From 594cd56cb6ad16621f7040c2d3a10be9951db776 Mon Sep 17 00:00:00 2001 From: Kagamma Date: Tue, 17 Jan 2023 20:32:47 +0700 Subject: [PATCH] AvPlayer HLE (#58) * Adds some libSceAvPlayer dummy functions, ps4_sceAvPlayerAddSource cache files to 'avplayer_dump' directory * Its better for languageCode to be array of chars * SysLogPrefix * us -> ms * Adds ffmpeg headers * Fix compilation * sceAvPlayerInitEx * spinlock, proper close ffmpeg stuff * Refactor + fix * NextPacket * ReceiveAudio * ReceiveVideo * audio works * Minor fix * Minor * accurate GetTimeInUs * sceAvPlayerPostInit and sceAvPlayerStop * Is not GPU Addr err * Fixing Structure Alignments * The original timeStamp is listed in ms in the documentation * Forgotten "not" and disable Exit(False); (Seems to work, need to test) * Should check patch folder first * Minor * test CI * use parse_filename * _sceAvPlayerInit, _sceAvPlayerInitEx * ps4_sceAvPlayerPostInit, _sceAvPlayerAddSource * fix types * _sceAvPlayerGetAudioData * _sceAvPlayerGetVideoDataEx * _sceAvPlayerStop, _sceAvPlayerClose * Removed outdated comments + long name * Use MemChunk instead of a simple Pointer * fix init value * convert to utf8 Co-authored-by: Pavel <68122101+red-prig@users.noreply.github.com> --- .github/workflows/main.yml | 13 +- .gitignore | 1 + ffmpeg/ffmpeg.inc | 371 ++ ffmpeg/ffmpeg_types.pas | 99 + ffmpeg/libavcodec.pas | 6785 +++++++++++++++++++++++ ffmpeg/libavdevice.pas | 522 ++ ffmpeg/libavfilter.pas | 1482 +++++ ffmpeg/libavformat.pas | 4218 +++++++++++++++ ffmpeg/libavutil.pas | 9586 +++++++++++++++++++++++++++++++++ ffmpeg/libpostproc.pas | 108 + ffmpeg/libswresample.pas | 564 ++ ffmpeg/libswscale.pas | 372 ++ fpPS4.lpi | 9 +- fpPS4.lpr | 1 + src/ps4_libsceavplayer.pas | 866 +++ sys/sys_time.pas | 20 + vulkan/vHostBufferManager.pas | 2 +- 17 files changed, 25014 insertions(+), 5 deletions(-) create mode 100644 ffmpeg/ffmpeg.inc create mode 100644 ffmpeg/ffmpeg_types.pas create mode 100644 ffmpeg/libavcodec.pas create mode 100644 ffmpeg/libavdevice.pas create mode 100644 ffmpeg/libavfilter.pas create mode 100644 ffmpeg/libavformat.pas create mode 100644 ffmpeg/libavutil.pas create mode 100644 ffmpeg/libpostproc.pas create mode 100644 ffmpeg/libswresample.pas create mode 100644 ffmpeg/libswscale.pas create mode 100644 src/ps4_libsceavplayer.pas diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index b9b817d..e275e10 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -38,12 +38,21 @@ jobs: lazbuild -B fpPS4.lpi > nul strip fpPS4.exe + - name: Download + shell: cmd + working-directory: ./ + run: | + curl -k -L -s https://github.com/red-prig/fpps4-bin/raw/main/ffmpeg.zip -o ffmpeg.zip + unzip ffmpeg.zip + - name: Upload artifacts uses: actions/upload-artifact@v2 if: ${{ !startsWith(github.ref, 'refs/tags/') }} with: name: fpPS4 - path: fpPS4.exe + path: | + fpPS4.exe + *.dll if-no-files-found: warn - name: Pack @@ -53,7 +62,7 @@ jobs: run: | mkdir sce_module echo "Put libSceNgs2.prx and etc. here" > sce_module/info.txt - zip -9 -qq -r "fpPS4_%GITHUB_REF_NAME%.zip" "fpPS4.exe" "sce_module/info.txt" + zip -9 -qq -r "fpPS4_%GITHUB_REF_NAME%.zip" "fpPS4.exe" "*.dll" "sce_module/info.txt" - name: Release uses: red-prig/action-gh-release@v1 diff --git a/.gitignore b/.gitignore index 2c7eccd..fb01256 100644 --- a/.gitignore +++ b/.gitignore @@ -16,4 +16,5 @@ link.res lib/ backup/ shader_dump/* +avplayer_dump/* savedata/* diff --git a/ffmpeg/ffmpeg.inc b/ffmpeg/ffmpeg.inc new file mode 100644 index 0000000..277757a --- /dev/null +++ b/ffmpeg/ffmpeg.inc @@ -0,0 +1,371 @@ +{$POINTERMATH ON} +{$MINENUMSIZE 4} (* use 4-byte enums *) +{$WRITEABLECONST ON} + + (* + * FF_API_* defines may be placed below to indicate public API that will be + * dropped at a future version bump. The defines themselves are not part of + * the public API and may change, break or disappear at any time. + * + * @note, when bumping the major version it is recommended to manually + * disable each FF_API_* in its own commit instead of disabling them all + * at once through the bump. This improves the git bisect-ability of the change. + *) +const +{$REGION 'libavutil'} + LIBAVUTIL_VERSION_MAJOR = 56; + LIBAVUTIL_VERSION_MAJOR_STR = '56'; + LIBAVUTIL_VERSION_MINOR = 31; + LIBAVUTIL_VERSION_MICRO = 100; + FFMPEG_VERSION = '4.2.2'; + LIBAVUTIL_VERSION_INT = ((LIBAVUTIL_VERSION_MAJOR shl 16) or (LIBAVUTIL_VERSION_MINOR shl 8) or LIBAVUTIL_VERSION_MICRO); + +{$IFNDEF FF_API_VAAPI} +{$IF LIBAVUTIL_VERSION_MAJOR < 57} +{$DEFINE FF_API_VAAPI} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_FRAME_QP} +{$IF LIBAVUTIL_VERSION_MAJOR < 57} +{$DEFINE FF_API_FRAME_QP} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_PLUS1_MINUS1} +{$IF LIBAVUTIL_VERSION_MAJOR < 57} +{$DEFINE FF_API_PLUS1_MINUS1} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_ERROR_FRAME} +{$IF LIBAVUTIL_VERSION_MAJOR < 57} +{$DEFINE FF_API_ERROR_FRAME} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_PKT_PTS} +{$IF LIBAVUTIL_VERSION_MAJOR < 57} +{$DEFINE FF_API_PKT_PTS} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_CRYPTO_SIZE_T} +{$IF LIBAVUTIL_VERSION_MAJOR < 57} +{$DEFINE FF_API_CRYPTO_SIZE_T} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_FRAME_GET_SET} +{$IF LIBAVUTIL_VERSION_MAJOR < 57} +{$DEFINE FF_API_FRAME_GET_SET} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_PSEUDOPAL} +{$IF LIBAVUTIL_VERSION_MAJOR < 57} +{$DEFINE FF_API_PSEUDOPAL} // Отсутсвует +{$ENDIF} +{$ENDIF} +{$ENDREGION} +{$REGION 'libswscale'} + LIBSWSCALE_VERSION_MAJOR = 5; + LIBSWSCALE_VERSION_MAJOR_STR = '5'; + LIBSWSCALE_VERSION_MINOR = 5; + LIBSWSCALE_VERSION_MICRO = 100; +{$ENDREGION} +{$REGION 'libavcodec'} + LIBAVCODEC_VERSION_MAJOR = 58; + LIBAVCODEC_VERSION_MAJOR_STR = '58'; + LIBAVCODEC_VERSION_MINOR = 54; + LIBAVCODEC_VERSION_MICRO = 100; + +{$IFNDEF FF_API_LOWRES} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_LOWRES} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_DEBUG_MV} +{$IF LIBAVCODEC_VERSION_MAJOR < 58} +{$DEFINE FF_API_DEBUG_MV} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_AVCTX_TIMEBASE} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_AVCTX_TIMEBASE} // Отсутсвует +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_CODED_FRAME} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_CODED_FRAME} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_SIDEDATA_ONLY_PKT} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_SIDEDATA_ONLY_PKT} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_VDPAU_PROFILE} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_VDPAU_PROFILE} // Отсутсвует +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_CONVERGENCE_DURATION} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_CONVERGENCE_DURATION} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_AVPICTURE} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_AVPICTURE} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_AVPACKET_OLD_API} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_AVPACKET_OLD_API} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_RTP_CALLBACK} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_RTP_CALLBACK} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_VBV_DELAY} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_VBV_DELAY} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_CODER_TYPE} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_CODER_TYPE} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_STAT_BITS} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_STAT_BITS} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_PRIVATE_OPT} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_PRIVATE_OPT} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_ASS_TIMING} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_ASS_TIMING} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_OLD_BSF} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_OLD_BSF} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_COPY_CONTEXT} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_COPY_CONTEXT} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_GET_CONTEXT_DEFAULTS} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_GET_CONTEXT_DEFAULTS} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_NVENC_OLD_NAME} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_NVENC_OLD_NAME} // Отсутсвует +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_STRUCT_VAAPI_CONTEXT} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_STRUCT_VAAPI_CONTEXT} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_MERGE_SD_API} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_MERGE_SD_API} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_TAG_STRING} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_TAG_STRING} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_GETCHROMA} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_GETCHROMA} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_CODEC_GET_SET} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_CODEC_GET_SET} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_USER_VISIBLE_AVHWACCEL} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_USER_VISIBLE_AVHWACCEL} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_LOCKMGR} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_LOCKMGR} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_NEXT} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_NEXT} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_UNSANITIZED_BITRATES} +{$IF LIBAVCODEC_VERSION_MAJOR < 59} +{$DEFINE FF_API_UNSANITIZED_BITRATES} +{$ENDIF} +{$ENDIF} +{$ENDREGION} +{$REGION 'avdevice'} + LIBAVDEVICE_VERSION_MAJOR = 58; + LIBAVDEVICE_VERSION_MAJOR_STR = '58'; + LIBAVDEVICE_VERSION_MINOR = 8; + LIBAVDEVICE_VERSION_MICRO = 100; +{$ENDREGION} +{$REGION 'avformat'} + LIBAVFORMAT_VERSION_MAJOR = 58; + LIBAVFORMAT_VERSION_MAJOR_STR = '58'; + LIBAVFORMAT_VERSION_MINOR = 29; + LIBAVFORMAT_VERSION_MICRO = 100; + +{$IFNDEF FF_API_COMPUTE_PKT_FIELDS2} +{$IF LIBAVFORMAT_VERSION_MAJOR < 59} +{$DEFINE FF_API_COMPUTE_PKT_FIELDS2} // Отсутсвует +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_OLD_OPEN_CALLBACKS} +{$IF LIBAVFORMAT_VERSION_MAJOR < 59} +{$DEFINE FF_API_OLD_OPEN_CALLBACKS} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_LAVF_AVCTX} +{$IF LIBAVFORMAT_VERSION_MAJOR < 59} +{$DEFINE FF_API_LAVF_AVCTX} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_HTTP_USER_AGENT} +{$IF LIBAVFORMAT_VERSION_MAJOR < 59} +{$DEFINE FF_API_HTTP_USER_AGENT} // Отсутсвует +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_HLS_WRAP} +{$IF LIBAVFORMAT_VERSION_MAJOR < 59} +{$DEFINE FF_API_HLS_WRAP} // Отсутсвует +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_HLS_USE_LOCALTIME} +{$IF LIBAVFORMAT_VERSION_MAJOR < 59} +{$DEFINE FF_API_HLS_USE_LOCALTIME} // Отсутсвует +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_LAVF_KEEPSIDE_FLAG} +{$IF LIBAVFORMAT_VERSION_MAJOR < 59} +{$DEFINE FF_API_LAVF_KEEPSIDE_FLAG} // Отсутсвует +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_OLD_ROTATE_API} +{$IF LIBAVFORMAT_VERSION_MAJOR < 59} +{$DEFINE FF_API_OLD_ROTATE_API} // Отсутсвует +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_FORMAT_GET_SET} +{$IF LIBAVFORMAT_VERSION_MAJOR < 59} +{$DEFINE FF_API_FORMAT_GET_SET} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_OLD_AVIO_EOF_0} +{$IF LIBAVFORMAT_VERSION_MAJOR < 59} +{$DEFINE FF_API_OLD_AVIO_EOF_0} // Отсутсвует +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_LAVF_FFSERVER} +{$IF LIBAVFORMAT_VERSION_MAJOR < 59} +{$DEFINE FF_API_LAVF_FFSERVER} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_FORMAT_FILENAME} +{$IF LIBAVFORMAT_VERSION_MAJOR < 59} +{$DEFINE FF_API_FORMAT_FILENAME} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_OLD_RTSP_OPTIONS} +{$IF LIBAVFORMAT_VERSION_MAJOR < 59} +{$DEFINE FF_API_OLD_RTSP_OPTIONS} // Отсутсвует +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_NEXT} +{$IF LIBAVFORMAT_VERSION_MAJOR < 59} +{$DEFINE FF_API_NEXT} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_DASH_MIN_SEG_DURATION} +{$IF LIBAVFORMAT_VERSION_MAJOR < 59} +{$DEFINE FF_API_DASH_MIN_SEG_DURATION} // Отсутсвует +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_LAVF_MP4A_LATM} +{$IF LIBAVFORMAT_VERSION_MAJOR < 59} +{$DEFINE FF_API_LAVF_MP4A_LATM} // Отсутсвует +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_AVIOFORMAT} +{$IF LIBAVFORMAT_VERSION_MAJOR < 59} +{$DEFINE FF_API_AVIOFORMAT} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_R_FRAME_RATE} + FF_API_R_FRAME_RATE = 1; +{$ENDIF} +{$ENDREGION} +{$REGION 'postproc'} + LIBPOSTPROC_VERSION_MAJOR = 55; + LIBPOSTPROC_VERSION_MAJOR_STR = '55'; + LIBPOSTPROC_VERSION_MINOR = 5; + LIBPOSTPROC_VERSION_MICRO = 100; +{$ENDREGION} +{$REGION 'swresample.h'} + LIBSWRESAMPLE_VERSION_MAJOR = 3; + LIBSWRESAMPLE_VERSION_MAJOR_STR = '3'; + LIBSWRESAMPLE_VERSION_MINOR = 5; + LIBSWRESAMPLE_VERSION_MICRO = 100; + +{$IFNDEF FF_API_SWS_VECTOR} +{$IF LIBSWSCALE_VERSION_MAJOR < 6} +{$DEFINE FF_API_SWS_VECTOR} +{$ENDIF} +{$ENDIF} +{$ENDREGION} +{$REGION 'avfilter.h'} + LIBAVFILTER_VERSION_MAJOR = 7; + LIBAVFILTER_VERSION_MAJOR_STR = '7'; + LIBAVFILTER_VERSION_MINOR = 57; + LIBAVFILTER_VERSION_MICRO = 100; + +{$IFNDEF FF_API_OLD_FILTER_OPTS_ERROR} +{$IF LIBAVFILTER_VERSION_MAJOR < 8} +{$DEFINE FF_API_OLD_FILTER_OPTS_ERROR} // Отсутсвует +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_LAVR_OPTS} +{$IF LIBAVFILTER_VERSION_MAJOR < 8} +{$DEFINE FF_API_LAVR_OPTS} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_FILTER_GET_SET} +{$IF LIBAVFILTER_VERSION_MAJOR < 8} +{$DEFINE FF_API_FILTER_GET_SET} +{$ENDIF} +{$ENDIF} +{$IFNDEF FF_API_NEXT} +{$IF LIBAVFILTER_VERSION_MAJOR < 8} +{$DEFINE FF_API_NEXT} +{$ENDIF} +{$ENDIF} +{$DEFINE FF_INTERNAL_FIELDS} +{$ENDREGION} + swscale_dll = 'swscale-' + LIBSWSCALE_VERSION_MAJOR_STR + '.dll'; + avutil_dll = 'avutil-' + LIBAVUTIL_VERSION_MAJOR_STR + '.dll'; + avcodec_dll = 'avcodec-' + LIBAVCODEC_VERSION_MAJOR_STR + '.dll'; + avdevice_dll = 'avdevice-' + LIBAVDEVICE_VERSION_MAJOR_STR + '.dll'; + avformat_dll = 'avformat-' + LIBAVFORMAT_VERSION_MAJOR_STR + '.dll'; + postproc_dll = 'postproc-' + LIBPOSTPROC_VERSION_MAJOR_STR + '.dll'; + swresample_dll = 'swresample-' + LIBSWRESAMPLE_VERSION_MAJOR_STR + '.dll'; + avfilter_dll = 'avfilter-' + LIBAVFILTER_VERSION_MAJOR_STR + '.dll'; diff --git a/ffmpeg/ffmpeg_types.pas b/ffmpeg/ffmpeg_types.pas new file mode 100644 index 0000000..a72f668 --- /dev/null +++ b/ffmpeg/ffmpeg_types.pas @@ -0,0 +1,99 @@ +unit ffmpeg_types; + +{$IFDEF FPC} +{$MODE Delphi} +{$ENDIF} + +interface + +Type + Bool = WordBool; + float = Single; + ppDouble = ^pDouble; + + size_t = NativeUInt; + psize_t = ^size_t; + ptrdiff_t = UInt32; + uint32_t = Cardinal; + unsigned = uint32_t; + unsignedint = UInt32; + UINT = unsigned; + unsigned_int = UInt32; + punsigned_int = ^unsigned_int; + unsigned_long = Cardinal; + + unsignedchar = Byte; + unsigned_char = unsignedchar; + punsignedchar = PByte; // ^unsignedchar; + punsigned_char = punsignedchar; + + Int = Integer; + pint = ^Int; + ppint = ^pint; + + int8_t = Int8; + pint8_t = ^int8_t; + + uint8_t = Byte; + puint8_t = PByte; // ^uint8_t; + ppuint8_t = ^puint8_t; + PPByte = ppuint8_t; + + int16_t = int16; + pint16_t = ^int16_t; + uint16_t = UInt16; + puint16_t = ^uint16_t; + + int32_t = Int32; + pint32_t = ^int32_t; + ppint32_t = ^pint32_t; + + int64_t = Int64; + pint64_t = ^int64_t; + uint64_t = UInt64; + puint64_t = ^uint64_t; + + array_uint8_t = array [0 .. 0] of uint8_t; + parray_uint8_t = ^array_uint8_t; + + array_int = array [0 .. 0] of Int; + parray_int = ^array_int; + + array4_int = array [0 .. 3] of Int; + parray4_int = ^array4_int; + + array4_puint8_t = array [0 .. 3] of puint8_t; + parray4_puint8_t = ^array4_puint8_t; + + array4_ptrdiff_t = array [0 .. 3] of ptrdiff_t; + parray4_ptrdiff_t = ^array4_ptrdiff_t; + + time_t = LongInt; + + AnsiCharArray = array [0 .. 0] of pAnsiChar; + pAnsiCharArray = ^AnsiCharArray; + + (* MICROSOFT VC++ STDIO'S FILE DEFINITION *) + _iobuf = record + _ptr: pAnsiChar; + _cnt: Integer; + _base: pAnsiChar; + _flag: Integer; + _file: Integer; + _charbuf: Integer; + _bufsiz: Integer; + _tmpfname: pAnsiChar; + end; + + PFile = ^TFile; + TFile = _iobuf; + + pAVHWAccel = Pointer; + ppAVCodecHWConfigInternal = Pointer; + +const + max_unsigned = $FFFF; + +implementation + +end. diff --git a/ffmpeg/libavcodec.pas b/ffmpeg/libavcodec.pas new file mode 100644 index 0000000..0e03651 --- /dev/null +++ b/ffmpeg/libavcodec.pas @@ -0,0 +1,6785 @@ +unit libavcodec; + +{$IFDEF FPC} +{$MODE Delphi} +{$ENDIF} + +interface + +Uses + ffmpeg_types, libavutil; + +{$I ffmpeg.inc} +{$REGION 'avcodec.h'} + +(* + * Identify the syntax and semantics of the bitstream. + * The principle is roughly: + * Two decoders with the same ID can decode the same streams. + * Two encoders with the same ID can encode compatible streams. + * There may be slight deviations from the principle due to implementation + * details. + * + * If you add a codec ID to this list, add it so that + * 1. no value of an existing codec ID changes (that would break ABI), + * 2. it is as close as possible to similar codecs + * + * After adding new codec IDs, do not forget to add an entry to the codec + * descriptor list and bump libavcodec minor version. +*) +type + pAVCodecID = ^AVCodecID; + AVCodecID = ( // + AV_CODEC_ID_NONE, + (* video codecs *) + AV_CODEC_ID_MPEG1VIDEO, // + AV_CODEC_ID_MPEG2VIDEO, // < preferred ID for MPEG-1/2 video decoding + AV_CODEC_ID_H261, // + AV_CODEC_ID_H263, // + AV_CODEC_ID_RV10, // + AV_CODEC_ID_RV20, // + AV_CODEC_ID_MJPEG, // + AV_CODEC_ID_MJPEGB, // + AV_CODEC_ID_LJPEG, // + AV_CODEC_ID_SP5X, // + AV_CODEC_ID_JPEGLS, // + AV_CODEC_ID_MPEG4, // + AV_CODEC_ID_RAWVIDEO, // + AV_CODEC_ID_MSMPEG4V1, // + AV_CODEC_ID_MSMPEG4V2, // + AV_CODEC_ID_MSMPEG4V3, // + AV_CODEC_ID_WMV1, // + AV_CODEC_ID_WMV2, // + AV_CODEC_ID_H263P, // + AV_CODEC_ID_H263I, // + AV_CODEC_ID_FLV1, // + AV_CODEC_ID_SVQ1, // + AV_CODEC_ID_SVQ3, // + AV_CODEC_ID_DVVIDEO, // + AV_CODEC_ID_HUFFYUV, // + AV_CODEC_ID_CYUV, // + AV_CODEC_ID_H264, // + AV_CODEC_ID_INDEO3, // + AV_CODEC_ID_VP3, // + AV_CODEC_ID_THEORA, // + AV_CODEC_ID_ASV1, // + AV_CODEC_ID_ASV2, // + AV_CODEC_ID_FFV1, // + AV_CODEC_ID_4XM, // + AV_CODEC_ID_VCR1, // + AV_CODEC_ID_CLJR, // + AV_CODEC_ID_MDEC, // + AV_CODEC_ID_ROQ, // + AV_CODEC_ID_INTERPLAY_VIDEO, // + AV_CODEC_ID_XAN_WC3, // + AV_CODEC_ID_XAN_WC4, // + AV_CODEC_ID_RPZA, // + AV_CODEC_ID_CINEPAK, // + AV_CODEC_ID_WS_VQA, // + AV_CODEC_ID_MSRLE, // + AV_CODEC_ID_MSVIDEO1, // + AV_CODEC_ID_IDCIN, // + AV_CODEC_ID_8BPS, // + AV_CODEC_ID_SMC, // + AV_CODEC_ID_FLIC, // + AV_CODEC_ID_TRUEMOTION1, // + AV_CODEC_ID_VMDVIDEO, // + AV_CODEC_ID_MSZH, // + AV_CODEC_ID_ZLIB, // + AV_CODEC_ID_QTRLE, // + AV_CODEC_ID_TSCC, // + AV_CODEC_ID_ULTI, // + AV_CODEC_ID_QDRAW, // + AV_CODEC_ID_VIXL, // + AV_CODEC_ID_QPEG, // + AV_CODEC_ID_PNG, // + AV_CODEC_ID_PPM, // + AV_CODEC_ID_PBM, // + AV_CODEC_ID_PGM, // + AV_CODEC_ID_PGMYUV, // + AV_CODEC_ID_PAM, // + AV_CODEC_ID_FFVHUFF, // + AV_CODEC_ID_RV30, // + AV_CODEC_ID_RV40, // + AV_CODEC_ID_VC1, // + AV_CODEC_ID_WMV3, // + AV_CODEC_ID_LOCO, // + AV_CODEC_ID_WNV1, // + AV_CODEC_ID_AASC, // + AV_CODEC_ID_INDEO2, // + AV_CODEC_ID_FRAPS, // + AV_CODEC_ID_TRUEMOTION2, // + AV_CODEC_ID_BMP, // + AV_CODEC_ID_CSCD, // + AV_CODEC_ID_MMVIDEO, // + AV_CODEC_ID_ZMBV, // + AV_CODEC_ID_AVS, // + AV_CODEC_ID_SMACKVIDEO, // + AV_CODEC_ID_NUV, // + AV_CODEC_ID_KMVC, // + AV_CODEC_ID_FLASHSV, // + AV_CODEC_ID_CAVS, // + AV_CODEC_ID_JPEG2000, // + AV_CODEC_ID_VMNC, // + AV_CODEC_ID_VP5, // + AV_CODEC_ID_VP6, // + AV_CODEC_ID_VP6F, // + AV_CODEC_ID_TARGA, // + AV_CODEC_ID_DSICINVIDEO, // + AV_CODEC_ID_TIERTEXSEQVIDEO, // + AV_CODEC_ID_TIFF, // + AV_CODEC_ID_GIF, // + AV_CODEC_ID_DXA, // + AV_CODEC_ID_DNXHD, // + AV_CODEC_ID_THP, // + AV_CODEC_ID_SGI, // + AV_CODEC_ID_C93, // + AV_CODEC_ID_BETHSOFTVID, // + AV_CODEC_ID_PTX, // + AV_CODEC_ID_TXD, // + AV_CODEC_ID_VP6A, // + AV_CODEC_ID_AMV, // + AV_CODEC_ID_VB, // + AV_CODEC_ID_PCX, // + AV_CODEC_ID_SUNRAST, // + AV_CODEC_ID_INDEO4, // + AV_CODEC_ID_INDEO5, // + AV_CODEC_ID_MIMIC, // + AV_CODEC_ID_RL2, // + AV_CODEC_ID_ESCAPE124, // + AV_CODEC_ID_DIRAC, // + AV_CODEC_ID_BFI, // + AV_CODEC_ID_CMV, // + AV_CODEC_ID_MOTIONPIXELS, // + AV_CODEC_ID_TGV, // + AV_CODEC_ID_TGQ, // + AV_CODEC_ID_TQI, // + AV_CODEC_ID_AURA, // + AV_CODEC_ID_AURA2, // + AV_CODEC_ID_V210X, // + AV_CODEC_ID_TMV, // + AV_CODEC_ID_V210, // + AV_CODEC_ID_DPX, // + AV_CODEC_ID_MAD, // + AV_CODEC_ID_FRWU, // + AV_CODEC_ID_FLASHSV2, // + AV_CODEC_ID_CDGRAPHICS, // + AV_CODEC_ID_R210, // + AV_CODEC_ID_ANM, // + AV_CODEC_ID_BINKVIDEO, // + AV_CODEC_ID_IFF_ILBM, // + // AV_CODEC_ID_IFF_BYTERUN1 = AV_CODEC_ID_IFF_ILBM, // + AV_CODEC_ID_KGV1, // + AV_CODEC_ID_YOP, // + AV_CODEC_ID_VP8, // + AV_CODEC_ID_PICTOR, // + AV_CODEC_ID_ANSI, // + AV_CODEC_ID_A64_MULTI, // + AV_CODEC_ID_A64_MULTI5, // + AV_CODEC_ID_R10K, // + AV_CODEC_ID_MXPEG, // + AV_CODEC_ID_LAGARITH, // + AV_CODEC_ID_PRORES, // + AV_CODEC_ID_JV, // + AV_CODEC_ID_DFA, // + AV_CODEC_ID_WMV3IMAGE, // + AV_CODEC_ID_VC1IMAGE, // + AV_CODEC_ID_UTVIDEO, // + AV_CODEC_ID_BMV_VIDEO, // + AV_CODEC_ID_VBLE, // + AV_CODEC_ID_DXTORY, // + AV_CODEC_ID_V410, // + AV_CODEC_ID_XWD, // + AV_CODEC_ID_CDXL, // + AV_CODEC_ID_XBM, // + AV_CODEC_ID_ZEROCODEC, // + AV_CODEC_ID_MSS1, // + AV_CODEC_ID_MSA1, // + AV_CODEC_ID_TSCC2, // + AV_CODEC_ID_MTS2, // + AV_CODEC_ID_CLLC, // + AV_CODEC_ID_MSS2, // + AV_CODEC_ID_VP9, // + AV_CODEC_ID_AIC, // + AV_CODEC_ID_ESCAPE130, // + AV_CODEC_ID_G2M, // + AV_CODEC_ID_WEBP, // + AV_CODEC_ID_HNM4_VIDEO, // + AV_CODEC_ID_HEVC, // + // AV_CODEC_ID_H265 = AV_CODEC_ID_HEVC, // + AV_CODEC_ID_FIC, // + AV_CODEC_ID_ALIAS_PIX, // + AV_CODEC_ID_BRENDER_PIX, // + AV_CODEC_ID_PAF_VIDEO, // + AV_CODEC_ID_EXR, // + AV_CODEC_ID_VP7, // + AV_CODEC_ID_SANM, // + AV_CODEC_ID_SGIRLE, // + AV_CODEC_ID_MVC1, // + AV_CODEC_ID_MVC2, // + AV_CODEC_ID_HQX, // + AV_CODEC_ID_TDSC, // + AV_CODEC_ID_HQ_HQA, // + AV_CODEC_ID_HAP, // + AV_CODEC_ID_DDS, // + AV_CODEC_ID_DXV, // + AV_CODEC_ID_SCREENPRESSO, // + AV_CODEC_ID_RSCC, // + AV_CODEC_ID_AVS2, // + + AV_CODEC_ID_Y41P = $8000, // + AV_CODEC_ID_AVRP, // + AV_CODEC_ID_012V, // + AV_CODEC_ID_AVUI, // + AV_CODEC_ID_AYUV, // + AV_CODEC_ID_TARGA_Y216, // + AV_CODEC_ID_V308, // + AV_CODEC_ID_V408, // + AV_CODEC_ID_YUV4, // + AV_CODEC_ID_AVRN, // + AV_CODEC_ID_CPIA, // + AV_CODEC_ID_XFACE, // + AV_CODEC_ID_SNOW, // + AV_CODEC_ID_SMVJPEG, // + AV_CODEC_ID_APNG, // + AV_CODEC_ID_DAALA, // + AV_CODEC_ID_CFHD, // + AV_CODEC_ID_TRUEMOTION2RT, // + AV_CODEC_ID_M101, // + AV_CODEC_ID_MAGICYUV, // + AV_CODEC_ID_SHEERVIDEO, // + AV_CODEC_ID_YLC, // + AV_CODEC_ID_PSD, // + AV_CODEC_ID_PIXLET, // + AV_CODEC_ID_SPEEDHQ, // + AV_CODEC_ID_FMVC, // + AV_CODEC_ID_SCPR, // + AV_CODEC_ID_CLEARVIDEO, // + AV_CODEC_ID_XPM, // + AV_CODEC_ID_AV1, // + AV_CODEC_ID_BITPACKED, // + AV_CODEC_ID_MSCC, // + AV_CODEC_ID_SRGC, // + AV_CODEC_ID_SVG, // + AV_CODEC_ID_GDV, // + AV_CODEC_ID_FITS, // + AV_CODEC_ID_IMM4, // + AV_CODEC_ID_PROSUMER, // + AV_CODEC_ID_MWSC, // + AV_CODEC_ID_WCMV, // + AV_CODEC_ID_RASC, // + AV_CODEC_ID_HYMT, // + AV_CODEC_ID_ARBC, // + AV_CODEC_ID_AGM, // + AV_CODEC_ID_LSCR, // + AV_CODEC_ID_VP4, // +// AV_CODEC_ID_IMM5, // 4.2.2 + + (* various PCM "codecs" *) + AV_CODEC_ID_FIRST_AUDIO = $10000, // < A dummy id pointing at the start of audio codecs + AV_CODEC_ID_PCM_S16LE = $10000, // + AV_CODEC_ID_PCM_S16BE, // + AV_CODEC_ID_PCM_U16LE, // + AV_CODEC_ID_PCM_U16BE, // + AV_CODEC_ID_PCM_S8, // + AV_CODEC_ID_PCM_U8, // + AV_CODEC_ID_PCM_MULAW, // + AV_CODEC_ID_PCM_ALAW, // + AV_CODEC_ID_PCM_S32LE, // + AV_CODEC_ID_PCM_S32BE, // + AV_CODEC_ID_PCM_U32LE, // + AV_CODEC_ID_PCM_U32BE, // + AV_CODEC_ID_PCM_S24LE, // + AV_CODEC_ID_PCM_S24BE, // + AV_CODEC_ID_PCM_U24LE, // + AV_CODEC_ID_PCM_U24BE, // + AV_CODEC_ID_PCM_S24DAUD, // + AV_CODEC_ID_PCM_ZORK, // + AV_CODEC_ID_PCM_S16LE_PLANAR, // + AV_CODEC_ID_PCM_DVD, // + AV_CODEC_ID_PCM_F32BE, // + AV_CODEC_ID_PCM_F32LE, // + AV_CODEC_ID_PCM_F64BE, // + AV_CODEC_ID_PCM_F64LE, // + AV_CODEC_ID_PCM_BLURAY, // + AV_CODEC_ID_PCM_LXF, // + AV_CODEC_ID_S302M, // + AV_CODEC_ID_PCM_S8_PLANAR, // + AV_CODEC_ID_PCM_S24LE_PLANAR, // + AV_CODEC_ID_PCM_S32LE_PLANAR, // + AV_CODEC_ID_PCM_S16BE_PLANAR, // + + AV_CODEC_ID_PCM_S64LE = $10800, // + AV_CODEC_ID_PCM_S64BE, // + AV_CODEC_ID_PCM_F16LE, // + AV_CODEC_ID_PCM_F24LE, // + AV_CODEC_ID_PCM_VIDC, // + + (* various ADPCM codecs *) + AV_CODEC_ID_ADPCM_IMA_QT = $11000, // + AV_CODEC_ID_ADPCM_IMA_WAV, // + AV_CODEC_ID_ADPCM_IMA_DK3, // + AV_CODEC_ID_ADPCM_IMA_DK4, // + AV_CODEC_ID_ADPCM_IMA_WS, // + AV_CODEC_ID_ADPCM_IMA_SMJPEG, // + AV_CODEC_ID_ADPCM_MS, // + AV_CODEC_ID_ADPCM_4XM, // + AV_CODEC_ID_ADPCM_XA, // + AV_CODEC_ID_ADPCM_ADX, // + AV_CODEC_ID_ADPCM_EA, // + AV_CODEC_ID_ADPCM_G726, // + AV_CODEC_ID_ADPCM_CT, // + AV_CODEC_ID_ADPCM_SWF, // + AV_CODEC_ID_ADPCM_YAMAHA, // + AV_CODEC_ID_ADPCM_SBPRO_4, // + AV_CODEC_ID_ADPCM_SBPRO_3, // + AV_CODEC_ID_ADPCM_SBPRO_2, // + AV_CODEC_ID_ADPCM_THP, // + AV_CODEC_ID_ADPCM_IMA_AMV, // + AV_CODEC_ID_ADPCM_EA_R1, // + AV_CODEC_ID_ADPCM_EA_R3, // + AV_CODEC_ID_ADPCM_EA_R2, // + AV_CODEC_ID_ADPCM_IMA_EA_SEAD, // + AV_CODEC_ID_ADPCM_IMA_EA_EACS, // + AV_CODEC_ID_ADPCM_EA_XAS, // + AV_CODEC_ID_ADPCM_EA_MAXIS_XA, // + AV_CODEC_ID_ADPCM_IMA_ISS, // + AV_CODEC_ID_ADPCM_G722, // + AV_CODEC_ID_ADPCM_IMA_APC, // + AV_CODEC_ID_ADPCM_VIMA, // + // + AV_CODEC_ID_ADPCM_AFC = $11800, // + AV_CODEC_ID_ADPCM_IMA_OKI, // + AV_CODEC_ID_ADPCM_DTK, // + AV_CODEC_ID_ADPCM_IMA_RAD, // + AV_CODEC_ID_ADPCM_G726LE, // + AV_CODEC_ID_ADPCM_THP_LE, // + AV_CODEC_ID_ADPCM_PSX, // + AV_CODEC_ID_ADPCM_AICA, // + AV_CODEC_ID_ADPCM_IMA_DAT4, // + AV_CODEC_ID_ADPCM_MTAF, // + AV_CODEC_ID_ADPCM_AGM, // + + (* AMR *) // + AV_CODEC_ID_AMR_NB = $12000, // + AV_CODEC_ID_AMR_WB, // + + (* RealAudio codecs *) // + AV_CODEC_ID_RA_144 = $13000, // + AV_CODEC_ID_RA_288, + + (* various DPCM codecs *) + AV_CODEC_ID_ROQ_DPCM = $14000, // + AV_CODEC_ID_INTERPLAY_DPCM, // + AV_CODEC_ID_XAN_DPCM, // + AV_CODEC_ID_SOL_DPCM, // + + AV_CODEC_ID_SDX2_DPCM = $14800, // + AV_CODEC_ID_GREMLIN_DPCM, // + + (* audio codecs *) + AV_CODEC_ID_MP2 = $15000, // + AV_CODEC_ID_MP3, + /// < preferred ID for decoding MPEG audio layer 1, 2 or 3 + AV_CODEC_ID_AAC, // + AV_CODEC_ID_AC3, // + AV_CODEC_ID_DTS, // + AV_CODEC_ID_VORBIS, // + AV_CODEC_ID_DVAUDIO, // + AV_CODEC_ID_WMAV1, // + AV_CODEC_ID_WMAV2, // + AV_CODEC_ID_MACE3, // + AV_CODEC_ID_MACE6, // + AV_CODEC_ID_VMDAUDIO, // + AV_CODEC_ID_FLAC, // + AV_CODEC_ID_MP3ADU, // + AV_CODEC_ID_MP3ON4, // + AV_CODEC_ID_SHORTEN, // + AV_CODEC_ID_ALAC, // + AV_CODEC_ID_WESTWOOD_SND1, // + AV_CODEC_ID_GSM, + /// < as in Berlin toast format + AV_CODEC_ID_QDM2, // + AV_CODEC_ID_COOK, // + AV_CODEC_ID_TRUESPEECH, // + AV_CODEC_ID_TTA, // + AV_CODEC_ID_SMACKAUDIO, // + AV_CODEC_ID_QCELP, // + AV_CODEC_ID_WAVPACK, // + AV_CODEC_ID_DSICINAUDIO, // + AV_CODEC_ID_IMC, // + AV_CODEC_ID_MUSEPACK7, // + AV_CODEC_ID_MLP, // + AV_CODEC_ID_GSM_MS, (* as found in WAV *) + AV_CODEC_ID_ATRAC3, // + AV_CODEC_ID_APE, // + AV_CODEC_ID_NELLYMOSER, // + AV_CODEC_ID_MUSEPACK8, // + AV_CODEC_ID_SPEEX, // + AV_CODEC_ID_WMAVOICE, // + AV_CODEC_ID_WMAPRO, // + AV_CODEC_ID_WMALOSSLESS, // + AV_CODEC_ID_ATRAC3P, // + AV_CODEC_ID_EAC3, // + AV_CODEC_ID_SIPR, // + AV_CODEC_ID_MP1, // + AV_CODEC_ID_TWINVQ, // + AV_CODEC_ID_TRUEHD, // + AV_CODEC_ID_MP4ALS, // + AV_CODEC_ID_ATRAC1, // + AV_CODEC_ID_BINKAUDIO_RDFT, // + AV_CODEC_ID_BINKAUDIO_DCT, // + AV_CODEC_ID_AAC_LATM, // + AV_CODEC_ID_QDMC, // + AV_CODEC_ID_CELT, // + AV_CODEC_ID_G723_1, // + AV_CODEC_ID_G729, // + AV_CODEC_ID_8SVX_EXP, // + AV_CODEC_ID_8SVX_FIB, // + AV_CODEC_ID_BMV_AUDIO, // + AV_CODEC_ID_RALF, // + AV_CODEC_ID_IAC, // + AV_CODEC_ID_ILBC, // + AV_CODEC_ID_OPUS, // + AV_CODEC_ID_COMFORT_NOISE, // + AV_CODEC_ID_TAK, // + AV_CODEC_ID_METASOUND, // + AV_CODEC_ID_PAF_AUDIO, // + AV_CODEC_ID_ON2AVC, // + AV_CODEC_ID_DSS_SP, // + AV_CODEC_ID_CODEC2, // + + AV_CODEC_ID_FFWAVESYNTH = $15800, // + AV_CODEC_ID_SONIC, // + AV_CODEC_ID_SONIC_LS, // + AV_CODEC_ID_EVRC, // + AV_CODEC_ID_SMV, // + AV_CODEC_ID_DSD_LSBF, // + AV_CODEC_ID_DSD_MSBF, // + AV_CODEC_ID_DSD_LSBF_PLANAR, // + AV_CODEC_ID_DSD_MSBF_PLANAR, // + AV_CODEC_ID_4GV, // + AV_CODEC_ID_INTERPLAY_ACM, // + AV_CODEC_ID_XMA1, // + AV_CODEC_ID_XMA2, // + AV_CODEC_ID_DST, // + AV_CODEC_ID_ATRAC3AL, // + AV_CODEC_ID_ATRAC3PAL, // + AV_CODEC_ID_DOLBY_E, // + AV_CODEC_ID_APTX, // + AV_CODEC_ID_APTX_HD, // + AV_CODEC_ID_SBC, // + AV_CODEC_ID_ATRAC9, // + AV_CODEC_ID_HCOM, // +// AV_CODEC_ID_ACELP_KELVIN, // 4.2.2 + + (* subtitle codecs *) // + AV_CODEC_ID_FIRST_SUBTITLE = $17000, + /// < A dummy ID pointing at the start of subtitle codecs. + AV_CODEC_ID_DVD_SUBTITLE = $17000, // + AV_CODEC_ID_DVB_SUBTITLE, // + AV_CODEC_ID_TEXT, + /// < raw UTF-8 text + AV_CODEC_ID_XSUB, // + AV_CODEC_ID_SSA, // + AV_CODEC_ID_MOV_TEXT, // + AV_CODEC_ID_HDMV_PGS_SUBTITLE, // + AV_CODEC_ID_DVB_TELETEXT, // + AV_CODEC_ID_SRT, // + + AV_CODEC_ID_MICRODVD = $17800, // + AV_CODEC_ID_EIA_608, // + AV_CODEC_ID_JACOSUB, // + AV_CODEC_ID_SAMI, // + AV_CODEC_ID_REALTEXT, // + AV_CODEC_ID_STL, // + AV_CODEC_ID_SUBVIEWER1, // + AV_CODEC_ID_SUBVIEWER, // + AV_CODEC_ID_SUBRIP, // + AV_CODEC_ID_WEBVTT, // + AV_CODEC_ID_MPL2, // + AV_CODEC_ID_VPLAYER, // + AV_CODEC_ID_PJS, // + AV_CODEC_ID_ASS, // + AV_CODEC_ID_HDMV_TEXT_SUBTITLE, // + AV_CODEC_ID_TTML, // + AV_CODEC_ID_ARIB_CAPTION, // + + (* other specific kind of codecs (generally used for attachments) *) + AV_CODEC_ID_FIRST_UNKNOWN = $18000, // < A dummy ID pointing at the start of various fake codecs. + AV_CODEC_ID_TTF = $18000, // + + AV_CODEC_ID_SCTE_35, // < Contain timestamp estimated through PCR of program stream. +// AV_CODEC_ID_EPG, // 4.2.2 + AV_CODEC_ID_BINTEXT = $18800, // + AV_CODEC_ID_XBIN, // + AV_CODEC_ID_IDF, // + AV_CODEC_ID_OTF, // + AV_CODEC_ID_SMPTE_KLV, // + AV_CODEC_ID_DVD_NAV, // + AV_CODEC_ID_TIMED_ID3, // + AV_CODEC_ID_BIN_DATA, // + + AV_CODEC_ID_MPEG2TS = $20000, (* < _FAKE_ codec to indicate a raw MPEG-2 TS + * stream (only used by libavformat) *) + AV_CODEC_ID_MPEG4SYSTEMS = $20001, (* < _FAKE_ codec to indicate a MPEG-4 Systems + * stream (only used by libavformat) *) + AV_CODEC_ID_FFMETADATA = $21000, // < Dummy codec for streams containing only metadata information. + AV_CODEC_ID_WRAPPED_AVFRAME = $21001 // < Passthrough codec, AVFrames wrapped in AVPacket + ); + +const + AV_CODEC_ID_IFF_BYTERUN1: AVCodecID = AV_CODEC_ID_IFF_ILBM; + AV_CODEC_ID_H265: AVCodecID = AV_CODEC_ID_HEVC; + + (* + * The codec supports this format via the hw_device_ctx interface. + * + * When selecting this format, AVCodecContext.hw_device_ctx should + * have been set to a device of the specified type before calling + * avcodec_open2(). + *) + AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX = $01; + (* + * The codec supports this format via the hw_frames_ctx interface. + * + * When selecting this format for a decoder, + * AVCodecContext.hw_frames_ctx should be set to a suitable frames + * context inside the get_format() callback. The frames context + * must have been created on a device of the specified type. + *) + AV_CODEC_HW_CONFIG_METHOD_HW_FRAMES_CTX = $02; + (* + * The codec supports this format by some internal method. + * + * This format can be selected without any additional configuration - + * no device or frames context is required. + *) + AV_CODEC_HW_CONFIG_METHOD_INTERNAL = $04; + (* + * The codec supports this format by some ad-hoc method. + * + * Additional settings and/or function calls are required. See the + * codec-specific documentation for details. (Methods requiring + * this sort of configuration are deprecated and others should be + * used in preference.) + *) + AV_CODEC_HW_CONFIG_METHOD_AD_HOC = $08; + + (* + * Codec uses only intra compression. + * Video and audio codecs only. + *) + AV_CODEC_PROP_INTRA_ONLY = (1 shl 0); + (* + * Codec supports lossy compression. Audio and video codecs only. + * @note a codec may support both lossy and lossless + * compression modes + *) + AV_CODEC_PROP_LOSSY = (1 shl 1); + (* + * Codec supports lossless compression. Audio and video codecs only. + *) + AV_CODEC_PROP_LOSSLESS = (1 shl 2); + (* + * Codec supports frame reordering. That is, the coded order (the order in which + * the encoded packets are output by the encoders / stored / input to the + * decoders) may be different from the presentation order of the corresponding + * frames. + * + * For codecs that do not have this property set, PTS and DTS should always be + * equal. + *) + AV_CODEC_PROP_REORDER = (1 shl 3); + (* + * Subtitle codec is bitmap based + * Decoded AVSubtitle data can be read from the AVSubtitleRect->pict field. + *) + AV_CODEC_PROP_BITMAP_SUB = (1 shl 16); + (* + * Subtitle codec is text based. + * Decoded AVSubtitle data can be read from the AVSubtitleRect->ass field. + *) + AV_CODEC_PROP_TEXT_SUB = (1 shl 17); + + (* + * @ingroup lavc_decoding + * Required number of additionally allocated bytes at the end of the input bitstream for decoding. + * This is mainly needed because some optimized bitstream readers read + * 32 or 64 bit at once and could read over the end.
+ * Note: If the first 23 bits of the additional bytes are not 0, then damaged + * MPEG bitstreams could cause overread and segfault. + *) + AV_INPUT_BUFFER_PADDING_SIZE = 64; + + (* + * @ingroup lavc_encoding + * minimum encoding buffer size + * Used to avoid some checks during header writing. + *) + AV_INPUT_BUFFER_MIN_SIZE = 16384; + + (* encoding support + These flags can be passed in AVCodecContext.flags before initialization. + Note: Not everything is supported yet. + *) + + (* + * Allow decoders to produce frames with data planes that are not aligned + * to CPU requirements (e.g. due to cropping). + *) + AV_CODEC_FLAG_UNALIGNED = (1 shl 0); + (* + * Use fixed qscale. + *) + AV_CODEC_FLAG_QSCALE = (1 shl 1); + (* + * 4 MV per MB allowed / advanced prediction for H.263. + *) + AV_CODEC_FLAG_4MV = (1 shl 2); + (* + * Output even those frames that might be corrupted. + *) + AV_CODEC_FLAG_OUTPUT_CORRUPT = (1 shl 3); + (* + * Use qpel MC. + *) + AV_CODEC_FLAG_QPEL = (1 shl 4); + (* + * Don't output frames whose parameters differ from first + * decoded frame in stream. + *) + AV_CODEC_FLAG_DROPCHANGED = (1 shl 5); + (* + * Use internal 2pass ratecontrol in first pass mode. + *) + AV_CODEC_FLAG_PASS1 = (1 shl 9); + (* + * Use internal 2pass ratecontrol in second pass mode. + *) + AV_CODEC_FLAG_PASS2 = (1 shl 10); + (* + * loop filter. + *) + AV_CODEC_FLAG_LOOP_FILTER = (1 shl 11); + (* + * Only decode/encode grayscale. + *) + AV_CODEC_FLAG_GRAY = (1 shl 13); + (* + * error[?] variables will be set during encoding. + *) + AV_CODEC_FLAG_PSNR = (1 shl 15); + (* + * Input bitstream might be truncated at a random location + * instead of only at frame boundaries. + *) + AV_CODEC_FLAG_TRUNCATED = (1 shl 16); + (* + * Use interlaced DCT. + *) + AV_CODEC_FLAG_INTERLACED_DCT = (1 shl 18); + (* + * Force low delay. + *) + AV_CODEC_FLAG_LOW_DELAY = (1 shl 19); + (* + * Place global headers in extradata instead of every keyframe. + *) + AV_CODEC_FLAG_GLOBAL_HEADER = (1 shl 22); + (* + * Use only bitexact stuff =(except =(I);DCT);. + *) + AV_CODEC_FLAG_BITEXACT = (1 shl 23); + (* Fx : Flag for H.263+ extra options *) + (* + * H.263 advanced intra coding / MPEG-4 AC prediction + *) + AV_CODEC_FLAG_AC_PRED = (1 shl 24); + (* + * interlaced motion estimation + *) + AV_CODEC_FLAG_INTERLACED_ME = (1 shl 29); + AV_CODEC_FLAG_CLOSED_GOP = (1 shl 31); + + (* + * Allow non spec compliant speedup tricks. + *) + AV_CODEC_FLAG2_FAST = (1 shl 0); + (* + * Skip bitstream encoding. + *) + AV_CODEC_FLAG2_NO_OUTPUT = (1 shl 2); + (* + * Place global headers at every keyframe instead of in extradata. + *) + AV_CODEC_FLAG2_LOCAL_HEADER = (1 shl 3); + + (* + * timecode is in drop frame format. DEPRECATED!!!! + *) + AV_CODEC_FLAG2_DROP_FRAME_TIMECODE = (1 shl 13)deprecated; + + (* + * Input bitstream might be truncated at a packet boundaries + * instead of only at frame boundaries. + *) + AV_CODEC_FLAG2_CHUNKS = (1 shl 15); + (* + * Discard cropping information from SPS. + *) + AV_CODEC_FLAG2_IGNORE_CROP = (1 shl 16); + + (* + * Show all frames before the first keyframe + *) + AV_CODEC_FLAG2_SHOW_ALL = (1 shl 22); + (* + * Export motion vectors through frame side data + *) + AV_CODEC_FLAG2_EXPORT_MVS = (1 shl 28); + (* + * Do not skip samples and export skip information as frame side data + *) + AV_CODEC_FLAG2_SKIP_MANUAL = (1 shl 29); + (* + * Do not reset ASS ReadOrder field on flush =(subtitles decoding); + *) + AV_CODEC_FLAG2_RO_FLUSH_NOOP = (1 shl 30); + + (* Unsupported options : + * Syntax Arithmetic coding =(SAC); + * Reference Picture Selection + * Independent Segment Decoding *) + (* /Fx *) + (* codec capabilities *) + + (* + * Decoder can use draw_horiz_band callback. + *) + AV_CODEC_CAP_DRAW_HORIZ_BAND = (1 shl 0); + (* + * Codec uses get_buffer=(); for allocating buffers and supports custom allocators. + * If not set, it might not use get_buffer=(); at all or use operations that + * assume the buffer was allocated by avcodec_default_get_buffer. + *) + AV_CODEC_CAP_DR1 = (1 shl 1); + AV_CODEC_CAP_TRUNCATED = (1 shl 3); + (* + * Encoder or decoder requires flushing with NULL input at the end in order to + * give the complete and correct output. + * + * NOTE: If this flag is not set, the codec is guaranteed to never be fed with + * with NULL data. The user can still send NULL data to the public encode + * or decode function, but libavcodec will not pass it along to the codec + * unless this flag is set. + * + * Decoders: + * The decoder has a non-zero delay and needs to be fed with avpkt->data=NULL, + * avpkt->size=0 at the end to get the delayed data until the decoder no longer + * returns frames. + * + * Encoders: + * The encoder needs to be fed with NULL data at the end of encoding until the + * encoder no longer returns data. + * + * NOTE: For encoders implementing the AVCodec.encode2=(); function, setting this + * flag also means that the encoder must set the pts and duration for + * each output packet. If this flag is not set, the pts and duration will + * be determined by libavcodec from the input frame. + *) + AV_CODEC_CAP_DELAY = (1 shl 5); + (* + * Codec can be fed a final frame with a smaller size. + * This can be used to prevent truncation of the last audio samples. + *) + AV_CODEC_CAP_SMALL_LAST_FRAME = (1 shl 6); + + (* + * Codec can output multiple frames per AVPacket + * Normally demuxers return one frame at a time, demuxers which do not do + * are connected to a parser to split what they return into proper frames. + * This flag is reserved to the very rare category of codecs which have a + * bitstream that cannot be split into frames without timeconsuming + * operations like full decoding. Demuxers carrying such bitstreams thus + * may return multiple frames in a packet. This has many disadvantages like + * prohibiting stream copy in many cases thus it should only be considered + * as a last resort. + *) + AV_CODEC_CAP_SUBFRAMES = (1 shl 8); + (* + * Codec is experimental and is thus avoided in favor of non experimental + * encoders + *) + AV_CODEC_CAP_EXPERIMENTAL = (1 shl 9); + (* + * Codec should fill in channel configuration and samplerate instead of container + *) + AV_CODEC_CAP_CHANNEL_CONF = (1 shl 10); + (* + * Codec supports frame-level multithreading. + *) + AV_CODEC_CAP_FRAME_THREADS = (1 shl 12); + (* + * Codec supports slice-based =(or partition-based); multithreading. + *) + AV_CODEC_CAP_SLICE_THREADS = (1 shl 13); + (* + * Codec supports changed parameters at any point. + *) + AV_CODEC_CAP_PARAM_CHANGE = (1 shl 14); + (* + * Codec supports avctx->thread_count == 0 =(auto);. + *) + AV_CODEC_CAP_AUTO_THREADS = (1 shl 15); + (* + * Audio encoder supports receiving a different number of samples in each call. + *) + AV_CODEC_CAP_VARIABLE_FRAME_SIZE = (1 shl 16); + (* + * Decoder is not a preferred choice for probing. + * This indicates that the decoder is not a good choice for probing. + * It could for example be an expensive to spin up hardware decoder, + * or it could simply not provide a lot of useful information about + * the stream. + * A decoder marked with this flag should only be used as last resort + * choice for probing. + *) + AV_CODEC_CAP_AVOID_PROBING = (1 shl 17); + (* + * Codec is intra only. + *) + AV_CODEC_CAP_INTRA_ONLY = $40000000; + (* + * Codec is lossless. + *) + AV_CODEC_CAP_LOSSLESS = $80000000; + + (* + * Codec is backed by a hardware implementation. Typically used to + * identify a non-hwaccel hardware decoder. For information about hwaccels, use + * avcodec_get_hw_config=(); instead. + *) + AV_CODEC_CAP_HARDWARE = (1 shl 18); + + (* + * Codec is potentially backed by a hardware implementation, but not + * necessarily. This is used instead of AV_CODEC_CAP_HARDWARE, if the + * implementation provides some sort of internal fallback. + *) + AV_CODEC_CAP_HYBRID = (1 shl 19); + (* + * This codec takes the reordered_opaque field from input AVFrames + * and returns it in the corresponding field in AVCodecContext after + * encoding. + *) + AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE = (1 shl 20); + +const + (* + * HWAccel is experimental and is thus avoided in favor of non experimental + * codecs + *) + AV_HWACCEL_CODEC_CAP_EXPERIMENTAL = $0200; + + (* + * Hardware acceleration should be used for decoding even if the codec level + * used is unknown or higher than the maximum supported level reported by the + * hardware driver. + * + * It's generally a good idea to pass this flag unless you have a specific + * reason not to, as hardware tends to under-report supported levels. + *) + AV_HWACCEL_FLAG_IGNORE_LEVEL = (1 shl 0); + + (* + * Hardware acceleration can output YUV pixel formats with a different chroma + * sampling than 4:2:0 and/or other than 8 bits per component. + *) + AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH = (1 shl 1); + + (* + * Hardware acceleration should still be attempted for decoding when the + * codec profile does not match the reported capabilities of the hardware. + * + * For example, this can be used to try to decode baseline profile H.264 + * streams in hardware - it will often succeed, because many streams marked + * as baseline profile actually conform to constrained baseline profile. + * + * @warning If the stream is actually not supported then the behaviour is + * undefined, and may include returning entirely incorrect output + * while indicating success. + *) + AV_HWACCEL_FLAG_ALLOW_PROFILE_MISMATCH = (1 shl 2); + +type + + (* + * AVProfile. + *) + AVProfile = record + profile: int; + name: pAnsiChar; // < short name for the profile + end; + + pAVProfile = ^AVProfile; + + (* + * This struct describes the properties of a single codec described by an + * AVCodecID. + * @see avcodec_descriptor_get() + *) + pAVCodecDescriptor = ^AVCodecDescriptor; + + AVCodecDescriptor = record + id: AVCodecID; + _type: AVMediaType; + (* + * Name of the codec described by this descriptor. It is non-empty and + * unique for each codec descriptor. It should contain alphanumeric + * characters and '_' only. + *) + name: pAnsiChar; + (* + * A more descriptive name for this codec. May be NULL. + *) + long_name: pAnsiChar; + (* + * Codec properties, a combination of AV_CODEC_PROP_* flags. + *) + props: int; + (* + * MIME type(s) associated with the codec. + * May be NULL; if not, a NULL-terminated array of MIME types. + * The first item is always non-NULL and is the preferred MIME type. + *) + // const char *const *mime_types; + mime_types: PPAnsiChar; + (* + * If non-NULL, an array of profiles recognized for this codec. + * Terminated with FF_PROFILE_UNKNOWN. + *) + // const struct AVProfile *profiles; + profiles: pAVProfile; + end; + + // AVHWDeviceType = ( + // AV_HWDEVICE_TYPE_VDPAU, + // AV_HWDEVICE_TYPE_CUDA, + // AV_HWDEVICE_TYPE_VAAPI, + // AV_HWDEVICE_TYPE_DXVA2, + // AV_HWDEVICE_TYPE_QSV, + // AV_HWDEVICE_TYPE_VIDEOTOOLBOX, + // AV_HWDEVICE_TYPE_NONE, + // AV_HWDEVICE_TYPE_D3D11VA, + // AV_HWDEVICE_TYPE_DRM + // ); + + pAVCodecHWConfig = ^AVCodecHWConfig; + + AVCodecHWConfig = record + (* + * A hardware pixel format which the codec can use. + *) + pix_fmt: AVPixelFormat; + (* + * Bit set of AV_CODEC_HW_CONFIG_METHOD_* flags, describing the possible + * setup methods which can be used with this configuration. + *) + methods: int; + (* + * The device type associated with the configuration. + * + * Must be set for AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX and + * AV_CODEC_HW_CONFIG_METHOD_HW_FRAMES_CTX, otherwise unused. + *) + device_type: AVHWDeviceType; + end; + + (* + * @ingroup lavc_decoding + *) + AVDiscard = ( // + (* We leave some space between them for extensions (drop some + * keyframes for intra-only or drop just some bidir frames). *) + AVDISCARD_NONE = -16, // < discard nothing + AVDISCARD_DEFAULT = 0, // < discard useless packets like 0 size packets in avi + AVDISCARD_NONREF = 8, // < discard all non reference + AVDISCARD_BIDIR = 16, // < discard all bidirectional frames + AVDISCARD_NONINTRA = 24, // < discard all non intra frames + AVDISCARD_NONKEY = 32, // < discard all frames except keyframes + AVDISCARD_ALL = 48 // < discard all + ); + + AVAudioServiceType = ( // + AV_AUDIO_SERVICE_TYPE_MAIN = 0, AV_AUDIO_SERVICE_TYPE_EFFECTS = 1, AV_AUDIO_SERVICE_TYPE_VISUALLY_IMPAIRED = 2, AV_AUDIO_SERVICE_TYPE_HEARING_IMPAIRED = 3, + AV_AUDIO_SERVICE_TYPE_DIALOGUE = 4, AV_AUDIO_SERVICE_TYPE_COMMENTARY = 5, AV_AUDIO_SERVICE_TYPE_EMERGENCY = 6, AV_AUDIO_SERVICE_TYPE_VOICE_OVER = 7, + AV_AUDIO_SERVICE_TYPE_KARAOKE = 8, AV_AUDIO_SERVICE_TYPE_NB + // < Not part of ABI + ); + + (* + * @ingroup lavc_encoding + *) + RcOverride = record + start_frame: int; + end_frame: int; + qscale: int; // If this is 0 then quality_factor will be used instead. + quality_factor: float; + end; + + pRcOverride = ^RcOverride; + + (* + * Pan Scan area. + * This specifies the area which should be displayed. + * Note there may be multiple such areas for one frame. + *) + AVPanScan = record + (* + * id + * - encoding: Set by user. + * - decoding: Set by libavcodec. + *) + id: int; + + (* + * width and height in 1/16 pel + * - encoding: Set by user. + * - decoding: Set by libavcodec. + *) + width: int; + height: int; + + (* + * position of the top left corner in 1/16 pel for up to 3 fields/frames + * - encoding: Set by user. + * - decoding: Set by libavcodec. + *) + position: array [0 .. 2, 0 .. 1] of int16_t; + end; + + (* + * This structure describes the bitrate properties of an encoded bitstream. It + * roughly corresponds to a subset the VBV parameters for MPEG-2 or HRD + * parameters for H.264/HEVC. + *) + pAVCPBProperties = ^AVCPBProperties; + + AVCPBProperties = record + (* + * Maximum bitrate of the stream, in bits per second. + * Zero if unknown or unspecified. + *) +{$IFDEF FF_API_UNSANITIZED_BITRATES} + max_bitrate: int; +{$ELSE} + max_bitrate: int64_t; +{$ENDIF} + (* + * Minimum bitrate of the stream, in bits per second. + * Zero if unknown or unspecified. + *) +{$IFDEF FF_API_UNSANITIZED_BITRATES} + min_bitrate: int; +{$ELSE} + min_bitrate: int64_t; +{$ENDIF} + (* + * Average bitrate of the stream, in bits per second. + * Zero if unknown or unspecified. + *) +{$IFDEF FF_API_UNSANITIZED_BITRATES} + avg_bitrate: int; +{$ELSE} + avg_bitrate: int64_t; +{$ENDIF} + (* + * The size of the buffer to which the ratecontrol is applied, in bits. + * Zero if unknown or unspecified. + *) + buffer_size: int; + + (* + * The delay between the time the packet this structure is associated with + * is received and the time when it should be decoded, in periods of a 27MHz + * clock. + * + * UINT64_MAX when unknown or unspecified. + *) + vbv_delay: uint64_t; + end; + +const + (* + * The decoder will keep a reference to the frame and may reuse it later. + *) + AV_GET_BUFFER_FLAG_REF = (1 shl 0); + + AV_PKT_FLAG_KEY = $0001; // < The packet contains a keyframe + AV_PKT_FLAG_CORRUPT = $0002; // < The packet content is corrupted + (* + * Flag is used to discard packets which are required to maintain valid + * decoder state but are not required for output and should be dropped + * after decoding. + * *) + AV_PKT_FLAG_DISCARD = $0004; + (* + * The packet comes from a trusted source. + * + * Otherwise-unsafe constructs such as arbitrary pointers to data + * outside the packet may be followed. + *) + AV_PKT_FLAG_TRUSTED = $0008; + (* + * Flag is used to indicate packets that contain frames that can + * be discarded by the decoder. I.e. Non-reference frames. + *) + AV_PKT_FLAG_DISPOSABLE = $0010; + + // AVCodecContext -> int compression_level; + FF_COMPRESSION_DEFAULT = -1; +{$IFDEF FF_API_PRIVATE_OPT} + // AVCodecContext -> int prediction_method; + FF_PRED_LEFT = 0; + FF_PRED_PLANE = 1; + FF_PRED_MEDIAN = 2; +{$ENDIF} + // AVCodecContext -> int ildct_cmp; + FF_CMP_SAD = 0; + FF_CMP_SSE = 1; + FF_CMP_SATD = 2; + FF_CMP_DCT = 3; + FF_CMP_PSNR = 4; + FF_CMP_BIT = 5; + FF_CMP_RD = 6; + FF_CMP_ZERO = 7; + FF_CMP_VSAD = 8; + FF_CMP_VSSE = 9; + FF_CMP_NSSE = 10; + FF_CMP_W53 = 11; + FF_CMP_W97 = 12; + FF_CMP_DCTMAX = 13; + FF_CMP_DCT264 = 14; + FF_CMP_MEDIAN_SAD = 15; + FF_CMP_CHROMA = 256; + + // AVCodecContext -> slice_flags:int; + SLICE_FLAG_CODED_ORDER = $0001; // < draw_horiz_band() is called in coded order instead of display + SLICE_FLAG_ALLOW_FIELD = $0002; // < allow draw_horiz_band() with field slices (MPEG-2 field pics) + SLICE_FLAG_ALLOW_PLANE = $0004; // < allow draw_horiz_band() with 1 component at a time (SVQ1) + + // AVCodecContext -> int mb_decision; + FF_MB_DECISION_SIMPLE = 0; // < uses mb_cmp + FF_MB_DECISION_BITS = 1; // < chooses the one which needs the fewest bits + FF_MB_DECISION_RD = 2; // < rate distortion + +{$IFDEF FF_API_CODER_TYPE} + // AVCodecContext -> int coder_type; + FF_CODER_TYPE_VLC = 0; + FF_CODER_TYPE_AC = 1; + FF_CODER_TYPE_RAW = 2; + FF_CODER_TYPE_RLE = 3; +{$ENDIF} + // AVCodecContext -> int workaround_bugs; + FF_BUG_AUTODETECT = 1; // < autodetection + FF_BUG_XVID_ILACE = 4; + FF_BUG_UMP4 = 8; + FF_BUG_NO_PADDING = 16; + FF_BUG_AMV = 32; + FF_BUG_QPEL_CHROMA = 64; + FF_BUG_STD_QPEL = 128; + FF_BUG_QPEL_CHROMA2 = 256; + FF_BUG_DIRECT_BLOCKSIZE = 512; + FF_BUG_EDGE = 1024; + FF_BUG_HPEL_CHROMA = 2048; + FF_BUG_DC_CLIP = 4096; + FF_BUG_MS = 8192; // < Work around various bugs in Microsoft's broken decoders. + FF_BUG_TRUNCATED = 16384; + FF_BUG_IEDGE = 32768; + + // AVCodecContext -> int strict_std_compliance; + FF_COMPLIANCE_VERY_STRICT = 2; // < Strictly conform to an older more strict version of the spec or reference software. + FF_COMPLIANCE_STRICT = 1; // < Strictly conform to all the things in the spec no matter what consequences. + FF_COMPLIANCE_NORMAL = 0; + FF_COMPLIANCE_UNOFFICIAL = -1; // < Allow unofficial extensions + FF_COMPLIANCE_EXPERIMENTAL = -2; // < Allow nonstandardized experimental things. + + // AVCodecContext -> error_concealment:int; + FF_EC_GUESS_MVS = 1; + FF_EC_DEBLOCK = 2; + FF_EC_FAVOR_INTER = 256; + + // AVCodecContext -> debug:int; + FF_DEBUG_PICT_INFO = 1; + FF_DEBUG_RC = 2; + FF_DEBUG_BITSTREAM = 4; + FF_DEBUG_MB_TYPE = 8; + FF_DEBUG_QP = 16; +{$IFDEF FF_API_DEBUG_MV} + (* + * @deprecated this option does nothing + *) + FF_DEBUG_MV = 32 deprecated; +{$ENDIF} + FF_DEBUG_DCT_COEFF = $00000040; + FF_DEBUG_SKIP = $00000080; + FF_DEBUG_STARTCODE = $00000100; + FF_DEBUG_ER = $00000400; + FF_DEBUG_MMCO = $00000800; + FF_DEBUG_BUGS = $00001000; +{$IFDEF FF_API_DEBUG_MV} + FF_DEBUG_VIS_QP = $00002000; + FF_DEBUG_VIS_MB_TYPE = $00004000; +{$ENDIF} + FF_DEBUG_BUFFERS = $00008000; + FF_DEBUG_THREADS = $00010000; + FF_DEBUG_GREEN_MD = $00800000; + FF_DEBUG_NOMC = $01000000; +{$IFDEF FF_API_DEBUG_MV} + (* + * debug + * - encoding: Set by user. + * - decoding: Set by user. + *) + // AVCodecContext -> int debug_mv; + FF_DEBUG_VIS_MV_P_FOR = $00000001; // visualize forward predicted MVs of P-frames + FF_DEBUG_VIS_MV_B_FOR = $00000002; // visualize forward predicted MVs of B-frames + FF_DEBUG_VIS_MV_B_BACK = $00000004; // visualize backward predicted MVs of B-frames +{$ENDIF} + // AVCodecContext ->err_recognition:int; + (* + * Verify checksums embedded in the bitstream (could be of either encoded or + * decoded data, depending on the codec) and print an error message on mismatch. + * If AV_EF_EXPLODE is also set, a mismatching checksum will result in the + * decoder returning an error. + *) + AV_EF_CRCCHECK = (1 shl 0); + AV_EF_BITSTREAM = (1 shl 1); // < detect bitstream specification deviations + AV_EF_BUFFER = (1 shl 2); // < detect improper bitstream length + AV_EF_EXPLODE = (1 shl 3); // < abort decoding on minor error detection + + AV_EF_IGNORE_ERR = (1 shl 15); // < ignore errors and continue + AV_EF_CAREFUL = (1 shl 16); + // < consider things that violate the spec, are fast to calculate and have not been seen in the wild as errors + AV_EF_COMPLIANT = (1 shl 17); // < consider all spec non compliances as errors + AV_EF_AGGRESSIVE = (1 shl 18); // < consider things that a sane encoder should not do as an error + + // AVCodecContext ->int dct_algo; + FF_DCT_AUTO = 0; + FF_DCT_FASTINT = 1; + FF_DCT_INT = 2; + FF_DCT_MMX = 3; + FF_DCT_ALTIVEC = 5; + FF_DCT_FAAN = 6; + + // AVCodecContext -> int idct_algo; + FF_IDCT_AUTO = 0; + FF_IDCT_INT = 1; + FF_IDCT_SIMPLE = 2; + FF_IDCT_SIMPLEMMX = 3; + FF_IDCT_ARM = 7; + FF_IDCT_ALTIVEC = 8; + FF_IDCT_SIMPLEARM = 10; + FF_IDCT_XVID = 14; + FF_IDCT_SIMPLEARMV5TE = 16; + FF_IDCT_SIMPLEARMV6 = 17; + FF_IDCT_FAAN = 20; + FF_IDCT_SIMPLENEON = 22; + FF_IDCT_NONE = 24; (* Used by XvMC to extract IDCT coefficients with FF_IDCT_PERM_NONE *) + FF_IDCT_SIMPLEAUTO = 128; + + // AVCodecContext -> int thread_type; + FF_THREAD_FRAME = 1; // < Decode more than one frame at once + FF_THREAD_SLICE = 2; // < Decode more than one part of a single frame at once + + // AVCodecContext -> int profile; + FF_PROFILE_UNKNOWN = -99; + FF_PROFILE_RESERVED = -100; + + FF_PROFILE_AAC_MAIN = 0; + FF_PROFILE_AAC_LOW = 1; + FF_PROFILE_AAC_SSR = 2; + FF_PROFILE_AAC_LTP = 3; + FF_PROFILE_AAC_HE = 4; + FF_PROFILE_AAC_HE_V2 = 28; + FF_PROFILE_AAC_LD = 22; + FF_PROFILE_AAC_ELD = 38; + FF_PROFILE_MPEG2_AAC_LOW = 128; + FF_PROFILE_MPEG2_AAC_HE = 131; + + FF_PROFILE_DNXHD = 0; + FF_PROFILE_DNXHR_LB = 1; + FF_PROFILE_DNXHR_SQ = 2; + FF_PROFILE_DNXHR_HQ = 3; + FF_PROFILE_DNXHR_HQX = 4; + FF_PROFILE_DNXHR_444 = 5; + + FF_PROFILE_DTS = 20; + FF_PROFILE_DTS_ES = 30; + FF_PROFILE_DTS_96_24 = 40; + FF_PROFILE_DTS_HD_HRA = 50; + FF_PROFILE_DTS_HD_MA = 60; + FF_PROFILE_DTS_EXPRESS = 70; + + FF_PROFILE_MPEG2_422 = 0; + FF_PROFILE_MPEG2_HIGH = 1; + FF_PROFILE_MPEG2_SS = 2; + FF_PROFILE_MPEG2_SNR_SCALABLE = 3; + FF_PROFILE_MPEG2_MAIN = 4; + FF_PROFILE_MPEG2_SIMPLE = 5; + + FF_PROFILE_H264_CONSTRAINED = (1 shl 9); // 8+1; constraint_set1_flag + FF_PROFILE_H264_INTRA = (1 shl 11); // 8+3; constraint_set3_flag + + FF_PROFILE_H264_BASELINE = 66; + FF_PROFILE_H264_CONSTRAINED_BASELINE = (66 or FF_PROFILE_H264_CONSTRAINED); + FF_PROFILE_H264_MAIN = 77; + FF_PROFILE_H264_EXTENDED = 88; + FF_PROFILE_H264_HIGH = 100; + FF_PROFILE_H264_HIGH_10 = 110; + FF_PROFILE_H264_HIGH_10_INTRA = (110 or FF_PROFILE_H264_INTRA); + FF_PROFILE_H264_MULTIVIEW_HIGH = 118; + FF_PROFILE_H264_HIGH_422 = 122; + FF_PROFILE_H264_HIGH_422_INTRA = (122 or FF_PROFILE_H264_INTRA); + FF_PROFILE_H264_STEREO_HIGH = 128; + FF_PROFILE_H264_HIGH_444 = 144; + FF_PROFILE_H264_HIGH_444_PREDICTIVE = 244; + FF_PROFILE_H264_HIGH_444_INTRA = (244 or FF_PROFILE_H264_INTRA); + FF_PROFILE_H264_CAVLC_444 = 44; + + FF_PROFILE_VC1_SIMPLE = 0; + FF_PROFILE_VC1_MAIN = 1; + FF_PROFILE_VC1_COMPLEX = 2; + FF_PROFILE_VC1_ADVANCED = 3; + + FF_PROFILE_MPEG4_SIMPLE = 0; + FF_PROFILE_MPEG4_SIMPLE_SCALABLE = 1; + FF_PROFILE_MPEG4_CORE = 2; + FF_PROFILE_MPEG4_MAIN = 3; + FF_PROFILE_MPEG4_N_BIT = 4; + FF_PROFILE_MPEG4_SCALABLE_TEXTURE = 5; + FF_PROFILE_MPEG4_SIMPLE_FACE_ANIMATION = 6; + FF_PROFILE_MPEG4_BASIC_ANIMATED_TEXTURE = 7; + FF_PROFILE_MPEG4_HYBRID = 8; + FF_PROFILE_MPEG4_ADVANCED_REAL_TIME = 9; + FF_PROFILE_MPEG4_CORE_SCALABLE = 10; + FF_PROFILE_MPEG4_ADVANCED_CODING = 11; + FF_PROFILE_MPEG4_ADVANCED_CORE = 12; + FF_PROFILE_MPEG4_ADVANCED_SCALABLE_TEXTURE = 13; + FF_PROFILE_MPEG4_SIMPLE_STUDIO = 14; + FF_PROFILE_MPEG4_ADVANCED_SIMPLE = 15; + + FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_0 = 1; + FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_1 = 2; + FF_PROFILE_JPEG2000_CSTREAM_NO_RESTRICTION = 32768; + FF_PROFILE_JPEG2000_DCINEMA_2K = 3; + FF_PROFILE_JPEG2000_DCINEMA_4K = 4; + + FF_PROFILE_VP9_0 = 0; + FF_PROFILE_VP9_1 = 1; + FF_PROFILE_VP9_2 = 2; + FF_PROFILE_VP9_3 = 3; + + FF_PROFILE_HEVC_MAIN = 1; + FF_PROFILE_HEVC_MAIN_10 = 2; + FF_PROFILE_HEVC_MAIN_STILL_PICTURE = 3; + FF_PROFILE_HEVC_REXT = 4; + + FF_PROFILE_AV1_MAIN = 0; + FF_PROFILE_AV1_HIGH = 1; + FF_PROFILE_AV1_PROFESSIONAL = 2; + + FF_PROFILE_MJPEG_HUFFMAN_BASELINE_DCT = $C0; + FF_PROFILE_MJPEG_HUFFMAN_EXTENDED_SEQUENTIAL_DCT = $C1; + FF_PROFILE_MJPEG_HUFFMAN_PROGRESSIVE_DCT = $C2; + FF_PROFILE_MJPEG_HUFFMAN_LOSSLESS = $C3; + FF_PROFILE_MJPEG_JPEG_LS = $F7; + + FF_PROFILE_SBC_MSBC = 1; + + FF_PROFILE_PRORES_PROXY = 0; + FF_PROFILE_PRORES_LT = 1; + FF_PROFILE_PRORES_STANDARD = 2; + FF_PROFILE_PRORES_HQ = 3; + FF_PROFILE_PRORES_4444 = 4; + FF_PROFILE_PRORES_XQ = 5; + + FF_PROFILE_ARIB_PROFILE_A = 0; + FF_PROFILE_ARIB_PROFILE_C = 1; + + (* + * level + * - encoding: Set by user. + * - decoding: Set by libavcodec. + *) + // AVCodecContext ->int level; + FF_LEVEL_UNKNOWN = -99; + + // AVCodecContext -> int sub_charenc_mode; + FF_SUB_CHARENC_MODE_DO_NOTHING = -1; + // < do nothing (demuxer outputs a stream supposed to be already in UTF-8, or the codec is bitmap for instance) + FF_SUB_CHARENC_MODE_AUTOMATIC = 0; // < libavcodec will select the mode itself + FF_SUB_CHARENC_MODE_PRE_DECODER = 1; + // < the AVPacket data needs to be recoded to UTF-8 before being fed to the decoder, requires iconv + FF_SUB_CHARENC_MODE_IGNORE = 2; // < neither convert the subtitles, nor check them for valid UTF-8 + + // AVCodecContext -> unsigned properties; + FF_CODEC_PROPERTY_LOSSLESS = $00000001; + FF_CODEC_PROPERTY_CLOSED_CAPTIONS = $00000002; + + // AVCodecContext -> int sub_text_format; + FF_SUB_TEXT_FMT_ASS = 0; +{$IFDEF FF_API_ASS_TIMING} + FF_SUB_TEXT_FMT_ASS_WITH_TIMINGS = 1; +{$ENDIF} + AV_SUBTITLE_FLAG_FORCED = $00000001; + +type + (* + * @defgroup lavc_packet AVPacket + * + * Types and functions for working with AVPacket. + * @{ + *) + AVPacketSideDataType = ( // + (* + * An AV_PKT_DATA_PALETTE side data packet contains exactly AVPALETTE_SIZE + * bytes worth of palette. This side data signals that a new palette is + * present. + *) + AV_PKT_DATA_PALETTE, + + (* + * The AV_PKT_DATA_NEW_EXTRADATA is used to notify the codec or the format + * that the extradata buffer was changed and the receiving side should + * act upon it appropriately. The new extradata is embedded in the side + * data buffer and should be immediately used for processing the current + * frame or packet. + *) + AV_PKT_DATA_NEW_EXTRADATA, + + (* + * An AV_PKT_DATA_PARAM_CHANGE side data packet is laid out as follows: + * @code + * u32le param_flags + * if (param_flags & AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_COUNT) + * s32le channel_count + * if (param_flags & AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_LAYOUT) + * u64le channel_layout + * if (param_flags & AV_SIDE_DATA_PARAM_CHANGE_SAMPLE_RATE) + * s32le sample_rate + * if (param_flags & AV_SIDE_DATA_PARAM_CHANGE_DIMENSIONS) + * s32le width + * s32le height + * @endcode + *) + AV_PKT_DATA_PARAM_CHANGE, + + (* + * An AV_PKT_DATA_H263_MB_INFO side data packet contains a number of + * structures with info about macroblocks relevant to splitting the + * packet into smaller packets on macroblock edges (e.g. as for RFC 2190). + * That is, it does not necessarily contain info about all macroblocks, + * as long as the distance between macroblocks in the info is smaller + * than the target payload size. + * Each MB info structure is 12 bytes, and is laid out as follows: + * @code + * u32le bit offset from the start of the packet + * u8 current quantizer at the start of the macroblock + * u8 GOB number + * u16le macroblock address within the GOB + * u8 horizontal MV predictor + * u8 vertical MV predictor + * u8 horizontal MV predictor for block number 3 + * u8 vertical MV predictor for block number 3 + * @endcode + *) + AV_PKT_DATA_H263_MB_INFO, + + (* + * This side data should be associated with an audio stream and contains + * ReplayGain information in form of the AVReplayGain struct. + *) + AV_PKT_DATA_REPLAYGAIN, + + (* + * This side data contains a 3x3 transformation matrix describing an affine + * transformation that needs to be applied to the decoded video frames for + * correct presentation. + * + * See libavutil/display.h for a detailed description of the data. + *) + AV_PKT_DATA_DISPLAYMATRIX, + + (* + * This side data should be associated with a video stream and contains + * Stereoscopic 3D information in form of the AVStereo3D struct. + *) + AV_PKT_DATA_STEREO3D, + + (* + * This side data should be associated with an audio stream and corresponds + * to enum AVAudioServiceType. + *) + AV_PKT_DATA_AUDIO_SERVICE_TYPE, + + (* + * This side data contains quality related information from the encoder. + * @code + * u32le quality factor of the compressed frame. Allowed range is between 1 (good) and FF_LAMBDA_MAX (bad). + * u8 picture type + * u8 error count + * u16 reserved + * u64le[error count] sum of squared differences between encoder in and output + * @endcode + *) + AV_PKT_DATA_QUALITY_STATS, + + (* + * This side data contains an integer value representing the stream index + * of a "fallback" track. A fallback track indicates an alternate + * track to use when the current track can not be decoded for some reason. + * e.g. no decoder available for codec. + *) + AV_PKT_DATA_FALLBACK_TRACK, + + (* + * This side data corresponds to the AVCPBProperties struct. + *) + AV_PKT_DATA_CPB_PROPERTIES, + + (* + * Recommmends skipping the specified number of samples + * @code + * u32le number of samples to skip from start of this packet + * u32le number of samples to skip from end of this packet + * u8 reason for start skip + * u8 reason for end skip (0=padding silence, 1=convergence) + * @endcode + *) + AV_PKT_DATA_SKIP_SAMPLES, + + (* + * An AV_PKT_DATA_JP_DUALMONO side data packet indicates that + * the packet may contain "dual mono" audio specific to Japanese DTV + * and if it is true, recommends only the selected channel to be used. + * @code + * u8 selected channels (0=mail/left, 1=sub/right, 2=both) + * @endcode + *) + AV_PKT_DATA_JP_DUALMONO, + + (* + * A list of zero terminated key/value strings. There is no end marker for + * the list, so it is required to rely on the side data size to stop. + *) + AV_PKT_DATA_STRINGS_METADATA, + + (* + * Subtitle event position + * @code + * u32le x1 + * u32le y1 + * u32le x2 + * u32le y2 + * @endcode + *) + AV_PKT_DATA_SUBTITLE_POSITION, + + (* + * Data found in BlockAdditional element of matroska container. There is + * no end marker for the data, so it is required to rely on the side data + * size to recognize the end. 8 byte id (as found in BlockAddId) followed + * by data. + *) + AV_PKT_DATA_MATROSKA_BLOCKADDITIONAL, + + (* + * The optional first identifier line of a WebVTT cue. + *) + AV_PKT_DATA_WEBVTT_IDENTIFIER, + + (* + * The optional settings (rendering instructions) that immediately + * follow the timestamp specifier of a WebVTT cue. + *) + AV_PKT_DATA_WEBVTT_SETTINGS, + + (* + * A list of zero terminated key/value strings. There is no end marker for + * the list, so it is required to rely on the side data size to stop. This + * side data includes updated metadata which appeared in the stream. + *) + AV_PKT_DATA_METADATA_UPDATE, + + (* + * MPEGTS stream ID as uint8_t, this is required to pass the stream ID + * information from the demuxer to the corresponding muxer. + *) + AV_PKT_DATA_MPEGTS_STREAM_ID, + + (* + * Mastering display metadata (based on SMPTE-2086:2014). This metadata + * should be associated with a video stream and contains data in the form + * of the AVMasteringDisplayMetadata struct. + *) + AV_PKT_DATA_MASTERING_DISPLAY_METADATA, + + (* + * This side data should be associated with a video stream and corresponds + * to the AVSphericalMapping structure. + *) + AV_PKT_DATA_SPHERICAL, + + (* + * Content light level (based on CTA-861.3). This metadata should be + * associated with a video stream and contains data in the form of the + * AVContentLightMetadata struct. + *) + AV_PKT_DATA_CONTENT_LIGHT_LEVEL, + + (* + * ATSC A53 Part 4 Closed Captions. This metadata should be associated with + * a video stream. A53 CC bitstream is stored as uint8_t in AVPacketSideData.data. + * The number of bytes of CC data is AVPacketSideData.size. + *) + AV_PKT_DATA_A53_CC, + + (* + * This side data is encryption initialization data. + * The format is not part of ABI, use av_encryption_init_info_* methods to + * access. + *) + AV_PKT_DATA_ENCRYPTION_INIT_INFO, + + (* + * This side data contains encryption info for how to decrypt the packet. + * The format is not part of ABI, use av_encryption_info_* methods to access. + *) + AV_PKT_DATA_ENCRYPTION_INFO, + + (* + * Active Format Description data consisting of a single byte as specified + * in ETSI TS 101 154 using AVActiveFormatDescription enum. + *) + AV_PKT_DATA_AFD, + + (* + * The number of side data types. + * This is not part of the public API/ABI in the sense that it may + * change when new side data types are added. + * This must stay the last enum value. + * If its value becomes huge, some code using it + * needs to be updated as it assumes it to be smaller than other limits. + *) + AV_PKT_DATA_NB); + + AVPacketSideData = record + data: puint8_t; + size: int; + _type: AVPacketSideDataType; + end; + + pAVPacketSideData = ^AVPacketSideData; + + (* + * This structure stores compressed data. It is typically exported by demuxers + * and then passed as input to decoders, or received as output from encoders and + * then passed to muxers. + * + * For video, it should typically contain one compressed frame. For audio it may + * contain several compressed frames. Encoders are allowed to output empty + * packets, with no compressed data, containing only side data + * (e.g. to update some stream parameters at the end of encoding). + * + * AVPacket is one of the few structs in FFmpeg, whose size is a part of public + * ABI. Thus it may be allocated on stack and no new fields can be added to it + * without libavcodec and libavformat major bump. + * + * The semantics of data ownership depends on the buf field. + * If it is set, the packet data is dynamically allocated and is + * valid indefinitely until a call to av_packet_unref() reduces the + * reference count to 0. + * + * If the buf field is not set av_packet_ref() would make a copy instead + * of increasing the reference count. + * + * The side data is always allocated with av_malloc(), copied by + * av_packet_ref() and freed by av_packet_unref(). + * + * @see av_packet_ref + * @see av_packet_unref + *) + pAVPacket = ^AVPacket; + + AVPacket = record + (* + * A reference to the reference-counted buffer where the packet data is + * stored. + * May be NULL, then the packet data is not reference-counted. + *) + buf: pAVBufferRef; + (* + * Presentation timestamp in AVStream->time_base units; the time at which + * the decompressed packet will be presented to the user. + * Can be AV_NOPTS_VALUE if it is not stored in the file. + * pts MUST be larger or equal to dts as presentation cannot happen before + * decompression, unless one wants to view hex dumps. Some formats misuse + * the terms dts and pts/cts to mean something different. Such timestamps + * must be converted to true pts/dts before they are stored in AVPacket. + *) + pts: int64_t; + (* + * Decompression timestamp in AVStream->time_base units; the time at which + * the packet is decompressed. + * Can be AV_NOPTS_VALUE if it is not stored in the file. + *) + dts: int64_t; + data: puint8_t; + size: int; + stream_index: int; + (* + * A combination of AV_PKT_FLAG values + *) + flags: int; + (* + * Additional packet data that can be provided by the container. + * Packet can contain several types of side information. + *) + side_data: pAVPacketSideData; + side_data_elems: int; + + (* + * Duration of this packet in AVStream->time_base units, 0 if unknown. + * Equals next_pts - this_pts in presentation order. + *) + duration: int64_t; + + pos: int64_t; // < byte position in stream, -1 if unknown + +{$IFDEF FF_API_CONVERGENCE_DURATION} + (* + * @deprecated Same as the duration field, but as int64_t. This was required + * for Matroska subtitles, whose duration values could overflow when the + * duration field was still an int. + *) + // attribute_deprecated + convergence_duration: int64_t deprecated; +{$ENDIF} + end; + + AVSideDataParamChangeFlags = ( // + AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_COUNT = $0001, // + AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_LAYOUT = $0002, // + AV_SIDE_DATA_PARAM_CHANGE_SAMPLE_RATE = $0004, // + AV_SIDE_DATA_PARAM_CHANGE_DIMENSIONS = $0008 // + ); + (* + * @} + *) + + AVCodecInternal = record + + end; + + pAVCodecInternal = ^AVCodecInternal; + + AVFieldOrder = ( // + AV_FIELD_UNKNOWN, AV_FIELD_PROGRESSIVE, AV_FIELD_TT, // < Top coded_first, top displayed first + AV_FIELD_BB, // < Bottom coded first, bottom displayed first + AV_FIELD_TB, // < Top coded first, bottom displayed first + AV_FIELD_BT // < Bottom coded first, top displayed first + ); + + AVCodecDefault = record + end; + + (* + * AVCodec. + *) + pAVCodec = ^avcodec; + ppAVCodec = ^pAVCodec; + pAVCodecContext = ^AVCodecContext; + ppAVCodecContext = ^pAVCodecContext; + pAVCodecDefault = ^AVCodecDefault; + pAVSubtitle = ^AVSubtitle; + + avcodec = record + (* + * Name of the codec implementation. + * The name is globally unique among encoders and among decoders (but an + * encoder and a decoder can share the same name). + * This is the primary way to find a codec from the user perspective. + *) + name: pAnsiChar; + (* + * Descriptive name for the codec, meant to be more human readable than name. + * You should use the NULL_IF_CONFIG_SMALL() macro to define it. + *) + long_name: pAnsiChar; + _type: AVMediaType; + id: AVCodecID; + (* + * Codec capabilities. + * see AV_CODEC_CAP_* + *) + capabilities: int; + supported_framerates: pAVRational; // < array of supported framerates, or NULL if any, array is terminated by {0,0} + pix_fmts: pAVPixelFormat; // < array of supported pixel formats, or NULL if unknown, array is terminated by -1 + supported_samplerates: pInt; // < array of supported audio samplerates, or NULL if unknown, array is terminated by 0 + sample_fmts: pAVSampleFormat; // < array of supported sample formats, or NULL if unknown, array is terminated by -1 + channel_layouts: puint64_t; // < array of support channel layouts, or NULL if unknown. array is terminated by 0 + max_lowres: uint8_t; // < maximum value for lowres supported by the decoder + priv_class: pAVClass; // < AVClass for the private context + profiles: pAVProfile; // < array of recognized profiles, or NULL if unknown, array is terminated by {FF_PROFILE_UNKNOWN} + + (* + * Group name of the codec implementation. + * This is a short symbolic name of the wrapper backing this codec. A + * wrapper uses some kind of external implementation for the codec, such + * as an external library, or a codec implementation provided by the OS or + * the hardware. + * If this field is NULL, this is a builtin, libavcodec native codec. + * If non-NULL, this will be the suffix in AVCodec.name in most cases + * (usually AVCodec.name will be of the form "_"). + *) + wrapper_name: pAnsiChar; + + (* *************************************************************** + * No fields below this line are part of the public API. They + * may not be used outside of libavcodec and can be changed and + * removed at will. + * New public fields should be added right above. + ***************************************************************** + *) + priv_data_size: int; + next: pAVCodec; + (* + * @name Frame-level threading support functions + * @{ + *) + (* + * If defined, called on thread contexts when they are created. + * If the codec allocates writable tables in init(), re-allocate them here. + * priv_data will be set to a copy of the original. + *) + // int (*init_thread_copy)(AVCodecContext *); + init_thread_copy: function(ctx: pAVCodecContext): int; cdecl; + (* + * Copy necessary context variables from a previous thread context to the current one. + * If not defined, the next thread will start automatically; otherwise, the codec + * must call ff_thread_finish_setup(). + * + * dst and src will (rarely) point to the same context, in which case memcpy should be skipped. + *) + // int (*update_thread_context)(AVCodecContext *dst, const AVCodecContext *src); + update_thread_context: function(dst: pAVCodecContext; const src: pAVCodecContext): int; cdecl; + + (* + * Private codec-specific defaults. + *) + defaults: pAVCodecDefault; + + (* + * Initialize codec static data, called from avcodec_register(). + * + * This is not intended for time consuming operations as it is + * run for every codec regardless of that codec being used. + *) + // void (*init_static_data)(struct AVCodec *codec); + init_static_data: procedure(codec: pAVCodec); cdecl; + + // int (*init)(AVCodecContext *); + init: function(ctx: pAVCodecContext): int; cdecl; + // int (*encode_sub)(AVCodecContext *, uint8_t *buf, int buf_size, const struct AVSubtitle *sub); + encode_sub: function(ctx: pAVCodecContext; buf: puint8_t; buf_size: int; const sub: pAVSubtitle): int; cdecl; + (* + * Encode data to an AVPacket. + * + * @param avctx codec context + * @param avpkt output AVPacket (may contain a user-provided buffer) + * @param[in] frame AVFrame containing the raw data to be encoded + * @param[out] got_packet_ptr encoder sets to 0 or 1 to indicate that a + * non-empty packet was returned in avpkt. + * @return 0 on success, negative error code on failure + *) + // int (*encode2)(AVCodecContext *avctx, AVPacket *avpkt, const AVFrame *frame,int *got_packet_ptr); + encode2: function(avctx: pAVCodecContext; avpkt: pAVPacket; const frame: pAVFrame; got_packet_ptr: pInt): int; cdecl; + // int (*decode)(AVCodecContext *, void *outdata, int *outdata_size, AVPacket *avpkt); + decode: function(cnt: pAVCodecContext; outdata: Pointer; outdata_size: pInt; avpkt: pAVPacket): int; cdecl; + // int (*close)(AVCodecContext *); + close: function(ctx: pAVCodecContext): int; cdecl; + (* + * Encode API with decoupled packet/frame dataflow. The API is the + * same as the avcodec_ prefixed APIs (avcodec_send_frame() etc.), except + * that: + * - never called if the codec is closed or the wrong type, + * - if AV_CODEC_CAP_DELAY is not set, drain frames are never sent, + * - only one drain frame is ever passed down, + *) + // int (*send_frame)(AVCodecContext *avctx, const AVFrame *frame); + send_frame: function(avctx: pAVCodecContext; const frame: pAVFrame): int; cdecl; + // int (*receive_packet)(AVCodecContext *avctx, AVPacket *avpkt); + receive_packet: function(avctx: pAVCodecContext; avpkt: pAVPacket): int; cdecl; + + (* + * Decode API with decoupled packet/frame dataflow. This function is called + * to get one output frame. It should call ff_decode_get_packet() to obtain + * input data. + *) + // int (*receive_frame)(AVCodecContext *avctx, AVFrame *frame); + receive_frame: function(avctx: pAVCodecContext; frame: pAVFrame): int; cdecl; + (* + * Flush buffers. + * Will be called when seeking + *) + // void (*flush)(AVCodecContext *); + flush: procedure(ctx: pAVCodecContext); cdecl; + (* + * Internal codec capabilities. + * See FF_CODEC_CAP_* in internal.h + *) + caps_internal: int; + + (* + * Decoding only, a comma-separated list of bitstream filters to apply to + * packets before decoding. + *) + bsfs: pAnsiChar; + + (* + * Array of pointers to hardware configurations supported by the codec, + * or NULL if no hardware supported. The array is terminated by a NULL + * pointer. + * + * The user can only access this field via avcodec_get_hw_config(). + *) + hw_configs: ppAVCodecHWConfigInternal; + end; + + TAVCodecContext_execute = function(c2: pAVCodecContext; arg: Pointer): int; cdecl; + + TAVCodecContext_execute2 = function(c2: pAVCodecContext; arg: Pointer; jobnr: int; threadnr: int): int; cdecl; + + (* + * main external API structure. + * New fields can be added to the end with minor version bumps. + * Removal, reordering and changes to existing fields require a major + * version bump. + * You can use AVOptions (av_opt* / av_set/get*()) to access these fields from user + * applications. + * The name string for AVOptions options matches the associated command line + * parameter name and can be found in libavcodec/options_table.h + * The AVOption/command line parameter names differ in some cases from the C + * structure field names for historic reasons or brevity. + * sizeof(AVCodecContext) must not be used outside libav*. + *) + AVCodecContext = record + (* + * information on struct for av_log + * - set by avcodec_alloc_context3 + *) + av_class: pAVClass; + log_level_offset: int; + + codec_type: AVMediaType; (* see AVMEDIA_TYPE_xxx *) + codec: pAVCodec; + codec_id: AVCodecID; (* see AV_CODEC_ID_xxx *) + + (* + * fourcc (LSB first, so "ABCD" -> ('D'shl24) + ('C'shl16) + ('B'shl8) + 'A'). + * This is used to work around some encoder bugs. + * A demuxer should set this to what is stored in the field used to identify the codec. + * If there are multiple such fields in a container then the demuxer should choose the one + * which maximizes the information about the used codec. + * If the codec tag field in a container is larger than 32 bits then the demuxer should + * remap the longer ID to 32 bits with a table or other structure. Alternatively a new + * extra_codec_tag + size could be added but for this a clear advantage must be demonstrated + * first. + * - encoding: Set by user, if not then the default based on codec_id will be used. + * - decoding: Set by user, will be converted to uppercase by libavcodec during init. + *) + codec_tag: unsigned; + + priv_data: Pointer; + + (* + * Private context used for internal data. + * + * Unlike priv_data, this is not codec-specific. It is used in general + * libavcodec functions. + *) + internal: pAVCodecInternal; + + (* + * Private data of the user, can be used to carry app specific stuff. + * - encoding: Set by user. + * - decoding: Set by user. + *) + opaque: Pointer; + + (* + * the average bitrate + * - encoding: Set by user; unused for constant quantizer encoding. + * - decoding: Set by user, may be overwritten by libavcodec + * if this info is available in the stream + *) + bit_rate: int64_t; + + (* + * number of bits the bitstream is allowed to diverge from the reference. + * the reference can be CBR (for CBR pass1) or VBR (for pass2) + * - encoding: Set by user; unused for constant quantizer encoding. + * - decoding: unused + *) + bit_rate_tolerance: int; + + (* + * Global quality for codecs which cannot change it per frame. + * This should be proportional to MPEG-1/2/4 qscale. + * - encoding: Set by user. + * - decoding: unused + *) + global_quality: int; + + (* + * - encoding: Set by user. + * - decoding: unused + *) + compression_level: int; + + (* + * AV_CODEC_FLAG_*. + * - encoding: Set by user. + * - decoding: Set by user. + *) + flags: int; + + (* + * AV_CODEC_FLAG2_* + * - encoding: Set by user. + * - decoding: Set by user. + *) + flags2: int; + + (* + * some codecs need / can use extradata like Huffman tables. + * MJPEG: Huffman tables + * rv10: additional flags + * MPEG-4: global headers (they can be in the bitstream or here) + * The allocated memory should be AV_INPUT_BUFFER_PADDING_SIZE bytes larger + * than extradata_size to avoid problems if it is read with the bitstream reader. + * The bytewise contents of extradata must not depend on the architecture or CPU endianness. + * Must be allocated with the av_malloc() family of functions. + * - encoding: Set/allocated/freed by libavcodec. + * - decoding: Set/allocated/freed by user. + *) + extradata: puint8_t; + extradata_size: int; + + (* + * This is the fundamental unit of time (in seconds) in terms + * of which frame timestamps are represented. For fixed-fps content, + * timebase should be 1/framerate and timestamp increments should be + * identically 1. + * This often, but not always is the inverse of the frame rate or field rate + * for video. 1/time_base is not the average frame rate if the frame rate is not + * constant. + * + * Like containers, elementary streams also can store timestamps, 1/time_base + * is the unit in which these timestamps are specified. + * As example of such codec time base see ISO/IEC 14496-2:2001(E) + * vop_time_increment_resolution and fixed_vop_rate + * (fixed_vop_rate == 0 implies that it is different from the framerate) + * + * - encoding: MUST be set by user. + * - decoding: the use of this field for decoding is deprecated. + * Use framerate instead. + *) + time_base: AVRational; + + (* + * For some codecs, the time base is closer to the field rate than the frame rate. + * Most notably, H.264 and MPEG-2 specify time_base as half of frame duration + * if no telecine is used ... + * + * Set to time_base ticks per frame. Default 1, e.g., H.264/MPEG-2 set it to 2. + *) + ticks_per_frame: int; + + (* + * Codec delay. + * + * Encoding: Number of frames delay there will be from the encoder input to + * the decoder output. (we assume the decoder matches the spec) + * Decoding: Number of frames delay in addition to what a standard decoder + * as specified in the spec would produce. + * + * Video: + * Number of frames the decoded output will be delayed relative to the + * encoded input. + * + * Audio: + * For encoding, this field is unused (see initial_padding). + * + * For decoding, this is the number of samples the decoder needs to + * output before the decoder's output is valid. When seeking, you should + * start decoding this many samples prior to your desired seek point. + * + * - encoding: Set by libavcodec. + * - decoding: Set by libavcodec. + *) + delay: int; + + (* video only *) + (* + * picture width / height. + * + * @note Those fields may not match the values of the last + * AVFrame output by avcodec_decode_video2 due frame + * reordering. + * + * - encoding: MUST be set by user. + * - decoding: May be set by the user before opening the decoder if known e.g. + * from the container. Some decoders will require the dimensions + * to be set by the caller. During decoding, the decoder may + * overwrite those values as required while parsing the data. + *) + width, height: int; + + (* + * Bitstream width / height, may be different from width/height e.g. when + * the decoded frame is cropped before being output or lowres is enabled. + * + * @note Those field may not match the value of the last + * AVFrame output by avcodec_receive_frame() due frame + * reordering. + * + * - encoding: unused + * - decoding: May be set by the user before opening the decoder if known + * e.g. from the container. During decoding, the decoder may + * overwrite those values as required while parsing the data. + *) + coded_width, coded_height: int; + + (* + * the number of pictures in a group of pictures, or 0 for intra_only + * - encoding: Set by user. + * - decoding: unused + *) + gop_size: int; + + (* + * Pixel format, see AV_PIX_FMT_xxx. + * May be set by the demuxer if known from headers. + * May be overridden by the decoder if it knows better. + * + * @note This field may not match the value of the last + * AVFrame output by avcodec_receive_frame() due frame + * reordering. + * + * - encoding: Set by user. + * - decoding: Set by user if known, overridden by libavcodec while + * parsing the data. + *) + pix_fmt: AVPixelFormat; + + (* + * If non NULL, 'draw_horiz_band' is called by the libavcodec + * decoder to draw a horizontal band. It improves cache usage. Not + * all codecs can do that. You must check the codec capabilities + * beforehand. + * When multithreading is used, it may be called from multiple threads + * at the same time; threads might draw different parts of the same AVFrame, + * or multiple AVFrames, and there is no guarantee that slices will be drawn + * in order. + * The function is also used by hardware acceleration APIs. + * It is called at least once during frame decoding to pass + * the data needed for hardware render. + * In that mode instead of pixel data, AVFrame points to + * a structure specific to the acceleration API. The application + * reads the structure and can change some fields to indicate progress + * or mark state. + * - encoding: unused + * - decoding: Set by user. + * @param height the height of the slice + * @param y the y position of the slice + * @param type 1->top field, 2->bottom field, 3->frame + * @param offset offset into the AVFrame.data from which the slice should be read + *) + // void (*draw_horiz_band)(struct AVCodecContext *s, + // const AVFrame *src, int offset[AV_NUM_DATA_POINTERS], + // int y, int type, int height); + draw_horiz_band: procedure(s: pAVCodecContext; const src: pAVFrame; offset: pAVNDPArray; y, _type, height: int); cdecl; + + (* + * callback to negotiate the pixelFormat + * @param fmt is the list of formats which are supported by the codec, + * it is terminated by -1 as 0 is a valid format, the formats are ordered by quality. + * The first is always the native one. + * @note The callback may be called again immediately if initialization for + * the selected (hardware-accelerated) pixel format failed. + * @warning Behavior is undefined if the callback returns a value not + * in the fmt list of formats. + * @return the chosen format + * - encoding: unused + * - decoding: Set by user, if not set the native format will be chosen. + *) + // enum AVPixelFormat (*get_format)(struct AVCodecContext *s, const enum AVPixelFormat * fmt); + get_format: procedure(s: pAVCodecContext; const fmt: pAVPixelFormat); cdecl; + + (* + * maximum number of B-frames between non-B-frames + * Note: The output will be delayed by max_b_frames+1 relative to the input. + * - encoding: Set by user. + * - decoding: unused + *) + max_b_frames: int; + + (* + * qscale factor between IP and B-frames + * If > 0 then the last P-frame quantizer will be used (q= lastp_q*factor+offset). + * If < 0 then normal ratecontrol will be done (q= -normal_q*factor+offset). + * - encoding: Set by user. + * - decoding: unused + *) + b_quant_factor: float; + +{$IFDEF FF_API_PRIVATE_OPT} + (* @deprecated use encoder private options instead *) + // attribute_deprecated + b_frame_strategy: int deprecated; +{$ENDIF} + (* + * qscale offset between IP and B-frames + * - encoding: Set by user. + * - decoding: unused + *) + b_quant_offset: float; + + (* + * Size of the frame reordering buffer in the decoder. + * For MPEG-2 it is 1 IPB or 0 low delay IP. + * - encoding: Set by libavcodec. + * - decoding: Set by libavcodec. + *) + has_b_frames: int; + +{$IFDEF FF_API_PRIVATE_OPT} + (* @deprecated use encoder private options instead *) + // attribute_deprecated + mpeg_quant: int deprecated; +{$ENDIF} + (* + * qscale factor between P- and I-frames + * If > 0 then the last P-frame quantizer will be used (q = lastp_q * factor + offset). + * If < 0 then normal ratecontrol will be done (q= -normal_q*factor+offset). + * - encoding: Set by user. + * - decoding: unused + *) + i_quant_factor: float; + + (* + * qscale offset between P and I-frames + * - encoding: Set by user. + * - decoding: unused + *) + i_quant_offset: float; + + (* + * luminance masking (0-> disabled) + * - encoding: Set by user. + * - decoding: unused + *) + lumi_masking: float; + + (* + * temporary complexity masking (0-> disabled) + * - encoding: Set by user. + * - decoding: unused + *) + temporal_cplx_masking: float; + + (* + * spatial complexity masking (0-> disabled) + * - encoding: Set by user. + * - decoding: unused + *) + spatial_cplx_masking: float; + + (* + * p block masking (0-> disabled) + * - encoding: Set by user. + * - decoding: unused + *) + p_masking: float; + + (* + * darkness masking (0-> disabled) + * - encoding: Set by user. + * - decoding: unused + *) + dark_masking: float; + + (* + * slice count + * - encoding: Set by libavcodec. + * - decoding: Set by user (or 0). + *) + slice_count: int; + +{$IFDEF FF_API_PRIVATE_OPT} + (* @deprecated use encoder private options instead *) + // attribute_deprecated + prediction_method: int deprecated; +{$ENDIF} + (* + * slice offsets in the frame in bytes + * - encoding: Set/allocated by libavcodec. + * - decoding: Set/allocated by user (or NULL). + *) + slice_offset: pInt; + + (* + * sample aspect ratio (0 if unknown) + * That is the width of a pixel divided by the height of the pixel. + * Numerator and denominator must be relatively prime and smaller than 256 for some video standards. + * - encoding: Set by user. + * - decoding: Set by libavcodec. + *) + sample_aspect_ratio: AVRational; + + (* + * motion estimation comparison function + * - encoding: Set by user. + * - decoding: unused + *) + me_cmp: int; + (* + * subpixel motion estimation comparison function + * - encoding: Set by user. + * - decoding: unused + *) + me_sub_cmp: int; + (* + * macroblock comparison function (not supported yet) + * - encoding: Set by user. + * - decoding: unused + *) + mb_cmp: int; + (* + * interlaced DCT comparison function + * - encoding: Set by user. + * - decoding: unused + *) + ildct_cmp: int; + + (* + * ME diamond size & shape + * - encoding: Set by user. + * - decoding: unused + *) + dia_size: int; + + (* + * amount of previous MV predictors (2a+1 x 2a+1 square) + * - encoding: Set by user. + * - decoding: unused + *) + last_predictor_count: int; + +{$IFDEF FF_API_PRIVATE_OPT} + (* @deprecated use encoder private options instead *) + // attribute_deprecated + pre_me: int deprecated; +{$ENDIF} + (* + * motion estimation prepass comparison function + * - encoding: Set by user. + * - decoding: unused + *) + me_pre_cmp: int; + + (* + * ME prepass diamond size & shape + * - encoding: Set by user. + * - decoding: unused + *) + pre_dia_size: int; + + (* + * subpel ME quality + * - encoding: Set by user. + * - decoding: unused + *) + me_subpel_quality: int; + + (* + * maximum motion estimation search range in subpel units + * If 0 then no limit. + * + * - encoding: Set by user. + * - decoding: unused + *) + me_range: int; + + (* + * slice flags + * - encoding: unused + * - decoding: Set by user. + *) + slice_flags: int; + + (* + * macroblock decision mode + * - encoding: Set by user. + * - decoding: unused + *) + mb_decision: int; + + (* + * custom intra quantization matrix + * Must be allocated with the av_malloc() family of functions, and will be freed in + * avcodec_free_context(). + * - encoding: Set/allocated by user, freed by libavcodec. Can be NULL. + * - decoding: Set/allocated/freed by libavcodec. + *) + intra_matrix: puint16_t; + + (* + * custom inter quantization matrix + * Must be allocated with the av_malloc() family of functions, and will be freed in + * avcodec_free_context(). + * - encoding: Set/allocated by user, freed by libavcodec. Can be NULL. + * - decoding: Set/allocated/freed by libavcodec. + *) + inter_matrix: puint16_t; + +{$IFDEF FF_API_PRIVATE_OPT} + (* @deprecated use encoder private options instead *) + // attribute_deprecated + scenechange_threshold: int deprecated; + + (* @deprecated use encoder private options instead *) + // attribute_deprecated + noise_reduction: int deprecated; +{$ENDIF} + (* + * precision of the intra DC coefficient - 8 + * - encoding: Set by user. + * - decoding: Set by libavcodec + *) + intra_dc_precision: int; + + (* + * Number of macroblock rows at the top which are skipped. + * - encoding: unused + * - decoding: Set by user. + *) + skip_top: int; + + (* + * Number of macroblock rows at the bottom which are skipped. + * - encoding: unused + * - decoding: Set by user. + *) + skip_bottom: int; + + (* + * minimum MB Lagrange multiplier + * - encoding: Set by user. + * - decoding: unused + *) + mb_lmin: int; + + (* + * maximum MB Lagrange multiplier + * - encoding: Set by user. + * - decoding: unused + *) + mb_lmax: int; + +{$IFDEF FF_API_PRIVATE_OPT} + (* + * @deprecated use encoder private options instead + *) + // attribute_deprecated + me_penalty_compensation: int deprecated; +{$ENDIF} + (* + * - encoding: Set by user. + * - decoding: unused + *) + bidir_refine: int; + +{$IFDEF FF_API_PRIVATE_OPT} + (* @deprecated use encoder private options instead *) + // attribute_deprecated + brd_scale: int deprecated; +{$ENDIF} + (* + * minimum GOP size + * - encoding: Set by user. + * - decoding: unused + *) + keyint_min: int; + + (* + * number of reference frames + * - encoding: Set by user. + * - decoding: Set by lavc. + *) + refs: int; + +{$IFDEF FF_API_PRIVATE_OPT} + (* @deprecated use encoder private options instead *) + // attribute_deprecated + chromaoffset: int; +{$ENDIF} + (* + * Note: Value depends upon the compare function used for fullpel ME. + * - encoding: Set by user. + * - decoding: unused + *) + mv0_threshold: int; + +{$IFDEF FF_API_PRIVATE_OPT} + (* @deprecated use encoder private options instead *) + // attribute_deprecated + b_sensitivity: int deprecated; +{$ENDIF} + (* + * Chromaticity coordinates of the source primaries. + * - encoding: Set by user + * - decoding: Set by libavcodec + *) + color_primaries: AVColorPrimaries; + + (* + * Color Transfer Characteristic. + * - encoding: Set by user + * - decoding: Set by libavcodec + *) + color_trc: AVColorTransferCharacteristic; + + (* + * YUV colorspace type. + * - encoding: Set by user + * - decoding: Set by libavcodec + *) + colorspace: AVColorSpace; + + (* + * MPEG vs JPEG YUV range. + * - encoding: Set by user + * - decoding: Set by libavcodec + *) + color_range: AVColorRange; + + (* + * This defines the location of chroma samples. + * - encoding: Set by user + * - decoding: Set by libavcodec + *) + chroma_sample_location: AVChromaLocation; + + (* + * Number of slices. + * Indicates number of picture subdivisions. Used for parallelized + * decoding. + * - encoding: Set by user + * - decoding: unused + *) + slices: int; + + (* Field order + * - encoding: set by libavcodec + * - decoding: Set by user. + *) + field_order: AVFieldOrder; + + (* audio only *) + sample_rate: int; // < samples per second + channels: int; // < number of audio channels + + (* + * audio sample format + * - encoding: Set by user. + * - decoding: Set by libavcodec. + *) + sample_fmt: AVSampleFormat; // < sample format + + (* The following data should not be initialized. *) + (* + * Number of samples per channel in an audio frame. + * + * - encoding: set by libavcodec in avcodec_open2(). Each submitted frame + * except the last must contain exactly frame_size samples per channel. + * May be 0 when the codec has AV_CODEC_CAP_VARIABLE_FRAME_SIZE set, then the + * frame size is not restricted. + * - decoding: may be set by some decoders to indicate constant frame size + *) + frame_size: int; + + (* + * Frame counter, set by libavcodec. + * + * - decoding: total number of frames returned from the decoder so far. + * - encoding: total number of frames passed to the encoder so far. + * + * @note the counter is not incremented if encoding/decoding resulted in + * an error. + *) + frame_number: int; + + (* + * number of bytes per packet if constant and known or 0 + * Used by some WAV based audio codecs. + *) + block_align: int; + + (* + * Audio cutoff bandwidth (0 means "automatic") + * - encoding: Set by user. + * - decoding: unused + *) + cutoff: int; + + (* + * Audio channel layout. + * - encoding: set by user. + * - decoding: set by user, may be overwritten by libavcodec. + *) + channel_layout: uint64_t; + + (* + * Request decoder to use this channel layout if it can (0 for default) + * - encoding: unused + * - decoding: Set by user. + *) + request_channel_layout: uint64_t; + + (* + * Type of service that the audio stream conveys. + * - encoding: Set by user. + * - decoding: Set by libavcodec. + *) + audio_service_type: AVAudioServiceType; + + (* + * desired sample format + * - encoding: Not used. + * - decoding: Set by user. + * Decoder will decode to this format if it can. + *) + request_sample_fmt: AVSampleFormat; + + (* + * This callback is called at the beginning of each frame to get data + * buffer(s) for it. There may be one contiguous buffer for all the data or + * there may be a buffer per each data plane or anything in between. What + * this means is, you may set however many entries in buf[] you feel necessary. + * Each buffer must be reference-counted using the AVBuffer API (see description + * of buf[] below). + * + * The following fields will be set in the frame before this callback is + * called: + * - format + * - width, height (video only) + * - sample_rate, channel_layout, nb_samples (audio only) + * Their values may differ from the corresponding values in + * AVCodecContext. This callback must use the frame values, not the codec + * context values, to calculate the required buffer size. + * + * This callback must fill the following fields in the frame: + * - data[] + * - linesize[] + * - extended_data: + * * if the data is planar audio with more than 8 channels, then this + * callback must allocate and fill extended_data to contain all pointers + * to all data planes. data[] must hold as many pointers as it can. + * extended_data must be allocated with av_malloc() and will be freed in + * av_frame_unref(). + * * otherwise extended_data must point to data + * - buf[] must contain one or more pointers to AVBufferRef structures. Each of + * the frame's data and extended_data pointers must be contained in these. That + * is, one AVBufferRef for each allocated chunk of memory, not necessarily one + * AVBufferRef per data[] entry. See: av_buffer_create(), av_buffer_alloc(), + * and av_buffer_ref(). + * - extended_buf and nb_extended_buf must be allocated with av_malloc() by + * this callback and filled with the extra buffers if there are more + * buffers than buf[] can hold. extended_buf will be freed in + * av_frame_unref(). + * + * If AV_CODEC_CAP_DR1 is not set then get_buffer2() must call + * avcodec_default_get_buffer2() instead of providing buffers allocated by + * some other means. + * + * Each data plane must be aligned to the maximum required by the target + * CPU. + * + * @see avcodec_default_get_buffer2() + * + * Video: + * + * If AV_GET_BUFFER_FLAG_REF is set in flags then the frame may be reused + * (read and/or written to if it is writable) later by libavcodec. + * + * avcodec_align_dimensions2() should be used to find the required width and + * height, as they normally need to be rounded up to the next multiple of 16. + * + * Some decoders do not support linesizes changing between frames. + * + * If frame multithreading is used and thread_safe_callbacks is set, + * this callback may be called from a different thread, but not from more + * than one at once. Does not need to be reentrant. + * + * @see avcodec_align_dimensions2() + * + * Audio: + * + * Decoders request a buffer of a particular size by setting + * AVFrame.nb_samples prior to calling get_buffer2(). The decoder may, + * however, utilize only part of the buffer by setting AVFrame.nb_samples + * to a smaller value in the output frame. + * + * As a convenience, av_samples_get_buffer_size() and + * av_samples_fill_arrays() in libavutil may be used by custom get_buffer2() + * functions to find the required data size and to fill data pointers and + * linesize. In AVFrame.linesize, only linesize[0] may be set for audio + * since all planes must be the same size. + * + * @see av_samples_get_buffer_size(), av_samples_fill_arrays() + * + * - encoding: unused + * - decoding: Set by libavcodec, user can override. + *) + // int (*get_buffer2)(struct AVCodecContext *s, AVFrame *frame, int flags); + get_buffer2: function(s: pAVCodecContext; frame: pAVFrame; flags: int): int; cdecl; + + (* + * If non-zero, the decoded audio and video frames returned from + * avcodec_decode_video2() and avcodec_decode_audio4() are reference-counted + * and are valid indefinitely. The caller must free them with + * av_frame_unref() when they are not needed anymore. + * Otherwise, the decoded frames must not be freed by the caller and are + * only valid until the next decode call. + * + * This is always automatically enabled if avcodec_receive_frame() is used. + * + * - encoding: unused + * - decoding: set by the caller before avcodec_open2(). + *) + // attribute_deprecated + refcounted_frames: int deprecated; + + (* - encoding parameters *) + qcompress: float; // < amount of qscale change between easy & hard scenes (0.0-1.0) + qblur: float; // < amount of qscale smoothing over time (0.0-1.0) + + (* + * minimum quantizer + * - encoding: Set by user. + * - decoding: unused + *) + qmin: int; + + (* + * maximum quantizer + * - encoding: Set by user. + * - decoding: unused + *) + qmax: int; + + (* + * maximum quantizer difference between frames + * - encoding: Set by user. + * - decoding: unused + *) + max_qdiff: int; + + (* + * decoder bitstream buffer size + * - encoding: Set by user. + * - decoding: unused + *) + rc_buffer_size: int; + + (* + * ratecontrol override, see RcOverride + * - encoding: Allocated/set/freed by user. + * - decoding: unused + *) + rc_override_count: int; + rc_override: pRcOverride; + + (* + * maximum bitrate + * - encoding: Set by user. + * - decoding: Set by user, may be overwritten by libavcodec. + *) + rc_max_rate: int64_t; + + (* + * minimum bitrate + * - encoding: Set by user. + * - decoding: unused + *) + rc_min_rate: int64_t; + + (* + * Ratecontrol attempt to use, at maximum, of what can be used without an underflow. + * - encoding: Set by user. + * - decoding: unused. + *) + rc_max_available_vbv_use: float; + + (* + * Ratecontrol attempt to use, at least, times the amount needed to prevent a vbv overflow. + * - encoding: Set by user. + * - decoding: unused. + *) + rc_min_vbv_overflow_use: float; + + (* + * Number of bits which should be loaded into the rc buffer before decoding starts. + * - encoding: Set by user. + * - decoding: unused + *) + rc_initial_buffer_occupancy: int; + +{$IFDEF FF_API_CODER_TYPE} + (* + * @deprecated use encoder private options instead + *) + // attribute_deprecated + coder_type: int deprecated; +{$ENDIF} (* FF_API_CODER_TYPE *) +{$IFDEF FF_API_PRIVATE_OPT} + (* @deprecated use encoder private options instead *) + // attribute_deprecated + context_model: int deprecated; +{$ENDIF} +{$IFDEF FF_API_PRIVATE_OPT} + (* @deprecated use encoder private options instead *) + // attribute_deprecated + frame_skip_threshold: int deprecated; + + (* @deprecated use encoder private options instead *) + // attribute_deprecated + frame_skip_factor: int; + + (* @deprecated use encoder private options instead *) + // attribute_deprecated + frame_skip_exp: int deprecated; + + (* @deprecated use encoder private options instead *) + // attribute_deprecated + frame_skip_cmp: int deprecated; +{$ENDIF} (* FF_API_PRIVATE_OPT *) + + (* + * trellis RD quantization + * - encoding: Set by user. + * - decoding: unused + *) + trellis: int; + +{$IFDEF FF_API_PRIVATE_OPT} + (* @deprecated use encoder private options instead *) + // attribute_deprecated + min_prediction_order: int deprecated; + + (* @deprecated use encoder private options instead *) + // attribute_deprecated + max_prediction_order: int deprecated; + + (* @deprecated use encoder private options instead *) + // attribute_deprecated + timecode_frame_start: int64_t deprecated; +{$ENDIF} +{$IFDEF FF_API_RTP_CALLBACK} + (* + * @deprecated unused + *) + (* The RTP callback: This function is called *) + (* every time the encoder has a packet to send. *) + (* It depends on the encoder if the data starts *) + (* with a Start Code (it should). H.263 does. *) + (* mb_nb contains the number of macroblocks *) + (* encoded in the RTP payload. *) + // attribute_deprecated + // void (*rtp_callback)(struct AVCodecContext *avctx, void *data, int size, int mb_nb); + rtp_callback: procedure(avctx: pAVCodecContext; data: Pointer; size: int; mb_nb: int); cdecl; +{$ENDIF} +{$IFDEF FF_API_PRIVATE_OPT} + (* @deprecated use encoder private options instead *) + // attribute_deprecated + rtp_payload_size: int deprecated; (* The size of the RTP payload: the coder will *) + (* do its best to deliver a chunk with size *) + (* below rtp_payload_size, the chunk will start *) + (* with a start code on some codecs like H.263. *) + (* This doesn't take account of any particular *) + (* headers inside the transmitted RTP payload. *) +{$ENDIF} +{$IFDEF FF_API_STAT_BITS} + (* statistics, used for 2-pass encoding *) + // attribute_deprecated + mv_bits: int deprecated; + // attribute_deprecated + header_bits: int deprecated; + // attribute_deprecated + i_tex_bits: int deprecated; + // attribute_deprecated + p_tex_bits: int deprecated; + // attribute_deprecated + i_count: int deprecated; + // attribute_deprecated + p_count: int deprecated; + // attribute_deprecated + skip_count: int deprecated; + // attribute_deprecated + misc_bits: int deprecated; + + (* @deprecated this field is unused *) + // attribute_deprecated + frame_bits: int deprecated; +{$ENDIF} + (* + * pass1 encoding statistics output buffer + * - encoding: Set by libavcodec. + * - decoding: unused + *) + stats_out: pAnsiChar; + + (* + * pass2 encoding statistics input buffer + * Concatenated stuff from stats_out of pass1 should be placed here. + * - encoding: Allocated/set/freed by user. + * - decoding: unused + *) + stats_in: pAnsiChar; + + (* + * Work around bugs in encoders which sometimes cannot be detected automatically. + * - encoding: Set by user + * - decoding: Set by user + *) + workaround_bugs: int; + + (* + * strictly follow the standard (MPEG-4, ...). + * - encoding: Set by user. + * - decoding: Set by user. + * Setting this to STRICT or higher means the encoder and decoder will + * generally do stupid things, whereas setting it to unofficial or lower + * will mean the encoder might produce output that is not supported by all + * spec-compliant decoders. Decoders don't differentiate between normal, + * unofficial and experimental (that is, they always try to decode things + * when they can) unless they are explicitly asked to behave stupidly + * (=strictly conform to the specs) + *) + strict_std_compliance: int; + + (* + * error concealment flags + * - encoding: unused + * - decoding: Set by user. + *) + error_concealment: int; + + (* + * debug + * - encoding: Set by user. + * - decoding: Set by user. + *) + debug: int; + +{$IFDEF FF_API_DEBUG_MV} + (* + * debug + * - encoding: Set by user. + * - decoding: Set by user. + *) + debug_mv: int; +{$ENDIF} + (* + * Error recognition; may misdetect some more or less valid parts as errors. + * - encoding: unused + * - decoding: Set by user. + *) + err_recognition: int; + (* + * opaque 64-bit number (generally a PTS) that will be reordered and + * output in AVFrame.reordered_opaque + * - encoding: Set by libavcodec to the reordered_opaque of the input + * frame corresponding to the last returned packet. Only + * supported by encoders with the + * AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE capability. + * - decoding: Set by user. + *) + reordered_opaque: int64_t; + (* + * Hardware accelerator in use + * - encoding: unused. + * - decoding: Set by libavcodec + *) + hwaccel: pAVHWAccel; + + (* + * Hardware accelerator context. + * For some hardware accelerators, a global context needs to be + * provided by the user. In that case, this holds display-dependent + * data FFmpeg cannot instantiate itself. Please refer to the + * FFmpeg HW accelerator documentation to know how to fill this + * is. e.g. for VA API, this is a struct vaapi_context. + * - encoding: unused + * - decoding: Set by user + *) + hwaccel_context: Pointer; + + (* + * error + * - encoding: Set by libavcodec if flags & AV_CODEC_FLAG_PSNR. + * - decoding: unused + *) + // uint64_t error[AV_NUM_DATA_POINTERS]; + error: TAVNDPArray_uint64_t; + + (* + * DCT algorithm, see FF_DCT_* below + * - encoding: Set by user. + * - decoding: unused + *) + dct_algo: int; + + (* + * IDCT algorithm, see FF_IDCT_* below. + * - encoding: Set by user. + * - decoding: Set by user. + *) + idct_algo: int; + + (* + * bits per sample/pixel from the demuxer (needed for huffyuv). + * - encoding: Set by libavcodec. + * - decoding: Set by user. + *) + bits_per_coded_sample: int; + + (* + * Bits per sample/pixel of internal libavcodec pixel/sample format. + * - encoding: set by user. + * - decoding: set by libavcodec. + *) + bits_per_raw_sample: int; + +{$IFDEF FF_API_LOWRES} + (* + * low resolution decoding, 1-> 1/2 size, 2->1/4 size + * - encoding: unused + * - decoding: Set by user. + *) + lowres: int; +{$ENDIF} +{$IFDEF FF_API_CODED_FRAME} + (* + * the picture in the bitstream + * - encoding: Set by libavcodec. + * - decoding: unused + * + * @deprecated use the quality factor packet side data instead + *) + // attribute_deprecated + coded_frame: pAVFrame deprecated; +{$ENDIF} + (* + * thread count + * is used to decide how many independent tasks should be passed to execute() + * - encoding: Set by user. + * - decoding: Set by user. + *) + thread_count: int; + + (* + * Which multithreading methods to use. + * Use of FF_THREAD_FRAME will increase decoding delay by one frame per thread, + * so clients which cannot provide future frames should not use it. + * + * - encoding: Set by user, otherwise the default is used. + * - decoding: Set by user, otherwise the default is used. + *) + thread_type: int; + + (* + * Which multithreading methods are in use by the codec. + * - encoding: Set by libavcodec. + * - decoding: Set by libavcodec. + *) + active_thread_type: int; + + (* + * Set by the client if its custom get_buffer() callback can be called + * synchronously from another thread, which allows faster multithreaded decoding. + * draw_horiz_band() will be called from other threads regardless of this setting. + * Ignored if the default get_buffer() is used. + * - encoding: Set by user. + * - decoding: Set by user. + *) + thread_safe_callbacks: int; + + (* + * The codec may call this to execute several independent things. + * It will return only after finishing all tasks. + * The user may replace this with some multithreaded implementation, + * the default implementation will execute the parts serially. + * @param count the number of things to execute + * - encoding: Set by libavcodec, user can override. + * - decoding: Set by libavcodec, user can override. + *) + // int (* execute)(struct AVCodecContext *c, int (*func)(struct AVCodecContext *c2, void *arg), void *arg2, int *ret, int count, int size); + execute: function(c: pAVCodecContext; func: TAVCodecContext_execute; arg2: Pointer; ret: pInt; count: int; size: int): int; cdecl; + + (* + * The codec may call this to execute several independent things. + * It will return only after finishing all tasks. + * The user may replace this with some multithreaded implementation, + * the default implementation will execute the parts serially. + * Also see avcodec_thread_init and e.g. the --enable-pthread configure option. + * @param c context passed also to func + * @param count the number of things to execute + * @param arg2 argument passed unchanged to func + * @param ret return values of executed functions, must have space for "count" values. May be NULL. + * @param func function that will be called count times, with jobnr from 0 to count-1. + * threadnr will be in the range 0 to c->thread_count-1 < MAX_THREADS and so that no + * two instances of func executing at the same time will have the same threadnr. + * @return always 0 currently, but code should handle a future improvement where when any call to func + * returns < 0 no further calls to func may be done and < 0 is returned. + * - encoding: Set by libavcodec, user can override. + * - decoding: Set by libavcodec, user can override. + *) + // int (* execute2)(struct AVCodecContext *c, int (*func)(struct AVCodecContext *c2, void *arg, int jobnr, int threadnr), void *arg2, int *ret, int count); + execute2: function(c: pAVCodecContext; func: TAVCodecContext_execute2; arg2: Pointer; ret: pInt; count: int): int; cdecl; + + (* + * noise vs. sse weight for the nsse comparison function + * - encoding: Set by user. + * - decoding: unused + *) + nsse_weight: int; + + (* + * profile + * - encoding: Set by user. + * - decoding: Set by libavcodec. + *) + profile: int; + + (* + * level + * - encoding: Set by user. + * - decoding: Set by libavcodec. + *) + level: int; + + (* + * Skip loop filtering for selected frames. + * - encoding: unused + * - decoding: Set by user. + *) + skip_loop_filter: AVDiscard; + + (* + * Skip IDCT/dequantization for selected frames. + * - encoding: unused + * - decoding: Set by user. + *) + skip_idct: AVDiscard; + + (* + * Skip decoding for selected frames. + * - encoding: unused + * - decoding: Set by user. + *) + skip_frame: AVDiscard; + + (* + * Header containing style information for text subtitles. + * For SUBTITLE_ASS subtitle type, it should contain the whole ASS + * [Script Info] and [V4+ Styles] section, plus the [Events] line and + * the Format line following. It shouldn't include any Dialogue line. + * - encoding: Set/allocated/freed by user (before avcodec_open2()) + * - decoding: Set/allocated/freed by libavcodec (by avcodec_open2()) + *) + subtitle_header: puint8_t; + subtitle_header_size: int; + +{$IFDEF FF_API_VBV_DELAY} + (* + * VBV delay coded in the last frame (in periods of a 27 MHz clock). + * Used for compliant TS muxing. + * - encoding: Set by libavcodec. + * - decoding: unused. + * @deprecated this value is now exported as a part of + * AV_PKT_DATA_CPB_PROPERTIES packet side data + *) + // attribute_deprecated + vbv_delay: uint64_t deprecated; +{$ENDIF} +{$IFDEF FF_API_SIDEDATA_ONLY_PKT} + (* + * Encoding only and set by default. Allow encoders to output packets + * that do not contain any encoded data, only side data. + * + * Some encoders need to output such packets, e.g. to update some stream + * parameters at the end of encoding. + * + * @deprecated this field disables the default behaviour and + * it is kept only for compatibility. + *) + // attribute_deprecated + side_data_only_packets: int deprecated; +{$ENDIF} + (* + * Audio only. The number of "priming" samples (padding) inserted by the + * encoder at the beginning of the audio. I.e. this number of leading + * decoded samples must be discarded by the caller to get the original audio + * without leading padding. + * + * - decoding: unused + * - encoding: Set by libavcodec. The timestamps on the output packets are + * adjusted by the encoder so that they always refer to the + * first sample of the data actually contained in the packet, + * including any added padding. E.g. if the timebase is + * 1/samplerate and the timestamp of the first input sample is + * 0, the timestamp of the first output packet will be + * -initial_padding. + *) + initial_padding: int; + + (* + * - decoding: For codecs that store a framerate value in the compressed + * bitstream, the decoder may export it here. { 0, 1} when + * unknown. + * - encoding: May be used to signal the framerate of CFR content to an + * encoder. + *) + framerate: AVRational; + + (* + * Nominal unaccelerated pixel format, see AV_PIX_FMT_xxx. + * - encoding: unused. + * - decoding: Set by libavcodec before calling get_format() + *) + sw_pix_fmt: AVPixelFormat; + + (* + * Timebase in which pkt_dts/pts and AVPacket.dts/pts are. + * - encoding unused. + * - decoding set by user. + *) + pkt_timebase: AVRational; + + (* + * AVCodecDescriptor + * - encoding: unused. + * - decoding: set by libavcodec. + *) + codec_descriptor: pAVCodecDescriptor; + +{$IFNDEF FF_API_LOWRES} + (* + * low resolution decoding, 1-> 1/2 size, 2->1/4 size + * - encoding: unused + * - decoding: Set by user. + *) + lowres: int; +{$ENDIF} + (* + * Current statistics for PTS correction. + * - decoding: maintained and used by libavcodec, not intended to be used by user apps + * - encoding: unused + *) + pts_correction_num_faulty_pts: int64_t; // Number of incorrect PTS values so far + pts_correction_num_faulty_dts: int64_t; // Number of incorrect DTS values so far + pts_correction_last_pts: int64_t; // PTS of the last frame + pts_correction_last_dts: int64_t; // DTS of the last frame + + (* + * Character encoding of the input subtitles file. + * - decoding: set by user + * - encoding: unused + *) + sub_charenc: pAnsiChar; + + (* + * Subtitles character encoding mode. Formats or codecs might be adjusting + * this setting (if they are doing the conversion themselves for instance). + * - decoding: set by libavcodec + * - encoding: unused + *) + sub_charenc_mode: int; + + (* + * Skip processing alpha if supported by codec. + * Note that if the format uses pre-multiplied alpha (common with VP6, + * and recommended due to better video quality/compression) + * the image will look as if alpha-blended onto a black background. + * However for formats that do not use pre-multiplied alpha + * there might be serious artefacts (though e.g. libswscale currently + * assumes pre-multiplied alpha anyway). + * + * - decoding: set by user + * - encoding: unused + *) + skip_alpha: int; + + (* + * Number of samples to skip after a discontinuity + * - decoding: unused + * - encoding: set by libavcodec + *) + seek_preroll: int; + +{$IFNDEF FF_API_DEBUG_MV} + (* + * debug motion vectors + * - encoding: Set by user. + * - decoding: Set by user. + *) + debug_mv: int; +{$ENDIF} + (* + * custom intra quantization matrix + * - encoding: Set by user, can be NULL. + * - decoding: unused. + *) + chroma_intra_matrix: puint16_t; + + (* + * dump format separator. + * can be ", " or "\n " or anything else + * - encoding: Set by user. + * - decoding: Set by user. + *) + dump_separator: puint8_t; + + (* + * ',' separated list of allowed decoders. + * If NULL then all are allowed + * - encoding: unused + * - decoding: set by user + *) + codec_whitelist: pAnsiChar; + + (* + * Properties of the stream that gets decoded + * - encoding: unused + * - decoding: set by libavcodec + *) + properties: unsigned; + + (* + * Additional data associated with the entire coded stream. + * + * - decoding: unused + * - encoding: may be set by libavcodec after avcodec_open2(). + *) + coded_side_data: pAVPacketSideData; + nb_coded_side_data: int; + + (* + * A reference to the AVHWFramesContext describing the input (for encoding) + * or output (decoding) frames. The reference is set by the caller and + * afterwards owned (and freed) by libavcodec - it should never be read by + * the caller after being set. + * + * - decoding: This field should be set by the caller from the get_format() + * callback. The previous reference (if any) will always be + * unreffed by libavcodec before the get_format() call. + * + * If the default get_buffer2() is used with a hwaccel pixel + * format, then this AVHWFramesContext will be used for + * allocating the frame buffers. + * + * - encoding: For hardware encoders configured to use a hwaccel pixel + * format, this field should be set by the caller to a reference + * to the AVHWFramesContext describing input frames. + * AVHWFramesContext.format must be equal to + * AVCodecContext.pix_fmt. + * + * This field should be set before avcodec_open2() is called. + *) + hw_frames_ctx: pAVBufferRef; + + (* + * Control the form of AVSubtitle.rects[N]->ass + * - decoding: set by user + * - encoding: unused + *) + sub_text_format: int; + (* + * Audio only. The amount of padding (in samples) appended by the encoder to + * the end of the audio. I.e. this number of decoded samples must be + * discarded by the caller from the end of the stream to get the original + * audio without any trailing padding. + * + * - decoding: unused + * - encoding: unused + *) + trailing_padding: int; + + (* + * The number of pixels per image to maximally accept. + * + * - decoding: set by user + * - encoding: set by user + *) + max_pixels: int64_t; + + (* + * A reference to the AVHWDeviceContext describing the device which will + * be used by a hardware encoder/decoder. The reference is set by the + * caller and afterwards owned (and freed) by libavcodec. + * + * This should be used if either the codec device does not require + * hardware frames or any that are used are to be allocated internally by + * libavcodec. If the user wishes to supply any of the frames used as + * encoder input or decoder output then hw_frames_ctx should be used + * instead. When hw_frames_ctx is set in get_format() for a decoder, this + * field will be ignored while decoding the associated stream segment, but + * may again be used on a following one after another get_format() call. + * + * For both encoders and decoders this field should be set before + * avcodec_open2() is called and must not be written to thereafter. + * + * Note that some decoders may require this field to be set initially in + * order to support hw_frames_ctx at all - in that case, all frames + * contexts used must be created on the same device. + *) + hw_device_ctx: pAVBufferRef; + + (* + * Bit set of AV_HWACCEL_FLAG_* flags, which affect hardware accelerated + * decoding (if active). + * - encoding: unused + * - decoding: Set by user (either before avcodec_open2(), or in the + * AVCodecContext.get_format callback) + *) + hwaccel_flags: int; + + (* + * Video decoding only. Certain video codecs support cropping, meaning that + * only a sub-rectangle of the decoded frame is intended for display. This + * option controls how cropping is handled by libavcodec. + * + * When set to 1 (the default), libavcodec will apply cropping internally. + * I.e. it will modify the output frame width/height fields and offset the + * data pointers (only by as much as possible while preserving alignment, or + * by the full amount if the AV_CODEC_FLAG_UNALIGNED flag is set) so that + * the frames output by the decoder refer only to the cropped area. The + * crop_* fields of the output frames will be zero. + * + * When set to 0, the width/height fields of the output frames will be set + * to the coded dimensions and the crop_* fields will describe the cropping + * rectangle. Applying the cropping is left to the caller. + * + * @warning When hardware acceleration with opaque output frames is used, + * libavcodec is unable to apply cropping from the top/left border. + * + * @note when this option is set to zero, the width/height fields of the + * AVCodecContext and output AVFrames have different meanings. The codec + * context fields store display dimensions (with the coded dimensions in + * coded_width/height), while the frame fields store the coded dimensions + * (with the display dimensions being determined by the crop_* fields). + *) + apply_cropping: int; + + (* + * Video decoding only. Sets the number of extra hardware frames which + * the decoder will allocate for use by the caller. This must be set + * before avcodec_open2() is called. + * + * Some hardware decoders require all frames that they will use for + * output to be defined in advance before decoding starts. For such + * decoders, the hardware frame pool must therefore be of a fixed size. + * The extra frames set here are on top of any number that the decoder + * needs internally in order to operate normally (for example, frames + * used as reference pictures). + *) + extra_hw_frames: int; + (* + * The percentage of damaged samples to discard a frame. + * + * - decoding: set by user + * - encoding: unused + *) + discard_damaged_percentage: int; + end; + + MpegEncContext = record + end; + + pMpegEncContext = ^MpegEncContext; + + (* + * @defgroup lavc_hwaccel AVHWAccel + * + * @note Nothing in this structure should be accessed by the user. At some + * point in future it will not be externally visible at all. + * + * @{ + *) + AVHWAccel = record + (* + * Name of the hardware accelerated codec. + * The name is globally unique among encoders and among decoders (but an + * encoder and a decoder can share the same name). + *) + name: pAnsiChar; + + (* + * Type of codec implemented by the hardware accelerator. + * + * See AVMEDIA_TYPE_xxx + *) + _type: AVMediaType; + + (* + * Codec implemented by the hardware accelerator. + * + * See AV_CODEC_ID_xxx + *) + id: AVCodecID; + + (* + * Supported pixel format. + * + * Only hardware accelerated formats are supported here. + *) + pix_fmt: AVPixelFormat; + + (* + * Hardware accelerated codec capabilities. + * see AV_HWACCEL_CODEC_CAP_* + *) + capabilities: int; + + (* *************************************************************** + * No fields below this line are part of the public API. They + * may not be used outside of libavcodec and can be changed and + * removed at will. + * New public fields should be added right above. + ***************************************************************** + *) + + (* + * Allocate a custom buffer + *) + // int (*alloc_frame)(AVCodecContext *avctx, AVFrame *frame); + alloc_frame: function(avctx: pAVCodecContext; frame: pAVFrame): int; cdecl; + + (* + * Called at the beginning of each frame or field picture. + * + * Meaningful frame information (codec specific) is guaranteed to + * be parsed at this point. This function is mandatory. + * + * Note that buf can be NULL along with buf_size set to 0. + * Otherwise, this means the whole frame is available at this point. + * + * @param avctx the codec context + * @param buf the frame data buffer base + * @param buf_size the size of the frame in bytes + * @return zero if successful, a negative value otherwise + *) + // int (*start_frame)(AVCodecContext *avctx, const uint8_t *buf, uint32_t buf_size); + start_frame: function(avctx: pAVCodecContext; const buf: puint8_t; buf_size: uint32_t): int; cdecl; + + (* + * Callback for parameter data (SPS/PPS/VPS etc). + * + * Useful for hardware decoders which keep persistent state about the + * video parameters, and need to receive any changes to update that state. + * + * @param avctx the codec context + * @param type the nal unit type + * @param buf the nal unit data buffer + * @param buf_size the size of the nal unit in bytes + * @return zero if successful, a negative value otherwise + *) + // int (*decode_params)(AVCodecContext *avctx, int type, const uint8_t *buf, uint32_t buf_size); + decode_params: function(avctx: pAVCodecContext; _type: int; const buf: puint8_t; buf_size: uint32_t): int; cdecl; + + (* + * Callback for each slice. + * + * Meaningful slice information (codec specific) is guaranteed to + * be parsed at this point. This function is mandatory. + * The only exception is XvMC, that works on MB level. + * + * @param avctx the codec context + * @param buf the slice data buffer base + * @param buf_size the size of the slice in bytes + * @return zero if successful, a negative value otherwise + *) + // int (*decode_slice)(AVCodecContext *avctx, const uint8_t *buf, uint32_t buf_size); + decode_slice: function(avctx: pAVCodecContext; const buf: puint8_t; buf_size: uint32_t): int; cdecl; + + (* + * Called at the end of each frame or field picture. + * + * The whole picture is parsed at this point and can now be sent + * to the hardware accelerator. This function is mandatory. + * + * @param avctx the codec context + * @return zero if successful, a negative value otherwise + *) + // int (*end_frame)(AVCodecContext *avctx); + end_frame: function(avctx: pAVCodecContext): int; cdecl; + + (* + * Size of per-frame hardware accelerator private data. + * + * Private data is allocated with av_mallocz() before + * AVCodecContext.get_buffer() and deallocated after + * AVCodecContext.release_buffer(). + *) + frame_priv_data_size: int; + + (* + * Called for every Macroblock in a slice. + * + * XvMC uses it to replace the ff_mpv_reconstruct_mb(). + * Instead of decoding to raw picture, MB parameters are + * stored in an array provided by the video driver. + * + * @param s the mpeg context + *) + // void (*decode_mb)(struct MpegEncContext *s); + decode_mb: procedure(s: pMpegEncContext); cdecl; + + (* + * Initialize the hwaccel private data. + * + * This will be called from ff_get_format(), after hwaccel and + * hwaccel_context are set and the hwaccel private data in AVCodecInternal + * is allocated. + *) + // int (*init)(AVCodecContext *avctx); + init: function(avctx: pAVCodecContext): int; cdecl; + + (* + * Uninitialize the hwaccel private data. + * + * This will be called from get_format() or avcodec_close(), after hwaccel + * and hwaccel_context are already uninitialized. + *) + // int (*uninit)(AVCodecContext *avctx); + uninit: function(avctx: pAVCodecContext): int; cdecl; + + (* + * Size of the private data to allocate in + * AVCodecInternal.hwaccel_priv_data. + *) + priv_data_size: int; + + (* + * Internal hwaccel capabilities. + *) + caps_internal: int; + + (* + * Fill the given hw_frames context with current codec parameters. Called + * from get_format. Refer to avcodec_get_hw_frames_parameters() for + * details. + * + * This CAN be called before AVHWAccel.init is called, and you must assume + * that avctx->hwaccel_priv_data is invalid. + *) + // int (*frame_params)(AVCodecContext *avctx, AVBufferRef *hw_frames_ctx); + frame_params: function(avctx: pAVCodecContext; hw_frames_ctx: pAVBufferRef): int; cdecl; + end; + +{$IFDEF FF_API_AVPICTURE} + + (* + * Picture data structure. + * + * Up to four components can be stored into it, the last component is + * alpha. + * @deprecated use AVFrame or imgutils functions instead + *) + pAVPicture = ^AVPicture; + + AVPicture = record + // attribute_deprecated + data: TAVNDPArray_puint8_t deprecated; // < pointers to the image data planes + + // attribute_deprecated + linesize: TAVNDPArray_int deprecated; // < number of bytes per line + end deprecated 'use AVFrame or imgutils functions instead'; + +{$ENDIF} + + AVSubtitleType = ( // + SUBTITLE_NONE, SUBTITLE_BITMAP, // < A bitmap, pict will be set + (* + * Plain text, the text field must be set by the decoder and is + * authoritative. ass and pict fields may contain approximations. + *) + SUBTITLE_TEXT, + (* + * Formatted text, the ass field must be set by the decoder and is + * authoritative. pict and text fields may contain approximations. + *) + SUBTITLE_ASS); + + AVSubtitleRect = record + x: int; // < top left corner of pict, undefined when pict is not set + y: int; // < top left corner of pict, undefined when pict is not set + w: int; // < width of pict, undefined when pict is not set + h: int; // < height of pict, undefined when pict is not set + nb_colors: int; // < number of colors in pict, undefined when pict is not set + +{$IFDEF FF_API_AVPICTURE} + (* + * @deprecated unused + *) + // attribute_deprecated + pict: AVPicture deprecated; +{$ENDIF} + (* + * data+linesize for the bitmap of this subtitle. + * Can be set for text/ass as well once they are rendered. + *) + data: puint8_t_array_4; + linesize: Tint_array_4; + + _type: AVSubtitleType; + + text: pAnsiChar; // < 0 terminated plain UTF-8 text + + (* + * 0 terminated ASS/SSA compatible event line. + * The presentation of this is unaffected by the other values in this + * struct. + *) + ass: pAnsiChar; + + flags: int; + end; + + pAVSubtitleRect = ^AVSubtitleRect; + ppAVSubtitleRect = ^pAVSubtitleRect; + + AVSubtitle = record + format: uint16_t; (* 0 = graphics *) + start_display_time: uint32_t; (* relative to packet pts, in ms *) + end_display_time: uint32_t; (* relative to packet pts, in ms *) + num_rects: unsigned; + rects: ppAVSubtitleRect; + pts: int64_t; // < Same as packet pts, in AV_TIME_BASE + end; + + (* + * This struct describes the properties of an encoded stream. + * + * sizeof(AVCodecParameters) is not a part of the public ABI, this struct must + * be allocated with avcodec_parameters_alloc() and freed with + * avcodec_parameters_free(). + *) + pAVCodecParameters = ^AVCodecParameters; + + AVCodecParameters = record + (* + * General type of the encoded data. + *) + codec_type: AVMediaType; + (* + * Specific type of the encoded data (the codec used). + *) + codec_id: AVCodecID; + + (* + * Additional information about the codec (corresponds to the AVI FOURCC). + *) + codec_tag: // uint32_t; + packed record + case Integer of + 0: + (tag: Cardinal); + 1: + (fourcc: array [0 .. 3] of AnsiChar); + 2: + (fourbb: array [0 .. 3] of Byte); + end; + + (* + * Extra binary data needed for initializing the decoder, codec-dependent. + * + * Must be allocated with av_malloc() and will be freed by + * avcodec_parameters_free(). The allocated size of extradata must be at + * least extradata_size + AV_INPUT_BUFFER_PADDING_SIZE, with the padding + * bytes zeroed. + *) + extradata: puint8_t; + (* + * Size of the extradata content in bytes. + *) + extradata_size: int; + + (* + * - video: the pixel format, the value corresponds to enum AVPixelFormat. + * - audio: the sample format, the value corresponds to enum AVSampleFormat. + *) + format: int; + + (* + * The average bitrate of the encoded data (in bits per second). + *) + bit_rate: int64_t; + + (* + * The number of bits per sample in the codedwords. + * + * This is basically the bitrate per sample. It is mandatory for a bunch of + * formats to actually decode them. It's the number of bits for one sample in + * the actual coded bitstream. + * + * This could be for example 4 for ADPCM + * For PCM formats this matches bits_per_raw_sample + * Can be 0 + *) + bits_per_coded_sample: int; + + (* + * This is the number of valid bits in each output sample. If the + * sample format has more bits, the least significant bits are additional + * padding bits, which are always 0. Use right shifts to reduce the sample + * to its actual size. For example, audio formats with 24 bit samples will + * have bits_per_raw_sample set to 24, and format set to AV_SAMPLE_FMT_S32. + * To get the original sample use "(int32_t)sample >> 8"." + * + * For ADPCM this might be 12 or 16 or similar + * Can be 0 + *) + bits_per_raw_sample: int; + + (* + * Codec-specific bitstream restrictions that the stream conforms to. + *) + profile: int; + level: int; + + (* + * Video only. The dimensions of the video frame in pixels. + *) + width: int; + height: int; + + (* + * Video only. The aspect ratio (width / height) which a single pixel + * should have when displayed. + * + * When the aspect ratio is unknown / undefined, the numerator should be + * set to 0 (the denominator may have any value). + *) + sample_aspect_ratio: AVRational; + + (* + * Video only. The order of the fields in interlaced video. + *) + field_order: AVFieldOrder; + + (* + * Video only. Additional colorspace characteristics. + *) + color_range: AVColorRange; + color_primaries: AVColorPrimaries; + color_trc: AVColorTransferCharacteristic; + color_space: AVColorSpace; + chroma_location: AVChromaLocation; + + (* + * Video only. Number of delayed frames. + *) + video_delay: int; + + (* + * Audio only. The channel layout bitmask. May be 0 if the channel layout is + * unknown or unspecified, otherwise the number of bits set must be equal to + * the channels field. + *) + channel_layout: uint64_t; + (* + * Audio only. The number of audio channels. + *) + channels: int; + (* + * Audio only. The number of audio samples per second. + *) + sample_rate: int; + (* + * Audio only. The number of bytes per coded audio frame, required by some + * formats. + * + * Corresponds to nBlockAlign in WAVEFORMATEX. + *) + block_align: int; + (* + * Audio only. Audio frame size, if known. Required by some formats to be static. + *) + frame_size: int; + + (* + * Audio only. The amount of padding (in samples) inserted by the encoder at + * the beginning of the audio. I.e. this number of leading decoded samples + * must be discarded by the caller to get the original audio without leading + * padding. + *) + initial_padding: int; + (* + * Audio only. The amount of padding (in samples) appended by the encoder to + * the end of the audio. I.e. this number of decoded samples must be + * discarded by the caller from the end of the stream to get the original + * audio without any trailing padding. + *) + trailing_padding: int; + (* + * Audio only. Number of samples to skip after a discontinuity. + *) + seek_preroll: int; + end; + +{$IFDEF FF_API_CODEC_GET_SET} + + (* + * Accessors for some AVCodecContext fields. These used to be provided for ABI + * compatibility, and do not need to be used anymore. + *) + // attribute_deprecated + // AVRational av_codec_get_pkt_timebase(const AVCodecContext * avctx); +function av_codec_get_pkt_timebase(const avctx: pAVCodecContext): AVRational; cdecl; external avcodec_dll; deprecated; +// attribute_deprecated +// void av_codec_set_pkt_timebase(AVCodecContext * avctx, AVRational val); +procedure av_codec_set_pkt_timebase(avctx: pAVCodecContext; val: AVRational); cdecl; external avcodec_dll; deprecated; + +// attribute_deprecated +// const AVCodecDescriptor * av_codec_get_codec_descriptor(const AVCodecContext * avctx); +function av_codec_get_codec_descriptor(const avctx: pAVCodecContext): pAVCodecDescriptor; cdecl; external avcodec_dll; + +// attribute_deprecated +// void av_codec_set_codec_descriptor(AVCodecContext * avctx, const AVCodecDescriptor * desc); +procedure av_codec_set_codec_descriptor(avctx: pAVCodecContext; const desc: pAVCodecDescriptor); cdecl; external avcodec_dll; + +// attribute_deprecated +// unsigned av_codec_get_codec_properties(const AVCodecContext * avctx); +function av_codec_get_codec_properties(const avctx: pAVCodecContext): unsigned; cdecl; external avcodec_dll; deprecated; + +{$IFDEF FF_API_LOWRES} +// attribute_deprecated +// int av_codec_get_lowres(const AVCodecContext *avctx); +function av_codec_get_lowres(const avctx: pAVCodecContext): int; cdecl; external avcodec_dll; deprecated; + +// attribute_deprecated +// void av_codec_set_lowres(AVCodecContext *avctx, int val); +procedure av_codec_set_lowres(avctx: pAVCodecContext; val: int); cdecl; external avcodec_dll; deprecated; + +{$ENDIF} +// attribute_deprecated +// int av_codec_get_seek_preroll(const AVCodecContext * avctx); +function av_codec_get_seek_preroll(const avctx: pAVCodecContext): int; cdecl; external avcodec_dll; deprecated; + +// attribute_deprecated +// void av_codec_set_seek_preroll(AVCodecContext * avctx, int val); +procedure av_codec_set_seek_preroll(avctx: pAVCodecContext; val: int); cdecl; external avcodec_dll; deprecated; + +// attribute_deprecated +// uint16_t * av_codec_get_chroma_intra_matrix(const AVCodecContext * avctx); +function av_codec_get_chroma_intra_matrix(const avctx: pAVCodecContext): puint16_t; cdecl; external avcodec_dll; deprecated; + +// attribute_deprecated +// void av_codec_set_chroma_intra_matrix(AVCodecContext * avctx, uint16_t * val); +procedure av_codec_set_chroma_intra_matrix(avctx: pAVCodecContext; val: puint16_t); cdecl; external avcodec_dll; deprecated; +{$ENDIF} +{$IFDEF FF_API_CODEC_GET_SET} +// attribute_deprecated +// int av_codec_get_max_lowres(const avcodec * codec); +function av_codec_get_max_lowres(const codec: pAVCodec): int; cdecl; external avcodec_dll; deprecated; +{$ENDIF} +(* + * Retrieve supported hardware configurations for a codec. + * + * Values of index from zero to some maximum return the indexed configuration + * descriptor; all other values return NULL. If the codec does not support + * any hardware configurations then it will always return NULL. +*) +// const AVCodecHWConfig * avcodec_get_hw_config(const avcodec * codec, int index); +function avcodec_get_hw_config(const codec: pAVCodec; index: int): pAVCodecHWConfig; cdecl; external avcodec_dll; + +const + AV_PKT_DATA_QUALITY_FACTOR: AVPacketSideDataType = AV_PKT_DATA_QUALITY_STATS; // DEPRECATED + + (* + * Iterate over all registered codecs. + * + * @param opaque a pointer where libavcodec will store the iteration state. Must + * point to NULL to start the iteration. + * + * @return the next registered codec or NULL when the iteration is + * finished + *) + // const avcodec * av_codec_iterate(void * * opaque); +function av_codec_iterate(var opaque: Pointer): pAVCodec; cdecl; external avcodec_dll; + +{$IFDEF FF_API_NEXT} +(* + * If c is NULL, returns the first registered codec, + * if c is non-NULL, returns the next registered codec after c, + * or NULL if c is the last one. +*) +// attribute_deprecated avcodec * av_codec_next(const avcodec * c); +function av_codec_next(const c: pAVCodec): pAVCodec; cdecl; external avcodec_dll; +{$ENDIF} +(* + * Return the LIBAVCODEC_VERSION_INT constant. +*) +// unsigned avcodec_version(void); +function avcodec_version(): unsigned; cdecl; external avcodec_dll; + +(* + * Return the libavcodec build-time configuration. +*) +// const char * avcodec_configuration(void); +function avcodec_configuration(): pAnsiChar; cdecl; external avcodec_dll; + +(* + * Return the libavcodec license. +*) +// const char * avcodec_license(void); +function avcodec_license(): pAnsiChar; cdecl; external avcodec_dll; + +{$IFDEF FF_API_NEXT} +(* + * Register the codec codec and initialize libavcodec. + * + * @warning either this function or avcodec_register_all() must be called + * before any other libavcodec functions. + * + * @see avcodec_register_all() +*) +// attribute_deprecated void avcodec_register(avcodec * codec); +procedure avcodec_register(codec: pAVCodec); cdecl; external avcodec_dll; deprecated; + +(* + * Register all the codecs, parsers and bitstream filters which were enabled at + * configuration time. If you do not call this function you can select exactly + * which formats you want to support, by using the individual registration + * functions. + * + * @see avcodec_register + * @see av_register_codec_parser + * @see av_register_bitstream_filter +*) +// attribute_deprecated void avcodec_register_all(void); +procedure avcodec_register_all(); cdecl; external avcodec_dll; deprecated; +{$ENDIF} +(* + * Allocate an AVCodecContext and set its fields to default values. The + * resulting struct should be freed with avcodec_free_context(). + * + * @param codec if non-NULL, allocate private data and initialize defaults + * for the given codec. It is illegal to then call avcodec_open2() + * with a different codec. + * If NULL, then the codec-specific defaults won't be initialized, + * which may result in suboptimal default settings (this is + * important mainly for encoders, e.g. libx264). + * + * @return An AVCodecContext filled with default values or NULL on failure. +*) +// AVCodecContext * avcodec_alloc_context3(const avcodec * codec); +function avcodec_alloc_context3(const codec: pAVCodec): pAVCodecContext; cdecl; external avcodec_dll; + +(* + * Free the codec context and everything associated with it and write NULL to + * the provided pointer. +*) +// void avcodec_free_context(AVCodecContext * * avctx); +procedure avcodec_free_context(var avctx: pAVCodecContext); cdecl; external avcodec_dll; + +{$IFDEF FF_API_GET_CONTEXT_DEFAULTS} +(* + * @deprecated This function should not be used, as closing and opening a codec + * context multiple time is not supported. A new codec context should be + * allocated for each new use. +*) +// int avcodec_get_context_defaults3(AVCodecContext * s, const avcodec * codec); +function avcodec_get_context_defaults3(s: pAVCodecContext; const codec: pAVCodec): int; cdecl; external avcodec_dll; +{$ENDIF} +(* + * Get the AVClass for AVCodecContext. It can be used in combination with + * AV_OPT_SEARCH_FAKE_OBJ for examining options. + * + * @see av_opt_find(). +*) +// const AVClass * avcodec_get_class(void); +function avcodec_get_class(): pAVClass; cdecl; external avcodec_dll; + +{$IFDEF FF_API_COPY_CONTEXT} +(* + * Get the AVClass for AVFrame. It can be used in combination with + * AV_OPT_SEARCH_FAKE_OBJ for examining options. + * + * @see av_opt_find(). +*) +// const AVClass * avcodec_get_frame_class(void); +function avcodec_get_frame_class(): pAVClass; cdecl; external avcodec_dll; + +(* + * Get the AVClass for AVSubtitleRect. It can be used in combination with + * AV_OPT_SEARCH_FAKE_OBJ for examining options. + * + * @see av_opt_find(). +*) +// const AVClass * avcodec_get_subtitle_rect_class(void); +function avcodec_get_subtitle_rect_class(): pAVClass; cdecl; external avcodec_dll; + +(* + * Copy the settings of the source AVCodecContext into the destination + * AVCodecContext. The resulting destination codec context will be + * unopened, i.e. you are required to call avcodec_open2() before you + * can use this AVCodecContext to decode/encode video/audio data. + * + * @param dest target codec context, should be initialized with + * avcodec_alloc_context3(NULL), but otherwise uninitialized + * @param src source codec context + * @return AVERROR() on error (e.g. memory allocation error), 0 on success + * + * @deprecated The semantics of this function are ill-defined and it should not + * be used. If you need to transfer the stream parameters from one codec context + * to another, use an intermediate AVCodecParameters instance and the + * avcodec_parameters_from_context() / avcodec_parameters_to_context() + * functions. +*) +// attribute_deprecated int avcodec_copy_context(AVCodecContext * dest, const AVCodecContext * src); +function avcodec_copy_context(dest: pAVCodecContext; const src: pAVCodecContext): int; cdecl; external avcodec_dll; deprecated; +{$ENDIF} +(* + * Allocate a new AVCodecParameters and set its fields to default values + * (unknown/invalid/0). The returned struct must be freed with + * avcodec_parameters_free(). +*) +// AVCodecParameters * avcodec_parameters_alloc(void); +function avcodec_parameters_alloc(): pAVCodecParameters; cdecl; external avcodec_dll; + +(* + * Free an AVCodecParameters instance and everything associated with it and + * write NULL to the supplied pointer. +*) +// void avcodec_parameters_free(AVCodecParameters * * par); +procedure avcodec_parameters_free(var par: pAVCodecParameters); cdecl; external avcodec_dll; + +(* + * Copy the contents of src to dst. Any allocated fields in dst are freed and + * replaced with newly allocated duplicates of the corresponding fields in src. + * + * @return >= 0 on success, a negative AVERROR code on failure. +*) +// int avcodec_parameters_copy(AVCodecParameters * dst, const AVCodecParameters * src); +function avcodec_parameters_copy(dst: pAVCodecParameters; const src: pAVCodecParameters): int; cdecl; external avcodec_dll; + +(* + * Fill the parameters struct based on the values from the supplied codec + * context. Any allocated fields in par are freed and replaced with duplicates + * of the corresponding fields in codec. + * + * @return >= 0 on success, a negative AVERROR code on failure +*) +// int avcodec_parameters_from_context(AVCodecParameters * par, const AVCodecContext * codec); +function avcodec_parameters_from_context(par: pAVCodecParameters; const codec: pAVCodecContext): int; cdecl; external avcodec_dll; + +(* + * Fill the codec context based on the values from the supplied codec + * parameters. Any allocated fields in codec that have a corresponding field in + * par are freed and replaced with duplicates of the corresponding field in par. + * Fields in codec that do not have a counterpart in par are not touched. + * + * @return >= 0 on success, a negative AVERROR code on failure. +*) +// int avcodec_parameters_to_context(AVCodecContext * codec, const AVCodecParameters * par); +function avcodec_parameters_to_context(codec: pAVCodecContext; const par: pAVCodecParameters): int; cdecl; external avcodec_dll; + +(* + * Initialize the AVCodecContext to use the given AVCodec. Prior to using this + * function the context has to be allocated with avcodec_alloc_context3(). + * + * The functions avcodec_find_decoder_by_name(), avcodec_find_encoder_by_name(), + * avcodec_find_decoder() and avcodec_find_encoder() provide an easy way for + * retrieving a codec. + * + * @warning This function is not thread safe! + * + * @note Always call this function before using decoding routines (such as + * @ref avcodec_receive_frame()). + * + * @code + * avcodec_register_all(); + * av_dict_set(&opts, "b", "2.5M", 0); + * codec = avcodec_find_decoder(AV_CODEC_ID_H264); + * if (!codec) + * exit(1); + * + * context = avcodec_alloc_context3(codec); + * + * if (avcodec_open2(context, codec, opts) < 0) + * exit(1); + * @endcode + * + * @param avctx The context to initialize. + * @param codec The codec to open this context for. If a non-NULL codec has been + * previously passed to avcodec_alloc_context3() or + * for this context, then this parameter MUST be either NULL or + * equal to the previously passed codec. + * @param options A dictionary filled with AVCodecContext and codec-private options. + * On return this object will be filled with options that were not found. + * + * @return zero on success, a negative value on error + * @see avcodec_alloc_context3(), avcodec_find_decoder(), avcodec_find_encoder(), + * av_dict_set(), av_opt_find(). +*) +// int avcodec_open2(AVCodecContext * avctx, const avcodec *codec, AVDictionary **options); +function avcodec_open2(avctx: pAVCodecContext; const codec: pAVCodec; options: ppAVDictionary): int; cdecl; external avcodec_dll; + +(* + * Close a given AVCodecContext and free all the data associated with it + * (but not the AVCodecContext itself). + * + * Calling this function on an AVCodecContext that hasn't been opened will free + * the codec-specific data allocated in avcodec_alloc_context3() with a non-NULL + * codec. Subsequent calls will do nothing. + * + * @note Do not use this function. Use avcodec_free_context() to destroy a + * codec context (either open or closed). Opening and closing a codec context + * multiple times is not supported anymore -- use multiple codec contexts + * instead. +*) +// int avcodec_close(AVCodecContext * avctx); +function avcodec_close(avctx: pAVCodecContext): int; cdecl; external avcodec_dll; + +(* + * Free all allocated data in the given subtitle struct. + * + * @param sub AVSubtitle to free. +*) +// void avsubtitle_free(AVSubtitle * sub); +procedure avsubtitle_free(sub: pAVSubtitle); cdecl; external avcodec_dll; + +(* + * Allocate an AVPacket and set its fields to default values. The resulting + * struct must be freed using av_packet_free(). + * + * @return An AVPacket filled with default values or NULL on failure. + * + * @note this only allocates the AVPacket itself, not the data buffers. Those + * must be allocated through other means such as av_new_packet. + * + * @see av_new_packet +*) +// AVPacket *av_packet_alloc(void); +function av_packet_alloc(): pAVPacket; cdecl; external avcodec_dll; + +(* + * Create a new packet that references the same data as src. + * + * This is a shortcut for av_packet_alloc()+av_packet_ref(). + * + * @return newly created AVPacket on success, NULL on error. + * + * @see av_packet_alloc + * @see av_packet_ref +*) +// AVPacket * av_packet_clone(const AVPacket * src); +function av_packet_clone(const src: pAVPacket): pAVPacket; cdecl; external avcodec_dll; + +(* + * Free the packet, if the packet is reference counted, it will be + * unreferenced first. + * + * @param pkt packet to be freed. The pointer will be set to NULL. + * @note passing NULL is a no-op. +*) +// void av_packet_free(AVPacket * * pkt); +procedure av_packet_free(var pkt: pAVPacket); cdecl; external avcodec_dll; + +(* + * Initialize optional fields of a packet with default values. + * + * Note, this does not touch the data and size members, which have to be + * initialized separately. + * + * @param pkt packet +*) +// void av_init_packet(AVPacket * pkt); +procedure av_init_packet(pkt: pAVPacket); cdecl; overload; external avcodec_dll; +procedure av_init_packet(var pkt: AVPacket); cdecl; overload; external avcodec_dll; + +(* + * Allocate the payload of a packet and initialize its fields with + * default values. + * + * @param pkt packet + * @param size wanted payload size + * @return 0 if OK, AVERROR_xxx otherwise +*) +// int av_new_packet(AVPacket * pkt, int size); +function av_new_packet(pkt: pAVPacket; size: int): int; cdecl; external avcodec_dll; + +(* + * Reduce packet size, correctly zeroing padding + * + * @param pkt packet + * @param size new size +*) +// void av_shrink_packet(AVPacket * pkt, int size); +procedure av_shrink_packet(pkt: pAVPacket; size: int); cdecl; external avcodec_dll; + +(* + * Increase packet size, correctly zeroing padding + * + * @param pkt packet + * @param grow_by number of bytes by which to increase the size of the packet +*) +// int av_grow_packet(AVPacket * pkt, int grow_by); +function av_grow_packet(pkt: pAVPacket; grow_by: int): int; cdecl; external avcodec_dll; + +(* + * Initialize a reference-counted packet from av_malloc()ed data. + * + * @param pkt packet to be initialized. This function will set the data, size, + * and buf fields, all others are left untouched. + * @param data Data allocated by av_malloc() to be used as packet data. If this + * function returns successfully, the data is owned by the underlying AVBuffer. + * The caller may not access the data through other means. + * @param size size of data in bytes, without the padding. I.e. the full buffer + * size is assumed to be size + AV_INPUT_BUFFER_PADDING_SIZE. + * + * @return 0 on success, a negative AVERROR on error +*) +// int av_packet_from_data(AVPacket * pkt, uint8_t * data, int size); +function av_packet_from_data(pkt: pAVPacket; data: puint8_t; size: int): int; cdecl; external avcodec_dll; + +{$IFDEF FF_API_AVPACKET_OLD_API} +(* + * @warning This is a hack - the packet memory allocation stuff is broken. The + * packet is allocated if it was not really allocated. + * + * @deprecated Use av_packet_ref or av_packet_make_refcounted +*) +// attribute_deprecated int av_dup_packet(AVPacket * pkt); +function av_dup_packet(pkt: pAVPacket): int; cdecl; external avcodec_dll; deprecated; +(* + * Copy packet, including contents + * + * @return 0 on success, negative AVERROR on fail + * + * @deprecated Use av_packet_ref +*) +// attribute_deprecated int av_copy_packet(AVPacket * dst, const AVPacket * src); +function av_copy_packet(dst: pAVPacket; const src: pAVPacket): int; cdecl; external avcodec_dll; deprecated; + +(* + * Copy packet side data + * + * @return 0 on success, negative AVERROR on fail + * + * @deprecated Use av_packet_copy_props +*) +// attribute_deprecated int av_copy_packet_side_data(AVPacket * dst, const AVPacket * src); +function av_copy_packet_side_data(dst: pAVPacket; const src: pAVPacket): int; cdecl; external avcodec_dll; deprecated; + +(* + * Free a packet. + * + * @deprecated Use av_packet_unref + * + * @param pkt packet to free +*) +// attribute_deprecated void av_free_packet(AVPacket * pkt); +procedure av_free_packet(pkt: pAVPacket); cdecl; overload; external avcodec_dll; deprecated; +procedure av_free_packet(Var pkt: AVPacket); cdecl; overload; external avcodec_dll; deprecated; +{$ENDIF} +(* + * Allocate new information of a packet. + * + * @param pkt packet + * @param type side information type + * @param size side information size + * @return pointer to fresh allocated data or NULL otherwise +*) +// uint8_t * av_packet_new_side_data(AVPacket * pkt, enum AVPacketSideDataType type , int size); +function av_packet_new_side_data(pkt: pAVPacket; _type: AVPacketSideDataType; size: int): puint8_t; cdecl; external avcodec_dll; + +(* + * Wrap an existing array as a packet side data. + * + * @param pkt packet + * @param type side information type + * @param data the side data array. It must be allocated with the av_malloc() + * family of functions. The ownership of the data is transferred to + * pkt. + * @param size side information size + * @return a non-negative number on success, a negative AVERROR code on + * failure. On failure, the packet is unchanged and the data remains + * owned by the caller. +*) +// int av_packet_add_side_data(AVPacket * pkt, enum AVPacketSideDataType type , uint8_t * data, size_t size); +function av_packet_add_side_data(pkt: pAVPacket; _type: AVPacketSideDataType; data: puint8_t; size: size_t): int; cdecl; external avcodec_dll; + +(* + * Shrink the already allocated side data buffer + * + * @param pkt packet + * @param type side information type + * @param size new side information size + * @return 0 on success, < 0 on failure +*) +// int av_packet_shrink_side_data(AVPacket * pkt, enum AVPacketSideDataType type , int size); +function av_packet_shrink_side_data(pkt: pAVPacket; _type: AVPacketSideDataType; size: int): int; cdecl; external avcodec_dll; + +(* + * Get side information from packet. + * + * @param pkt packet + * @param type desired side information type + * @param size pointer for side information size to store (optional) + * @return pointer to data if present or NULL otherwise +*) +// uint8_t * av_packet_get_side_data(const AVPacket * pkt, enum AVPacketSideDataType type , int * size); +function av_packet_get_side_data(const pkt: pAVPacket; _type: AVPacketSideDataType; size: pInt): puint8_t; cdecl; external avcodec_dll; + +{$IFDEF FF_API_MERGE_SD_API} +// attribute_deprecated int av_packet_merge_side_data(AVPacket * pkt); +function av_packet_merge_side_data(pkt: pAVPacket): int; cdecl; external avcodec_dll; deprecated; + +// attribute_deprecated int av_packet_split_side_data(AVPacket * pkt); +function av_packet_split_side_data(pkt: pAVPacket): int; cdecl; external avcodec_dll; deprecated; +{$ENDIF} +// const char * av_packet_side_data_name(enum AVPacketSideDataType type); +function av_packet_side_data_name(_type: AVPacketSideDataType): pAnsiChar; cdecl; external avcodec_dll; + +(* + * Pack a dictionary for use in side_data. + * + * @param dict The dictionary to pack. + * @param size pointer to store the size of the returned data + * @return pointer to data if successful, NULL otherwise +*) +// uint8_t * av_packet_pack_dictionary(AVDictionary * dict, int * size); +function av_packet_pack_dictionary(dict: pAVDictionary; size: pInt): puint8_t; cdecl; external avcodec_dll; +(* + * Unpack a dictionary from side_data. + * + * @param data data from side_data + * @param size size of the data + * @param dict the metadata storage dictionary + * @return 0 on success, < 0 on failure +*) +// int av_packet_unpack_dictionary(const uint8_t * data, int size, AVDictionary **dict); +function av_packet_unpack_dictionary(const data: puint8_t; size: int; var dict: pAVDictionary): int; cdecl; external avcodec_dll; + +(* + * Convenience function to free all the side data stored. + * All the other fields stay untouched. + * + * @param pkt packet +*) +// void av_packet_free_side_data(AVPacket * pkt); +procedure av_packet_free_side_data(pkt: pAVPacket); cdecl; external avcodec_dll; + +(* + * Setup a new reference to the data described by a given packet + * + * If src is reference-counted, setup dst as a new reference to the + * buffer in src. Otherwise allocate a new buffer in dst and copy the + * data from src into it. + * + * All the other fields are copied from src. + * + * @see av_packet_unref + * + * @param dst Destination packet + * @param src Source packet + * + * @return 0 on success, a negative AVERROR on error. +*) +// int av_packet_ref(AVPacket * dst, const AVPacket * src); +function av_packet_ref(dst: pAVPacket; const src: pAVPacket): int; cdecl; external avcodec_dll; + +(* + * Wipe the packet. + * + * Unreference the buffer referenced by the packet and reset the + * remaining packet fields to their default values. + * + * @param pkt The packet to be unreferenced. +*) +// void av_packet_unref(AVPacket * pkt); +procedure av_packet_unref(pkt: pAVPacket); cdecl; external avcodec_dll; overload; +procedure av_packet_unref(var pkt: AVPacket); cdecl; external avcodec_dll; overload; + +(* + * Move every field in src to dst and reset src. + * + * @see av_packet_unref + * + * @param src Source packet, will be reset + * @param dst Destination packet +*) +// void av_packet_move_ref(AVPacket * dst, AVPacket * src); +procedure av_packet_move_ref(dst: pAVPacket; src: pAVPacket); cdecl; external avcodec_dll; overload; +procedure av_packet_move_ref(dst: pAVPacket; var src: AVPacket); cdecl; external avcodec_dll; overload; + +(* + * Copy only "properties" fields from src to dst. + * + * Properties for the purpose of this function are all the fields + * beside those related to the packet data (buf, data, size) + * + * @param dst Destination packet + * @param src Source packet + * + * @return 0 on success AVERROR on failure. +*) +// int av_packet_copy_props(AVPacket * dst, const AVPacket * src); +function av_packet_copy_props(dst: pAVPacket; const src: pAVPacket): int; cdecl; external avcodec_dll; + +(* + * Ensure the data described by a given packet is reference counted. + * + * @note This function does not ensure that the reference will be writable. + * Use av_packet_make_writable instead for that purpose. + * + * @see av_packet_ref + * @see av_packet_make_writable + * + * @param pkt packet whose data should be made reference counted. + * + * @return 0 on success, a negative AVERROR on error. On failure, the + * packet is unchanged. +*) +// int av_packet_make_refcounted(AVPacket * pkt); +function av_packet_make_refcounted(pkt: pAVPacket): int; cdecl; external avcodec_dll; + +(* + * Create a writable reference for the data described by a given packet, + * avoiding data copy if possible. + * + * @param pkt Packet whose data should be made writable. + * + * @return 0 on success, a negative AVERROR on failure. On failure, the + * packet is unchanged. +*) +// int av_packet_make_writable(AVPacket * pkt); +function av_packet_make_writable(pkt: pAVPacket): int; cdecl; external avcodec_dll; + +(* + * Convert valid timing fields (timestamps / durations) in a packet from one + * timebase to another. Timestamps with unknown values (AV_NOPTS_VALUE) will be + * ignored. + * + * @param pkt packet on which the conversion will be performed + * @param tb_src source timebase, in which the timing fields in pkt are + * expressed + * @param tb_dst destination timebase, to which the timing fields will be + * converted +*) +// void av_packet_rescale_ts(AVPacket * pkt, AVRational tb_src, AVRational tb_dst); +procedure av_packet_rescale_ts(pkt: pAVPacket; tb_src: AVRational; tb_dst: AVRational); cdecl; external avcodec_dll; + +(* + * Find a registered decoder with a matching codec ID. + * + * @param id AVCodecID of the requested decoder + * @return A decoder if one was found, NULL otherwise. +*) +// avcodec * avcodec_find_decoder(enum AVCodecID id); +function avcodec_find_decoder(id: AVCodecID): pAVCodec; cdecl; external avcodec_dll; + +(* + * Find a registered decoder with the specified name. + * + * @param name name of the requested decoder + * @return A decoder if one was found, NULL otherwise. +*) +// avcodec * avcodec_find_decoder_by_name(const char * name); +function avcodec_find_decoder_by_name(const name: pAnsiChar): pAVCodec; cdecl; external avcodec_dll; + +(* + * The default callback for AVCodecContext.get_buffer2(). It is made public so + * it can be called by custom get_buffer2() implementations for decoders without + * AV_CODEC_CAP_DR1 set. +*) +// int avcodec_default_get_buffer2(AVCodecContext * s, AVFrame * frame, int flags); +function avcodec_default_get_buffer2(s: pAVCodecContext; frame: pAVFrame; flags: int): int; cdecl; external avcodec_dll; + +(* + * Modify width and height values so that they will result in a memory + * buffer that is acceptable for the codec if you do not use any horizontal + * padding. + * + * May only be used if a codec with AV_CODEC_CAP_DR1 has been opened. +*) +// void avcodec_align_dimensions(AVCodecContext * s, int * width, int * height); +procedure avcodec_align_dimensions(s: pAVCodecContext; width: pInt; height: pInt); cdecl; external avcodec_dll; + +(* + * Modify width and height values so that they will result in a memory + * buffer that is acceptable for the codec if you also ensure that all + * line sizes are a multiple of the respective linesize_align[i]. + * + * May only be used if a codec with AV_CODEC_CAP_DR1 has been opened. +*) +// void avcodec_align_dimensions2(AVCodecContext * s, int * width, int * height, int linesize_align[AV_NUM_DATA_POINTERS]); +procedure avcodec_align_dimensions2(s: pAVCodecContext; var width: int; var height: int; linesize_align: TAVNDPArray_int); cdecl; external avcodec_dll; + +(* + * Converts AVChromaLocation to swscale x/y chroma position. + * + * The positions represent the chroma (0,0) position in a coordinates system + * with luma (0,0) representing the origin and luma(1,1) representing 256,256 + * + * @param xpos horizontal chroma sample position + * @param ypos vertical chroma sample position +*) +// int avcodec_enum_to_chroma_pos(int * xpos, int * ypos, enum AVChromaLocation pos); +function avcodec_enum_to_chroma_pos(var xpos: int; ypos: int; pos: AVChromaLocation): int; cdecl; external avcodec_dll; + +(* + * Converts swscale x/y chroma position to AVChromaLocation. + * + * The positions represent the chroma (0,0) position in a coordinates system + * with luma (0,0) representing the origin and luma(1,1) representing 256,256 + * + * @param xpos horizontal chroma sample position + * @param ypos vertical chroma sample position +*) +// enum AVChromaLocation avcodec_chroma_pos_to_enum(int xpos, int ypos); +function avcodec_chroma_pos_to_enum(xpos, ypos: int): AVChromaLocation; cdecl; external avcodec_dll; + +(* + * Decode the audio frame of size avpkt->size from avpkt->data into frame. + * + * Some decoders may support multiple frames in a single AVPacket. Such + * decoders would then just decode the first frame and the return value would be + * less than the packet size. In this case, avcodec_decode_audio4 has to be + * called again with an AVPacket containing the remaining data in order to + * decode the second frame, etc... Even if no frames are returned, the packet + * needs to be fed to the decoder with remaining data until it is completely + * consumed or an error occurs. + * + * Some decoders (those marked with AV_CODEC_CAP_DELAY) have a delay between input + * and output. This means that for some packets they will not immediately + * produce decoded output and need to be flushed at the end of decoding to get + * all the decoded data. Flushing is done by calling this function with packets + * with avpkt->data set to NULL and avpkt->size set to 0 until it stops + * returning samples. It is safe to flush even those decoders that are not + * marked with AV_CODEC_CAP_DELAY, then no samples will be returned. + * + * @warning The input buffer, avpkt->data must be AV_INPUT_BUFFER_PADDING_SIZE + * larger than the actual read bytes because some optimized bitstream + * readers read 32 or 64 bits at once and could read over the end. + * + * @note The AVCodecContext MUST have been opened with @ref avcodec_open2() + * before packets may be fed to the decoder. + * + * @param avctx the codec context + * @param[out] frame The AVFrame in which to store decoded audio samples. + * The decoder will allocate a buffer for the decoded frame by + * calling the AVCodecContext.get_buffer2() callback. + * When AVCodecContext.refcounted_frames is set to 1, the frame is + * reference counted and the returned reference belongs to the + * caller. The caller must release the frame using av_frame_unref() + * when the frame is no longer needed. The caller may safely write + * to the frame if av_frame_is_writable() returns 1. + * When AVCodecContext.refcounted_frames is set to 0, the returned + * reference belongs to the decoder and is valid only until the + * next call to this function or until closing or flushing the + * decoder. The caller may not write to it. + * @param[out] got_frame_ptr Zero if no frame could be decoded, otherwise it is + * non-zero. Note that this field being set to zero + * does not mean that an error has occurred. For + * decoders with AV_CODEC_CAP_DELAY set, no given decode + * call is guaranteed to produce a frame. + * @param[in] avpkt The input AVPacket containing the input buffer. + * At least avpkt->data and avpkt->size should be set. Some + * decoders might also require additional fields to be set. + * @return A negative error code is returned if an error occurred during + * decoding, otherwise the number of bytes consumed from the input + * AVPacket is returned. + * + * @deprecated Use avcodec_send_packet() and avcodec_receive_frame(). +*) +// attribute_deprecated int avcodec_decode_audio4(AVCodecContext * avctx, AVFrame * frame, int * got_frame_ptr, const AVPacket * avpkt); +function avcodec_decode_audio4(avctx: pAVCodecContext; frame: pAVFrame; var got_frame_ptr: int; const avpkt: pAVPacket): int; cdecl; external avcodec_dll; + deprecated; + +(* + * Decode the video frame of size avpkt->size from avpkt->data into picture. + * Some decoders may support multiple frames in a single AVPacket, such + * decoders would then just decode the first frame. + * + * @warning The input buffer must be AV_INPUT_BUFFER_PADDING_SIZE larger than + * the actual read bytes because some optimized bitstream readers read 32 or 64 + * bits at once and could read over the end. + * + * @warning The end of the input buffer buf should be set to 0 to ensure that + * no overreading happens for damaged MPEG streams. + * + * @note Codecs which have the AV_CODEC_CAP_DELAY capability set have a delay + * between input and output, these need to be fed with avpkt->data=NULL, + * avpkt->size=0 at the end to return the remaining frames. + * + * @note The AVCodecContext MUST have been opened with @ref avcodec_open2() + * before packets may be fed to the decoder. + * + * @param avctx the codec context + * @param[out] picture The AVFrame in which the decoded video frame will be stored. + * Use av_frame_alloc() to get an AVFrame. The codec will + * allocate memory for the actual bitmap by calling the + * AVCodecContext.get_buffer2() callback. + * When AVCodecContext.refcounted_frames is set to 1, the frame is + * reference counted and the returned reference belongs to the + * caller. The caller must release the frame using av_frame_unref() + * when the frame is no longer needed. The caller may safely write + * to the frame if av_frame_is_writable() returns 1. + * When AVCodecContext.refcounted_frames is set to 0, the returned + * reference belongs to the decoder and is valid only until the + * next call to this function or until closing or flushing the + * decoder. The caller may not write to it. + * + * @param[in] avpkt The input AVPacket containing the input buffer. + * You can create such packet with av_init_packet() and by then setting + * data and size, some decoders might in addition need other fields like + * flags&AV_PKT_FLAG_KEY. All decoders are designed to use the least + * fields possible. + * @param[in,out] got_picture_ptr Zero if no frame could be decompressed, otherwise, it is nonzero. + * @return On error a negative value is returned, otherwise the number of bytes + * used or zero if no frame could be decompressed. + * + * @deprecated Use avcodec_send_packet() and avcodec_receive_frame(). +*) +// attribute_deprecated int avcodec_decode_video2(AVCodecContext * avctx, AVFrame * picture, int * got_picture_ptr, const AVPacket * avpkt); +function avcodec_decode_video2(avctx: pAVCodecContext; picture: pAVFrame; var got_picture_ptr: int; const avpkt: pAVPacket): int; cdecl; external avcodec_dll; + deprecated; + +(* + * Decode a subtitle message. + * Return a negative value on error, otherwise return the number of bytes used. + * If no subtitle could be decompressed, got_sub_ptr is zero. + * Otherwise, the subtitle is stored in *sub. + * Note that AV_CODEC_CAP_DR1 is not available for subtitle codecs. This is for + * simplicity, because the performance difference is expect to be negligible + * and reusing a get_buffer written for video codecs would probably perform badly + * due to a potentially very different allocation pattern. + * + * Some decoders (those marked with AV_CODEC_CAP_DELAY) have a delay between input + * and output. This means that for some packets they will not immediately + * produce decoded output and need to be flushed at the end of decoding to get + * all the decoded data. Flushing is done by calling this function with packets + * with avpkt->data set to NULL and avpkt->size set to 0 until it stops + * returning subtitles. It is safe to flush even those decoders that are not + * marked with AV_CODEC_CAP_DELAY, then no subtitles will be returned. + * + * @note The AVCodecContext MUST have been opened with @ref avcodec_open2() + * before packets may be fed to the decoder. + * + * @param avctx the codec context + * @param[out] sub The Preallocated AVSubtitle in which the decoded subtitle will be stored, + * must be freed with avsubtitle_free if *got_sub_ptr is set. + * @param[in,out] got_sub_ptr Zero if no subtitle could be decompressed, otherwise, it is nonzero. + * @param[in] avpkt The input AVPacket containing the input buffer. +*) +// int avcodec_decode_subtitle2(AVCodecContext * avctx, AVSubtitle * sub, int * got_sub_ptr, AVPacket * avpkt); +function avcodec_decode_subtitle2(avctx: pAVCodecContext; var sub: AVSubtitle; var got_sub_ptr: int; avpkt: pAVPacket): int; cdecl; external avcodec_dll; + +(* + * Supply raw packet data as input to a decoder. + * + * Internally, this call will copy relevant AVCodecContext fields, which can + * influence decoding per-packet, and apply them when the packet is actually + * decoded. (For example AVCodecContext.skip_frame, which might direct the + * decoder to drop the frame contained by the packet sent with this function.) + * + * @warning The input buffer, avpkt->data must be AV_INPUT_BUFFER_PADDING_SIZE + * larger than the actual read bytes because some optimized bitstream + * readers read 32 or 64 bits at once and could read over the end. + * + * @warning Do not mix this API with the legacy API (like avcodec_decode_video2()) + * on the same AVCodecContext. It will return unexpected results now + * or in future libavcodec versions. + * + * @note The AVCodecContext MUST have been opened with @ref avcodec_open2() + * before packets may be fed to the decoder. + * + * @param avctx codec context + * @param[in] avpkt The input AVPacket. Usually, this will be a single video + * frame, or several complete audio frames. + * Ownership of the packet remains with the caller, and the + * decoder will not write to the packet. The decoder may create + * a reference to the packet data (or copy it if the packet is + * not reference-counted). + * Unlike with older APIs, the packet is always fully consumed, + * and if it contains multiple frames (e.g. some audio codecs), + * will require you to call avcodec_receive_frame() multiple + * times afterwards before you can send a new packet. + * It can be NULL (or an AVPacket with data set to NULL and + * size set to 0); in this case, it is considered a flush + * packet, which signals the end of the stream. Sending the + * first flush packet will return success. Subsequent ones are + * unnecessary and will return AVERROR_EOF. If the decoder + * still has frames buffered, it will return them after sending + * a flush packet. + * + * @return 0 on success, otherwise negative error code: + * AVERROR(EAGAIN): input is not accepted in the current state - user + * must read output with avcodec_receive_frame() (once + * all output is read, the packet should be resent, and + * the call will not fail with EAGAIN). + * AVERROR_EOF: the decoder has been flushed, and no new packets can + * be sent to it (also returned if more than 1 flush + * packet is sent) + * AVERROR(EINVAL): codec not opened, it is an encoder, or requires flush + * AVERROR(ENOMEM): failed to add packet to internal queue, or similar + * other errors: legitimate decoding errors +*) +// int avcodec_send_packet(AVCodecContext * avctx, const AVPacket * avpkt); +function avcodec_send_packet(avctx: pAVCodecContext; const avpkt: pAVPacket): int; cdecl; overload; external avcodec_dll; +function avcodec_send_packet(avctx: pAVCodecContext; var avpkt: AVPacket): int; cdecl; overload; external avcodec_dll; + +(* + * Return decoded output data from a decoder. + * + * @param avctx codec context + * @param frame This will be set to a reference-counted video or audio + * frame (depending on the decoder type) allocated by the + * decoder. Note that the function will always call + * av_frame_unref(frame) before doing anything else. + * + * @return + * 0: success, a frame was returned + * AVERROR(EAGAIN): output is not available in this state - user must try + * to send new input + * AVERROR_EOF: the decoder has been fully flushed, and there will be + * no more output frames + * AVERROR(EINVAL): codec not opened, or it is an encoder + * AVERROR_INPUT_CHANGED: current decoded frame has changed parameters + * with respect to first decoded frame. Applicable + * when flag AV_CODEC_FLAG_DROPCHANGED is set. + * other negative values: legitimate decoding errors +*) +// int avcodec_receive_frame(AVCodecContext * avctx, AVFrame * frame); +function avcodec_receive_frame(avctx: pAVCodecContext; frame: pAVFrame): int; cdecl; external avcodec_dll; + +(* + * Supply a raw video or audio frame to the encoder. Use avcodec_receive_packet() + * to retrieve buffered output packets. + * + * @param avctx codec context + * @param[in] frame AVFrame containing the raw audio or video frame to be encoded. + * Ownership of the frame remains with the caller, and the + * encoder will not write to the frame. The encoder may create + * a reference to the frame data (or copy it if the frame is + * not reference-counted). + * It can be NULL, in which case it is considered a flush + * packet. This signals the end of the stream. If the encoder + * still has packets buffered, it will return them after this + * call. Once flushing mode has been entered, additional flush + * packets are ignored, and sending frames will return + * AVERROR_EOF. + * + * For audio: + * If AV_CODEC_CAP_VARIABLE_FRAME_SIZE is set, then each frame + * can have any number of samples. + * If it is not set, frame->nb_samples must be equal to + * avctx->frame_size for all frames except the last. + * The final frame may be smaller than avctx->frame_size. + * @return 0 on success, otherwise negative error code: + * AVERROR(EAGAIN): input is not accepted in the current state - user + * must read output with avcodec_receive_packet() (once + * all output is read, the packet should be resent, and + * the call will not fail with EAGAIN). + * AVERROR_EOF: the encoder has been flushed, and no new frames can + * be sent to it + * AVERROR(EINVAL): codec not opened, refcounted_frames not set, it is a + * decoder, or requires flush + * AVERROR(ENOMEM): failed to add packet to internal queue, or similar + * other errors: legitimate decoding errors +*) +// int avcodec_send_frame(AVCodecContext * avctx, const AVFrame * frame); +function avcodec_send_frame(avctx: pAVCodecContext; const frame: pAVFrame): int; cdecl; external avcodec_dll; + +(* + * Read encoded data from the encoder. + * + * @param avctx codec context + * @param avpkt This will be set to a reference-counted packet allocated by the + * encoder. Note that the function will always call + * av_frame_unref(frame) before doing anything else. + * @return 0 on success, otherwise negative error code: + * AVERROR(EAGAIN): output is not available in the current state - user + * must try to send input + * AVERROR_EOF: the encoder has been fully flushed, and there will be + * no more output packets + * AVERROR(EINVAL): codec not opened, or it is an encoder + * other errors: legitimate decoding errors +*) +// int avcodec_receive_packet(AVCodecContext * avctx, AVPacket * avpkt); +function avcodec_receive_packet(avctx: pAVCodecContext; avpkt: pAVPacket): int; cdecl; overload; external avcodec_dll; +function avcodec_receive_packet(avctx: pAVCodecContext; var avpkt: AVPacket): int; cdecl; overload; external avcodec_dll; + +(* + * Create and return a AVHWFramesContext with values adequate for hardware + * decoding. This is meant to get called from the get_format callback, and is + * a helper for preparing a AVHWFramesContext for AVCodecContext.hw_frames_ctx. + * This API is for decoding with certain hardware acceleration modes/APIs only. + * + * The returned AVHWFramesContext is not initialized. The caller must do this + * with av_hwframe_ctx_init(). + * + * Calling this function is not a requirement, but makes it simpler to avoid + * codec or hardware API specific details when manually allocating frames. + * + * Alternatively to this, an API user can set AVCodecContext.hw_device_ctx, + * which sets up AVCodecContext.hw_frames_ctx fully automatically, and makes + * it unnecessary to call this function or having to care about + * AVHWFramesContext initialization at all. + * + * There are a number of requirements for calling this function: + * + * - It must be called from get_format with the same avctx parameter that was + * passed to get_format. Calling it outside of get_format is not allowed, and + * can trigger undefined behavior. + * - The function is not always supported (see description of return values). + * Even if this function returns successfully, hwaccel initialization could + * fail later. (The degree to which implementations check whether the stream + * is actually supported varies. Some do this check only after the user's + * get_format callback returns.) + * - The hw_pix_fmt must be one of the choices suggested by get_format. If the + * user decides to use a AVHWFramesContext prepared with this API function, + * the user must return the same hw_pix_fmt from get_format. + * - The device_ref passed to this function must support the given hw_pix_fmt. + * - After calling this API function, it is the user's responsibility to + * initialize the AVHWFramesContext (returned by the out_frames_ref parameter), + * and to set AVCodecContext.hw_frames_ctx to it. If done, this must be done + * before returning from get_format (this is implied by the normal + * AVCodecContext.hw_frames_ctx API rules). + * - The AVHWFramesContext parameters may change every time time get_format is + * called. Also, AVCodecContext.hw_frames_ctx is reset before get_format. So + * you are inherently required to go through this process again on every + * get_format call. + * - It is perfectly possible to call this function without actually using + * the resulting AVHWFramesContext. One use-case might be trying to reuse a + * previously initialized AVHWFramesContext, and calling this API function + * only to test whether the required frame parameters have changed. + * - Fields that use dynamically allocated values of any kind must not be set + * by the user unless setting them is explicitly allowed by the documentation. + * If the user sets AVHWFramesContext.free and AVHWFramesContext.user_opaque, + * the new free callback must call the potentially set previous free callback. + * This API call may set any dynamically allocated fields, including the free + * callback. + * + * The function will set at least the following fields on AVHWFramesContext + * (potentially more, depending on hwaccel API): + * + * - All fields set by av_hwframe_ctx_alloc(). + * - Set the format field to hw_pix_fmt. + * - Set the sw_format field to the most suited and most versatile format. (An + * implication is that this will prefer generic formats over opaque formats + * with arbitrary restrictions, if possible.) + * - Set the width/height fields to the coded frame size, rounded up to the + * API-specific minimum alignment. + * - Only _if_ the hwaccel requires a pre-allocated pool: set the initial_pool_size + * field to the number of maximum reference surfaces possible with the codec, + * plus 1 surface for the user to work (meaning the user can safely reference + * at most 1 decoded surface at a time), plus additional buffering introduced + * by frame threading. If the hwaccel does not require pre-allocation, the + * field is left to 0, and the decoder will allocate new surfaces on demand + * during decoding. + * - Possibly AVHWFramesContext.hwctx fields, depending on the underlying + * hardware API. + * + * Essentially, out_frames_ref returns the same as av_hwframe_ctx_alloc(), but + * with basic frame parameters set. + * + * The function is stateless, and does not change the AVCodecContext or the + * device_ref AVHWDeviceContext. + * + * @param avctx The context which is currently calling get_format, and which + * implicitly contains all state needed for filling the returned + * AVHWFramesContext properly. + * @param device_ref A reference to the AVHWDeviceContext describing the device + * which will be used by the hardware decoder. + * @param hw_pix_fmt The hwaccel format you are going to return from get_format. + * @param out_frames_ref On success, set to a reference to an _uninitialized_ + * AVHWFramesContext, created from the given device_ref. + * Fields will be set to values required for decoding. + * Not changed if an error is returned. + * @return zero on success, a negative value on error. The following error codes + * have special semantics: + * AVERROR(ENOENT): the decoder does not support this functionality. Setup + * is always manual, or it is a decoder which does not + * support setting AVCodecContext.hw_frames_ctx at all, + * or it is a software format. + * AVERROR(EINVAL): it is known that hardware decoding is not supported for + * this configuration, or the device_ref is not supported + * for the hwaccel referenced by hw_pix_fmt. +*) +// int avcodec_get_hw_frames_parameters(AVCodecContext * avctx, AVBufferRef * device_ref, enum AVPixelFormat hw_pix_fmt, +// AVBufferRef * * out_frames_ref); +function avcodec_get_hw_frames_parameters(avctx: pAVCodecContext; device_ref: pAVBufferRef; hw_pix_fmt: AVPixelFormat; var out_frames_ref: pAVBufferRef): int; + cdecl; external avcodec_dll; + +const + AV_PARSER_PTS_NB = 4; + + PARSER_FLAG_COMPLETE_FRAMES = $0001; + PARSER_FLAG_ONCE = $0002; + // Set if the parser has a valid file offset + PARSER_FLAG_FETCHED_OFFSET = $0004; + PARSER_FLAG_USE_CODEC_TS = $1000; + +type + + TAPPNArray_int64_t = array [0 .. AV_PARSER_PTS_NB - 1] of int64_t; + + AVPictureStructure = ( // + AV_PICTURE_STRUCTURE_UNKNOWN, // < unknown + AV_PICTURE_STRUCTURE_TOP_FIELD, // < coded as top field + AV_PICTURE_STRUCTURE_BOTTOM_FIELD, // < coded as bottom field + AV_PICTURE_STRUCTURE_FRAME // < coded as frame + ); + + pAVCodecParserContext = ^AVCodecParserContext; + pAVCodecParser = ^AVCodecParser; + + AVCodecParserContext = record + priv_data: Pointer; + parser: pAVCodecParser; + frame_offset: int64_t; (* offset of the current frame *) + cur_offset: int64_t; (* current offset + (incremented by each av_parser_parse()) *) + next_frame_offset: int64_t; (* offset of the next frame *) + (* video info *) + pict_type: int; (* XXX: Put it back in AVCodecContext. *) + (* + * This field is used for proper frame duration computation in lavf. + * It signals, how much longer the frame duration of the current frame + * is compared to normal frame duration. + * + * frame_duration = (1 + repeat_pict) * time_base + * + * It is used by codecs like H.264 to display telecined material. + *) + repeat_pict: int; (* XXX: Put it back in AVCodecContext. *) + pts: int64_t; (* pts of the current frame *) + dts: int64_t; (* dts of the current frame *) + + (* private data *) + last_pts: int64_t; + last_dts: int64_t; + fetch_timestamp: int; + + cur_frame_start_index: int; + cur_frame_offset: TAPPNArray_int64_t; + cur_frame_pts: TAPPNArray_int64_t; + cur_frame_dts: TAPPNArray_int64_t; + + flags: int; + + offset: int64_t; // < byte offset from starting packet start + cur_frame_end: TAPPNArray_int64_t; + + (* + * Set by parser to 1 for key frames and 0 for non-key frames. + * It is initialized to -1, so if the parser doesn't set this flag, + * old-style fallback using AV_PICTURE_TYPE_I picture type as key frames + * will be used. + *) + key_frame: int; + +{$IFDEF FF_API_CONVERGENCE_DURATION} + (* + * @deprecated unused + *) + // attribute_deprecated + convergence_duration: int64_t deprecated; +{$ENDIF} + // Timestamp generation support: + (* + * Synchronization point for start of timestamp generation. + * + * Set to >0 for sync point, 0 for no sync point and <0 for undefined + * (default). + * + * For example, this corresponds to presence of H.264 buffering period + * SEI message. + *) + dts_sync_point: int; + + (* + * Offset of the current timestamp against last timestamp sync point in + * units of AVCodecContext.time_base. + * + * Set to INT_MIN when dts_sync_point unused. Otherwise, it must + * contain a valid timestamp offset. + * + * Note that the timestamp of sync point has usually a nonzero + * dts_ref_dts_delta, which refers to the previous sync point. Offset of + * the next frame after timestamp sync point will be usually 1. + * + * For example, this corresponds to H.264 cpb_removal_delay. + *) + dts_ref_dts_delta: int; + + (* + * Presentation delay of current frame in units of AVCodecContext.time_base. + * + * Set to INT_MIN when dts_sync_point unused. Otherwise, it must + * contain valid non-negative timestamp delta (presentation time of a frame + * must not lie in the past). + * + * This delay represents the difference between decoding and presentation + * time of the frame. + * + * For example, this corresponds to H.264 dpb_output_delay. + *) + pts_dts_delta: int; + + (* + * Position of the packet in file. + * + * Analogous to cur_frame_pts/dts + *) + cur_frame_pos: TAPPNArray_int64_t; + + (* + * Byte position of currently parsed frame in stream. + *) + pos: int64_t; + + (* + * Previous frame byte position. + *) + last_pos: int64_t; + + (* + * Duration of the current frame. + * For audio, this is in units of 1 / AVCodecContext.sample_rate. + * For all other types, this is in units of AVCodecContext.time_base. + *) + duration: int; + + field_order: AVFieldOrder; + + (* + * Indicate whether a picture is coded as a frame, top field or bottom field. + * + * For example, H.264 field_pic_flag equal to 0 corresponds to + * AV_PICTURE_STRUCTURE_FRAME. An H.264 picture with field_pic_flag + * equal to 1 and bottom_field_flag equal to 0 corresponds to + * AV_PICTURE_STRUCTURE_TOP_FIELD. + *) + picture_structure: AVPictureStructure; + + (* + * Picture number incremented in presentation or output order. + * This field may be reinitialized at the first picture of a new sequence. + * + * For example, this corresponds to H.264 PicOrderCnt. + *) + output_picture_number: int; + + (* + * Dimensions of the decoded video intended for presentation. + *) + width: int; + height: int; + + (* + * Dimensions of the coded video. + *) + coded_width: int; + coded_height: int; + + (* + * The format of the coded data, corresponds to enum AVPixelFormat for video + * and for enum AVSampleFormat for audio. + * + * Note that a decoder can have considerable freedom in how exactly it + * decodes the data, so the format reported here might be different from the + * one returned by a decoder. + *) + format: int; + end; + + Tcodec_ids_array_5_int = array [0 .. 4] of int; + + AVCodecParser = record + codec_ids: Tcodec_ids_array_5_int; (* several codec IDs are permitted *) + priv_data_size: int; + // int (*parser_init)(AVCodecParserContext *s); + parser_init: function(s: pAVCodecParserContext): int; cdecl; + (* This callback never returns an error, a negative value means that + * the frame start was in a previous packet. *) + // int (*parser_parse)(AVCodecParserContext *s, AVCodecContext *avctx, const uint8_t **poutbuf, int *poutbuf_size, const uint8_t *buf, int buf_size); + parser_parse: function(s: pAVCodecParserContext; avctx: pAVCodecContext; const poutbuf: puint8_t; poutbuf_size: pInt; const buf: puint8_t; buf_size: int) + : int; cdecl; + + // void (*parser_close)(AVCodecParserContext *s); + parser_close: procedure(s: pAVCodecParserContext); cdecl; + // int (*split)(AVCodecContext *avctx, const uint8_t *buf, int buf_size); + split: function(avctx: pAVCodecContext; const buf: puint8_t; buf_size: int): int; cdecl; + next: pAVCodecParser; + end; + + (* + * Iterate over all registered codec parsers. + * + * @param opaque a pointer where libavcodec will store the iteration state. Must + * point to NULL to start the iteration. + * + * @return the next registered codec parser or NULL when the iteration is + * finished + *) + // const AVCodecParser * av_parser_iterate(void * * opaque); +function av_parser_iterate(var opaque: Pointer): pAVCodecParser; cdecl; external avcodec_dll; + +// attribute_deprecated AVCodecParser * av_parser_next(const AVCodecParser * c); +function av_parser_next(const c: pAVCodecParser): pAVCodecParser; cdecl; external avcodec_dll; deprecated; + +// attribute_deprecated void av_register_codec_parser(AVCodecParser * parser); +procedure av_register_codec_parser(parser: pAVCodecParser); cdecl; external avcodec_dll; deprecated; + +// AVCodecParserContext * av_parser_init(int codec_id); +function av_parser_init(codec_id: int): pAVCodecParserContext; cdecl; external avcodec_dll; deprecated; + +(* + * Parse a packet. + * + * @param s parser context. + * @param avctx codec context. + * @param poutbuf set to pointer to parsed buffer or NULL if not yet finished. + * @param poutbuf_size set to size of parsed buffer or zero if not yet finished. + * @param buf input buffer. + * @param buf_size buffer size in bytes without the padding. I.e. the full buffer + size is assumed to be buf_size + AV_INPUT_BUFFER_PADDING_SIZE. + To signal EOF, this should be 0 (so that the last frame + can be output). + * @param pts input presentation timestamp. + * @param dts input decoding timestamp. + * @param pos input byte position in stream. + * @return the number of bytes of the input bitstream used. + * + * Example: + * @code + * while(in_len){ + * len = av_parser_parse2(myparser, AVCodecContext, &data, &size, + * in_data, in_len, + * pts, dts, pos); + * in_data += len; + * in_len -= len; + * + * if(size) + * decode_frame(data, size); + * } + * @endcode +*) +// int av_parser_parse2(AVCodecParserContext * s, AVCodecContext * avctx, uint8_t * * poutbuf, int * poutbuf_size, const uint8_t * buf, +// int buf_size, int64_t pts, int64_t dts, int64_t pos); +function av_parser_parse2(s: pAVCodecParserContext; avctx: pAVCodecContext; var poutbuf: puint8_t; var poutbuf_size: int; const buf: puint8_t; buf_size: int; + pts: int64_t; dts: int64_t; pos: int64_t): int; cdecl; external avcodec_dll; + +(* + * @return 0 if the output buffer is a subset of the input, 1 if it is allocated and must be freed + * @deprecated use AVBitStreamFilter +*) +// int av_parser_change(AVCodecParserContext * s, AVCodecContext * avctx, uint8_t **poutbuf, int * poutbuf_size, const uint8_t *buf, +// int buf_size, int keyframe); +function av_parser_change(s: pAVCodecParserContext; avctx: pAVCodecContext; var poutbuf: puint8_t; var poutbuf_size: int; const buf: puint8_t; buf_size: int; + keyframe: int): int; cdecl; external avcodec_dll; deprecated 'deprecated use AVBitStreamFilter'; + +// void av_parser_close(AVCodecParserContext * s); +procedure av_parser_close(s: pAVCodecParserContext); cdecl; external avcodec_dll; deprecated; + +(* + * Find a registered encoder with a matching codec ID. + * + * @param id AVCodecID of the requested encoder + * @return An encoder if one was found, NULL otherwise. +*) +// avcodec * avcodec_find_encoder(enum AVCodecID id); +function avcodec_find_encoder(id: AVCodecID): pAVCodec; cdecl; external avcodec_dll; + +(* + * Find a registered encoder with the specified name. + * + * @param name name of the requested encoder + * @return An encoder if one was found, NULL otherwise. +*) +// avcodec * avcodec_find_encoder_by_name(const char * name); +function avcodec_find_encoder_by_name(const name: pAnsiChar): pAVCodec; cdecl; external avcodec_dll; + +(* + * Encode a frame of audio. + * + * Takes input samples from frame and writes the next output packet, if + * available, to avpkt. The output packet does not necessarily contain data for + * the most recent frame, as encoders can delay, split, and combine input frames + * internally as needed. + * + * @param avctx codec context + * @param avpkt output AVPacket. + * The user can supply an output buffer by setting + * avpkt->data and avpkt->size prior to calling the + * function, but if the size of the user-provided data is not + * large enough, encoding will fail. If avpkt->data and + * avpkt->size are set, avpkt->destruct must also be set. All + * other AVPacket fields will be reset by the encoder using + * av_init_packet(). If avpkt->data is NULL, the encoder will + * allocate it. The encoder will set avpkt->size to the size + * of the output packet. + * + * If this function fails or produces no output, avpkt will be + * freed using av_packet_unref(). + * @param[in] frame AVFrame containing the raw audio data to be encoded. + * May be NULL when flushing an encoder that has the + * AV_CODEC_CAP_DELAY capability set. + * If AV_CODEC_CAP_VARIABLE_FRAME_SIZE is set, then each frame + * can have any number of samples. + * If it is not set, frame->nb_samples must be equal to + * avctx->frame_size for all frames except the last. + * The final frame may be smaller than avctx->frame_size. + * @param[out] got_packet_ptr This field is set to 1 by libavcodec if the + * output packet is non-empty, and to 0 if it is + * empty. If the function returns an error, the + * packet can be assumed to be invalid, and the + * value of got_packet_ptr is undefined and should + * not be used. + * @return 0 on success, negative error code on failure + * + * @deprecated use avcodec_send_frame()/avcodec_receive_packet() instead +*) +// attribute_deprecated +// int avcodec_encode_audio2(AVCodecContext * avctx, AVPacket * avpkt, const AVFrame * frame, int * got_packet_ptr); +function avcodec_encode_audio2(avctx: pAVCodecContext; avpkt: pAVPacket; const frame: pAVFrame; var got_packet_ptr: int): int; cdecl; external avcodec_dll; + deprecated; + +(* + * Encode a frame of video. + * + * Takes input raw video data from frame and writes the next output packet, if + * available, to avpkt. The output packet does not necessarily contain data for + * the most recent frame, as encoders can delay and reorder input frames + * internally as needed. + * + * @param avctx codec context + * @param avpkt output AVPacket. + * The user can supply an output buffer by setting + * avpkt->data and avpkt->size prior to calling the + * function, but if the size of the user-provided data is not + * large enough, encoding will fail. All other AVPacket fields + * will be reset by the encoder using av_init_packet(). If + * avpkt->data is NULL, the encoder will allocate it. + * The encoder will set avpkt->size to the size of the + * output packet. The returned data (if any) belongs to the + * caller, he is responsible for freeing it. + * + * If this function fails or produces no output, avpkt will be + * freed using av_packet_unref(). + * @param[in] frame AVFrame containing the raw video data to be encoded. + * May be NULL when flushing an encoder that has the + * AV_CODEC_CAP_DELAY capability set. + * @param[out] got_packet_ptr This field is set to 1 by libavcodec if the + * output packet is non-empty, and to 0 if it is + * empty. If the function returns an error, the + * packet can be assumed to be invalid, and the + * value of got_packet_ptr is undefined and should + * not be used. + * @return 0 on success, negative error code on failure + * + * @deprecated use avcodec_send_frame()/avcodec_receive_packet() instead +*) +// attribute_deprecated +// int avcodec_encode_video2(AVCodecContext * avctx, AVPacket * avpkt, const AVFrame * frame, int * got_packet_ptr); +function avcodec_encode_video2(avctx: pAVCodecContext; avpkt: pAVPacket; const frame: pAVFrame; var got_packet_ptr: int): int; cdecl; overload; + external avcodec_dll; deprecated; +function avcodec_encode_video2(avctx: pAVCodecContext; var avpkt: AVPacket; const frame: pAVFrame; var got_packet_ptr: int): int; cdecl; overload; + external avcodec_dll; deprecated; + +// int avcodec_encode_subtitle(AVCodecContext * avctx, uint8_t * buf, int buf_size, const AVSubtitle * sub); +function avcodec_encode_subtitle(avctx: pAVCodecContext; buf: puint8_t; buf_size: int; const sub: pAVSubtitle): int; cdecl; external avcodec_dll; + +{$IFDEF FF_API_AVPICTURE} +(* + * @deprecated unused +*) +// attribute_deprecated +// int avpicture_alloc(AVPicture * picture, enum AVPixelFormat pix_fmt, int width, int height); +function avpicture_alloc(picture: pAVPicture; pix_fmt: AVPixelFormat; width: int; height: int): int; cdecl; overload; external avcodec_dll; deprecated; +function avpicture_alloc(Var picture: AVPicture; pix_fmt: AVPixelFormat; width: int; height: int): int; cdecl; overload; external avcodec_dll; deprecated; + +(* + * @deprecated unused +*) +// attribute_deprecated +// void avpicture_free(AVPicture * picture); +procedure avpicture_free(picture: pAVPicture); cdecl; external avcodec_dll; deprecated; + +(* + * @deprecated use av_image_fill_arrays() instead. +*) +// attribute_deprecated +// int avpicture_fill(AVPicture * picture, const uint8_t * ptr, enum AVPixelFormat pix_fmt, int width, int height); +function avpicture_fill(picture: pAVPicture; const ptr: puint8_t; pix_fmt: AVPixelFormat; width: int; height: int): int; cdecl; external avcodec_dll; + +(* + * @deprecated use av_image_copy_to_buffer() instead. +*) +// attribute_deprecated +// int avpicture_layout(const AVPicture * src, enum AVPixelFormat pix_fmt, int width, int height, unsigned char * dest, int dest_size); +function avpicture_layout(const src: pAVPicture; pix_fmt: AVPixelFormat; width: int; height: int; dest: punsignedchar; dest_size: int): int; cdecl; + external avcodec_dll; deprecated; + +(* + * @deprecated use av_image_get_buffer_size() instead. +*) +// attribute_deprecated +// int avpicture_get_size(enum AVPixelFormat pix_fmt, int width, int height); +function avpicture_get_size(pix_fmt: AVPixelFormat; width: int; height: int): int; cdecl; external avcodec_dll; deprecated; + +(* + * @deprecated av_image_copy() instead. +*) +// attribute_deprecated +// void av_picture_copy(AVPicture * dst, const AVPicture * src, enum AVPixelFormat pix_fmt, int width, int height); +procedure av_picture_copy(dst: pAVPicture; const src: pAVPicture; pix_fmt: AVPixelFormat; width: int; height: int); cdecl; external avcodec_dll; deprecated; + +(* + * @deprecated unused +*) +// attribute_deprecated +// int av_picture_crop(AVPicture * dst, const AVPicture * src, enum AVPixelFormat pix_fmt, int top_band, int left_band); +function av_picture_crop(dst: pAVPicture; const src: pAVPicture; pix_fmt: AVPixelFormat; top_band: int; left_band: int): int; cdecl; external avcodec_dll; + +(* + * @deprecated unused +*) +// attribute_deprecated +// int av_picture_pad(AVPicture * dst, const AVPicture * src, int height, int width, enum AVPixelFormat pix_fmt, int padtop, int padbottom, int padleft, int padright, int * color); +function av_picture_pad(dst: pAVPicture; const src: pAVPicture; height: int; width: int; pix_fmt: AVPixelFormat; padtop: int; padbottom: int; padleft: int; + padright: int; var color: int): int; cdecl; external avcodec_dll; deprecated; + +{$ENDIF} +(* + * @defgroup lavc_misc Utility functions + * @ingroup libavc + * + * Miscellaneous utility functions related to both encoding and decoding + * (or neither). + * @{ +*) + +(* + * @defgroup lavc_misc_pixfmt Pixel formats + * + * Functions for working with pixel formats. + * @{ +*) + +{$IFDEF FF_API_GETCHROMA} +(* + * @deprecated Use av_pix_fmt_get_chroma_sub_sample +*) + +// attribute_deprecated +// void avcodec_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int * h_shift, int * v_shift); +procedure avcodec_get_chroma_sub_sample(pix_fmt: AVPixelFormat; var h_shift, v_shift: int); cdecl; external avcodec_dll; deprecated; +{$ENDIF} +(* + * Return a value representing the fourCC code associated to the + * pixel format pix_fmt, or 0 if no associated fourCC code can be + * found. +*) +// unsigned int avcodec_pix_fmt_to_codec_tag(enum AVPixelFormat pix_fmt); +function avcodec_pix_fmt_to_codec_tag(pix_fmt: AVPixelFormat): unsignedint; cdecl; external avcodec_dll; + +(* + * @deprecated see av_get_pix_fmt_loss() +*) +// int avcodec_get_pix_fmt_loss(enum AVPixelFormat dst_pix_fmt, enum AVPixelFormat src_pix_fmt, int has_alpha); +function avcodec_get_pix_fmt_loss(dst_pix_fmt: AVPixelFormat; src_pix_fmt: AVPixelFormat; has_alpha: int): int; cdecl; external avcodec_dll; + +(* + * Find the best pixel format to convert to given a certain source pixel + * format. When converting from one pixel format to another, information loss + * may occur. For example, when converting from RGB24 to GRAY, the color + * information will be lost. Similarly, other losses occur when converting from + * some formats to other formats. avcodec_find_best_pix_fmt_of_2() searches which of + * the given pixel formats should be used to suffer the least amount of loss. + * The pixel formats from which it chooses one, are determined by the + * pix_fmt_list parameter. + * + * + * @param[in] pix_fmt_list AV_PIX_FMT_NONE terminated array of pixel formats to choose from + * @param[in] src_pix_fmt source pixel format + * @param[in] has_alpha Whether the source pixel format alpha channel is used. + * @param[out] loss_ptr Combination of flags informing you what kind of losses will occur. + * @return The best pixel format to convert to or -1 if none was found. +*) +// enum AVPixelFormat avcodec_find_best_pix_fmt_of_list(const enum AVPixelFormat * pix_fmt_list, enum AVPixelFormat src_pix_fmt, int has_alpha, +// int * loss_ptr); +function avcodec_find_best_pix_fmt_of_list(const pix_fmt_list: pAVPixelFormat; src_pix_fmt: AVPixelFormat; has_alpha: int; var loss_ptr: int): AVPixelFormat; + cdecl; external avcodec_dll; + +(* + * @deprecated see av_find_best_pix_fmt_of_2() +*) +// enum AVPixelFormat avcodec_find_best_pix_fmt_of_2(enum AVPixelFormat dst_pix_fmt1, enum AVPixelFormat dst_pix_fmt2, +// enum AVPixelFormat src_pix_fmt, int has_alpha, int * loss_ptr); +function avcodec_find_best_pix_fmt_of_2(dst_pix_fmt1: AVPixelFormat; dst_pix_fmt2: AVPixelFormat; src_pix_fmt: AVPixelFormat; has_alpha: int; var loss_ptr: int) + : AVPixelFormat; cdecl; external avcodec_dll; + +// attribute_deprecated +// enum AVPixelFormat avcodec_find_best_pix_fmt2(enum AVPixelFormat dst_pix_fmt1, enum AVPixelFormat dst_pix_fmt2, +// enum AVPixelFormat src_pix_fmt, int has_alpha, int * loss_ptr); +function avcodec_find_best_pix_fmt2(dst_pix_fmt1: AVPixelFormat; dst_pix_fmt2: AVPixelFormat; src_pix_fmt: AVPixelFormat; has_alpha: int; var loss_ptr: int) + : AVPixelFormat; cdecl; external avcodec_dll; deprecated; + +// enum AVPixelFormat avcodec_default_get_format(struct AVCodecContext * s, const enum AVPixelFormat * fmt); +function avcodec_default_get_format(s: pAVCodecContext; const fmt: pAVPixelFormat): AVPixelFormat; cdecl; external avcodec_dll; + +{$IFDEF FF_API_TAG_STRING} +(* + * Put a string representing the codec tag codec_tag in buf. + * + * @param buf buffer to place codec tag in + * @param buf_size size in bytes of buf + * @param codec_tag codec tag to assign + * @return the length of the string that would have been generated if + * enough space had been available, excluding the trailing null + * + * @deprecated see av_fourcc_make_string() and av_fourcc2str(). +*) +// attribute_deprecated +// size_t av_get_codec_tag_string(char * buf, size_t buf_size, unsigned int codec_tag); +function av_get_codec_tag_string(buf: pAnsiChar; buf_size: size_t; codec_tag: unsignedint): size_t; cdecl; external avcodec_dll; deprecated; +{$ENDIF} +// void avcodec_string(char * buf, int buf_size, AVCodecContext * enc, int encode); +procedure avcodec_string(buf: pAnsiChar; buf_size: int; enc: pAVCodecContext; encode: int); cdecl; external avcodec_dll; + +(* + * Return a name for the specified profile, if available. + * + * @param codec the codec that is searched for the given profile + * @param profile the profile value for which a name is requested + * @return A name for the profile if found, NULL otherwise. +*) +// const char * av_get_profile_name(const avcodec * codec, int profile); +function av_get_profile_name(const codec: pAVCodec; profile: int): pAnsiChar; cdecl; external avcodec_dll; + +(* + * Return a name for the specified profile, if available. + * + * @param codec_id the ID of the codec to which the requested profile belongs + * @param profile the profile value for which a name is requested + * @return A name for the profile if found, NULL otherwise. + * + * @note unlike av_get_profile_name(), which searches a list of profiles + * supported by a specific decoder or encoder implementation, this + * function searches the list of profiles from the AVCodecDescriptor +*) +// const char * avcodec_profile_name(enum AVCodecID codec_id, int profile); +function avcodec_profile_name(codec_id: AVCodecID; profile: int): pAnsiChar; cdecl; external avcodec_dll; + +// int avcodec_default_execute(AVCodecContext * c, int (* func)(AVCodecContext *c2, void *arg2),void *arg, int *ret, int count, int size); +// int avcodec_default_execute2(AVCodecContext *c, int (*func)(AVCodecContext *c2, void *arg2, int, int),void *arg, int *ret, int count); +type + Tavcodec_default_execute_func = function(c2: pAVCodecContext; arg2: Pointer): int; cdecl; + +function avcodec_default_execute(c: pAVCodecContext; func: Tavcodec_default_execute_func; arg: Pointer; var ret: int; count: int; size: int): int; cdecl; + external avcodec_dll; + +// int avcodec_default_execute2(AVCodecContext *c, int (*func)(AVCodecContext *c2, void *arg2, int, int),void *arg, int *ret, int count); +type + Tavcodec_default_execute2_func = function(c2: pAVCodecContext; arg2: Pointer; p1, p2: int): int; cdecl; + +function avcodec_default_execute2(c: pAVCodecContext; func: Tavcodec_default_execute2_func; arg: Pointer; var ret: int; count: int): int; cdecl; + external avcodec_dll; + +// FIXME func typedef + +(* + * Fill AVFrame audio data and linesize pointers. + * + * The buffer buf must be a preallocated buffer with a size big enough + * to contain the specified samples amount. The filled AVFrame data + * pointers will point to this buffer. + * + * AVFrame extended_data channel pointers are allocated if necessary for + * planar audio. + * + * @param frame the AVFrame + * frame->nb_samples must be set prior to calling the + * function. This function fills in frame->data, + * frame->extended_data, frame->linesize[0]. + * @param nb_channels channel count + * @param sample_fmt sample format + * @param buf buffer to use for frame data + * @param buf_size size of buffer + * @param align plane size sample alignment (0 = default) + * @return >=0 on success, negative error code on failure + * @todo return the size in bytes required to store the samples in + * case of success, at the next libavutil bump +*) +// int avcodec_fill_audio_frame(AVFrame * frame, int nb_channels, enum AVSampleFormat sample_fmt, const uint8_t * buf, int buf_size, int align); +function avcodec_fill_audio_frame(frame: pAVFrame; nb_channels: int; sample_fmt: AVSampleFormat; const buf: puint8_t; buf_size: int; align: int): int; cdecl; + external avcodec_dll; + +(* + * Reset the internal decoder state / flush internal buffers. Should be called + * e.g. when seeking or when switching to a different stream. + * + * @note when refcounted frames are not used (i.e. avctx->refcounted_frames is 0), + * this invalidates the frames previously returned from the decoder. When + * refcounted frames are used, the decoder just releases any references it might + * keep internally, but the caller's reference remains valid. +*) +// void avcodec_flush_buffers(AVCodecContext * avctx); +procedure avcodec_flush_buffers(avctx: pAVCodecContext); cdecl; external avcodec_dll; + +(* + * Return codec bits per sample. + * + * @param[in] codec_id the codec + * @return Number of bits per sample or zero if unknown for the given codec. +*) +// int av_get_bits_per_sample(enum AVCodecID codec_id); +function av_get_bits_per_sample(codec_id: AVCodecID): int; cdecl; external avcodec_dll; + +(* + * Return the PCM codec associated with a sample format. + * @param be endianness, 0 for little, 1 for big, + * -1 (or anything else) for native + * @return AV_CODEC_ID_PCM_* or AV_CODEC_ID_NONE +*) +// enum AVCodecID av_get_pcm_codec(enum AVSampleFormat fmt, int be); +function av_get_pcm_codec(fmt: AVSampleFormat; be: int): AVCodecID; cdecl; external avcodec_dll; + +(* + * Return codec bits per sample. + * Only return non-zero if the bits per sample is exactly correct, not an + * approximation. + * + * @param[in] codec_id the codec + * @return Number of bits per sample or zero if unknown for the given codec. +*) +// int av_get_exact_bits_per_sample(enum AVCodecID codec_id); +function av_get_exact_bits_per_sample(codec_id: AVCodecID): int; cdecl; external avcodec_dll; + +(* + * Return audio frame duration. + * + * @param avctx codec context + * @param frame_bytes size of the frame, or 0 if unknown + * @return frame duration, in samples, if known. 0 if not able to + * determine. +*) +// int av_get_audio_frame_duration(AVCodecContext * avctx, int frame_bytes); +function av_get_audio_frame_duration(avctx: pAVCodecContext; frame_bytes: int): int; cdecl; external avcodec_dll; + +(* + * This function is the same as av_get_audio_frame_duration(), except it works + * with AVCodecParameters instead of an AVCodecContext. +*) +// int av_get_audio_frame_duration2(AVCodecParameters * par, int frame_bytes); +function av_get_audio_frame_duration2(par: pAVCodecParameters; frame_bytes: int): int; cdecl; external avcodec_dll; + +Type + pAVBitStreamFilter = ^AVBitStreamFilter; + +{$IFDEF FF_API_OLD_BSF} + pAVBitStreamFilterContext = ^AVBitStreamFilterContext; + + AVBitStreamFilterContext = record + priv_data: Pointer; + filter: pAVBitStreamFilter; + parser: pAVCodecParserContext; + next: pAVBitStreamFilterContext; + (* + * Internal default arguments, used if NULL is passed to av_bitstream_filter_filter(). + * Not for access by library users. + *) + args: pAnsiChar; + end; +{$ENDIF} + + pAVBSFInternal = ^AVBSFInternal; + + AVBSFInternal = record + end; + + (* + * The bitstream filter state. + * + * This struct must be allocated with av_bsf_alloc() and freed with + * av_bsf_free(). + * + * The fields in the struct will only be changed (by the caller or by the + * filter) as described in their documentation, and are to be considered + * immutable otherwise. + *) + pAVBSFContext = ^AVBSFContext; + + AVBSFContext = record + (* + * A class for logging and AVOptions + *) + av_class: pAVClass; + + (* + * The bitstream filter this context is an instance of. + *) + filter: pAVBitStreamFilter; + + (* + * Opaque libavcodec internal data. Must not be touched by the caller in any + * way. + *) + internal: pAVBSFInternal; + + (* + * Opaque filter-specific private data. If filter->priv_class is non-NULL, + * this is an AVOptions-enabled struct. + *) + priv_data: Pointer; + + (* + * Parameters of the input stream. This field is allocated in + * av_bsf_alloc(), it needs to be filled by the caller before + * av_bsf_init(). + *) + par_in: pAVCodecParameters; + + (* + * Parameters of the output stream. This field is allocated in + * av_bsf_alloc(), it is set by the filter in av_bsf_init(). + *) + par_out: pAVCodecParameters; + + (* + * The timebase used for the timestamps of the input packets. Set by the + * caller before av_bsf_init(). + *) + time_base_in: AVRational; + + (* + * The timebase used for the timestamps of the output packets. Set by the + * filter in av_bsf_init(). + *) + time_base_out: AVRational; + end; + + AVBitStreamFilter = record + name: pAnsiChar; + + (* + * A list of codec ids supported by the filter, terminated by + * AV_CODEC_ID_NONE. + * May be NULL, in that case the bitstream filter works with any codec id. + *) + codec_ids: pAVCodecID; + + (* + * A class for the private data, used to declare bitstream filter private + * AVOptions. This field is NULL for bitstream filters that do not declare + * any options. + * + * If this field is non-NULL, the first member of the filter private data + * must be a pointer to AVClass, which will be set by libavcodec generic + * code to this class. + *) + priv_class: pAVClass; + + (* *************************************************************** + * No fields below this line are part of the public API. They + * may not be used outside of libavcodec and can be changed and + * removed at will. + * New public fields should be added right above. + ***************************************************************** + *) + priv_data_size: int; + // int (*init)(AVBSFContext *ctx); + init: function(ctx: pAVBSFContext): int; cdecl; + // int (*filter)(AVBSFContext *ctx, AVPacket *pkt); + filter: function(ctx: pAVBSFContext; pkt: pAVPacket): int; cdecl; + // void (*close)(AVBSFContext *ctx); + close: procedure(ctx: pAVBSFContext); cdecl; + // void (*flush)(AVBSFContext *ctx); + flush: procedure(ctx: pAVBSFContext); cdecl; + end; + +{$IFDEF FF_API_OLD_BSF} + + (* + * @deprecated the old bitstream filtering API (using AVBitStreamFilterContext) + * is deprecated. Use the new bitstream filtering API (using AVBSFContext). + *) + // attribute_deprecated void av_register_bitstream_filter(AVBitStreamFilter * bsf); +procedure av_register_bitstream_filter(bsf: pAVBitStreamFilter); cdecl; external avcodec_dll; deprecated; +(* + * @deprecated the old bitstream filtering API (using AVBitStreamFilterContext) + * is deprecated. Use av_bsf_get_by_name(), av_bsf_alloc(), and av_bsf_init() + * from the new bitstream filtering API (using AVBSFContext). +*) +// attribute_deprecated AVBitStreamFilterContext * av_bitstream_filter_init(const char * name); +function av_bitstream_filter_init(const name: pAnsiChar): pAVBitStreamFilterContext; cdecl; external avcodec_dll; deprecated; +(* + * @deprecated the old bitstream filtering API (using AVBitStreamFilterContext) + * is deprecated. Use av_bsf_send_packet() and av_bsf_receive_packet() from the + * new bitstream filtering API (using AVBSFContext). +*) +// attribute_deprecated int av_bitstream_filter_filter(AVBitStreamFilterContext * bsfc, AVCodecContext * avctx, const char * args, +// uint8_t * * poutbuf, int * poutbuf_size, const uint8_t * buf, int buf_size, int keyframe); +function av_bitstream_filter_filter(bsfc: pAVBitStreamFilterContext; avctx: pAVCodecContext; const args: pAnsiChar; var poutbuf: puint8_t; + var poutbuf_size: int; const buf: puint8_t; buf_size: int; keyframe: int): int; cdecl; external avcodec_dll; deprecated; +(* + * @deprecated the old bitstream filtering API (using AVBitStreamFilterContext) + * is deprecated. Use av_bsf_free() from the new bitstream filtering API (using + * AVBSFContext). +*) +// attribute_deprecated void av_bitstream_filter_close(AVBitStreamFilterContext * bsf); +procedure av_bitstream_filter_close(bsf: pAVBitStreamFilterContext); cdecl; external avcodec_dll; deprecated; +(* + * @deprecated the old bitstream filtering API (using AVBitStreamFilterContext) + * is deprecated. Use av_bsf_iterate() from the new bitstream filtering API (using + * AVBSFContext). +*) +// attribute_deprecated const AVBitStreamFilter * av_bitstream_filter_next(const AVBitStreamFilter * f); +function av_bitstream_filter_next(const f: pAVBitStreamFilter): pAVBitStreamFilter; cdecl; external avcodec_dll; deprecated; +{$ENDIF} +(* + * @return a bitstream filter with the specified name or NULL if no such + * bitstream filter exists. +*) +// const AVBitStreamFilter * av_bsf_get_by_name(const char * name); +function av_bsf_get_by_name(const name: pAnsiChar): pAVBitStreamFilter; cdecl; external avcodec_dll; + +(* + * Iterate over all registered bitstream filters. + * + * @param opaque a pointer where libavcodec will store the iteration state. Must + * point to NULL to start the iteration. + * + * @return the next registered bitstream filter or NULL when the iteration is + * finished +*) +// const AVBitStreamFilter * av_bsf_iterate(void * * opaque); +function av_bsf_iterate(var opaque: Pointer): pAVBitStreamFilter; cdecl; external avcodec_dll; +{$IFDEF FF_API_NEXT} +// attribute_deprecated const AVBitStreamFilter * av_bsf_next(void * * opaque); +function av_bsf_next(var opaque: Pointer): pAVBitStreamFilter; cdecl; external avcodec_dll; deprecated; +{$ENDIF} +(* + * Allocate a context for a given bitstream filter. The caller must fill in the + * context parameters as described in the documentation and then call + * av_bsf_init() before sending any data to the filter. + * + * @param filter the filter for which to allocate an instance. + * @param ctx a pointer into which the pointer to the newly-allocated context + * will be written. It must be freed with av_bsf_free() after the + * filtering is done. + * + * @return 0 on success, a negative AVERROR code on failure +*) +// int av_bsf_alloc(const AVBitStreamFilter * filter, AVBSFContext * * ctx); +function av_bsf_alloc(const filter: pAVBitStreamFilter; var ctx: pAVBSFContext): int; cdecl; external avcodec_dll; + +(* + * Prepare the filter for use, after all the parameters and options have been + * set. +*) +// int av_bsf_init(AVBSFContext * ctx); +function av_bsf_init(ctx: pAVBSFContext): int; cdecl; external avcodec_dll; + +(* + * Submit a packet for filtering. + * + * After sending each packet, the filter must be completely drained by calling + * av_bsf_receive_packet() repeatedly until it returns AVERROR(EAGAIN) or + * AVERROR_EOF. + * + * @param pkt the packet to filter. The bitstream filter will take ownership of + * the packet and reset the contents of pkt. pkt is not touched if an error occurs. + * This parameter may be NULL, which signals the end of the stream (i.e. no more + * packets will be sent). That will cause the filter to output any packets it + * may have buffered internally. + * + * @return 0 on success, a negative AVERROR on error. +*) +// int av_bsf_send_packet(AVBSFContext * ctx, AVPacket * pkt); +function av_bsf_send_packet(ctx: pAVBSFContext; pkt: pAVPacket): int; cdecl; external avcodec_dll; + +(* + * Retrieve a filtered packet. + * + * @param[out] pkt this struct will be filled with the contents of the filtered + * packet. It is owned by the caller and must be freed using + * av_packet_unref() when it is no longer needed. + * This parameter should be "clean" (i.e. freshly allocated + * with av_packet_alloc() or unreffed with av_packet_unref()) + * when this function is called. If this function returns + * successfully, the contents of pkt will be completely + * overwritten by the returned data. On failure, pkt is not + * touched. + * + * @return 0 on success. AVERROR(EAGAIN) if more packets need to be sent to the + * filter (using av_bsf_send_packet()) to get more output. AVERROR_EOF if there + * will be no further output from the filter. Another negative AVERROR value if + * an error occurs. + * + * @note one input packet may result in several output packets, so after sending + * a packet with av_bsf_send_packet(), this function needs to be called + * repeatedly until it stops returning 0. It is also possible for a filter to + * output fewer packets than were sent to it, so this function may return + * AVERROR(EAGAIN) immediately after a successful av_bsf_send_packet() call. +*) +// int av_bsf_receive_packet(AVBSFContext * ctx, AVPacket * pkt); +function av_bsf_receive_packet(ctx: pAVBSFContext; pkt: pAVPacket): int; cdecl; external avcodec_dll; + +(* * + * Reset the internal bitstream filter state / flush internal buffers. +*) +// void av_bsf_flush(AVBSFContext *ctx); +procedure av_bsf_flush(var ctx: AVBSFContext); cdecl; external avcodec_dll; + +(* + * Free a bitstream filter context and everything associated with it; write NULL + * into the supplied pointer. +*) +// void av_bsf_free(AVBSFContext **ctx); +procedure av_bsf_free(var ctx: pAVBSFContext); cdecl; external avcodec_dll; + +(* + * Get the AVClass for AVBSFContext. It can be used in combination with + * AV_OPT_SEARCH_FAKE_OBJ for examining options. + * + * @see av_opt_find(). +*) +// const AVClass * av_bsf_get_class(void); +function av_bsf_get_class(): pAVClass; cdecl; external avcodec_dll; + +type + (* + * Structure for chain/list of bitstream filters. + * Empty list can be allocated by av_bsf_list_alloc(). + *) + pAVBSFList = ^AVBSFList; + + AVBSFList = record + end; + + (* + * Allocate empty list of bitstream filters. + * The list must be later freed by av_bsf_list_free() + * or finalized by av_bsf_list_finalize(). + * + * @return Pointer to @ref AVBSFList on success, NULL in case of failure + *) + // AVBSFList * av_bsf_list_alloc(void); +function av_bsf_list_alloc(): pAVBSFList; cdecl; external avcodec_dll; + +(* + * Free list of bitstream filters. + * + * @param lst Pointer to pointer returned by av_bsf_list_alloc() +*) +// void av_bsf_list_free(AVBSFList * * lst); +procedure av_bsf_list_free(var lst: pAVBSFList); cdecl; external avcodec_dll; +(* + * Append bitstream filter to the list of bitstream filters. + * + * @param lst List to append to + * @param bsf Filter context to be appended + * + * @return >=0 on success, negative AVERROR in case of failure +*) +// int av_bsf_list_append(AVBSFList * lst, AVBSFContext * bsf); +function av_bsf_list_append(lst: pAVBSFList; bsf: pAVBSFContext): int; cdecl; external avcodec_dll; +(* + * Construct new bitstream filter context given it's name and options + * and append it to the list of bitstream filters. + * + * @param lst List to append to + * @param bsf_name Name of the bitstream filter + * @param options Options for the bitstream filter, can be set to NULL + * + * @return >=0 on success, negative AVERROR in case of failure +*) +// int av_bsf_list_append2(AVBSFList * lst, const char * bsf_name, AVDictionary * * options); +function av_bsf_list_append2(lst: pAVBSFList; const bsf_name: pAnsiChar; var options: pAVDictionary): int; cdecl; external avcodec_dll; +(* + * Finalize list of bitstream filters. + * + * This function will transform @ref AVBSFList to single @ref AVBSFContext, + * so the whole chain of bitstream filters can be treated as single filter + * freshly allocated by av_bsf_alloc(). + * If the call is successful, @ref AVBSFList structure is freed and lst + * will be set to NULL. In case of failure, caller is responsible for + * freeing the structure by av_bsf_list_free() + * + * @param lst Filter list structure to be transformed + * @param[out] bsf Pointer to be set to newly created @ref AVBSFContext structure + * representing the chain of bitstream filters + * + * @return >=0 on success, negative AVERROR in case of failure +*) +// int av_bsf_list_finalize(AVBSFList * * lst, AVBSFContext * * bsf); +function av_bsf_list_finalize(var lst: pAVBSFList; var bsf: pAVBSFContext): int; cdecl; external avcodec_dll; +(* + * Parse string describing list of bitstream filters and create single + * @ref AVBSFContext describing the whole chain of bitstream filters. + * Resulting @ref AVBSFContext can be treated as any other @ref AVBSFContext freshly + * allocated by av_bsf_alloc(). + * + * @param str String describing chain of bitstream filters in format + * `bsf1[=opt1=val1:opt2=val2][,bsf2]` + * @param[out] bsf Pointer to be set to newly created @ref AVBSFContext structure + * representing the chain of bitstream filters + * + * @return >=0 on success, negative AVERROR in case of failure +*) +// int av_bsf_list_parse_str(const char * str, AVBSFContext * * bsf); +function av_bsf_list_parse_str(const str: pAnsiChar; var bsf: pAVBSFContext): int; cdecl; external avcodec_dll; +(* + * Get null/pass-through bitstream filter. + * + * @param[out] bsf Pointer to be set to new instance of pass-through bitstream filter + * + * @return +*) +// int av_bsf_get_null_filter(AVBSFContext * * bsf); +function av_bsf_get_null_filter(var bsf: pAVBSFContext): int; cdecl; external avcodec_dll; +(* memory *) + +(* + * Same behaviour av_fast_malloc but the buffer has additional + * AV_INPUT_BUFFER_PADDING_SIZE at the end which will always be 0. + * + * In addition the whole buffer will initially and after resizes + * be 0-initialized so that no uninitialized data will ever appear. +*) +// void av_fast_padded_malloc(void * ptr, unsigned int * size, size_t min_size); +procedure av_fast_padded_malloc(ptr: Pointer; var size: unsignedint; min_size: size_t); cdecl; external avcodec_dll; +(* + * Same behaviour av_fast_padded_malloc except that buffer will always + * be 0-initialized after call. +*) +// void av_fast_padded_mallocz(void * ptr, unsigned int * size, size_t min_size); +procedure av_fast_padded_mallocz(ptr: Pointer; var size: unsignedint; min_size: size_t); cdecl; external avcodec_dll; +(* + * Encode extradata length to a buffer. Used by xiph codecs. + * + * @param s buffer to write to; must be at least (v/255+1) bytes long + * @param v size of extradata in bytes + * @return number of bytes written to the buffer. +*) +// unsigned int av_xiphlacing(unsigned char * s, unsigned int v); +function av_xiphlacing(s: punsignedchar; v: unsignedint): unsignedint; cdecl; external avcodec_dll; +{$IFDEF FF_API_USER_VISIBLE_AVHWACCEL} +(* + * Register the hardware accelerator hwaccel. + * + * @deprecated This function doesn't do anything. +*) +// attribute_deprecated void av_register_hwaccel(AVHWAccel * hwaccel); +procedure av_register_hwaccel(hwaccel: pAVHWAccel); cdecl; external avcodec_dll; deprecated 'This function doesn''t do anything'; +(* + * If hwaccel is NULL, returns the first registered hardware accelerator, + * if hwaccel is non-NULL, returns the next registered hardware accelerator + * after hwaccel, or NULL if hwaccel is the last one. + * + * @deprecated AVHWaccel structures contain no user-serviceable parts, so + * this function should not be used. +*) +// attribute_deprecated AVHWAccel * av_hwaccel_next(const AVHWAccel * hwaccel); +function av_hwaccel_next(const hwaccel: pAVHWAccel): pAVHWAccel; cdecl; external avcodec_dll; + deprecated 'AVHWaccel structures contain no user-serviceable parts, so this function should not be used'; + +{$ENDIF} +{$IFDEF FF_API_LOCKMGR} + +type + (* + * Lock operation used by lockmgr + * + * @deprecated Deprecated together with av_lockmgr_register(). + *) + AVLockOp = ( // + AV_LOCK_CREATE, // < Create a mutex + AV_LOCK_OBTAIN, // < Lock the mutex + AV_LOCK_RELEASE, // < Unlock the mutex + AV_LOCK_DESTROY // < Free mutex resources + )deprecated 'Deprecated together with av_lockmgr_register()'; + + (* + * Register a user provided lock manager supporting the operations + * specified by AVLockOp. The "mutex" argument to the function points + * to a (void * ) where the lockmgr should store/get a pointer to a user + * allocated mutex. It is NULL upon AV_LOCK_CREATE and equal to the + * value left by the last call for all other ops. If the lock manager is + * unable to perform the op then it should leave the mutex in the same + * state as when it was called and return a non-zero value. However, + * when called with AV_LOCK_DESTROY the mutex will always be assumed to + * have been successfully destroyed. If av_lockmgr_register succeeds + * it will return a non-negative value, if it fails it will return a + * negative value and destroy all mutex and unregister all callbacks. + * av_lockmgr_register is not thread-safe, it must be called from a + * single thread before any calls which make use of locking are used. + * + * @param cb User defined callback. av_lockmgr_register invokes calls + * to this callback and the previously registered callback. + * The callback will be used to create more than one mutex + * each of which must be backed by its own underlying locking + * mechanism (i.e. do not use a single static object to + * implement your lock manager). If cb is set to NULL the + * lockmgr will be unregistered. + * + * @deprecated This function does nothing, and always returns 0. Be sure to + * build with thread support to get basic thread safety. + *) + // attribute_deprecated int av_lockmgr_register(int (* cb)(void **mutex, enum AVLockOp op)); +type + Tav_lockmgr_register_cb_func = function(var mutex: Pointer; op: AVLockOp): int; cdecl; + +function av_lockmgr_register(cb: Tav_lockmgr_register_cb_func): int; cdecl; external avcodec_dll; +{$ENDIF} +(* + * Get the type of the given codec. +*) +// enum AVMediaType avcodec_get_type(enum AVCodecID codec_id); +function avcodec_get_type(codec_id: AVCodecID): AVMediaType; cdecl; external avcodec_dll; +(* + * Get the name of a codec. + * @return a static string identifying the codec; never NULL +*) +// const char * avcodec_get_name(enum AVCodecID id); +function avcodec_get_name(id: AVCodecID): pAnsiChar; cdecl; external avcodec_dll; +(* + * @return a positive value if s is open (i.e. avcodec_open2() was called on it + * with no corresponding avcodec_close()), 0 otherwise. +*) +// int avcodec_is_open(AVCodecContext * s); +function avcodec_is_open(s: pAVCodecContext): int; cdecl; external avcodec_dll; +(* + * @return a non-zero number if codec is an encoder, zero otherwise +*) +// int av_codec_is_encoder(const avcodec * codec); +function av_codec_is_encoder(const codec: pAVCodec): int; cdecl; external avcodec_dll; + +(* + * @return a non-zero number if codec is a decoder, zero otherwise +*) +// int av_codec_is_decoder(const avcodec * codec); +function av_codec_is_decoder(const codec: pAVCodec): int; cdecl; external avcodec_dll; + +(* + * @return descriptor for given codec ID or NULL if no descriptor exists. +*) +// const AVCodecDescriptor * avcodec_descriptor_get(enum AVCodecID id); +function avcodec_descriptor_get(id: pAVCodecID): pAVCodecDescriptor; cdecl; external avcodec_dll; + +(* + * Iterate over all codec descriptors known to libavcodec. + * + * @param prev previous descriptor. NULL to get the first descriptor. + * + * @return next descriptor or NULL after the last descriptor +*) +// const AVCodecDescriptor * avcodec_descriptor_next(const AVCodecDescriptor * prev); +function avcodec_descriptor_next(const prev: pAVCodecDescriptor): pAVCodecDescriptor; cdecl; external avcodec_dll; + +(* + * @return codec descriptor with the given name or NULL if no such descriptor + * exists. +*) +// const AVCodecDescriptor * avcodec_descriptor_get_by_name(const char * name); +function avcodec_descriptor_get_by_name(const name): pAVCodecDescriptor; cdecl; external avcodec_dll; + +(* + * Allocate a CPB properties structure and initialize its fields to default + * values. + * + * @param size if non-NULL, the size of the allocated struct will be written + * here. This is useful for embedding it in side data. + * + * @return the newly allocated struct or NULL on failure +*) +// AVCPBProperties * av_cpb_properties_alloc(size_t * size); +function av_cpb_properties_alloc(size: psize_t): pAVCPBProperties; cdecl; external avcodec_dll; + +{$ENDREGION} +{$REGION 'ac3_parser.h'} +(* * + * Extract the bitstream ID and the frame size from AC-3 data. +*) +// int av_ac3_parse_header(const uint8_t *buf, size_t size, +// uint8_t *bitstream_id, uint16_t *frame_size); +function av_ac3_parse_header(const buf: puint8_t; size: size_t; var bitstream_id: uint8_t; var frame_size: uint16_t): int; cdecl; external avcodec_dll; +{$ENDREGION} +{$REGION 'adts_parser.h'} +(* * + * Extract the number of samples and frames from AAC data. + * @param[in] buf pointer to AAC data buffer + * @param[out] samples Pointer to where number of samples is written + * @param[out] frames Pointer to where number of frames is written + * @return Returns 0 on success, error code on failure. +*) +// int av_adts_header_parse(const uint8_t *buf, uint32_t *samples, +// uint8_t *frames); +function av_adts_header_parse(const buf: puint8_t; var samples: uint32_t; var frames: uint8_t): int; cdecl; external avcodec_dll; +{$ENDREGION} +{$REGION 'jni.h'} +(* + * Manually set a Java virtual machine which will be used to retrieve the JNI + * environment. Once a Java VM is set it cannot be changed afterwards, meaning + * you can call multiple times av_jni_set_java_vm with the same Java VM pointer + * however it will error out if you try to set a different Java VM. + * + * @param vm Java virtual machine + * @param log_ctx context used for logging, can be NULL + * @return 0 on success, < 0 otherwise +*) +// int av_jni_set_java_vm(void *vm, void *log_ctx); +function av_jni_set_java_vm(vm: Pointer; log_ctx: Pointer): int; cdecl; external avcodec_dll; +(* + * Get the Java virtual machine which has been set with av_jni_set_java_vm. + * + * @param vm Java virtual machine + * @return a pointer to the Java virtual machine +*) +// void *av_jni_get_java_vm(void *log_ctx); +function av_jni_get_java_vm(log_ctx: Pointer): Pointer; cdecl; external avcodec_dll; +{$ENDREGION} +{$REGION 'vorbis_parser.h'} + +type + pAVVorbisParseContext = ^AVVorbisParseContext; + + AVVorbisParseContext = record + end; + + (* * + * Allocate and initialize the Vorbis parser using headers in the extradata. + *) + // AVVorbisParseContext *av_vorbis_parse_init(const uint8_t *extradata, int extradata_size); +function av_vorbis_parse_init(const extradata: puint8_t; extradata_size: int): pAVVorbisParseContext; cdecl; external avcodec_dll; +(* * + * Free the parser and everything associated with it. +*) +// void av_vorbis_parse_free(AVVorbisParseContext **s); +procedure av_vorbis_parse_free(var s: pAVVorbisParseContext); cdecl; external avcodec_dll; + +const + VORBIS_FLAG_HEADER = $00000001; + VORBIS_FLAG_COMMENT = $00000002; + VORBIS_FLAG_SETUP = $00000004; + + (* * + * Get the duration for a Vorbis packet. + * + * If @p flags is @c NULL, + * special frames are considered invalid. + * + * @param s Vorbis parser context + * @param buf buffer containing a Vorbis frame + * @param buf_size size of the buffer + * @param flags flags for special frames + *) + // int av_vorbis_parse_frame_flags(AVVorbisParseContext *s, const uint8_t *buf, int buf_size, int *flags); +function av_vorbis_parse_frame_flags(s: pAVVorbisParseContext; const buf: puint8_t; buf_size: int; var flags: int): int; cdecl; external avcodec_dll; +(* * + * Get the duration for a Vorbis packet. + * + * @param s Vorbis parser context + * @param buf buffer containing a Vorbis frame + * @param buf_size size of the buffer +*) +// int av_vorbis_parse_frame(AVVorbisParseContext *s, const uint8_t *buf, int buf_size); +function av_vorbis_parse_frame(s: pAVVorbisParseContext; const buf: puint8_t; buf_size: int): int; cdecl; external avcodec_dll; + +// void av_vorbis_parse_reset(AVVorbisParseContext *s); +procedure av_vorbis_parse_reset(s: pAVVorbisParseContext); cdecl; external avcodec_dll; +{$ENDREGION} +{$REGION 'vaapi.h'} +{$IFDEF FF_API_STRUCT_VAAPI_CONTEXT} + +type + (* * + * This structure is used to share data between the FFmpeg library and + * the client video application. + * This shall be zero-allocated and available as + * AVCodecContext.hwaccel_context. All user members can be set once + * during initialization or through each AVCodecContext.get_buffer() + * function call. In any case, they must be valid prior to calling + * decoding functions. + * + * Deprecated: use AVCodecContext.hw_frames_ctx instead. + *) + // struct attribute_deprecated + vaapi_context = record + (* * + * Window system dependent data + * + * - encoding: unused + * - decoding: Set by user + *) + display: Pointer; + + (* * + * Configuration ID + * + * - encoding: unused + * - decoding: Set by user + *) + config_id: uint32_t; + + (* * + * Context ID (video decode pipeline) + * + * - encoding: unused + * - decoding: Set by user + *) + context_id: uint32_t; + end deprecated; + +{$ENDIF} +{$ENDREGION} +{$REGION 'avdct.h'} + +type + (* * + * AVDCT context. + * @note function pointers can be NULL if the specific features have been + * disabled at build time. + *) + pAVDCT = ^AVDCT; + + AVDCT = record + av_class: pAVClass; + // void (*idct)(int16_t *block /* align 16 */); + idct: procedure(block: pint16_t (* align 16 *) ); cdecl; + (* * + * IDCT input permutation. + * Several optimized IDCTs need a permutated input (relative to the + * normal order of the reference IDCT). + * This permutation must be performed before the idct_put/add. + * Note, normally this can be merged with the zigzag/alternate scan
+ * An example to avoid confusion: + * - (->decode coeffs -> zigzag reorder -> dequant -> reference IDCT -> ...) + * - (x -> reference DCT -> reference IDCT -> x) + * - (x -> reference DCT -> simple_mmx_perm = idct_permutation + * -> simple_idct_mmx -> x) + * - (-> decode coeffs -> zigzag reorder -> simple_mmx_perm -> dequant + * -> simple_idct_mmx -> ...) + *) + idct_permutation: array [0 .. 64 - 1] of uint8_t; + // void (*fdct)(int16_t *block /* align 16 */); + fdct: procedure(block: pint16_t (* align 16 *) ); cdecl; + (* * + * DCT algorithm. + * must use AVOptions to set this field. + *) + dct_algo: int; + (* * + * IDCT algorithm. + * must use AVOptions to set this field. + *) + idct_algo: int; + // void (*get_pixels)(int16_t *block /* align 16 */, + // const uint8_t *pixels /* align 8 */, + // ptrdiff_t line_size); + get_pixels: procedure(block: int16_t (* align 16 *); const pixels: puint8_t (* align 8 *); line_size: ptrdiff_t); cdecl; + bits_per_sample: int; + end; + + (* * + * Allocates a AVDCT context. + * This needs to be initialized with avcodec_dct_init() after optionally + * configuring it with AVOptions. + * + * To free it use av_free() + *) + // AVDCT *avcodec_dct_alloc(void); +function avcodec_dct_alloc(): pAVDCT; cdecl; external avcodec_dll; + +// int avcodec_dct_init(AVDCT *); +function avcodec_dct_init(p: pAVDCT): int; cdecl; external avcodec_dll; + +// const AVClass *avcodec_dct_get_class(void); +function avcodec_dct_get_class(): pAVClass; cdecl; external avcodec_dll; +{$ENDREGION} +{$REGION 'mediacodec.h'} + +(* * + * This structure holds a reference to a android/view/Surface object that will + * be used as output by the decoder. + * +*) +type + pAVMediaCodecContext = ^AVMediaCodecContext; + + AVMediaCodecContext = record + (* * + * android/view/Surface object reference. + *) + surface: Pointer; + end; + + (* * + * Allocate and initialize a MediaCodec context. + * + * When decoding with MediaCodec is finished, the caller must free the + * MediaCodec context with av_mediacodec_default_free. + * + * @return a pointer to a newly allocated AVMediaCodecContext on success, NULL otherwise + *) + // AVMediaCodecContext *av_mediacodec_alloc_context(void); +function av_mediacodec_alloc_context(): pAVMediaCodecContext; cdecl; external avcodec_dll; +(* * + * Convenience function that sets up the MediaCodec context. + * + * @param avctx codec context + * @param ctx MediaCodec context to initialize + * @param surface reference to an android/view/Surface + * @return 0 on success, < 0 otherwise +*) +// int av_mediacodec_default_init(AVCodecContext * avctx, AVMediaCodecContext * ctx, void * surface); +function av_mediacodec_default_init(avctx: pAVCodecContext; ctx: pAVMediaCodecContext; surface: Pointer): int; cdecl; external avcodec_dll; +(* * + * This function must be called to free the MediaCodec context initialized with + * av_mediacodec_default_init(). + * + * @param avctx codec context +*) +// void av_mediacodec_default_free(AVCodecContext * avctx); +procedure av_mediacodec_default_free(avctx: pAVCodecContext); cdecl; external avcodec_dll; + +(* * + * Opaque structure representing a MediaCodec buffer to render. +*) +type + pAVMediaCodecBuffer = ^AVMediaCodecBuffer; + + AVMediaCodecBuffer = record + end; + + (* * + * Release a MediaCodec buffer and render it to the surface that is associated + * with the decoder. This function should only be called once on a given + * buffer, once released the underlying buffer returns to the codec, thus + * subsequent calls to this function will have no effect. + * + * @param buffer the buffer to render + * @param render 1 to release and render the buffer to the surface or 0 to + * discard the buffer + * @return 0 on success, < 0 otherwise + *) + // int av_mediacodec_release_buffer(AVMediaCodecBuffer * buffer, int render); +function av_mediacodec_release_buffer(buffer: pAVMediaCodecBuffer; render: int): int; cdecl; external avcodec_dll; + +(* + * Release a MediaCodec buffer and render it at the given time to the surface + * that is associated with the decoder. The timestamp must be within one second + * of the current java/lang/System#nanoTime() (which is implemented using + * CLOCK_MONOTONIC on Android). See the Android MediaCodec documentation + * of android/media/MediaCodec#releaseOutputBuffer(int,long) for more details. + * + * @param buffer the buffer to render + * @param time timestamp in nanoseconds of when to render the buffer + * @return 0 on success, < 0 otherwise +*) +// int av_mediacodec_render_buffer_at_time(AVMediaCodecBuffer *buffer, int64_t time); +function av_mediacodec_render_buffer_at_time(buffer: pAVMediaCodecBuffer; time: int64_t): int; cdecl; external avcodec_dll; +{$ENDREGION} +{$REGION 'avfft.h'} + +type + FFTSample = float; + pFFTSample = ^FFTSample; + + pFFTComplex = ^FFTComplex; + + FFTComplex = record + re, im: FFTSample; + end; + + pFFTContext = ^FFTContext; + + FFTContext = record + + end; + + (* * + * Set up a complex FFT. + * @param nbits log2 of the length of the input array + * @param inverse if 0 perform the forward transform, if 1 perform the inverse + *) + // FFTContext *av_fft_init(int nbits, int inverse); +function av_fft_init(nbits: int; inverse: int): pFFTContext; cdecl; external avcodec_dll; +(* * + * Do the permutation needed BEFORE calling ff_fft_calc(). +*) +// void av_fft_permute(FFTContext *s, FFTComplex *z); +procedure av_fft_permute(s: pFFTContext; z: pFFTContext); cdecl; external avcodec_dll; +(* * + * Do a complex FFT with the parameters defined in av_fft_init(). The + * input data must be permuted before. No 1.0/sqrt(n) normalization is done. +*) +// void av_fft_calc(FFTContext *s, FFTComplex *z); +procedure av_fft_calc(s: pFFTContext; z: pFFTComplex); cdecl; external avcodec_dll; + +// void av_fft_end(FFTContext *s); +procedure av_fft_end(s: pFFTContext); cdecl; external avcodec_dll; + +// FFTContext *av_mdct_init(int nbits, int inverse, double scale); +function av_mdct_init(nbits: int; inverse: int; scale: double): pFFTContext; cdecl; external avcodec_dll; + +// void av_imdct_calc(FFTContext *s, FFTSample *output, const FFTSample *input); +procedure av_imdct_calc(s: pFFTContext; output: pFFTSample; const input: pFFTSample); cdecl; external avcodec_dll; + +// void av_imdct_half(FFTContext *s, FFTSample *output, const FFTSample *input); +procedure av_imdct_half(s: pFFTContext; output: pFFTSample; const input: pFFTSample); cdecl; external avcodec_dll; + +// void av_mdct_calc(FFTContext *s, FFTSample *output, const FFTSample *input); +procedure av_mdct_calc(s: pFFTContext; output: pFFTSample; const input: pFFTSample); cdecl; external avcodec_dll; + +// void av_mdct_end(FFTContext *s); +procedure av_mdct_end(s: pFFTContext); cdecl; external avcodec_dll; + +Type + (* Real Discrete Fourier Transform *) + + RDFTransformType = (DFT_R2C, IDFT_C2R, IDFT_R2C, DFT_C2R); + + pRDFTContext = ^RDFTContext; + + RDFTContext = record + + end; + + (* * + * Set up a real FFT. + * @param nbits log2 of the length of the input array + * @param trans the type of transform + *) + // RDFTContext *av_rdft_init(int nbits, enum RDFTransformType trans); +function av_rdft_init(nbits: int; trans: RDFTransformType): pRDFTContext; cdecl; external avcodec_dll; + +// void av_rdft_calc(RDFTContext *s, FFTSample *data); +procedure av_rdft_calc(s: pRDFTContext; data: pFFTSample); cdecl; external avcodec_dll; + +// void av_rdft_end(RDFTContext *s); +procedure av_rdft_end(s: pRDFTContext); cdecl; external avcodec_dll; +(* Discrete Cosine Transform *) + +type + pDCTContext = ^DCTContext; + + DCTContext = record + end; + + DCTTransformType = ( // + DCT_II = 0, DCT_III, DCT_I, DST_I); + + (* * + * Set up DCT. + * + * @param nbits size of the input array: + * (1 << nbits) for DCT-II, DCT-III and DST-I + * (1 << nbits) + 1 for DCT-I + * @param type the type of transform + * + * @note the first element of the input of DST-I is ignored + *) + // DCTContext *av_dct_init(int nbits, enum DCTTransformType type); +function av_dct_init(nbits: int; _type: DCTTransformType): pDCTContext; cdecl; external avcodec_dll; + +// void av_dct_calc(DCTContext *s, FFTSample *data); +procedure av_dct_calc(s: pDCTContext; data: pFFTSample); cdecl; external avcodec_dll; + +// void av_dct_end (DCTContext *s); +procedure av_dct_end(s: pDCTContext); cdecl; external avcodec_dll; +{$ENDREGION} +{$REGION 'dv_profile.h'} + +const + (* minimum number of bytes to read from a DV stream in order to + * determine the profile *) + DV_PROFILE_BYTES = (6 * 80); (* 6 DIF blocks *) + +type + (* + * AVDVProfile is used to express the differences between various + * DV flavors. For now it's primarily used for differentiating + * 525/60 and 625/50, but the plans are to use it for various + * DV specs as well (e.g. SMPTE314M vs. IEC 61834). + *) + Taudio_shuffle = array [0 .. 8] of uint8_t; + paudio_shuffle = ^Taudio_shuffle; + pAVDVProfile = ^AVDVProfile; + + AVDVProfile = record + dsf: int; (* value of the dsf in the DV header *) + video_stype: int; (* stype for VAUX source pack *) + frame_size: int; (* total size of one frame in bytes *) + difseg_size: int; (* number of DIF segments per DIF channel *) + n_difchan: int; (* number of DIF channels per frame *) + time_base: AVRational; (* 1/framerate *) + ltc_divisor: int; (* FPS from the LTS standpoint *) + height: int; (* picture height in pixels *) + width: int; (* picture width in pixels *) + sar: array [0 .. 1] of AVRational; (* sample aspect ratios for 4:3 and 16:9 *) + pix_fmt: AVPixelFormat; (* picture pixel format *) + bpm: int; (* blocks per macroblock *) + block_sizes: puint8_t; (* AC block sizes, in bits *) + audio_stride: int; (* size of audio_shuffle table *) + audio_min_samples: array [0 .. 2] of int; (* min amount of audio samples *) + (* for 48kHz, 44.1kHz and 32kHz *) + audio_samples_dist: array [0 .. 4] of int; (* how many samples are supposed to be *) + (* in each frame in a 5 frames window *) + audio_shuffle: paudio_shuffle; (* PCM shuffling table *) + end; + + (* * + * Get a DV profile for the provided compressed frame. + * + * @param sys the profile used for the previous frame, may be NULL + * @param frame the compressed data buffer + * @param buf_size size of the buffer in bytes + * @return the DV profile for the supplied data or NULL on failure + *) + // const AVDVProfile *av_dv_frame_profile(const AVDVProfile *sys, + // const uint8_t *frame, unsigned buf_size); +function av_dv_frame_profile(const sys: pAVDVProfile; const frame: puint8_t; buf_size: unsigned): pAVDVProfile; cdecl; external avcodec_dll; +(* * + * Get a DV profile for the provided stream parameters. +*) +// const AVDVProfile *av_dv_codec_profile(int width, int height, enum AVPixelFormat pix_fmt); +function av_dv_codec_profile(width: int; height: int; pix_fmt: AVPixelFormat): pAVDVProfile; cdecl; external avcodec_dll; +(* * + * Get a DV profile for the provided stream parameters. + * The frame rate is used as a best-effort parameter. +*) +// const AVDVProfile *av_dv_codec_profile2(int width, int height, enum AVPixelFormat pix_fmt, AVRational frame_rate); +function av_dv_codec_profile2(width: int; height: int; pix_fmt: AVPixelFormat; frame_rate: AVRational): pAVDVProfile; cdecl; external avcodec_dll; +{$ENDREGION} +{$REGION 'dirac.h'} + +const + (* * + * The spec limits the number of wavelet decompositions to 4 for both + * level 1 (VC-2) and 128 (long-gop default). + * 5 decompositions is the maximum before >16-bit buffers are needed. + * Schroedinger allows this for DD 9,7 and 13,7 wavelets only, limiting + * the others to 4 decompositions (or 3 for the fidelity filter). + * + * We use this instead of MAX_DECOMPOSITIONS to save some memory. + *) + MAX_DWT_LEVELS = 5; + +type + (* * + * Parse code values: + * + * Dirac Specification -> + * 9.6.1 Table 9.1 + * + * VC-2 Specification -> + * 10.4.1 Table 10.1 + *) + + DiracParseCodes = (DIRAC_PCODE_SEQ_HEADER = $00, DIRAC_PCODE_END_SEQ = $10, DIRAC_PCODE_AUX = $20, DIRAC_PCODE_PAD = $30, DIRAC_PCODE_PICTURE_CODED = $08, + DIRAC_PCODE_PICTURE_RAW = $48, DIRAC_PCODE_PICTURE_LOW_DEL = $C8, DIRAC_PCODE_PICTURE_HQ = $E8, DIRAC_PCODE_INTER_NOREF_CO1 = $0A, + DIRAC_PCODE_INTER_NOREF_CO2 = $09, DIRAC_PCODE_INTER_REF_CO1 = $0D, DIRAC_PCODE_INTER_REF_CO2 = $0E, DIRAC_PCODE_INTRA_REF_CO = $0C, + DIRAC_PCODE_INTRA_REF_RAW = $4C, DIRAC_PCODE_INTRA_REF_PICT = $CC, DIRAC_PCODE_MAGIC = $42424344); + + DiracVersionInfo = record + major: int; + minor: int; + end; + + pAVDiracSeqHeader = ^AVDiracSeqHeader; + + AVDiracSeqHeader = record + width: unsigned; + height: unsigned; + chroma_format: uint8_t; + // < 0: 444 1: 422 2: 420 + + interlaced: uint8_t; + top_field_first: uint8_t; + + frame_rate_index: uint8_t; + // < index into dirac_frame_rate[] + aspect_ratio_index: uint8_t; + // < index into dirac_aspect_ratio[] + + clean_width: uint16_t; + clean_height: uint16_t; + clean_left_offset: uint16_t; + clean_right_offset: uint16_t; + + pixel_range_index: uint8_t; + // < index into dirac_pixel_range_presets[] + color_spec_index: uint8_t; + // < index into dirac_color_spec_presets[] + + profile: int; + level: int; + + framerate: AVRational; + sample_aspect_ratio: AVRational; + + pix_fmt: AVPixelFormat; + color_range: AVColorRange; + color_primaries: AVColorPrimaries; + color_trc: AVColorTransferCharacteristic; + colorspace: AVColorSpace; + + version: DiracVersionInfo; + bit_depth: int; + end; + + (* * + * Parse a Dirac sequence header. + * + * @param dsh this function will allocate and fill an AVDiracSeqHeader struct + * and write it into this pointer. The caller must free it with + * av_free(). + * @param buf the data buffer + * @param buf_size the size of the data buffer in bytes + * @param log_ctx if non-NULL, this function will log errors here + * @return 0 on success, a negative AVERROR code on failure + *) + // int av_dirac_parse_sequence_header(AVDiracSeqHeader **dsh, + // const uint8_t *buf, size_t buf_size, + // void *log_ctx); +function av_dirac_parse_sequence_header(var dsh: pAVDiracSeqHeader; const buf: puint8_t; buf_size: size_t; log_ctx: Pointer): int; cdecl; external avcodec_dll; +{$ENDREGION} + +implementation + +end. + \ No newline at end of file diff --git a/ffmpeg/libavdevice.pas b/ffmpeg/libavdevice.pas new file mode 100644 index 0000000..0ebd557 --- /dev/null +++ b/ffmpeg/libavdevice.pas @@ -0,0 +1,522 @@ +unit libavdevice; + +{$IFDEF FPC} +{$MODE Delphi} +{$ENDIF} + +interface + +Uses + ffmpeg_types, libavutil, libavcodec, libavformat; + +{$I ffmpeg.inc} + +(* * + * @defgroup lavd libavdevice + * Special devices muxing/demuxing library. + * + * Libavdevice is a complementary library to @ref libavf "libavformat". It + * provides various "special" platform-specific muxers and demuxers, e.g. for + * grabbing devices, audio capture and playback etc. As a consequence, the + * (de)muxers in libavdevice are of the AVFMT_NOFILE type (they use their own + * I/O functions). The filename passed to avformat_open_input() often does not + * refer to an actually existing file, but has some special device-specific + * meaning - e.g. for xcbgrab it is the display name. + * + * To use libavdevice, simply call avdevice_register_all() to register all + * compiled muxers and demuxers. They all use standard libavformat API. + * +*) + +(* * + * Return the LIBAVDEVICE_VERSION_INT constant. +*) +// unsigned avdevice_version(void); +function avdevice_version(): unsigned; cdecl; external avdevice_dll; + +(* * + * Return the libavdevice build-time configuration. +*) +// const char *avdevice_configuration(void); +function avdevice_configuration(): pAnsiChar; cdecl; external avdevice_dll; + +(* * + * Return the libavdevice license. +*) +// const char *avdevice_license(void); +function avdevice_license(): pAnsiChar; cdecl; external avdevice_dll; + +(* * + * Initialize libavdevice and register all the input and output devices. +*) +// void avdevice_register_all(void); +procedure avdevice_register_all(); cdecl; external avdevice_dll; + +(* * + * Audio input devices iterator. + * + * If d is NULL, returns the first registered input audio/video device, + * if d is non-NULL, returns the next registered input audio/video device after d + * or NULL if d is the last one. +*) +// AVInputFormat *av_input_audio_device_next(AVInputFormat *d); +function av_input_audio_device_next(d: pAVInputFormat): pAVInputFormat; cdecl; external avdevice_dll; + +(* * + * Video input devices iterator. + * + * If d is NULL, returns the first registered input audio/video device, + * if d is non-NULL, returns the next registered input audio/video device after d + * or NULL if d is the last one. +*) +// AVInputFormat *av_input_video_device_next(AVInputFormat *d); +function av_input_video_device_next(d: pAVInputFormat): pAVInputFormat; cdecl; external avdevice_dll; + +(* * + * Audio output devices iterator. + * + * If d is NULL, returns the first registered output audio/video device, + * if d is non-NULL, returns the next registered output audio/video device after d + * or NULL if d is the last one. +*) +// AVOutputFormat *av_output_audio_device_next(AVOutputFormat *d); +function av_output_audio_device_next(d: pAVOutputFormat): pAVOutputFormat; cdecl; external avdevice_dll; + +(* * + * Video output devices iterator. + * + * If d is NULL, returns the first registered output audio/video device, + * if d is non-NULL, returns the next registered output audio/video device after d + * or NULL if d is the last one. +*) +// AVOutputFormat *av_output_video_device_next(AVOutputFormat *d); +function av_output_video_device_next(d: pAVOutputFormat): pAVOutputFormat; cdecl; external avdevice_dll; + +type + pAVDeviceRect = ^AVDeviceRect; + + AVDeviceRect = record + x: int; (* *< x coordinate of top left corner *) + y: int; (* *< y coordinate of top left corner *) + width: int; (* *< width *) + height: int; (* *< height *) + end; + + (* * + * Message types used by avdevice_app_to_dev_control_message(). + *) + AVAppToDevMessageType = array [0 .. 3] of AnsiChar; + +const + (* * + * Dummy message. + *) + AV_APP_TO_DEV_NONE: AVAppToDevMessageType = ('N', 'O', 'N', 'E'); + + (* * + * Window size change message. + * + * Message is sent to the device every time the application changes the size + * of the window device renders to. + * Message should also be sent right after window is created. + * + * data: AVDeviceRect: new window size. + *) + AV_APP_TO_DEV_WINDOW_SIZE: AVAppToDevMessageType = ('G', 'E', 'O', 'M'); + + (* * + * Repaint request message. + * + * Message is sent to the device when window has to be repainted. + * + * data: AVDeviceRect: area required to be repainted. + * NULL: whole area is required to be repainted. + *) + AV_APP_TO_DEV_WINDOW_REPAINT: AVAppToDevMessageType = ('R', 'E', 'P', 'A'); + + (* * + * Request pause/play. + * + * Application requests pause/unpause playback. + * Mostly usable with devices that have internal buffer. + * By default devices are not paused. + * + * data: NULL + *) + AV_APP_TO_DEV_PAUSE: AVAppToDevMessageType = ('P', 'A', 'U', ' '); + AV_APP_TO_DEV_PLAY: AVAppToDevMessageType = ('P', 'L', 'A', 'Y'); + AV_APP_TO_DEV_TOGGLE_PAUSE: AVAppToDevMessageType = ('P', 'A', 'U', 'T'); + + (* * + * Volume control message. + * + * Set volume level. It may be device-dependent if volume + * is changed per stream or system wide. Per stream volume + * change is expected when possible. + * + * data: double: new volume with range of 0.0 - 1.0. + *) + AV_APP_TO_DEV_SET_VOLUME: AVAppToDevMessageType = ('S', 'V', 'O', 'L'); + + (* * + * Mute control messages. + * + * Change mute state. It may be device-dependent if mute status + * is changed per stream or system wide. Per stream mute status + * change is expected when possible. + * + * data: NULL. + *) + AV_APP_TO_DEV_MUTE: AVAppToDevMessageType = (' ', 'M', 'U', 'T'); + AV_APP_TO_DEV_UNMUTE: AVAppToDevMessageType = ('U', 'M', 'U', 'T'); + AV_APP_TO_DEV_TOGGLE_MUTE: AVAppToDevMessageType = ('T', 'M', 'U', 'T'); + + (* * + * Get volume/mute messages. + * + * Force the device to send AV_DEV_TO_APP_VOLUME_LEVEL_CHANGED or + * AV_DEV_TO_APP_MUTE_STATE_CHANGED command respectively. + * + * data: NULL. + *) + AV_APP_TO_DEV_GET_VOLUME: AVAppToDevMessageType = ('G', 'V', 'O', 'L'); + AV_APP_TO_DEV_GET_MUTE: AVAppToDevMessageType = ('G', 'M', 'U', 'T'); + +type + AVDevToAppMessageType = array [0 .. 3] of AnsiChar; + +const + (* * + * Message types used by avdevice_dev_to_app_control_message(). + *) + (* * + * Dummy message. + *) + AV_DEV_TO_APP_NONE: AVDevToAppMessageType = ('N', 'O', 'N', 'E'); + + (* * + * Create window buffer message. + * + * Device requests to create a window buffer. Exact meaning is device- + * and application-dependent. Message is sent before rendering first + * frame and all one-shot initializations should be done here. + * Application is allowed to ignore preferred window buffer size. + * + * @note: Application is obligated to inform about window buffer size + * with AV_APP_TO_DEV_WINDOW_SIZE message. + * + * data: AVDeviceRect: preferred size of the window buffer. + * NULL: no preferred size of the window buffer. + *) + AV_DEV_TO_APP_CREATE_WINDOW_BUFFER: AVDevToAppMessageType = ('B', 'C', 'R', 'E'); + + (* * + * Prepare window buffer message. + * + * Device requests to prepare a window buffer for rendering. + * Exact meaning is device- and application-dependent. + * Message is sent before rendering of each frame. + * + * data: NULL. + *) + AV_DEV_TO_APP_PREPARE_WINDOW_BUFFER: AVDevToAppMessageType = ('B', 'P', 'R', 'E'); + + (* * + * Display window buffer message. + * + * Device requests to display a window buffer. + * Message is sent when new frame is ready to be displayed. + * Usually buffers need to be swapped in handler of this message. + * + * data: NULL. + *) + AV_DEV_TO_APP_DISPLAY_WINDOW_BUFFER: AVDevToAppMessageType = ('B', 'D', 'I', 'S'); + + (* * + * Destroy window buffer message. + * + * Device requests to destroy a window buffer. + * Message is sent when device is about to be destroyed and window + * buffer is not required anymore. + * + * data: NULL. + *) + AV_DEV_TO_APP_DESTROY_WINDOW_BUFFER: AVDevToAppMessageType = ('B', 'D', 'E', 'S'); + + (* * + * Buffer fullness status messages. + * + * Device signals buffer overflow/underflow. + * + * data: NULL. + *) + AV_DEV_TO_APP_BUFFER_OVERFLOW: AVDevToAppMessageType = ('B', 'O', 'F', 'L'); + AV_DEV_TO_APP_BUFFER_UNDERFLOW: AVDevToAppMessageType = ('B', 'U', 'F', 'L'); + + (* * + * Buffer readable/writable. + * + * Device informs that buffer is readable/writable. + * When possible, device informs how many bytes can be read/write. + * + * @warning Device may not inform when number of bytes than can be read/write changes. + * + * data: int64_t: amount of bytes available to read/write. + * NULL: amount of bytes available to read/write is not known. + *) + AV_DEV_TO_APP_BUFFER_READABLE: AVDevToAppMessageType = ('B', 'R', 'D', ' '); + AV_DEV_TO_APP_BUFFER_WRITABLE: AVDevToAppMessageType = ('B', 'W', 'R', ' '); + + (* * + * Mute state change message. + * + * Device informs that mute state has changed. + * + * data: int: 0 for not muted state, non-zero for muted state. + *) + AV_DEV_TO_APP_MUTE_STATE_CHANGED: AVDevToAppMessageType = ('C', 'M', 'U', 'T'); + + (* * + * Volume level change message. + * + * Device informs that volume level has changed. + * + * data: double: new volume with range of 0.0 - 1.0. + *) + AV_DEV_TO_APP_VOLUME_LEVEL_CHANGED: AVDevToAppMessageType = ('C', 'V', 'O', 'L'); + + (* * + * Send control message from application to device. + * + * @param s device context. + * @param type message type. + * @param data message data. Exact type depends on message type. + * @param data_size size of message data. + * @return >= 0 on success, negative on error. + * AVERROR(ENOSYS) when device doesn't implement handler of the message. + *) + // int avdevice_app_to_dev_control_message(struct AVFormatContext *s, + // enum AVAppToDevMessageType type, + // void *data, size_t data_size); +function avdevice_app_to_dev_control_message(s: pAVFormatContext; _type: AVAppToDevMessageType; data: Pointer; data_size: size_t): int; + cdecl; external avdevice_dll; +(* * + * Send control message from device to application. + * + * @param s device context. + * @param type message type. + * @param data message data. Can be NULL. + * @param data_size size of message data. + * @return >= 0 on success, negative on error. + * AVERROR(ENOSYS) when application doesn't implement handler of the message. +*) +// int avdevice_dev_to_app_control_message(struct AVFormatContext *s, +// enum AVDevToAppMessageType type, +// void *data, size_t data_size); +function avdevice_dev_to_app_control_message(s: pAVFormatContext; _type: AVDevToAppMessageType; data: Pointer; data_size: size_t): int; + cdecl; external avdevice_dll; + +(* * + * Following API allows user to probe device capabilities (supported codecs, + * pixel formats, sample formats, resolutions, channel counts, etc). + * It is build on top op AVOption API. + * Queried capabilities make it possible to set up converters of video or audio + * parameters that fit to the device. + * + * List of capabilities that can be queried: + * - Capabilities valid for both audio and video devices: + * - codec: supported audio/video codecs. + * type: AV_OPT_TYPE_INT (AVCodecID value) + * - Capabilities valid for audio devices: + * - sample_format: supported sample formats. + * type: AV_OPT_TYPE_INT (AVSampleFormat value) + * - sample_rate: supported sample rates. + * type: AV_OPT_TYPE_INT + * - channels: supported number of channels. + * type: AV_OPT_TYPE_INT + * - channel_layout: supported channel layouts. + * type: AV_OPT_TYPE_INT64 + * - Capabilities valid for video devices: + * - pixel_format: supported pixel formats. + * type: AV_OPT_TYPE_INT (AVPixelFormat value) + * - window_size: supported window sizes (describes size of the window size presented to the user). + * type: AV_OPT_TYPE_IMAGE_SIZE + * - frame_size: supported frame sizes (describes size of provided video frames). + * type: AV_OPT_TYPE_IMAGE_SIZE + * - fps: supported fps values + * type: AV_OPT_TYPE_RATIONAL + * + * Value of the capability may be set by user using av_opt_set() function + * and AVDeviceCapabilitiesQuery object. Following queries will + * limit results to the values matching already set capabilities. + * For example, setting a codec may impact number of formats or fps values + * returned during next query. Setting invalid value may limit results to zero. + * + * Example of the usage basing on opengl output device: + * + * @code + * AVFormatContext *oc = NULL; + * AVDeviceCapabilitiesQuery *caps = NULL; + * AVOptionRanges *ranges; + * int ret; + * + * if ((ret = avformat_alloc_output_context2(&oc, NULL, "opengl", NULL)) < 0) + * goto fail; + * if (avdevice_capabilities_create(&caps, oc, NULL) < 0) + * goto fail; + * + * //query codecs + * if (av_opt_query_ranges(&ranges, caps, "codec", AV_OPT_MULTI_COMPONENT_RANGE)) < 0) + * goto fail; + * //pick codec here and set it + * av_opt_set(caps, "codec", AV_CODEC_ID_RAWVIDEO, 0); + * + * //query format + * if (av_opt_query_ranges(&ranges, caps, "pixel_format", AV_OPT_MULTI_COMPONENT_RANGE)) < 0) + * goto fail; + * //pick format here and set it + * av_opt_set(caps, "pixel_format", AV_PIX_FMT_YUV420P, 0); + * + * //query and set more capabilities + * + * fail: + * //clean up code + * avdevice_capabilities_free(&query, oc); + * avformat_free_context(oc); + * @endcode +*) +type + (* * + * Structure describes device capabilities. + * + * It is used by devices in conjunction with av_device_capabilities AVOption table + * to implement capabilities probing API based on AVOption API. Should not be used directly. + *) + pAVDeviceCapabilitiesQuery = ^AVDeviceCapabilitiesQuery; + + AVDeviceCapabilitiesQuery = record + av_class: pAVClass; + device_context: pAVFormatContext; + codec: AVCodecID; + sample_format: AVSampleFormat; + pixel_format: AVPixelFormat; + sample_rate: int; + channels: int; + channel_layout: int64_t; + window_width: int; + window_height: int; + frame_width: int; + frame_height: int; + fps: AVRational; + end; + + (* * + * AVOption table used by devices to implement device capabilities API. Should not be used by a user. + *) + // extern const AVOption av_device_capabilities[]; + + (* * + * Initialize capabilities probing API based on AVOption API. + * + * avdevice_capabilities_free() must be called when query capabilities API is + * not used anymore. + * + * @param[out] caps Device capabilities data. Pointer to a NULL pointer must be passed. + * @param s Context of the device. + * @param device_options An AVDictionary filled with device-private options. + * On return this parameter will be destroyed and replaced with a dict + * containing options that were not found. May be NULL. + * The same options must be passed later to avformat_write_header() for output + * devices or avformat_open_input() for input devices, or at any other place + * that affects device-private options. + * + * @return >= 0 on success, negative otherwise. + *) + // int avdevice_capabilities_create(AVDeviceCapabilitiesQuery **caps, AVFormatContext *s, + // AVDictionary **device_options); +function avdevice_capabilities_create(var caps: pAVDeviceCapabilitiesQuery; s: pAVFormatContext; var device_options: pAVDictionary): int; + cdecl; external avdevice_dll; +(* * + * Free resources created by avdevice_capabilities_create() + * + * @param caps Device capabilities data to be freed. + * @param s Context of the device. +*) +// void avdevice_capabilities_free(AVDeviceCapabilitiesQuery **caps, AVFormatContext *s); +procedure avdevice_capabilities_free(var caps: pAVDeviceCapabilitiesQuery; s: pAVFormatContext); cdecl; external avdevice_dll; + +type + (* * + * Structure describes basic parameters of the device. + *) + pAVDeviceInfo = ^AVDeviceInfo; + ppAVDeviceInfo = ^pAVDeviceInfo; + + AVDeviceInfo = record + device_name: pAnsiChar; (* *< device name, format depends on device *) + device_description: pAnsiChar; (* *< human friendly name *) + end; + + (* * + * List of devices. + *) + pAVDeviceInfoList = ^AVDeviceInfoList; + + AVDeviceInfoList = record + devices: ppAVDeviceInfo; (* *< list of autodetected devices *) + nb_devices: int; (* *< number of autodetected devices *) + default_device: int; (* *< index of default device or -1 if no default *) + end; + + (* * + * List devices. + * + * Returns available device names and their parameters. + * + * @note: Some devices may accept system-dependent device names that cannot be + * autodetected. The list returned by this function cannot be assumed to + * be always completed. + * + * @param s device context. + * @param[out] device_list list of autodetected devices. + * @return count of autodetected devices, negative on error. + *) + // int avdevice_list_devices(struct AVFormatContext *s, AVDeviceInfoList **device_list); +function avdevice_list_devices(s: pAVFormatContext; var device_list: pAVDeviceInfoList): int; cdecl; external avdevice_dll; +(* * + * Convenient function to free result of avdevice_list_devices(). + * + * @param devices device list to be freed. +*) +// void avdevice_free_list_devices(AVDeviceInfoList **device_list); +procedure avdevice_free_list_devices(var device_list: pAVDeviceInfoList); cdecl; external avdevice_dll; +(* * + * List devices. + * + * Returns available device names and their parameters. + * These are convinient wrappers for avdevice_list_devices(). + * Device context is allocated and deallocated internally. + * + * @param device device format. May be NULL if device name is set. + * @param device_name device name. May be NULL if device format is set. + * @param device_options An AVDictionary filled with device-private options. May be NULL. + * The same options must be passed later to avformat_write_header() for output + * devices or avformat_open_input() for input devices, or at any other place + * that affects device-private options. + * @param[out] device_list list of autodetected devices + * @return count of autodetected devices, negative on error. + * @note device argument takes precedence over device_name when both are set. +*) +// int avdevice_list_input_sources(struct AVInputFormat *device, const char *device_name, +// AVDictionary *device_options, AVDeviceInfoList **device_list); +function avdevice_list_input_sources(device: pAVInputFormat; const device_name: pAnsiChar; device_options: pAVDictionary; + var device_list: pAVDeviceInfoList): int; cdecl; external avdevice_dll; + +// int avdevice_list_output_sinks(struct AVOutputFormat *device, const char *device_name, +// AVDictionary *device_options, AVDeviceInfoList **device_list); +function avdevice_list_output_sinks(device: pAVOutputFormat; const device_name: pAnsiChar; device_options: pAVDictionary; + var device_list: pAVDeviceInfoList): int; cdecl; external avdevice_dll; + +implementation + +end. diff --git a/ffmpeg/libavfilter.pas b/ffmpeg/libavfilter.pas new file mode 100644 index 0000000..055e8c8 --- /dev/null +++ b/ffmpeg/libavfilter.pas @@ -0,0 +1,1482 @@ +unit libavfilter; + +{$IFDEF FPC} +{$MODE Delphi} +{$ENDIF} + +interface + +Uses + ffmpeg_types, libavutil, libavformat; + +{$I ffmpeg.inc} +{$REGION 'avfilter.h'} +(* * + * Return the LIBAVFILTER_VERSION_INT constant. +*) +// unsigned avfilter_version(void); +function avfilter_version(): unsigned; cdecl; external avfilter_dll; + +(* * + * Return the libavfilter build-time configuration. +*) +// const char *avfilter_configuration(void); +function avfilter_configuration(): pAnsiChar; cdecl; external avfilter_dll; +(* * + * Return the libavfilter license. +*) +// const char *avfilter_license(void); +function avfilter_license(): pAnsiChar; cdecl; external avfilter_dll; + +const + (* * + * Process multiple parts of the frame concurrently. + *) + AVFILTER_THREAD_SLICE = (1 shl 0); + +const + (* * + * The number of the filter inputs is not determined just by AVFilter.inputs. + * The filter might add additional inputs during initialization depending on the + * options supplied to it. + *) + AVFILTER_FLAG_DYNAMIC_INPUTS = (1 shl 0); + (* * + * The number of the filter outputs is not determined just by AVFilter.outputs. + * The filter might add additional outputs during initialization depending on + * the options supplied to it. + *) + AVFILTER_FLAG_DYNAMIC_OUTPUTS = (1 shl 1); + (* * + * The filter supports multithreading by splitting frames into multiple parts + * and processing them concurrently. + *) + AVFILTER_FLAG_SLICE_THREADS = (1 shl 2); + (* * + * Some filters support a generic "enable" expression option that can be used + * to enable or disable a filter in the timeline. Filters supporting this + * option have this flag set. When the enable expression is false, the default + * no-op filter_frame() function is called in place of the filter_frame() + * callback defined on each input pad, thus the frame is passed unchanged to + * the next filters. + *) + AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC = (1 shl 16); + (* * + * Same as AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC, except that the filter will + * have its filter_frame() callback(s) called as usual even when the enable + * expression is false. The filter will disable filtering within the + * filter_frame() callback(s) itself, for example executing code depending on + * the AVFilterContext->is_disabled value. + *) + AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL = (1 shl 17); + (* * + * Handy mask to test whether the filter supports or no the timeline feature + * (internally or generically). + *) + AVFILTER_FLAG_SUPPORT_TIMELINE = (AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC or AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL); + +type + pAVFilterContext = ^AVFilterContext; + ppAVFilterContext = ^pAVFilterContext; + pAVFilterLink = ^AVFilterLink; + ppAVFilterLink = ^pAVFilterLink; + + pAVFilterPad = ^AVFilterPad; + + AVFilterPad = record + end; + // pAVFilterFormats=^AVFilterFormats; + // AVFilterFormats = record + // end; + + pAVFilterGraph = ^AVFilterGraph; + + pAVFilterCommand = ^AVFilterCommand; + + AVFilterCommand = record + end; + + (* * + * Filter definition. This defines the pads a filter contains, and all the + * callback functions used to interact with the filter. + *) + pAVFilter = ^AVFilter; + + AVFilter = record + (* * + * Filter name. Must be non-NULL and unique among filters. + *) + name: pAnsiChar; + + (* * + * A description of the filter. May be NULL. + * + * You should use the NULL_IF_CONFIG_SMALL() macro to define it. + *) + description: pAnsiChar; + + (* * + * List of inputs, terminated by a zeroed element. + * + * NULL if there are no (static) inputs. Instances of filters with + * AVFILTER_FLAG_DYNAMIC_INPUTS set may have more inputs than present in + * this list. + *) + inputs: pAVFilterPad; + (* * + * List of outputs, terminated by a zeroed element. + * + * NULL if there are no (static) outputs. Instances of filters with + * AVFILTER_FLAG_DYNAMIC_OUTPUTS set may have more outputs than present in + * this list. + *) + outputs: pAVFilterPad; + + (* * + * A class for the private data, used to declare filter private AVOptions. + * This field is NULL for filters that do not declare any options. + * + * If this field is non-NULL, the first member of the filter private data + * must be a pointer to AVClass, which will be set by libavfilter generic + * code to this class. + *) + priv_class: pAVClass; + + (* * + * A combination of AVFILTER_FLAG_* + *) + flags: int; + + (* **************************************************************** + * All fields below this line are not part of the public API. They + * may not be used outside of libavfilter and can be changed and + * removed at will. + * New public fields should be added right above. + ***************************************************************** + *) + + (* * + * Filter pre-initialization function + * + * This callback will be called immediately after the filter context is + * allocated, to allow allocating and initing sub-objects. + * + * If this callback is not NULL, the uninit callback will be called on + * allocation failure. + * + * @return 0 on success, + * AVERROR code on failure (but the code will be + * dropped and treated as ENOMEM by the calling code) + *) + // int (*preinit)(AVFilterContext *ctx); + preinit: function(ctx: pAVFilterContext): int; cdecl; + (* * + * Filter initialization function. + * + * This callback will be called only once during the filter lifetime, after + * all the options have been set, but before links between filters are + * established and format negotiation is done. + * + * Basic filter initialization should be done here. Filters with dynamic + * inputs and/or outputs should create those inputs/outputs here based on + * provided options. No more changes to this filter's inputs/outputs can be + * done after this callback. + * + * This callback must not assume that the filter links exist or frame + * parameters are known. + * + * @ref AVFilter.uninit "uninit" is guaranteed to be called even if + * initialization fails, so this callback does not have to clean up on + * failure. + * + * @return 0 on success, a negative AVERROR on failure + *) + // int (*init)(AVFilterContext *ctx); + init: function(ctx: pAVFilterContext): int; cdecl; + (* * + * Should be set instead of @ref AVFilter.init "init" by the filters that + * want to pass a dictionary of AVOptions to nested contexts that are + * allocated during init. + * + * On return, the options dict should be freed and replaced with one that + * contains all the options which could not be processed by this filter (or + * with NULL if all the options were processed). + * + * Otherwise the semantics is the same as for @ref AVFilter.init "init". + *) + // int (*init_dict)(AVFilterContext *ctx, AVDictionary **options); + init_dict: function(ctx: pAVFilterContext; var options: pAVDictionary): int; cdecl; + (* * + * Filter uninitialization function. + * + * Called only once right before the filter is freed. Should deallocate any + * memory held by the filter, release any buffer references, etc. It does + * not need to deallocate the AVFilterContext.priv memory itself. + * + * This callback may be called even if @ref AVFilter.init "init" was not + * called or failed, so it must be prepared to handle such a situation. + *) + // void (*uninit)(AVFilterContext *ctx); + uninit: procedure(ctx: pAVFilterContext); cdecl; + (* * + * Query formats supported by the filter on its inputs and outputs. + * + * This callback is called after the filter is initialized (so the inputs + * and outputs are fixed), shortly before the format negotiation. This + * callback may be called more than once. + * + * This callback must set AVFilterLink.out_formats on every input link and + * AVFilterLink.in_formats on every output link to a list of pixel/sample + * formats that the filter supports on that link. For audio links, this + * filter must also set @ref AVFilterLink.in_samplerates "in_samplerates" / + * @ref AVFilterLink.out_samplerates "out_samplerates" and + * @ref AVFilterLink.in_channel_layouts "in_channel_layouts" / + * @ref AVFilterLink.out_channel_layouts "out_channel_layouts" analogously. + * + * This callback may be NULL for filters with one input, in which case + * libavfilter assumes that it supports all input formats and preserves + * them on output. + * + * @return zero on success, a negative value corresponding to an + * AVERROR code otherwise + *) + // int (*query_formats)(AVFilterContext *); + query_formats: function(p: pAVFilterContext): int; cdecl; + priv_size: int; // < size of private data to allocate for the filter + + flags_internal: int; // < Additional flags for avfilter internal use only. + + (* * + * Used by the filter registration system. Must not be touched by any other + * code. + *) + next: pAVFilter; + + (* * + * Make the filter instance process a command. + * + * @param cmd the command to process, for handling simplicity all commands must be alphanumeric only + * @param arg the argument for the command + * @param res a buffer with size res_size where the filter(s) can return a response. This must not change when the command is not supported. + * @param flags if AVFILTER_CMD_FLAG_FAST is set and the command would be + * time consuming then a filter should treat it like an unsupported command + * + * @returns >=0 on success otherwise an error code. + * AVERROR(ENOSYS) on unsupported commands + *) + // int (*process_command)(AVFilterContext *, const char *cmd, const char *arg, char *res, int res_len, int flags); + process_command: function(ctx: pAVFilterContext; const cmd: pAnsiChar; const arg: pAnsiChar; res: pAnsiChar; res_len: int; flags: int): int; cdecl; + (* * + * Filter initialization function, alternative to the init() + * callback. Args contains the user-supplied parameters, opaque is + * used for providing binary data. + *) + // int (*init_opaque)(AVFilterContext *ctx, void *opaque); + init_opaque: function(ctx: pAVFilterContext; opaque: pointer): int; cdecl; + (* * + * Filter activation function. + * + * Called when any processing is needed from the filter, instead of any + * filter_frame and request_frame on pads. + * + * The function must examine inlinks and outlinks and perform a single + * step of processing. If there is nothing to do, the function must do + * nothing and not return an error. If more steps are or may be + * possible, it must use ff_filter_set_ready() to schedule another + * activation. + *) + // int (*activate)(AVFilterContext *ctx); + activate: function(ctx: pAVFilterContext): int; cdecl; + end; + + pAVFilterInternal = ^AVFilterInternal; + + AVFilterInternal = record + end; + (* * An instance of a filter *) + + AVFilterContext = record + av_class: pAVClass; // < needed for av_log() and filters common options + + filter: pAVFilter; // < the AVFilter of which this is an instance + + name: pAnsiChar; // < name of this filter instance + + input_pads: pAVFilterPad; // < array of input pads + inputs: ppAVFilterLink; // < array of pointers to input links + nb_inputs: unsigned; // < number of input pads + + output_pads: pAVFilterPad; // < array of output pads + outputs: ppAVFilterLink; // < array of pointers to output links + nb_outputs: unsigned; // < number of output pads + + priv: pointer; // < private data for use by the filter + + graph: pAVFilterGraph; // < filtergraph this filter belongs to + + (* * + * Type of multithreading being allowed/used. A combination of + * AVFILTER_THREAD_* flags. + * + * May be set by the caller before initializing the filter to forbid some + * or all kinds of multithreading for this filter. The default is allowing + * everything. + * + * When the filter is initialized, this field is combined using bit AND with + * AVFilterGraph.thread_type to get the final mask used for determining + * allowed threading types. I.e. a threading type needs to be set in both + * to be allowed. + * + * After the filter is initialized, libavfilter sets this field to the + * threading type that is actually used (0 for no multithreading). + *) + thread_type: int; + + (* * + * An opaque struct for libavfilter internal use. + *) + internal: pAVFilterInternal; + + command_queue: pAVFilterCommand; + + enable_str: pAnsiChar; // < enable expression string + enable: pointer; // < parsed expression (AVExpr*) + var_values: pdouble; // < variable values for the enable expression + is_disabled: int; // < the enabled state from the last expression evaluation + + (* * + * For filters which will create hardware frames, sets the device the + * filter should create them in. All other filters will ignore this field: + * in particular, a filter which consumes or processes hardware frames will + * instead use the hw_frames_ctx field in AVFilterLink to carry the + * hardware context information. + *) + hw_device_ctx: pAVBufferRef; + + (* * + * Max number of threads allowed in this filter instance. + * If <= 0, its value is ignored. + * Overrides global number of threads set per filter graph. + *) + nb_threads: int; + + (* * + * Ready status of the filter. + * A non-0 value means that the filter needs activating; + * a higher value suggests a more urgent activation. + *) + ready: unsigned; + + (* * + * Sets the number of extra hardware frames which the filter will + * allocate on its output links for use in following filters or by + * the caller. + * + * Some hardware filters require all frames that they will use for + * output to be defined in advance before filtering starts. For such + * filters, any hardware frame pools used for output must therefore be + * of fixed size. The extra frames set here are on top of any number + * that the filter needs internally in order to operate normally. + * + * This field must be set before the graph containing this filter is + * configured. + *) + extra_hw_frames: int; + end; + + pAVFilterGraphInternal = ^AVFilterGraphInternal; + + AVFilterGraphInternal = record + end; + + (* * + * A function pointer passed to the @ref AVFilterGraph.execute callback to be + * executed multiple times, possibly in parallel. + * + * @param ctx the filter context the job belongs to + * @param arg an opaque parameter passed through from @ref + * AVFilterGraph.execute + * @param jobnr the index of the job being executed + * @param nb_jobs the total number of jobs + * + * @return 0 on success, a negative AVERROR on error + *) + // typedef int (avfilter_action_func)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs); + Tavfilter_action_func = function(ctx: pAVFilterContext; arg: pointer; jobnr: int; b_jobs: int): int; cdecl; + + (* * + * A function executing multiple jobs, possibly in parallel. + * + * @param ctx the filter context to which the jobs belong + * @param func the function to be called multiple times + * @param arg the argument to be passed to func + * @param ret a nb_jobs-sized array to be filled with return values from each + * invocation of func + * @param nb_jobs the number of jobs to execute + * + * @return 0 on success, a negative AVERROR on error + *) + // typedef int (avfilter_execute_func)(AVFilterContext *ctx, avfilter_action_func *func, + // void *arg, int *ret, int nb_jobs); + Tavfilter_execute_func = function(ctx: pAVFilterContext; func: Tavfilter_action_func; arg: pointer; var ret: int; nb_jobs: int): int; cdecl; + + AVFilterGraph = record + av_class: pAVClass; + filters: ppAVFilterContext; + nb_filters: unsigned; + + scale_sws_opts: pAnsiChar; // < sws options to use for the auto-inserted scale filters +{$IFDEF FF_API_LAVR_OPTS} + // attribute_deprecated + resample_lavr_opts: pAnsiChar deprecated; // < libavresample options to use for the auto-inserted resample filters +{$ENDIF} + (* * + * Type of multithreading allowed for filters in this graph. A combination + * of AVFILTER_THREAD_* flags. + * + * May be set by the caller at any point, the setting will apply to all + * filters initialized after that. The default is allowing everything. + * + * When a filter in this graph is initialized, this field is combined using + * bit AND with AVFilterContext.thread_type to get the final mask used for + * determining allowed threading types. I.e. a threading type needs to be + * set in both to be allowed. + *) + thread_type: int; + + (* * + * Maximum number of threads used by filters in this graph. May be set by + * the caller before adding any filters to the filtergraph. Zero (the + * default) means that the number of threads is determined automatically. + *) + nb_threads: int; + + (* * + * Opaque object for libavfilter internal use. + *) + internal: pAVFilterGraphInternal; + + (* * + * Opaque user data. May be set by the caller to an arbitrary value, e.g. to + * be used from callbacks like @ref AVFilterGraph.execute. + * Libavfilter will not touch this field in any way. + *) + opaque: pointer; + + (* * + * This callback may be set by the caller immediately after allocating the + * graph and before adding any filters to it, to provide a custom + * multithreading implementation. + * + * If set, filters with slice threading capability will call this callback + * to execute multiple jobs in parallel. + * + * If this field is left unset, libavfilter will use its internal + * implementation, which may or may not be multithreaded depending on the + * platform and build options. + *) + execute: Tavfilter_execute_func; + + aresample_swr_opts: pAnsiChar; // < swr options to use for the auto-inserted aresample filters, Access ONLY through AVOptions + + (* * + * Private fields + * + * The following fields are for internal use only. + * Their type, offset, number and semantic can change without notice. + *) + + sink_links: ppAVFilterLink; + sink_links_count: int; + + disable_auto_convert: unsigned; + end; + + (* * + * A link between two filters. This contains pointers to the source and + * destination filters between which this link exists, and the indexes of + * the pads involved. In addition, this link also contains the parameters + * which have been negotiated and agreed upon between the filter, such as + * image dimensions, format, etc. + * + * Applications must not normally access the link structure directly. + * Use the buffersrc and buffersink API instead. + * In the future, access to the header may be reserved for filters + * implementation. + *) + Tinit_state = ( // + AVLINK_UNINIT = 0, // < not started + AVLINK_STARTINIT, // < started, but incomplete + AVLINK_INIT // < complete + ); + + AVFilterLink = record + src: pAVFilterContext; // < source filter + srcpad: pAVFilterPad; // < output pad on the source filter + + dst: pAVFilterContext; // < dest filter + dstpad: pAVFilterPad; // < input pad on the dest filter + + _type: AVMediaType; // < filter media type + + (* These parameters apply only to video *) + w: int; // < agreed upon image width + h: int; // < agreed upon image height + sample_aspect_ratio: AVRational; // < agreed upon sample aspect ratio + (* These parameters apply only to audio *) + channel_layout: uint64_t; // < channel layout of current buffer (see libavutil/channel_layout.h) + sample_rate: int; // < samples per second + + format: int; // < agreed upon media format + + (* * + * Define the time base used by the PTS of the frames/samples + * which will pass through this link. + * During the configuration stage, each filter is supposed to + * change only the output timebase, while the timebase of the + * input link is assumed to be an unchangeable property. + *) + time_base: AVRational; + + (* **************************************************************** + * All fields below this line are not part of the public API. They + * may not be used outside of libavfilter and can be changed and + * removed at will. + * New public fields should be added right above. + ***************************************************************** + *) + (* * + * Lists of formats and channel layouts supported by the input and output + * filters respectively. These lists are used for negotiating the format + * to actually be used, which will be loaded into the format and + * channel_layout members, above, when chosen. + * + *) + in_formats: pAVFilterFormats; + out_formats: pAVFilterFormats; + + (* * + * Lists of channel layouts and sample rates used for automatic + * negotiation. + *) + in_samplerates: pAVFilterFormats; + out_samplerates: pAVFilterFormats; + in_channel_layouts: pAVFilterChannelLayouts; + out_channel_layouts: pAVFilterChannelLayouts; + + (* * + * Audio only, the destination filter sets this to a non-zero value to + * request that buffers with the given number of samples should be sent to + * it. AVFilterPad.needs_fifo must also be set on the corresponding input + * pad. + * Last buffer before EOF will be padded with silence. + *) + request_samples: int; + + (* * stage of the initialization of the link properties (dimensions, etc) *) + init_state: Tinit_state; + + (* * + * Graph the filter belongs to. + *) + graph: pAVFilterGraph; + + (* * + * Current timestamp of the link, as defined by the most recent + * frame(s), in link time_base units. + *) + current_pts: int64_t; + + (* * + * Current timestamp of the link, as defined by the most recent + * frame(s), in AV_TIME_BASE units. + *) + current_pts_us: int64_t; + + (* * + * Index in the age array. + *) + age_index: int; + + (* * + * Frame rate of the stream on the link, or 1/0 if unknown or variable; + * if left to 0/0, will be automatically copied from the first input + * of the source filter if it exists. + * + * Sources should set it to the best estimation of the real frame rate. + * If the source frame rate is unknown or variable, set this to 1/0. + * Filters should update it if necessary depending on their function. + * Sinks can use it to set a default output frame rate. + * It is similar to the r_frame_rate field in AVStream. + *) + frame_rate: AVRational; + + (* * + * Buffer partially filled with samples to achieve a fixed/minimum size. + *) + partial_buf: pAVFrame; + + (* * + * Size of the partial buffer to allocate. + * Must be between min_samples and max_samples. + *) + partial_buf_size: int; + + (* * + * Minimum number of samples to filter at once. If filter_frame() is + * called with fewer samples, it will accumulate them in partial_buf. + * This field and the related ones must not be changed after filtering + * has started. + * If 0, all related fields are ignored. + *) + min_samples: int; + + (* * + * Maximum number of samples to filter at once. If filter_frame() is + * called with more samples, it will split them. + *) + max_samples: int; + + (* * + * Number of channels. + *) + channels: int; + + (* * + * Link processing flags. + *) + flags: unsigned; + + (* * + * Number of past frames sent through the link. + *) + frame_count_in, frame_count_out: int64_t; + + (* * + * A pointer to a FFFramePool struct. + *) + frame_pool: pointer; + + (* * + * True if a frame is currently wanted on the output of this filter. + * Set when ff_request_frame() is called by the output, + * cleared when a frame is filtered. + *) + frame_wanted_out: int; + + (* * + * For hwaccel pixel formats, this should be a reference to the + * AVHWFramesContext describing the frames. + *) + hw_frames_ctx: pAVBufferRef; + +{$IFNDEF FF_INTERNAL_FIELDS} + (* * + * Internal structure members. + * The fields below this limit are internal for libavfilter's use + * and must in no way be accessed by applications. + *) + reserved: array [0 .. $F000 - 1] of AnsiChar; + +{$ELSE} (* FF_INTERNAL_FIELDS *) + + (* * + * Queue of frames waiting to be filtered. + *) + fifo: FFFrameQueue; + + (* * + * If set, the source filter can not generate a frame as is. + * The goal is to avoid repeatedly calling the request_frame() method on + * the same link. + *) + frame_blocked_in: int; + + (* * + * Link input status. + * If not zero, all attempts of filter_frame will fail with the + * corresponding code. + *) + status_in: int; + + (* * + * Timestamp of the input status change. + *) + status_in_pts: int64_t; + + (* * + * Link output status. + * If not zero, all attempts of request_frame will fail with the + * corresponding code. + *) + status_out: int; + +{$ENDIF} (* FF_INTERNAL_FIELDS *) + end; + + (* * + * Link two filters together. + * + * @param src the source filter + * @param srcpad index of the output pad on the source filter + * @param dst the destination filter + * @param dstpad index of the input pad on the destination filter + * @return zero on success + *) + // int avfilter_link(AVFilterContext *src, unsigned srcpad, AVFilterContext *dst, unsigned dstpad); +function avfilter_link(src: pAVFilterContext; srcpad: unsigned; dst: pAVFilterContext; dstpad: unsigned): int; cdecl; external avfilter_dll; +(* * + * Free the link in *link, and set its pointer to NULL. +*) +// void avfilter_link_free(AVFilterLink **link); +procedure avfilter_link_free(var link: pAVFilterLink); cdecl; external avfilter_dll; + +{$IFDEF FF_API_FILTER_GET_SET} +(* * + * Get the number of channels of a link. + * @deprecated Use av_buffersink_get_channels() +*) +// attribute_deprecated +// int avfilter_link_get_channels(AVFilterLink *link); +function avfilter_link_get_channels(link: pAVFilterLink): int; deprecated 'Use av_buffersink_get_channels()'; cdecl; external avfilter_dll; +{$ENDIF} +(* * + * Set the closed field of a link. + * @deprecated applications are not supposed to mess with links, they should + * close the sinks. +*) +// attribute_deprecated +// void avfilter_link_set_closed(AVFilterLink *link, int closed); +procedure avfilter_link_set_closed(link: pAVFilterLink; closed: int); + deprecated 'applications are not supposed to mess with links, they should close the sinks.'; cdecl; external avfilter_dll; +(* * + * Negotiate the media format, dimensions, etc of all inputs to a filter. + * + * @param filter the filter to negotiate the properties for its inputs + * @return zero on successful negotiation +*) +// int avfilter_config_links(AVFilterContext *filter); +function avfilter_config_links(filter: pAVFilterContext): int; cdecl; external avfilter_dll; + +const + AVFILTER_CMD_FLAG_ONE = 1; + // < Stop once a filter understood the command (for target=all for example), fast filters are favored automatically + AVFILTER_CMD_FLAG_FAST = 2; // < Only execute command when its fast (like a video out that supports contrast adjustment in hw) + + (* * + * Make the filter instance process a command. + * It is recommended to use avfilter_graph_send_command(). + *) + // int avfilter_process_command(AVFilterContext *filter, const char *cmd, const char *arg, char *res, int res_len, int flags); +function avfilter_process_command(filter: pAVFilterContext; const cmd: pAnsiChar; const arg: pAnsiChar; res: pAnsiChar; res_len: int; flags: int): int; cdecl; + external avfilter_dll; +(* * + * Iterate over all registered filters. + * + * @param opaque a pointer where libavfilter will store the iteration state. Must + * point to NULL to start the iteration. + * + * @return the next registered filter or NULL when the iteration is + * finished +*) +// const AVFilter *av_filter_iterate(void **opaque); +function av_filter_iterate(var opaque: pointer): pAVFilter; cdecl; external avfilter_dll; + +{$IFDEF FF_API_NEXT} +(* * Initialize the filter system. Register all builtin filters. *) +// attribute_deprecated +// void avfilter_register_all(void); +procedure avfilter_register_all(); deprecated; cdecl; external avfilter_dll; +(* * + * Register a filter. This is only needed if you plan to use + * avfilter_get_by_name later to lookup the AVFilter structure by name. A + * filter can still by instantiated with avfilter_graph_alloc_filter even if it + * is not registered. + * + * @param filter the filter to register + * @return 0 if the registration was successful, a negative value + * otherwise +*) +// attribute_deprecated +// int avfilter_register(AVFilter *filter); +function avfilter_register(filter: pAVFilter): int; deprecated; cdecl; external avfilter_dll; +(* * + * Iterate over all registered filters. + * @return If prev is non-NULL, next registered filter after prev or NULL if + * prev is the last filter. If prev is NULL, return the first registered filter. +*) +// attribute_deprecated +// const AVFilter *avfilter_next(const AVFilter *prev); +function avfilter_next(const prev: pAVFilter): pAVFilter; deprecated; cdecl; external avfilter_dll; +{$ENDIF} +(* * + * Get a filter definition matching the given name. + * + * @param name the filter name to find + * @return the filter definition, if any matching one is registered. + * NULL if none found. +*) +// const AVFilter *avfilter_get_by_name(const char *name); +function avfilter_get_by_name(const name: pAnsiChar): pAVFilter; cdecl; external avfilter_dll; +(* * + * Initialize a filter with the supplied parameters. + * + * @param ctx uninitialized filter context to initialize + * @param args Options to initialize the filter with. This must be a + * ':'-separated list of options in the 'key=value' form. + * May be NULL if the options have been set directly using the + * AVOptions API or there are no options that need to be set. + * @return 0 on success, a negative AVERROR on failure +*) +// int avfilter_init_str(AVFilterContext *ctx, const char *args); +function avfilter_init_str(ctx: pAVFilterContext; const args: pAnsiChar): int; cdecl; external avfilter_dll; +(* * + * Initialize a filter with the supplied dictionary of options. + * + * @param ctx uninitialized filter context to initialize + * @param options An AVDictionary filled with options for this filter. On + * return this parameter will be destroyed and replaced with + * a dict containing options that were not found. This dictionary + * must be freed by the caller. + * May be NULL, then this function is equivalent to + * avfilter_init_str() with the second parameter set to NULL. + * @return 0 on success, a negative AVERROR on failure + * + * @note This function and avfilter_init_str() do essentially the same thing, + * the difference is in manner in which the options are passed. It is up to the + * calling code to choose whichever is more preferable. The two functions also + * behave differently when some of the provided options are not declared as + * supported by the filter. In such a case, avfilter_init_str() will fail, but + * this function will leave those extra options in the options AVDictionary and + * continue as usual. +*) +// int avfilter_init_dict(AVFilterContext *ctx, AVDictionary **options); +function avfilter_init_dict(ctx: pAVFilterContext; var options: pAVDictionary): int; cdecl; external avfilter_dll; +(* * + * Free a filter context. This will also remove the filter from its + * filtergraph's list of filters. + * + * @param filter the filter to free +*) +// void avfilter_free(AVFilterContext *filter); +procedure avfilter_free(filter: pAVFilterContext); cdecl; external avfilter_dll; +(* * + * Insert a filter in the middle of an existing link. + * + * @param link the link into which the filter should be inserted + * @param filt the filter to be inserted + * @param filt_srcpad_idx the input pad on the filter to connect + * @param filt_dstpad_idx the output pad on the filter to connect + * @return zero on success +*) +// int avfilter_insert_filter(AVFilterLink *link, AVFilterContext *filt, +// unsigned filt_srcpad_idx, unsigned filt_dstpad_idx); +function avfilter_insert_filter(link: pAVFilterLink; filt: pAVFilterContext; filt_srcpad_idx: unsigned; filt_dstpad_idx: unsigned): int; cdecl; + external avfilter_dll; +(* * + * @return AVClass for AVFilterContext. + * + * @see av_opt_find(). +*) +// const AVClass *avfilter_get_class(void); +function avfilter_get_class(): pAVClass; cdecl; external avfilter_dll; + +(* * + * Allocate a filter graph. + * + * @return the allocated filter graph on success or NULL. +*) +// AVFilterGraph *avfilter_graph_alloc(void); +function avfilter_graph_alloc(): pAVFilterGraph; cdecl; external avfilter_dll; +(* * + * Create a new filter instance in a filter graph. + * + * @param graph graph in which the new filter will be used + * @param filter the filter to create an instance of + * @param name Name to give to the new instance (will be copied to + * AVFilterContext.name). This may be used by the caller to identify + * different filters, libavfilter itself assigns no semantics to + * this parameter. May be NULL. + * + * @return the context of the newly created filter instance (note that it is + * also retrievable directly through AVFilterGraph.filters or with + * avfilter_graph_get_filter()) on success or NULL on failure. +*) +// AVFilterContext *avfilter_graph_alloc_filter(AVFilterGraph *graph, +// const AVFilter *filter, +// const char *name); +function avfilter_graph_alloc_filter(graph: pAVFilterGraph; const filter: pAVFilter; const name: pAnsiChar): pAVFilterContext; cdecl; external avfilter_dll; +(* * + * Get a filter instance identified by instance name from graph. + * + * @param graph filter graph to search through. + * @param name filter instance name (should be unique in the graph). + * @return the pointer to the found filter instance or NULL if it + * cannot be found. +*) +// AVFilterContext *avfilter_graph_get_filter(AVFilterGraph *graph, const char *name); +function avfilter_graph_get_filter(graph: pAVFilterGraph; const name: pAnsiChar): pAVFilterContext; cdecl; external avfilter_dll; +(* * + * Create and add a filter instance into an existing graph. + * The filter instance is created from the filter filt and inited + * with the parameters args and opaque. + * + * In case of success put in *filt_ctx the pointer to the created + * filter instance, otherwise set *filt_ctx to NULL. + * + * @param name the instance name to give to the created filter instance + * @param graph_ctx the filter graph + * @return a negative AVERROR error code in case of failure, a non + * negative value otherwise +*) +// int avfilter_graph_create_filter(AVFilterContext **filt_ctx, const AVFilter *filt, +// const char *name, const char *args, void *opaque, +// AVFilterGraph *graph_ctx); +function avfilter_graph_create_filter(var filt_ctx: pAVFilterContext; const filt: pAVFilter; const name: pAnsiChar; const args: pAnsiChar; opaque: pointer; + graph_ctx: pAVFilterGraph): int; cdecl; external avfilter_dll; +(* * + * Enable or disable automatic format conversion inside the graph. + * + * Note that format conversion can still happen inside explicitly inserted + * scale and aresample filters. + * + * @param flags any of the AVFILTER_AUTO_CONVERT_* constants +*) +// void avfilter_graph_set_auto_convert(AVFilterGraph *graph, unsigned flags); +procedure avfilter_graph_set_auto_convert(graph: pAVFilterGraph; flags: unsigned); cdecl; external avfilter_dll; + +const + + AVFILTER_AUTO_CONVERT_ALL = 0; (* *< all automatic conversions enabled *) + AVFILTER_AUTO_CONVERT_NONE = -1; (* *< all automatic conversions disabled *) + + (* * + * Check validity and configure all the links and formats in the graph. + * + * @param graphctx the filter graph + * @param log_ctx context used for logging + * @return >= 0 in case of success, a negative AVERROR code otherwise + *) + // int avfilter_graph_config(AVFilterGraph *graphctx, void *log_ctx); +function avfilter_graph_config(graphctx: pAVFilterGraph; log_ctx: pointer): int; cdecl; external avfilter_dll; +(* * + * Free a graph, destroy its links, and set *graph to NULL. + * If *graph is NULL, do nothing. +*) +// void avfilter_graph_free(AVFilterGraph **graph); +procedure avfilter_graph_free(var graph: pAVFilterGraph); cdecl; external avfilter_dll; + +type + (* * + * A linked-list of the inputs/outputs of the filter chain. + * + * This is mainly useful for avfilter_graph_parse() / avfilter_graph_parse2(), + * where it is used to communicate open (unlinked) inputs and outputs from and + * to the caller. + * This struct specifies, per each not connected pad contained in the graph, the + * filter context and the pad index required for establishing a link. + *) + pAVFilterInOut = ^AVFilterInOut; + + AVFilterInOut = record + (* * unique name for this input/output in the list *) + name: pAnsiChar; + + (* * filter context associated to this input/output *) + filter_ctx: pAVFilterContext; + + (* * index of the filt_ctx pad to use for linking *) + pad_idx: int; + + (* * next input/input in the list, NULL if this is the last *) + next: pAVFilterInOut; + end; + + (* * + * Allocate a single AVFilterInOut entry. + * Must be freed with avfilter_inout_free(). + * @return allocated AVFilterInOut on success, NULL on failure. + *) + // AVFilterInOut *avfilter_inout_alloc(void); +function avfilter_inout_alloc(): pAVFilterInOut; cdecl; external avfilter_dll; +(* * + * Free the supplied list of AVFilterInOut and set *inout to NULL. + * If *inout is NULL, do nothing. +*) +// void avfilter_inout_free(AVFilterInOut **inout); +procedure avfilter_inout_free(var inout: pAVFilterInOut); cdecl; external avfilter_dll; +(* * + * Add a graph described by a string to a graph. + * + * @note The caller must provide the lists of inputs and outputs, + * which therefore must be known before calling the function. + * + * @note The inputs parameter describes inputs of the already existing + * part of the graph; i.e. from the point of view of the newly created + * part, they are outputs. Similarly the outputs parameter describes + * outputs of the already existing filters, which are provided as + * inputs to the parsed filters. + * + * @param graph the filter graph where to link the parsed graph context + * @param filters string to be parsed + * @param inputs linked list to the inputs of the graph + * @param outputs linked list to the outputs of the graph + * @return zero on success, a negative AVERROR code on error +*) +// int avfilter_graph_parse(AVFilterGraph *graph, const char *filters, +// AVFilterInOut *inputs, AVFilterInOut *outputs, +// void *log_ctx); +function avfilter_graph_parse(graph: pAVFilterGraph; const filters: pAnsiChar; inputs: pAVFilterInOut; outputs: pAVFilterInOut; log_ctx: pointer): int; cdecl; + external avfilter_dll; +(* * + * Add a graph described by a string to a graph. + * + * In the graph filters description, if the input label of the first + * filter is not specified, "in" is assumed; if the output label of + * the last filter is not specified, "out" is assumed. + * + * @param graph the filter graph where to link the parsed graph context + * @param filters string to be parsed + * @param inputs pointer to a linked list to the inputs of the graph, may be NULL. + * If non-NULL, *inputs is updated to contain the list of open inputs + * after the parsing, should be freed with avfilter_inout_free(). + * @param outputs pointer to a linked list to the outputs of the graph, may be NULL. + * If non-NULL, *outputs is updated to contain the list of open outputs + * after the parsing, should be freed with avfilter_inout_free(). + * @return non negative on success, a negative AVERROR code on error +*) +// int avfilter_graph_parse_ptr(AVFilterGraph *graph, const char *filters, +// AVFilterInOut **inputs, AVFilterInOut **outputs, +// void *log_ctx); +function avfilter_graph_parse_ptr(graph: pAVFilterGraph; const filters: pAnsiChar; var inputs: pAVFilterInOut; var outputs: pAVFilterInOut; log_ctx: pointer) + : int; cdecl; external avfilter_dll; +(* * + * Add a graph described by a string to a graph. + * + * @param[in] graph the filter graph where to link the parsed graph context + * @param[in] filters string to be parsed + * @param[out] inputs a linked list of all free (unlinked) inputs of the + * parsed graph will be returned here. It is to be freed + * by the caller using avfilter_inout_free(). + * @param[out] outputs a linked list of all free (unlinked) outputs of the + * parsed graph will be returned here. It is to be freed by the + * caller using avfilter_inout_free(). + * @return zero on success, a negative AVERROR code on error + * + * @note This function returns the inputs and outputs that are left + * unlinked after parsing the graph and the caller then deals with + * them. + * @note This function makes no reference whatsoever to already + * existing parts of the graph and the inputs parameter will on return + * contain inputs of the newly parsed part of the graph. Analogously + * the outputs parameter will contain outputs of the newly created + * filters. +*) +// int avfilter_graph_parse2(AVFilterGraph *graph, const char *filters, +// AVFilterInOut **inputs, +// AVFilterInOut **outputs); +function avfilter_graph_parse2(graph: pAVFilterGraph; const filters: pAnsiChar; var inputs: pAVFilterInOut; var outputs: pAVFilterInOut): int; cdecl; + external avfilter_dll; +(* * + * Send a command to one or more filter instances. + * + * @param graph the filter graph + * @param target the filter(s) to which the command should be sent + * "all" sends to all filters + * otherwise it can be a filter or filter instance name + * which will send the command to all matching filters. + * @param cmd the command to send, for handling simplicity all commands must be alphanumeric only + * @param arg the argument for the command + * @param res a buffer with size res_size where the filter(s) can return a response. + * + * @returns >=0 on success otherwise an error code. + * AVERROR(ENOSYS) on unsupported commands +*) +// int avfilter_graph_send_command(AVFilterGraph *graph, const char *target, const char *cmd, const char *arg, char *res, int res_len, int flags); +function avfilter_graph_send_command(graph: pAVFilterGraph; const target: pAnsiChar; const cmd: pAnsiChar; const arg: pAnsiChar; res: pAnsiChar; res_len: int; + flags: int): int; cdecl; external avfilter_dll; +(* * + * Queue a command for one or more filter instances. + * + * @param graph the filter graph + * @param target the filter(s) to which the command should be sent + * "all" sends to all filters + * otherwise it can be a filter or filter instance name + * which will send the command to all matching filters. + * @param cmd the command to sent, for handling simplicity all commands must be alphanumeric only + * @param arg the argument for the command + * @param ts time at which the command should be sent to the filter + * + * @note As this executes commands after this function returns, no return code + * from the filter is provided, also AVFILTER_CMD_FLAG_ONE is not supported. +*) +// int avfilter_graph_queue_command(AVFilterGraph *graph, const char *target, const char *cmd, const char *arg, int flags, double ts); +function avfilter_graph_queue_command(graph: pAVFilterGraph; const target: pAnsiChar; const cmd: pAnsiChar; const arg: pAnsiChar; flags: int; ts: double): int; + cdecl; external avfilter_dll; +(* * + * Dump a graph into a human-readable string representation. + * + * @param graph the graph to dump + * @param options formatting options; currently ignored + * @return a string, or NULL in case of memory allocation failure; + * the string must be freed using av_free +*) +// char *avfilter_graph_dump(AVFilterGraph *graph, const char *options); +function avfilter_graph_dump(graph: pAVFilterGraph; const options: pAnsiChar): pAnsiChar; cdecl; external avfilter_dll; +(* * + * Request a frame on the oldest sink link. + * + * If the request returns AVERROR_EOF, try the next. + * + * Note that this function is not meant to be the sole scheduling mechanism + * of a filtergraph, only a convenience function to help drain a filtergraph + * in a balanced way under normal circumstances. + * + * Also note that AVERROR_EOF does not mean that frames did not arrive on + * some of the sinks during the process. + * When there are multiple sink links, in case the requested link + * returns an EOF, this may cause a filter to flush pending frames + * which are sent to another sink link, although unrequested. + * + * @return the return value of ff_request_frame(), + * or AVERROR_EOF if all links returned AVERROR_EOF +*) +// int avfilter_graph_request_oldest(AVFilterGraph *graph); +function avfilter_graph_request_oldest(graph: pAVFilterGraph): int; cdecl; external avfilter_dll; +{$ENDREGION} +{$REGION 'buffersink.h'} +(* * + * Get a frame with filtered data from sink and put it in frame. + * + * @param ctx pointer to a buffersink or abuffersink filter context. + * @param frame pointer to an allocated frame that will be filled with data. + * The data must be freed using av_frame_unref() / av_frame_free() + * @param flags a combination of AV_BUFFERSINK_FLAG_* flags + * + * @return >= 0 in for success, a negative AVERROR code for failure. +*) +// int av_buffersink_get_frame_flags(AVFilterContext *ctx, AVFrame *frame, int flags); +function av_buffersink_get_frame_flags(ctx: pAVFilterContext; frame: pAVFrame; flags: int): int; cdecl; external avfilter_dll; + +const + (* * + * Tell av_buffersink_get_buffer_ref() to read video/samples buffer + * reference, but not remove it from the buffer. This is useful if you + * need only to read a video/samples buffer, without to fetch it. + *) + AV_BUFFERSINK_FLAG_PEEK = 1; + + (* * + * Tell av_buffersink_get_buffer_ref() not to request a frame from its input. + * If a frame is already buffered, it is read (and removed from the buffer), + * but if no frame is present, return AVERROR(EAGAIN). + *) + AV_BUFFERSINK_FLAG_NO_REQUEST = 2; + +type + (* * + * Struct to use for initializing a buffersink context. + *) + pAVBufferSinkParams = ^AVBufferSinkParams; + + AVBufferSinkParams = record + pixel_fmts: pAVPixelFormat; + /// < list of allowed pixel formats, terminated by AV_PIX_FMT_NONE + end; + + (* * + * Create an AVBufferSinkParams structure. + * + * Must be freed with av_free(). + *) + // AVBufferSinkParams *av_buffersink_params_alloc(void); +function av_buffersink_params_alloc(): pAVBufferSinkParams; cdecl; external avfilter_dll; + +type + (* * + * Struct to use for initializing an abuffersink context. + *) + pAVABufferSinkParams = ^AVABufferSinkParams; + + AVABufferSinkParams = record + sample_fmts: pAVSampleFormat; + /// < list of allowed sample formats, terminated by AV_SAMPLE_FMT_NONE + channel_layouts: pint64_t; + /// < list of allowed channel layouts, terminated by -1 + channel_counts: pint; + /// < list of allowed channel counts, terminated by -1 + all_channel_counts: int; + /// < if not 0, accept any channel count or layout + sample_rates: pint; + /// < list of allowed sample rates, terminated by -1 + end; + + (* * + * Create an AVABufferSinkParams structure. + * + * Must be freed with av_free(). + *) + // AVABufferSinkParams *av_abuffersink_params_alloc(void); +function av_abuffersink_params_alloc(): pAVABufferSinkParams; cdecl; external avfilter_dll; +(* * + * Set the frame size for an audio buffer sink. + * + * All calls to av_buffersink_get_buffer_ref will return a buffer with + * exactly the specified number of samples, or AVERROR(EAGAIN) if there is + * not enough. The last buffer at EOF will be padded with 0. +*) +// void av_buffersink_set_frame_size(AVFilterContext *ctx, unsigned frame_size); +procedure av_buffersink_set_frame_size(ctx: pAVFilterContext; frame_size: unsigned); cdecl; external avfilter_dll; +(* * + * @defgroup lavfi_buffersink_accessors Buffer sink accessors + * Get the properties of the stream + * @{ +*) + +// enum AVMediaType av_buffersink_get_type (const AVFilterContext *ctx); +function av_buffersink_get_type(const ctx: pAVFilterContext): AVMediaType; cdecl; external avfilter_dll; + +// AVRational av_buffersink_get_time_base (const AVFilterContext *ctx); +function av_buffersink_get_time_base(const ctx: pAVFilterContext): AVRational; cdecl; external avfilter_dll; + +// int av_buffersink_get_format (const AVFilterContext *ctx); +function av_buffersink_get_format(const ctx: pAVFilterContext): int; cdecl; external avfilter_dll; +// AVRational av_buffersink_get_frame_rate (const AVFilterContext *ctx); +function av_buffersink_get_frame_rate(const ctx: pAVFilterContext): AVRational; cdecl; external avfilter_dll; + +// int av_buffersink_get_w (const AVFilterContext *ctx); +function av_buffersink_get_w(const ctx: pAVFilterContext): int; cdecl; external avfilter_dll; + +// int av_buffersink_get_h (const AVFilterContext *ctx); +function av_buffersink_get_h(const ctx: pAVFilterContext): int; cdecl; external avfilter_dll; + +// AVRational av_buffersink_get_sample_aspect_ratio (const AVFilterContext *ctx); +function av_buffersink_get_sample_aspect_ratio(const ctx: pAVFilterContext): AVRational; cdecl; external avfilter_dll; + +// int av_buffersink_get_channels (const AVFilterContext *ctx); +function av_buffersink_get_channels(const ctx: pAVFilterContext): int; cdecl; external avfilter_dll; + +// uint64_t av_buffersink_get_channel_layout (const AVFilterContext *ctx); +function av_buffersink_get_channel_layout(const ctx: pAVFilterContext): uint64_t; cdecl; external avfilter_dll; + +// int av_buffersink_get_sample_rate (const AVFilterContext *ctx); +function av_buffersink_get_sample_rate(const ctx: pAVFilterContext): int; cdecl; external avfilter_dll; + +// AVBufferRef * av_buffersink_get_hw_frames_ctx (const AVFilterContext *ctx); +function av_buffersink_get_hw_frames_ctx(const ctx: pAVFilterContext): pAVBufferRef; cdecl; external avfilter_dll; +(* * @} *) + +(* * + * Get a frame with filtered data from sink and put it in frame. + * + * @param ctx pointer to a context of a buffersink or abuffersink AVFilter. + * @param frame pointer to an allocated frame that will be filled with data. + * The data must be freed using av_frame_unref() / av_frame_free() + * + * @return + * - >= 0 if a frame was successfully returned. + * - AVERROR(EAGAIN) if no frames are available at this point; more + * input frames must be added to the filtergraph to get more output. + * - AVERROR_EOF if there will be no more output frames on this sink. + * - A different negative AVERROR code in other failure cases. +*) +// int av_buffersink_get_frame(AVFilterContext *ctx, AVFrame *frame); +function av_buffersink_get_frame(ctx: pAVFilterContext; frame: pAVFrame): int; cdecl; external avfilter_dll; +(* * + * Same as av_buffersink_get_frame(), but with the ability to specify the number + * of samples read. This function is less efficient than + * av_buffersink_get_frame(), because it copies the data around. + * + * @param ctx pointer to a context of the abuffersink AVFilter. + * @param frame pointer to an allocated frame that will be filled with data. + * The data must be freed using av_frame_unref() / av_frame_free() + * frame will contain exactly nb_samples audio samples, except at + * the end of stream, when it can contain less than nb_samples. + * + * @return The return codes have the same meaning as for + * av_buffersink_get_samples(). + * + * @warning do not mix this function with av_buffersink_get_frame(). Use only one or + * the other with a single sink, not both. +*) +// int av_buffersink_get_samples(AVFilterContext *ctx, AVFrame *frame, int nb_samples); +function av_buffersink_get_samples(ctx: pAVFilterContext; frame: pAVFrame; nb_samples: int): int; cdecl; external avfilter_dll; +{$ENDREGION} +{$REGION 'buffersrc.h'} + +const + + (* * + * Do not check for format changes. + *) + AV_BUFFERSRC_FLAG_NO_CHECK_FORMAT = 1; + + (* * + * Immediately push the frame to the output. + *) + AV_BUFFERSRC_FLAG_PUSH = 4; + + (* * + * Keep a reference to the frame. + * If the frame if reference-counted, create a new reference; otherwise + * copy the frame data. + *) + AV_BUFFERSRC_FLAG_KEEP_REF = 8; + + (* * + * Get the number of failed requests. + * + * A failed request is when the request_frame method is called while no + * frame is present in the buffer. + * The number is reset when a frame is added. + *) + // unsigned av_buffersrc_get_nb_failed_requests(AVFilterContext *buffer_src); +function av_buffersrc_get_nb_failed_requests(buffer_src: pAVFilterContext): unsigned; cdecl; external avfilter_dll; + +type + (* * + * This structure contains the parameters describing the frames that will be + * passed to this filter. + * + * It should be allocated with av_buffersrc_parameters_alloc() and freed with + * av_free(). All the allocated fields in it remain owned by the caller. + *) + pAVBufferSrcParameters = ^AVBufferSrcParameters; + + AVBufferSrcParameters = record + (* * + * video: the pixel format, value corresponds to enum AVPixelFormat + * audio: the sample format, value corresponds to enum AVSampleFormat + *) + format: int; + (* * + * The timebase to be used for the timestamps on the input frames. + *) + time_base: AVRational; + + (* * + * Video only, the display dimensions of the input frames. + *) + width, height: int; + + (* * + * Video only, the sample (pixel) aspect ratio. + *) + sample_aspect_ratio: AVRational; + + (* * + * Video only, the frame rate of the input video. This field must only be + * set to a non-zero value if input stream has a known constant framerate + * and should be left at its initial value if the framerate is variable or + * unknown. + *) + frame_rate: AVRational; + + (* * + * Video with a hwaccel pixel format only. This should be a reference to an + * AVHWFramesContext instance describing the input frames. + *) + hw_frames_ctx: pAVBufferRef; + + (* * + * Audio only, the audio sampling rate in samples per secon. + *) + sample_rate: int; + + (* * + * Audio only, the audio channel layout + *) + channel_layout: uint64_t; + end; + + (* * + * Allocate a new AVBufferSrcParameters instance. It should be freed by the + * caller with av_free(). + *) + // AVBufferSrcParameters *av_buffersrc_parameters_alloc(void); +function av_buffersrc_parameters_alloc(): pAVBufferSrcParameters; cdecl; external avfilter_dll; +(* * + * Initialize the buffersrc or abuffersrc filter with the provided parameters. + * This function may be called multiple times, the later calls override the + * previous ones. Some of the parameters may also be set through AVOptions, then + * whatever method is used last takes precedence. + * + * @param ctx an instance of the buffersrc or abuffersrc filter + * @param param the stream parameters. The frames later passed to this filter + * must conform to those parameters. All the allocated fields in + * param remain owned by the caller, libavfilter will make internal + * copies or references when necessary. + * @return 0 on success, a negative AVERROR code on failure. +*) +// int av_buffersrc_parameters_set(AVFilterContext *ctx, AVBufferSrcParameters *param); +function av_buffersrc_parameters_set(ctx: pAVFilterContext; param: pAVBufferSrcParameters): int; cdecl; external avfilter_dll; +(* * + * Add a frame to the buffer source. + * + * @param ctx an instance of the buffersrc filter + * @param frame frame to be added. If the frame is reference counted, this + * function will make a new reference to it. Otherwise the frame data will be + * copied. + * + * @return 0 on success, a negative AVERROR on error + * + * This function is equivalent to av_buffersrc_add_frame_flags() with the + * AV_BUFFERSRC_FLAG_KEEP_REF flag. +*) +// av_warn_unused_result +// int av_buffersrc_write_frame(AVFilterContext *ctx, const AVFrame *frame); +function av_buffersrc_write_frame(ctx: pAVFilterContext; const frame: pAVFrame): int; cdecl; external avfilter_dll; +(* * + * Add a frame to the buffer source. + * + * @param ctx an instance of the buffersrc filter + * @param frame frame to be added. If the frame is reference counted, this + * function will take ownership of the reference(s) and reset the frame. + * Otherwise the frame data will be copied. If this function returns an error, + * the input frame is not touched. + * + * @return 0 on success, a negative AVERROR on error. + * + * @note the difference between this function and av_buffersrc_write_frame() is + * that av_buffersrc_write_frame() creates a new reference to the input frame, + * while this function takes ownership of the reference passed to it. + * + * This function is equivalent to av_buffersrc_add_frame_flags() without the + * AV_BUFFERSRC_FLAG_KEEP_REF flag. +*) +// av_warn_unused_result +// int av_buffersrc_add_frame(AVFilterContext *ctx, AVFrame *frame); +function av_buffersrc_add_frame(ctx: pAVFilterContext; frame: pAVFrame): int; cdecl; external avfilter_dll; +(* * + * Add a frame to the buffer source. + * + * By default, if the frame is reference-counted, this function will take + * ownership of the reference(s) and reset the frame. This can be controlled + * using the flags. + * + * If this function returns an error, the input frame is not touched. + * + * @param buffer_src pointer to a buffer source context + * @param frame a frame, or NULL to mark EOF + * @param flags a combination of AV_BUFFERSRC_FLAG_* + * @return >= 0 in case of success, a negative AVERROR code + * in case of failure +*) +// av_warn_unused_result +// int av_buffersrc_add_frame_flags(AVFilterContext *buffer_src, +// AVFrame *frame, int flags); +function av_buffersrc_add_frame_flags(buffer_src: pAVFilterContext; frame: pAVFrame; flags: int): int; cdecl; external avfilter_dll; +(* * + * Close the buffer source after EOF. + * + * This is similar to passing NULL to av_buffersrc_add_frame_flags() + * except it takes the timestamp of the EOF, i.e. the timestamp of the end + * of the last frame. +*) +// int av_buffersrc_close(AVFilterContext *ctx, int64_t pts, unsigned flags); +function av_buffersrc_close(ctx: pAVFilterContext; pts: int64_t; flags: unsigned): int; cdecl; external avfilter_dll; +{$ENDREGION} + +implementation + +end. diff --git a/ffmpeg/libavformat.pas b/ffmpeg/libavformat.pas new file mode 100644 index 0000000..d48ed1d --- /dev/null +++ b/ffmpeg/libavformat.pas @@ -0,0 +1,4218 @@ +unit libavformat; + +{$IFDEF FPC} +{$MODE Delphi} +{$ENDIF} + +interface + +Uses + ffmpeg_types, libavutil, libavcodec; + +{$I ffmpeg.inc} +{$REGION 'formats.h'} + +(* * + * A list of supported formats for one end of a filter link. This is used + * during the format negotiation process to try to pick the best format to + * use to minimize the number of necessary conversions. Each filter gives a + * list of the formats supported by each input and output pad. The list + * given for each pad need not be distinct - they may be references to the + * same list of formats, as is often the case when a filter supports multiple + * formats, but will always output the same format as it is given in input. + * + * In this way, a list of possible input formats and a list of possible + * output formats are associated with each link. When a set of formats is + * negotiated over a link, the input and output lists are merged to form a + * new list containing only the common elements of each list. In the case + * that there were no common elements, a format conversion is necessary. + * Otherwise, the lists are merged, and all other links which reference + * either of the format lists involved in the merge are also affected. + * + * For example, consider the filter chain: + * filter (a) --> (b) filter (b) --> (c) filter + * + * where the letters in parenthesis indicate a list of formats supported on + * the input or output of the link. Suppose the lists are as follows: + * (a) = {A, B} + * (b) = {A, B, C} + * (c) = {B, C} + * + * First, the first link's lists are merged, yielding: + * filter (a) --> (a) filter (a) --> (c) filter + * + * Notice that format list (b) now refers to the same list as filter list (a). + * Next, the lists for the second link are merged, yielding: + * filter (a) --> (a) filter (a) --> (a) filter + * + * where (a) = {B}. + * + * Unfortunately, when the format lists at the two ends of a link are merged, + * we must ensure that all links which reference either pre-merge format list + * get updated as well. Therefore, we have the format list structure store a + * pointer to each of the pointers to itself. +*) +type + pAVFilterFormats = ^AVFilterFormats; + ppAVFilterFormats = ^pAVFilterFormats; + pppAVFilterFormats = ^ppAVFilterFormats; + + AVFilterFormats = record + nb_formats: unsigned; + /// < number of formats + formats: pint; + /// < list of media formats + + refcount: unsigned; + /// < number of references to this list + refs: pppAVFilterFormats; + /// < references to this list + end; + + (* * + * A list of supported channel layouts. + * + * The list works the same as AVFilterFormats, except for the following + * differences: + * - A list with all_layouts = 1 means all channel layouts with a known + * disposition; nb_channel_layouts must then be 0. + * - A list with all_counts = 1 means all channel counts, with a known or + * unknown disposition; nb_channel_layouts must then be 0 and all_layouts 1. + * - The list must not contain a layout with a known disposition and a + * channel count with unknown disposition with the same number of channels + * (e.g. AV_CH_LAYOUT_STEREO and FF_COUNT2LAYOUT(2). + *) + pAVFilterChannelLayouts = ^AVFilterChannelLayouts; + ppAVFilterChannelLayouts = ^pAVFilterChannelLayouts; + pppAVFilterChannelLayouts = ^ppAVFilterChannelLayouts; + + AVFilterChannelLayouts = record + channel_layouts: puint64_t; + /// < list of channel layouts + nb_channel_layouts: int; + /// < number of channel layouts + all_layouts: AnsiChar; + /// < accept any known channel layout + all_counts: AnsiChar; + /// < accept any channel layout or count + refcount: unsigned; + /// < number of references to this list + refs: pppAVFilterChannelLayouts; + /// < references to this list + end; + +{$ENDREGION} +{$REGION 'avformat.h'} + (* * + * @defgroup libavf libavformat + * I/O and Muxing/Demuxing Library + * + * Libavformat (lavf) is a library for dealing with various media container + * formats. Its main two purposes are demuxing - i.e. splitting a media file + * into component streams, and the reverse process of muxing - writing supplied + * data in a specified container format. It also has an @ref lavf_io + * "I/O module" which supports a number of protocols for accessing the data (e.g. + * file, tcp, http and others). + * Unless you are absolutely sure you won't use libavformat's network + * capabilities, you should also call avformat_network_init(). + * + * A supported input format is described by an AVInputFormat struct, conversely + * an output format is described by AVOutputFormat. You can iterate over all + * input/output formats using the av_demuxer_iterate / av_muxer_iterate() functions. + * The protocols layer is not part of the public API, so you can only get the names + * of supported protocols with the avio_enum_protocols() function. + * + * Main lavf structure used for both muxing and demuxing is AVFormatContext, + * which exports all information about the file being read or written. As with + * most Libavformat structures, its size is not part of public ABI, so it cannot be + * allocated on stack or directly with av_malloc(). To create an + * AVFormatContext, use avformat_alloc_context() (some functions, like + * avformat_open_input() might do that for you). + * + * Most importantly an AVFormatContext contains: + * @li the @ref AVFormatContext.iformat "input" or @ref AVFormatContext.oformat + * "output" format. It is either autodetected or set by user for input; + * always set by user for output. + * @li an @ref AVFormatContext.streams "array" of AVStreams, which describe all + * elementary streams stored in the file. AVStreams are typically referred to + * using their index in this array. + * @li an @ref AVFormatContext.pb "I/O context". It is either opened by lavf or + * set by user for input, always set by user for output (unless you are dealing + * with an AVFMT_NOFILE format). + * + * @section lavf_options Passing options to (de)muxers + * It is possible to configure lavf muxers and demuxers using the @ref avoptions + * mechanism. Generic (format-independent) libavformat options are provided by + * AVFormatContext, they can be examined from a user program by calling + * av_opt_next() / av_opt_find() on an allocated AVFormatContext (or its AVClass + * from avformat_get_class()). Private (format-specific) options are provided by + * AVFormatContext.priv_data if and only if AVInputFormat.priv_class / + * AVOutputFormat.priv_class of the corresponding format struct is non-NULL. + * Further options may be provided by the @ref AVFormatContext.pb "I/O context", + * if its AVClass is non-NULL, and the protocols layer. See the discussion on + * nesting in @ref avoptions documentation to learn how to access those. + * + * @section urls + * URL strings in libavformat are made of a scheme/protocol, a ':', and a + * scheme specific string. URLs without a scheme and ':' used for local files + * are supported but deprecated. "file:" should be used for local files. + * + * It is important that the scheme string is not taken from untrusted + * sources without checks. + * + * Note that some schemes/protocols are quite powerful, allowing access to + * both local and remote files, parts of them, concatenations of them, local + * audio and video devices and so on. + * + * @{ + * + * @defgroup lavf_decoding Demuxing + * @{ + * Demuxers read a media file and split it into chunks of data (@em packets). A + * @ref AVPacket "packet" contains one or more encoded frames which belongs to a + * single elementary stream. In the lavf API this process is represented by the + * avformat_open_input() function for opening a file, av_read_frame() for + * reading a single packet and finally avformat_close_input(), which does the + * cleanup. + * + * @section lavf_decoding_open Opening a media file + * The minimum information required to open a file is its URL, which + * is passed to avformat_open_input(), as in the following code: + * @code + * const char *url = "file:in.mp3"; + * AVFormatContext *s = NULL; + * int ret = avformat_open_input(&s, url, NULL, NULL); + * if (ret < 0) + * abort(); + * @endcode + * The above code attempts to allocate an AVFormatContext, open the + * specified file (autodetecting the format) and read the header, exporting the + * information stored there into s. Some formats do not have a header or do not + * store enough information there, so it is recommended that you call the + * avformat_find_stream_info() function which tries to read and decode a few + * frames to find missing information. + * + * In some cases you might want to preallocate an AVFormatContext yourself with + * avformat_alloc_context() and do some tweaking on it before passing it to + * avformat_open_input(). One such case is when you want to use custom functions + * for reading input data instead of lavf internal I/O layer. + * To do that, create your own AVIOContext with avio_alloc_context(), passing + * your reading callbacks to it. Then set the @em pb field of your + * AVFormatContext to newly created AVIOContext. + * + * Since the format of the opened file is in general not known until after + * avformat_open_input() has returned, it is not possible to set demuxer private + * options on a preallocated context. Instead, the options should be passed to + * avformat_open_input() wrapped in an AVDictionary: + * @code + * AVDictionary *options = NULL; + * av_dict_set(&options, "video_size", "640x480", 0); + * av_dict_set(&options, "pixel_format", "rgb24", 0); + * + * if (avformat_open_input(&s, url, NULL, &options) < 0) + * abort(); + * av_dict_free(&options); + * @endcode + * This code passes the private options 'video_size' and 'pixel_format' to the + * demuxer. They would be necessary for e.g. the rawvideo demuxer, since it + * cannot know how to interpret raw video data otherwise. If the format turns + * out to be something different than raw video, those options will not be + * recognized by the demuxer and therefore will not be applied. Such unrecognized + * options are then returned in the options dictionary (recognized options are + * consumed). The calling program can handle such unrecognized options as it + * wishes, e.g. + * @code + * AVDictionaryEntry *e; + * if (e = av_dict_get(options, "", NULL, AV_DICT_IGNORE_SUFFIX)) { + * fprintf(stderr, "Option %s not recognized by the demuxer.\n", e->key); + * abort(); + * } + * @endcode + * + * After you have finished reading the file, you must close it with + * avformat_close_input(). It will free everything associated with the file. + * + * @section lavf_decoding_read Reading from an opened file + * Reading data from an opened AVFormatContext is done by repeatedly calling + * av_read_frame() on it. Each call, if successful, will return an AVPacket + * containing encoded data for one AVStream, identified by + * AVPacket.stream_index. This packet may be passed straight into the libavcodec + * decoding functions avcodec_send_packet() or avcodec_decode_subtitle2() if the + * caller wishes to decode the data. + * + * AVPacket.pts, AVPacket.dts and AVPacket.duration timing information will be + * set if known. They may also be unset (i.e. AV_NOPTS_VALUE for + * pts/dts, 0 for duration) if the stream does not provide them. The timing + * information will be in AVStream.time_base units, i.e. it has to be + * multiplied by the timebase to convert them to seconds. + * + * If AVPacket.buf is set on the returned packet, then the packet is + * allocated dynamically and the user may keep it indefinitely. + * Otherwise, if AVPacket.buf is NULL, the packet data is backed by a + * static storage somewhere inside the demuxer and the packet is only valid + * until the next av_read_frame() call or closing the file. If the caller + * requires a longer lifetime, av_packet_make_refcounted() will ensure that + * the data is reference counted, copying the data if necessary. + * In both cases, the packet must be freed with av_packet_unref() when it is no + * longer needed. + * + * @section lavf_decoding_seek Seeking + * @} + * + * @defgroup lavf_encoding Muxing + * @{ + * Muxers take encoded data in the form of @ref AVPacket "AVPackets" and write + * it into files or other output bytestreams in the specified container format. + * + * The main API functions for muxing are avformat_write_header() for writing the + * file header, av_write_frame() / av_interleaved_write_frame() for writing the + * packets and av_write_trailer() for finalizing the file. + * + * At the beginning of the muxing process, the caller must first call + * avformat_alloc_context() to create a muxing context. The caller then sets up + * the muxer by filling the various fields in this context: + * + * - The @ref AVFormatContext.oformat "oformat" field must be set to select the + * muxer that will be used. + * - Unless the format is of the AVFMT_NOFILE type, the @ref AVFormatContext.pb + * "pb" field must be set to an opened IO context, either returned from + * avio_open2() or a custom one. + * - Unless the format is of the AVFMT_NOSTREAMS type, at least one stream must + * be created with the avformat_new_stream() function. The caller should fill + * the @ref AVStream.codecpar "stream codec parameters" information, such as the + * codec @ref AVCodecParameters.codec_type "type", @ref AVCodecParameters.codec_id + * "id" and other parameters (e.g. width / height, the pixel or sample format, + * etc.) as known. The @ref AVStream.time_base "stream timebase" should + * be set to the timebase that the caller desires to use for this stream (note + * that the timebase actually used by the muxer can be different, as will be + * described later). + * - It is advised to manually initialize only the relevant fields in + * AVCodecParameters, rather than using @ref avcodec_parameters_copy() during + * remuxing: there is no guarantee that the codec context values remain valid + * for both input and output format contexts. + * - The caller may fill in additional information, such as @ref + * AVFormatContext.metadata "global" or @ref AVStream.metadata "per-stream" + * metadata, @ref AVFormatContext.chapters "chapters", @ref + * AVFormatContext.programs "programs", etc. as described in the + * AVFormatContext documentation. Whether such information will actually be + * stored in the output depends on what the container format and the muxer + * support. + * + * When the muxing context is fully set up, the caller must call + * avformat_write_header() to initialize the muxer internals and write the file + * header. Whether anything actually is written to the IO context at this step + * depends on the muxer, but this function must always be called. Any muxer + * private options must be passed in the options parameter to this function. + * + * The data is then sent to the muxer by repeatedly calling av_write_frame() or + * av_interleaved_write_frame() (consult those functions' documentation for + * discussion on the difference between them; only one of them may be used with + * a single muxing context, they should not be mixed). Do note that the timing + * information on the packets sent to the muxer must be in the corresponding + * AVStream's timebase. That timebase is set by the muxer (in the + * avformat_write_header() step) and may be different from the timebase + * requested by the caller. + * + * Once all the data has been written, the caller must call av_write_trailer() + * to flush any buffered packets and finalize the output file, then close the IO + * context (if any) and finally free the muxing context with + * avformat_free_context(). + * @} + * + * @defgroup lavf_io I/O Read/Write + * @{ + * @section lavf_io_dirlist Directory listing + * The directory listing API makes it possible to list files on remote servers. + * + * Some of possible use cases: + * - an "open file" dialog to choose files from a remote location, + * - a recursive media finder providing a player with an ability to play all + * files from a given directory. + * + * @subsection lavf_io_dirlist_open Opening a directory + * At first, a directory needs to be opened by calling avio_open_dir() + * supplied with a URL and, optionally, ::AVDictionary containing + * protocol-specific parameters. The function returns zero or positive + * integer and allocates AVIODirContext on success. + * + * @code + * AVIODirContext *ctx = NULL; + * if (avio_open_dir(&ctx, "smb://example.com/some_dir", NULL) < 0) { + * fprintf(stderr, "Cannot open directory.\n"); + * abort(); + * } + * @endcode + * + * This code tries to open a sample directory using smb protocol without + * any additional parameters. + * + * @subsection lavf_io_dirlist_read Reading entries + * Each directory's entry (i.e. file, another directory, anything else + * within ::AVIODirEntryType) is represented by AVIODirEntry. + * Reading consecutive entries from an opened AVIODirContext is done by + * repeatedly calling avio_read_dir() on it. Each call returns zero or + * positive integer if successful. Reading can be stopped right after the + * NULL entry has been read -- it means there are no entries left to be + * read. The following code reads all entries from a directory associated + * with ctx and prints their names to standard output. + * @code + * AVIODirEntry *entry = NULL; + * for (;;) { + * if (avio_read_dir(ctx, &entry) < 0) { + * fprintf(stderr, "Cannot list directory.\n"); + * abort(); + * } + * if (!entry) + * break; + * printf("%s\n", entry->name); + * avio_free_directory_entry(&entry); + * } + * @endcode + * @} + * + * @defgroup lavf_codec Demuxers + * @{ + * @defgroup lavf_codec_native Native Demuxers + * @{ + * @} + * @defgroup lavf_codec_wrappers External library wrappers + * @{ + * @} + * @} + * @defgroup lavf_protos I/O Protocols + * @{ + * @} + * @defgroup lavf_internal Internal + * @{ + * @} + * @} + *) + + // #include + // #include (* FILE *) + // #include "libavcodec/avcodec.h" + // #include "libavutil/dict.h" + // #include "libavutil/log.h" + // #include "avio.h" + // #include "libavformat/version.h" + +{$REGION 'avio.h'} + +const + (* * + * Seeking works like for a local file. + *) + AVIO_SEEKABLE_NORMAL = (1 shl 0); + + (* * + * Seeking by timestamp with avio_seek_time() is possible. + *) + AVIO_SEEKABLE_TIME = (1 shl 1); + +type + (* * + * Callback for checking whether to abort blocking functions. + * AVERROR_EXIT is returned in this case by the interrupted + * function. During blocking operations, callback is called with + * opaque as parameter. If the callback returns 1, the + * blocking operation will be aborted. + * + * No members can be added to this struct without a major bump, if + * new elements have been added after this struct in AVFormatContext + * or AVIOContext. + *) + pAVIOInterruptCB = ^AVIOInterruptCB; + + AVIOInterruptCB = record + // int (*callback)(void*); + callback: function(p: pointer): int; cdecl; + opaque: pointer; + end; + + (* * + * Directory entry types. + *) + AVIODirEntryType = ( // + AVIO_ENTRY_UNKNOWN, AVIO_ENTRY_BLOCK_DEVICE, AVIO_ENTRY_CHARACTER_DEVICE, AVIO_ENTRY_DIRECTORY, AVIO_ENTRY_NAMED_PIPE, AVIO_ENTRY_SYMBOLIC_LINK, + AVIO_ENTRY_SOCKET, AVIO_ENTRY_FILE, AVIO_ENTRY_SERVER, AVIO_ENTRY_SHARE, AVIO_ENTRY_WORKGROUP); + + (* * + * Describes single entry of the directory. + * + * Only name and type fields are guaranteed be set. + * Rest of fields are protocol or/and platform dependent and might be unknown. + *) + pAVIODirEntry = ^AVIODirEntry; + + AVIODirEntry = record + name: PAnsiChar; (* *< Filename *) + _type: int; (* *< Type of the entry *) + utf8: int; (* *< Set to 1 when name is encoded with UTF-8, 0 otherwise. + Name can be encoded with UTF-8 even though 0 is set. *) + size: int64_t; (* *< File size in bytes, -1 if unknown. *) + modification_timestamp: int64_t; (* *< Time of last modification in microseconds since unix + epoch, -1 if unknown. *) + access_timestamp: int64_t; (* *< Time of last access in microseconds since unix epoch, + -1 if unknown. *) + status_change_timestamp: int64_t; (* *< Time of last status change in microseconds since unix + epoch, -1 if unknown. *) + user_id: int64_t; (* *< User ID of owner, -1 if unknown. *) + group_id: int64_t; (* *< Group ID of owner, -1 if unknown. *) + filemode: int64_t; (* *< Unix file mode, -1 if unknown. *) + end; + + pURLContext = ^URLContext; + + URLContext = record + end; + + pAVIODirContext = ^AVIODirContext; + + AVIODirContext = record + url_context: pURLContext; + end; + + (* * + * Different data types that can be returned via the AVIO + * write_data_type callback. + *) + AVIODataMarkerType = ( + (* * + * Header data; this needs to be present for the stream to be decodeable. + *) + AVIO_DATA_MARKER_HEADER, + (* * + * A point in the output bytestream where a decoder can start decoding + * (i.e. a keyframe). A demuxer/decoder given the data flagged with + * AVIO_DATA_MARKER_HEADER, followed by any AVIO_DATA_MARKER_SYNC_POINT, + * should give decodeable results. + *) + AVIO_DATA_MARKER_SYNC_POINT, + (* * + * A point in the output bytestream where a demuxer can start parsing + * (for non self synchronizing bytestream formats). That is, any + * non-keyframe packet start point. + *) + AVIO_DATA_MARKER_BOUNDARY_POINT, + (* * + * This is any, unlabelled data. It can either be a muxer not marking + * any positions at all, it can be an actual boundary/sync point + * that the muxer chooses not to mark, or a later part of a packet/fragment + * that is cut into multiple write callbacks due to limited IO buffer size. + *) + AVIO_DATA_MARKER_UNKNOWN, + (* * + * Trailer data, which doesn't contain actual content, but only for + * finalizing the output file. + *) + AVIO_DATA_MARKER_TRAILER, + (* * + * A point in the output bytestream where the underlying AVIOContext might + * flush the buffer depending on latency or buffering requirements. Typically + * means the end of a packet. + *) + AVIO_DATA_MARKER_FLUSH_POINT); + + (* * + * Bytestream IO Context. + * New fields can be added to the end with minor version bumps. + * Removal, reordering and changes to existing fields require a major + * version bump. + * sizeof(AVIOContext) must not be used outside libav*. + * + * @note None of the function pointers in AVIOContext should be called + * directly, they should only be set by the client application + * when implementing custom I/O. Normally these are set to the + * function pointers specified in avio_alloc_context() + *) + pAVIOContext = ^AVIOContext; + + AVIOContext = record + (* * + * A class for private options. + * + * If this AVIOContext is created by avio_open2(), av_class is set and + * passes the options down to protocols. + * + * If this AVIOContext is manually allocated, then av_class may be set by + * the caller. + * + * warning -- this field can be NULL, be sure to not pass this AVIOContext + * to any av_opt_* functions in that case. + *) + av_class: pAVClass; + + (* + * The following shows the relationship between buffer, buf_ptr, + * buf_ptr_max, buf_end, buf_size, and pos, when reading and when writing + * (since AVIOContext is used for both): + * + ********************************************************************************** + * READING + ********************************************************************************** + * + * | buffer_size | + * |---------------------------------------| + * | | + * + * buffer buf_ptr buf_end + * +---------------+-----------------------+ + * |/ / / / / / / /|/ / / / / / /| | + * read buffer: |/ / consumed / | to be read /| | + * |/ / / / / / / /|/ / / / / / /| | + * +---------------+-----------------------+ + * + * pos + * +-------------------------------------------+-----------------+ + * input file: | | | + * +-------------------------------------------+-----------------+ + * + * + ********************************************************************************** + * WRITING + ********************************************************************************** + * + * | buffer_size | + * |--------------------------------------| + * | | + * + * buf_ptr_max + * buffer (buf_ptr) buf_end + * +-----------------------+--------------+ + * |/ / / / / / / / / / / /| | + * write buffer: | / / to be flushed / / | | + * |/ / / / / / / / / / / /| | + * +-----------------------+--------------+ + * buf_ptr can be in this + * due to a backward seek + * + * pos + * +-------------+----------------------------------------------+ + * output file: | | | + * +-------------+----------------------------------------------+ + * + *) + buffer: punsignedchar; (* *< Start of the buffer. *) + buffer_size: int; (* *< Maximum buffer size *) + buf_ptr: punsignedchar; (* *< Current position in the buffer *) + buf_end: punsignedchar; (* *< End of the data, may be less than + buffer+buffer_size if the read function returned + less data than requested, e.g. for streams where + no more data has been received yet. *) + opaque: pointer; (* *< A private pointer, passed to the read/write/seek/... + functions. *) + // int (*read_packet)(void *opaque, uint8_t *buf, int buf_size); + read_packet: function(opaque: pointer; buf: puint8_t; buf_size: int): int; cdecl; + // int (*write_packet)(void *opaque, uint8_t *buf, int buf_size); + write_packet: function(opaque: pointer; buf: puint8_t; buf_size: int): int; cdecl; + // int64_t (*seek)(void *opaque, int64_t offset, int whence); + seek: function(opaque: pointer; offset: int64_t; whence: int): int64_t; cdecl; + pos: int64_t; (* *< position in the file of the current buffer *) + eof_reached: int; (* *< true if was unable to read due to error or eof *) + write_flag: int; (* *< true if open for writing *) + max_packet_size: int; + checksum: unsigned_long; + checksum_ptr: punsignedchar; + // unsigned long (*update_checksum)(unsigned long checksum, const uint8_t *buf, unsigned int size); + update_checksum: function(checksum: unsigned_long; const buf: puint8_t; size: unsigned_int): unsigned_long; cdecl; + error: int; (* *< contains the error code or 0 if no error happened *) + (* * + * Pause or resume playback for network streaming protocols - e.g. MMS. + *) + // int (*read_pause)(void *opaque, int pause); + read_pause: function(opaque: pointer; pause: int): int; cdecl; + (* * + * Seek to a given timestamp in stream with the specified stream_index. + * Needed for some network streaming protocols which don't support seeking + * to byte position. + *) + // int64_t (*read_seek)(void *opaque, int stream_index, int64_t timestamp, int flags); + read_seek: function(opaque: pointer; stream_index: int; timestamp: int64_t; flags: int): int64_t; cdecl; + (* * + * A combination of AVIO_SEEKABLE_ flags or 0 when the stream is not seekable. + *) + seekable: int; + + (* * + * max filesize, used to limit allocations + * This field is internal to libavformat and access from outside is not allowed. + *) + maxsize: int64_t; + + (* * + * avio_read and avio_write should if possible be satisfied directly + * instead of going through a buffer, and avio_seek will always + * call the underlying seek function directly. + *) + direct: int; + + (* * + * Bytes read statistic + * This field is internal to libavformat and access from outside is not allowed. + *) + bytes_read: int64_t; + + (* * + * seek statistic + * This field is internal to libavformat and access from outside is not allowed. + *) + seek_count: int; + + (* * + * writeout statistic + * This field is internal to libavformat and access from outside is not allowed. + *) + writeout_count: int; + + (* * + * Original buffer size + * used internally after probing and ensure seekback to reset the buffer size + * This field is internal to libavformat and access from outside is not allowed. + *) + orig_buffer_size: int; + + (* * + * Threshold to favor readahead over seek. + * This is current internal only, do not use from outside. + *) + short_seek_threshold: int; + + (* * + * ',' separated list of allowed protocols. + *) + protocol_whitelist: PAnsiChar; + + (* * + * ',' separated list of disallowed protocols. + *) + protocol_blacklist: PAnsiChar; + + (* * + * A callback that is used instead of write_packet. + *) + // int (*write_data_type)(void *opaque, uint8_t *buf, int buf_size, enum AVIODataMarkerType type, int64_t time); + write_data_type: function(opaque: pointer; buf: puint8_t; buf_size: int; _type: AVIODataMarkerType; time: int64_t): int; cdecl; + (* * + * If set, don't call write_data_type separately for AVIO_DATA_MARKER_BOUNDARY_POINT, + * but ignore them and treat them as AVIO_DATA_MARKER_UNKNOWN (to avoid needlessly + * small chunks of data returned from the callback). + *) + ignore_boundary_point: int; + + (* * + * Internal, not meant to be used from outside of AVIOContext. + *) + current_type: AVIODataMarkerType; + last_time: int64_t; + + (* * + * A callback that is used instead of short_seek_threshold. + * This is current internal only, do not use from outside. + *) + // int (*short_seek_get)(void *opaque); + short_seek_get: function(opaque: pointer): int; cdecl; + + written: int64_t; + + (* * + * Maximum reached position before a backward seek in the write buffer, + * used keeping track of already written data for a later flush. + *) + buf_ptr_max: punsigned_char; + + (* * + * Try to buffer at least this amount of data before flushing it + *) + min_packet_size: int; + end; + + (* * + * Return the name of the protocol that will handle the passed URL. + * + * NULL is returned if no protocol could be found for the given URL. + * + * @return Name of the protocol or NULL. + *) + // const char *avio_find_protocol_name(const char *url); +function avio_find_protocol_name(const url: PAnsiChar): PAnsiChar; cdecl; external avformat_dll; + +(* * + * Return AVIO_FLAG_* access flags corresponding to the access permissions + * of the resource in url, or a negative value corresponding to an + * AVERROR code in case of failure. The returned access flags are + * masked by the value in flags. + * + * @note This function is intrinsically unsafe, in the sense that the + * checked resource may change its existence or permission status from + * one call to another. Thus you should not trust the returned value, + * unless you are sure that no other processes are accessing the + * checked resource. +*) +// int avio_check(const char *url, int flags); +function avio_check(const url: PAnsiChar; flags: int): int; cdecl; external avformat_dll; +(* * + * Move or rename a resource. + * + * @note url_src and url_dst should share the same protocol and authority. + * + * @param url_src url to resource to be moved + * @param url_dst new url to resource if the operation succeeded + * @return >=0 on success or negative on error. +*) +// int avpriv_io_move(const char *url_src, const char *url_dst); +function avpriv_io_move(const url_src: PAnsiChar; const url_dst: PAnsiChar): int; cdecl; external avformat_dll; +(* * + * Delete a resource. + * + * @param url resource to be deleted. + * @return >=0 on success or negative on error. +*) +// int avpriv_io_delete(const char *url); +function avpriv_io_delete(const url: PAnsiChar): int; cdecl; external avformat_dll; +(* * + * Open directory for reading. + * + * @param s directory read context. Pointer to a NULL pointer must be passed. + * @param url directory to be listed. + * @param options A dictionary filled with protocol-private options. On return + * this parameter will be destroyed and replaced with a dictionary + * containing options that were not found. May be NULL. + * @return >=0 on success or negative on error. +*) +// int avio_open_dir(AVIODirContext **s, const char *url, AVDictionary **options); +function avio_open_dir(var s: pAVIODirContext; const url: PAnsiChar; options: ppAVDictionary): int; cdecl; external avformat_dll; +(* * + * Get next directory entry. + * + * Returned entry must be freed with avio_free_directory_entry(). In particular + * it may outlive AVIODirContext. + * + * @param s directory read context. + * @param[out] next next entry or NULL when no more entries. + * @return >=0 on success or negative on error. End of list is not considered an + * error. +*) +// int avio_read_dir(AVIODirContext *s, AVIODirEntry **next); +function avio_read_dir(s: pAVIODirContext; var next: pAVIODirEntry): int; cdecl; external avformat_dll; +(* * + * Close directory. + * + * @note Entries created using avio_read_dir() are not deleted and must be + * freeded with avio_free_directory_entry(). + * + * @param s directory read context. + * @return >=0 on success or negative on error. +*) +// int avio_close_dir(AVIODirContext **s); +function avio_close_dir(var s: pAVIODirContext): int; cdecl; external avformat_dll; +(* * + * Free entry allocated by avio_read_dir(). + * + * @param entry entry to be freed. +*) +// void avio_free_directory_entry(AVIODirEntry **entry); +procedure avio_free_directory_entry(var entry: pAVIODirEntry); cdecl; external avformat_dll; +(* * + * Allocate and initialize an AVIOContext for buffered I/O. It must be later + * freed with avio_context_free(). + * + * @param buffer Memory block for input/output operations via AVIOContext. + * The buffer must be allocated with av_malloc() and friends. + * It may be freed and replaced with a new buffer by libavformat. + * AVIOContext.buffer holds the buffer currently in use, + * which must be later freed with av_free(). + * @param buffer_size The buffer size is very important for performance. + * For protocols with fixed blocksize it should be set to this blocksize. + * For others a typical size is a cache page, e.g. 4kb. + * @param write_flag Set to 1 if the buffer should be writable, 0 otherwise. + * @param opaque An opaque pointer to user-specific data. + * @param read_packet A function for refilling the buffer, may be NULL. + * For stream protocols, must never return 0 but rather + * a proper AVERROR code. + * @param write_packet A function for writing the buffer contents, may be NULL. + * The function may not change the input buffers content. + * @param seek A function for seeking to specified byte position, may be NULL. + * + * @return Allocated AVIOContext or NULL on failure. +*) + +// AVIOContext *avio_alloc_context( +// unsigned char *buffer, +// int buffer_size, +// int write_flag, +// void *opaque, +// int (*read_packet)(void *opaque, uint8_t *buf, int buf_size), +// int (*write_packet)(void *opaque, uint8_t *buf, int buf_size), +// int64_t (*seek)(void *opaque, int64_t offset, int whence)); + +type + // int (*read_packet)(void *opaque, uint8_t *buf, int buf_size), + Tavio_alloc_context_read_packet = function(opaque: pointer; buf: puint8_t; buf_size: int): int; cdecl; + // int (*write_packet)(void *opaque, uint8_t *buf, int buf_size), + Tavio_alloc_context_write_packet = Tavio_alloc_context_read_packet; + // int64_t (*seek)(void *opaque, int64_t offset, int whence) + Tavio_alloc_context_seek = function(opaque: pointer; offset: int64_t; whence: int): int64_t; cdecl; + +function avio_alloc_context(buffer: punsigned_char; buffer_size: int; write_flag: int; opaque: pointer; read_packet: Tavio_alloc_context_read_packet; + write_packet: Tavio_alloc_context_write_packet; seek: Tavio_alloc_context_seek): pAVIOContext; cdecl; external avformat_dll; +(* * + * Free the supplied IO context and everything associated with it. + * + * @param s Double pointer to the IO context. This function will write NULL + * into s. +*) +// void avio_context_free(AVIOContext **s); +procedure avio_context_free(var s: pAVIOContext); cdecl; external avformat_dll; + +// void avio_w8(AVIOContext *s, int b); +procedure avio_w8(s: pAVIOContext; b: int); cdecl; external avformat_dll; + +// void avio_write(AVIOContext *s, const unsigned char *buf, int size); +procedure avio_write(s: pAVIOContext; const buf: punsigned_char; size: int); cdecl; external avformat_dll; + +// void avio_wl64(AVIOContext *s, uint64_t val); +procedure avio_wl64(s: pAVIOContext; val: uint64_t); cdecl; external avformat_dll; + +// void avio_wb64(AVIOContext *s, uint64_t val); +procedure avio_wb64(s: pAVIOContext; val: uint64_t); cdecl; external avformat_dll; + +// void avio_wl32(AVIOContext *s, unsigned int val); +procedure avio_wl32(s: pAVIOContext; val: unsigned_int); cdecl; external avformat_dll; + +// void avio_wb32(AVIOContext *s, unsigned int val); +procedure avio_wb32(s: pAVIOContext; val: unsigned_int); cdecl; external avformat_dll; + +// void avio_wl24(AVIOContext *s, unsigned int val); +procedure avio_wl24(s: pAVIOContext; val: unsigned_int); cdecl; external avformat_dll; + +// void avio_wb24(AVIOContext *s, unsigned int val); +procedure avio_wb24(s: pAVIOContext; val: unsigned_int); cdecl; external avformat_dll; + +// void avio_wl16(AVIOContext *s, unsigned int val); +procedure avio_wl16(s: pAVIOContext; val: unsigned_int); cdecl; external avformat_dll; + +// void avio_wb16(AVIOContext *s, unsigned int val); +procedure avio_wb16(s: pAVIOContext; val: unsigned_int); cdecl; external avformat_dll; +(* * + * Write a NULL-terminated string. + * @return number of bytes written. +*) +// int avio_put_str(AVIOContext *s, const char *str); +function avio_put_str(s: pAVIOContext; const str: PAnsiChar): int; cdecl; external avformat_dll; +(* * + * Convert an UTF-8 string to UTF-16LE and write it. + * @param s the AVIOContext + * @param str NULL-terminated UTF-8 string + * + * @return number of bytes written. +*) +// int avio_put_str16le(AVIOContext *s, const char *str); +function avio_put_str16le(s: pAVIOContext; const str: PAnsiChar): int; cdecl; external avformat_dll; +(* * + * Convert an UTF-8 string to UTF-16BE and write it. + * @param s the AVIOContext + * @param str NULL-terminated UTF-8 string + * + * @return number of bytes written. +*) +// int avio_put_str16be(AVIOContext *s, const char *str); +function avio_put_str16be(s: pAVIOContext; const str: PAnsiChar): int; cdecl; external avformat_dll; +(* * + * Mark the written bytestream as a specific type. + * + * Zero-length ranges are omitted from the output. + * + * @param time the stream time the current bytestream pos corresponds to + * (in AV_TIME_BASE units), or AV_NOPTS_VALUE if unknown or not + * applicable + * @param type the kind of data written starting at the current pos +*) +// void avio_write_marker(AVIOContext *s, int64_t time, enum AVIODataMarkerType type); +procedure avio_write_marker(s: pAVIOContext; time: int64_t; _type: AVIODataMarkerType); cdecl; external avformat_dll; + +const + (* * + * ORing this as the "whence" parameter to a seek function causes it to + * return the filesize without seeking anywhere. Supporting this is optional. + * If it is not supported then the seek function will return <0. + *) + AVSEEK_SIZE = $10000; + + (* * + * Passing this flag as the "whence" parameter to a seek function causes it to + * seek by any means (like reopening and linear reading) or other normally unreasonable + * means that can be extremely slow. + * This may be ignored by the seek code. + *) + AVSEEK_FORCE = $20000; + + (* * + * fseek() equivalent for AVIOContext. + * @return new position or AVERROR. + *) + // int64_t avio_seek(AVIOContext *s, int64_t offset, int whence); +function avio_seek(s: pAVIOContext; offset: int64_t; whence: int): int64_t; cdecl; external avformat_dll; +(* * + * Skip given number of bytes forward + * @return new position or AVERROR. +*) +// int64_t avio_skip(AVIOContext *s, int64_t offset); +function avio_skip(s: pAVIOContext; offset: int64_t): int64_t; cdecl; external avformat_dll; +(* * + * ftell() equivalent for AVIOContext. + * @return position or AVERROR. +*) +// static av_always_inline int64_t avio_tell(AVIOContext *s) +// { +// return avio_seek(s, 0, SEEK_CUR); +// } +function avio_tell(s: pAVIOContext): int64_t; inline; + +(* * + * Get the filesize. + * @return filesize or AVERROR +*) +// int64_t avio_size(AVIOContext *s); +function avio_size(s: pAVIOContext): int64_t; cdecl; external avformat_dll; +(* * + * Similar to feof() but also returns nonzero on read errors. + * @return non zero if and only if at end of file or a read error happened when reading. +*) +// int avio_feof(AVIOContext *s); +function avio_feof(s: pAVIOContext): int; cdecl; external avformat_dll; + +(* + * Writes a formatted string to the context. + * @return number of bytes written, < 0 on error. +*) +// int avio_printf(AVIOContext *s, const char *fmt, ...) av_printf_format(2, 3); + +// --> 4.2.2 +(* * + * Write a NULL terminated array of strings to the context. + * Usually you don't need to use this function directly but its macro wrapper, + * avio_print. +*) +// void avio_print_string_array(AVIOContext *s, const char *strings[]); +//procedure avio_print_string_array(s: pAVIOContext; const strings: pAnsiCharArray); cdecl; external avformat_dll; //4.2.2 + +(* + * Write strings (const char* ) to the context. + * This is a convenience macro around avio_print_string_array and it + * automatically creates the string array from the variable argument list. + * For simple string concatenations this function is more performant than using + * avio_printf since it does not need a temporary buffer. +*) +// #define avio_print(s, ...) avio_print_string_array(s, (const char*[]){__VA_ARGS__, NULL}) +// <-- 4.2.2 + +(* * + * Force flushing of buffered data. + * + * For write streams, force the buffered data to be immediately written to the output, + * without to wait to fill the internal buffer. + * + * For read streams, discard all currently buffered data, and advance the + * reported file position to that of the underlying stream. This does not + * read new data, and does not perform any seeks. +*) +// void avio_flush(AVIOContext *s); +procedure avio_flush(s: pAVIOContext); cdecl; external avformat_dll; +(* * + * Read size bytes from AVIOContext into buf. + * @return number of bytes read or AVERROR +*) +// int avio_read(AVIOContext *s, unsigned char *buf, int size); +function avio_read(s: pAVIOContext; buf: punsigned_char; size: int): int; cdecl; external avformat_dll; +(* * + * Read size bytes from AVIOContext into buf. Unlike avio_read(), this is allowed + * to read fewer bytes than requested. The missing bytes can be read in the next + * call. This always tries to read at least 1 byte. + * Useful to reduce latency in certain cases. + * @return number of bytes read or AVERROR +*) +// int avio_read_partial(AVIOContext *s, unsigned char *buf, int size); +function avio_read_partial(s: pAVIOContext; buf: punsigned_char; size: int): int; cdecl; external avformat_dll; +(* * + * @name Functions for reading from AVIOContext + * @{ + * + * @note return 0 if EOF, so you cannot use it if EOF handling is + * necessary +*) +// int avio_r8 (AVIOContext *s); +function avio_r8(s: pAVIOContext): int; cdecl; external avformat_dll; +// unsigned int avio_rl16(AVIOContext *s); +function avio_rl16(s: pAVIOContext): unsigned_int; cdecl; external avformat_dll; + +// unsigned int avio_rl24(AVIOContext *s); +function avio_rl24(s: pAVIOContext): unsigned_int; cdecl; external avformat_dll; + +// unsigned int avio_rl32(AVIOContext *s); +function avio_rl32(s: pAVIOContext): unsigned_int; cdecl; external avformat_dll; + +// uint64_t avio_rl64(AVIOContext *s); +function avio_rl64(s: pAVIOContext): uint64_t; cdecl; external avformat_dll; + +// unsigned int avio_rb16(AVIOContext *s); +function avio_rb16(s: pAVIOContext): unsigned_int; cdecl; external avformat_dll; + +// unsigned int avio_rb24(AVIOContext *s); +function avio_rb24(s: pAVIOContext): unsigned_int; cdecl; external avformat_dll; + +// unsigned int avio_rb32(AVIOContext *s); +function avio_rb32(s: pAVIOContext): unsigned_int; cdecl; external avformat_dll; + +// uint64_t avio_rb64(AVIOContext *s); +function avio_rb64(s: pAVIOContext): uint64_t; cdecl; external avformat_dll; +(* * + * @} +*) + +(* * + * Read a string from pb into buf. The reading will terminate when either + * a NULL character was encountered, maxlen bytes have been read, or nothing + * more can be read from pb. The result is guaranteed to be NULL-terminated, it + * will be truncated if buf is too small. + * Note that the string is not interpreted or validated in any way, it + * might get truncated in the middle of a sequence for multi-byte encodings. + * + * @return number of bytes read (is always <= maxlen). + * If reading ends on EOF or error, the return value will be one more than + * bytes actually read. +*) +// int avio_get_str(AVIOContext *pb, int maxlen, char *buf, int buflen); +function avio_get_str(pb: pAVIOContext; maxlen: int; buf: PAnsiChar; buflen: int): int; cdecl; external avformat_dll; +(* * + * Read a UTF-16 string from pb and convert it to UTF-8. + * The reading will terminate when either a null or invalid character was + * encountered or maxlen bytes have been read. + * @return number of bytes read (is always <= maxlen) +*) +// int avio_get_str16le(AVIOContext *pb, int maxlen, char *buf, int buflen); +function avio_get_str16le(pb: pAVIOContext; maxlen: int; buf: PAnsiChar; buflen: int): int; cdecl; external avformat_dll; + +// int avio_get_str16be(AVIOContext *pb, int maxlen, char *buf, int buflen); +function avio_get_str16be(pb: pAVIOContext; maxlen: int; buf: PAnsiChar; buflen: int): int; cdecl; external avformat_dll; + +const + (* * + * @name URL open modes + * The flags argument to avio_open must be one of the following + * constants, optionally ORed with other flags. + * + *) + AVIO_FLAG_READ = 1; (* *< read-only *) + AVIO_FLAG_WRITE = 2; (* *< write-only *) + AVIO_FLAG_READ_WRITE = (AVIO_FLAG_READ or AVIO_FLAG_WRITE); (* *< read-write pseudo flag *) + + (* * + * Use non-blocking mode. + * If this flag is set, operations on the context will return + * AVERROR(EAGAIN) if they can not be performed immediately. + * If this flag is not set, operations on the context will never return + * AVERROR(EAGAIN). + * Note that this flag does not affect the opening/connecting of the + * context. Connecting a protocol will always block if necessary (e.g. on + * network protocols) but never hang (e.g. on busy devices). + * Warning: non-blocking protocols is work-in-progress; this flag may be + * silently ignored. + *) + AVIO_FLAG_NONBLOCK = 8; + + (* * + * Use direct mode. + * avio_read and avio_write should if possible be satisfied directly + * instead of going through a buffer, and avio_seek will always + * call the underlying seek function directly. + *) + AVIO_FLAG_DIRECT = $8000; + + (* * + * Create and initialize a AVIOContext for accessing the + * resource indicated by url. + * @note When the resource indicated by url has been opened in + * read+write mode, the AVIOContext can be used only for writing. + * + * @param s Used to return the pointer to the created AVIOContext. + * In case of failure the pointed to value is set to NULL. + * @param url resource to access + * @param flags flags which control how the resource indicated by url + * is to be opened + * @return >= 0 in case of success, a negative value corresponding to an + * AVERROR code in case of failure + *) + // int avio_open(AVIOContext **s, const char *url, int flags); +function avio_open(var s: pAVIOContext; const url: PAnsiChar; flags: int): int; cdecl; external avformat_dll; +(* * + * Create and initialize a AVIOContext for accessing the + * resource indicated by url. + * @note When the resource indicated by url has been opened in + * read+write mode, the AVIOContext can be used only for writing. + * + * @param s Used to return the pointer to the created AVIOContext. + * In case of failure the pointed to value is set to NULL. + * @param url resource to access + * @param flags flags which control how the resource indicated by url + * is to be opened + * @param int_cb an interrupt callback to be used at the protocols level + * @param options A dictionary filled with protocol-private options. On return + * this parameter will be destroyed and replaced with a dict containing options + * that were not found. May be NULL. + * @return >= 0 in case of success, a negative value corresponding to an + * AVERROR code in case of failure +*) +// int avio_open2(AVIOContext **s, const char *url, int flags, const AVIOInterruptCB *int_cb, AVDictionary **options); +function avio_open2(var s: pAVIOContext; const url: PAnsiChar; flags: int; const int_cb: pAVIOInterruptCB; var options: pAVDictionary): int; cdecl; + external avformat_dll; +(* * + * Close the resource accessed by the AVIOContext s and free it. + * This function can only be used if s was opened by avio_open(). + * + * The internal buffer is automatically flushed before closing the + * resource. + * + * @return 0 on success, an AVERROR < 0 on error. + * @see avio_closep +*) +// int avio_close(AVIOContext *s); +function avio_close(s: pAVIOContext): int; cdecl; external avformat_dll; +(* * + * Close the resource accessed by the AVIOContext *s, free it + * and set the pointer pointing to it to NULL. + * This function can only be used if s was opened by avio_open(). + * + * The internal buffer is automatically flushed before closing the + * resource. + * + * @return 0 on success, an AVERROR < 0 on error. + * @see avio_close +*) +// int avio_closep(AVIOContext **s); +function avio_closep(var s: pAVIOContext): int; cdecl; external avformat_dll; +(* * + * Open a write only memory stream. + * + * @param s new IO context + * @return zero if no error. +*) +// int avio_open_dyn_buf(AVIOContext **s); +function avio_open_dyn_buf(var s: pAVIOContext): int; cdecl; external avformat_dll; +(* * + * Return the written size and a pointer to the buffer. + * The AVIOContext stream is left intact. + * The buffer must NOT be freed. + * No padding is added to the buffer. + * + * @param s IO context + * @param pbuffer pointer to a byte buffer + * @return the length of the byte buffer +*) +// int avio_get_dyn_buf(AVIOContext *s, uint8_t **pbuffer); +function avio_get_dyn_buf(s: pAVIOContext; var pbuffer: puint8_t): int; cdecl; external avformat_dll; +(* * + * Return the written size and a pointer to the buffer. The buffer + * must be freed with av_free(). + * Padding of AV_INPUT_BUFFER_PADDING_SIZE is added to the buffer. + * + * @param s IO context + * @param pbuffer pointer to a byte buffer + * @return the length of the byte buffer +*) +// int avio_close_dyn_buf(AVIOContext *s, uint8_t **pbuffer); +function avio_close_dyn_buf(s: pAVIOContext; var pbuffer: puint8_t): int; cdecl; external avformat_dll; +(* * + * Iterate through names of available protocols. + * + * @param opaque A private pointer representing current protocol. + * It must be a pointer to NULL on first iteration and will + * be updated by successive calls to avio_enum_protocols. + * @param output If set to 1, iterate over output protocols, + * otherwise over input protocols. + * + * @return A static string containing the name of current protocol or NULL +*) +// const char *avio_enum_protocols(void **opaque, int output); +function avio_enum_protocols(var opaque: pointer; output: int): PAnsiChar; cdecl; external avformat_dll; +(* * + * Pause and resume playing - only meaningful if using a network streaming + * protocol (e.g. MMS). + * + * @param h IO context from which to call the read_pause function pointer + * @param pause 1 for pause, 0 for resume +*) +// int avio_pause(AVIOContext *h, int pause); +function avio_pause(h: pAVIOContext; pause: int): int; cdecl; external avformat_dll; +(* * + * Seek to a given timestamp relative to some component stream. + * Only meaningful if using a network streaming protocol (e.g. MMS.). + * + * @param h IO context from which to call the seek function pointers + * @param stream_index The stream index that the timestamp is relative to. + * If stream_index is (-1) the timestamp should be in AV_TIME_BASE + * units from the beginning of the presentation. + * If a stream_index >= 0 is used and the protocol does not support + * seeking based on component streams, the call will fail. + * @param timestamp timestamp in AVStream.time_base units + * or if there is no stream specified then in AV_TIME_BASE units. + * @param flags Optional combination of AVSEEK_FLAG_BACKWARD, AVSEEK_FLAG_BYTE + * and AVSEEK_FLAG_ANY. The protocol may silently ignore + * AVSEEK_FLAG_BACKWARD and AVSEEK_FLAG_ANY, but AVSEEK_FLAG_BYTE will + * fail if used and not supported. + * @return >= 0 on success + * @see AVInputFormat::read_seek +*) +// int64_t avio_seek_time(AVIOContext *h, int stream_index, int64_t timestamp, int flags); +function avio_seek_time(h: pAVIOContext; stream_index: int; timestamp: int64_t; flags: int): int64_t; cdecl; external avformat_dll; + +(* * + * Read contents of h into print buffer, up to max_size bytes, or up to EOF. + * + * @return 0 for success (max_size bytes read or EOF reached), negative error + * code otherwise +*) +// int avio_read_to_bprint(AVIOContext *h, struct AVBPrint *pb, size_t max_size); +function avio_read_to_bprint(h: pAVIOContext; pb: pAVBPrint; max_size: size_t): int; cdecl; external avformat_dll; +(* * + * Accept and allocate a client context on a server context. + * @param s the server context + * @param c the client context, must be unallocated + * @return >= 0 on success or a negative value corresponding + * to an AVERROR on failure +*) +// int avio_accept(AVIOContext *s, AVIOContext **c); +function avio_accept(s: pAVIOContext; var c: pAVIOContext): int; cdecl; external avformat_dll; +(* * + * Perform one step of the protocol handshake to accept a new client. + * This function must be called on a client returned by avio_accept() before + * using it as a read/write context. + * It is separate from avio_accept() because it may block. + * A step of the handshake is defined by places where the application may + * decide to change the proceedings. + * For example, on a protocol with a request header and a reply header, each + * one can constitute a step because the application may use the parameters + * from the request to change parameters in the reply; or each individual + * chunk of the request can constitute a step. + * If the handshake is already finished, avio_handshake() does nothing and + * returns 0 immediately. + * + * @param c the client context to perform the handshake on + * @return 0 on a complete and successful handshake + * > 0 if the handshake progressed, but is not complete + * < 0 for an AVERROR code +*) +// int avio_handshake(AVIOContext *c); +function avio_handshake(c: pAVIOContext): int; cdecl; external avformat_dll; +{$ENDREGION} + +const + // AVFormatContext -> int flags; + AVFMT_FLAG_GENPTS = $0001; // < Generate missing pts even if it requires parsing future frames. + AVFMT_FLAG_IGNIDX = $0002; // < Ignore index. + AVFMT_FLAG_NONBLOCK = $0004; // < Do not block when reading packets from input. + AVFMT_FLAG_IGNDTS = $0008; // < Ignore DTS on frames that contain both DTS & PTS + AVFMT_FLAG_NOFILLIN = $0010; // < Do not infer any values from other values, just return what is stored in the container + AVFMT_FLAG_NOPARSE = $0020; + // < Do not use AVParsers, you also must set AVFMT_FLAG_NOFILLIN as the fillin code works on frames and no parsing -> no frames. Also seeking to frames can not work if parsing to find frame boundaries has been disabled + AVFMT_FLAG_NOBUFFER = $0040; // < Do not buffer frames when possible + AVFMT_FLAG_CUSTOM_IO = $0080; // < The caller has supplied a custom AVIOContext, don't avio_close() it. + AVFMT_FLAG_DISCARD_CORRUPT = $0100; // < Discard frames marked corrupted + AVFMT_FLAG_FLUSH_PACKETS = $0200; // < Flush the AVIOContext every packet. + (* * + * When muxing, try to avoid writing any random/volatile data to the output. + * This includes any random IDs, real-time timestamps/dates, muxer version, etc. + * + * This flag is mainly intended for testing. + *) + AVFMT_FLAG_BITEXACT = $0400; +{$IFDEF FF_API_LAVF_MP4A_LATM} + // < Enable RTP MP4A-LATM payload + AVFMT_FLAG_MP4A_LATM = $8000; + /// < Deprecated, does nothing. +{$ENDIF} + AVFMT_FLAG_SORT_DTS = $10000; // < try to interleave outputted packets by dts (using this flag can slow demuxing down) + AVFMT_FLAG_PRIV_OPT = $20000; + // < Enable use of private options by delaying codec open (this could be made default once all code is converted) +{$IFDEF FF_API_LAVF_KEEPSIDE_FLAG} + AVFMT_FLAG_KEEP_SIDE_DATA = $40000; // < Deprecated, does nothing. +{$ENDIF} + AVFMT_FLAG_FAST_SEEK = $80000; // < Enable fast, but inaccurate seeks for some formats + AVFMT_FLAG_SHORTEST = $100000; // < Stop muxing when the shortest stream stops. + AVFMT_FLAG_AUTO_BSF = $200000; // < Add bitstream filters as requested by the muxer + + // AVFormatContext ->int debug; + FF_FDEBUG_TS = $0001; + // AVFormatContext ->int event_flags; + AVFMT_EVENT_FLAG_METADATA_UPDATED = $0001; // < The call resulted in updated metadata. + // AVFormatContext ->int avoid_negative_ts; + AVFMT_AVOID_NEG_TS_AUTO = -1; // < Enabled when required by target format + AVFMT_AVOID_NEG_TS_MAKE_NON_NEGATIVE = 1; // < Shift timestamps so they are non negative + AVFMT_AVOID_NEG_TS_MAKE_ZERO = 2; // < Shift timestamps so that they start at 0 + + // AVProgram + AV_PROGRAM_RUNNING = 1; + // + AVFMTCTX_NOHEADER = $0001; (* *< signal that no header is present + (streams are added dynamically) *) + AVFMTCTX_UNSEEKABLE = $0002; (* *< signal that the stream is definitely + not seekable, and attempts to call the + seek function will fail. For some + network protocols (e.g. HLS), this can + change dynamically at runtime. *) + +const + AVPROBE_SCORE_EXTENSION = 50; // < score for file extension + AVPROBE_SCORE_MIME = 75; // < score for file mime type + AVPROBE_SCORE_MAX = 100; // < maximum score + + AVPROBE_PADDING_SIZE = 32; // < extra allocated bytes at the end of the probe buffer + + AVPROBE_SCORE_RETRY = (AVPROBE_SCORE_MAX div 4); + AVPROBE_SCORE_STREAM_RETRY = (AVPROBE_SCORE_MAX div 4 - 1); + + // Demuxer will use avio_open, no opened file should be provided by the caller. + AVFMT_NOFILE = $0001; + AVFMT_NEEDNUMBER = $0002; (* *< Needs '%d' in filename. *) + AVFMT_SHOW_IDS = $0008; (* *< Show format stream IDs numbers. *) + AVFMT_GLOBALHEADER = $0040; (* *< Format wants global header. *) + AVFMT_NOTIMESTAMPS = $0080; (* *< Format does not need / have any timestamps. *) + AVFMT_GENERIC_INDEX = $0100; (* *< Use generic index building code. *) + AVFMT_TS_DISCONT = $0200; (* *< Format allows timestamp discontinuities. Note, muxers always require valid (monotone) timestamps *) + AVFMT_VARIABLE_FPS = $0400; (* *< Format allows variable fps. *) + AVFMT_NODIMENSIONS = $0800; (* *< Format does not need width/height *) + AVFMT_NOSTREAMS = $1000; (* *< Format does not require any streams *) + AVFMT_NOBINSEARCH = $2000; (* *< Format does not allow to fall back on binary search via read_timestamp *) + AVFMT_NOGENSEARCH = $4000; (* *< Format does not allow to fall back on generic search *) + AVFMT_NO_BYTE_SEEK = $8000; (* *< Format does not allow seeking by bytes *) + AVFMT_ALLOW_FLUSH = $10000; + (* *< Format allows flushing. If not set, the muxer will not receive a NULL packet in the write_packet function. *) + AVFMT_TS_NONSTRICT = $20000; (* *< Format does not require strictly + increasing timestamps, but they must + still be monotonic *) + AVFMT_TS_NEGATIVE = $40000; (* *< Format allows muxing negative + timestamps. If not set the timestamp + will be shifted in av_write_frame and + av_interleaved_write_frame so they + start from 0. + The user or muxer can override this through + AVFormatContext.avoid_negative_ts + *) + + AVFMT_SEEK_TO_PTS = $4000000; (* *< Seeking is based on PTS *) + + AVINDEX_KEYFRAME = $0001; + AVINDEX_DISCARD_FRAME = $0002; + + AV_DISPOSITION_DEFAULT = $0001; + AV_DISPOSITION_DUB = $0002; + AV_DISPOSITION_ORIGINAL = $0004; + AV_DISPOSITION_COMMENT = $0008; + AV_DISPOSITION_LYRICS = $0010; + AV_DISPOSITION_KARAOKE = $0020; + + (* * + * Track should be used during playback by default. + * Useful for subtitle track that should be displayed + * even when user did not explicitly ask for subtitles. + *) + AV_DISPOSITION_FORCED = $0040; + AV_DISPOSITION_HEARING_IMPAIRED = $0080; (* *< stream for hearing impaired audiences *) + AV_DISPOSITION_VISUAL_IMPAIRED = $0100; (* *< stream for visual impaired audiences *) + AV_DISPOSITION_CLEAN_EFFECTS = $0200; (* *< stream without voice *) + (* * + * The stream is stored in the file as an attached picture/"cover art" (e.g. + * APIC frame in ID3v2). The first (usually only) packet associated with it + * will be returned among the first few packets read from the file unless + * seeking takes place. It can also be accessed at any time in + * AVStream.attached_pic. + *) + AV_DISPOSITION_ATTACHED_PIC = $0400; + (* * + * The stream is sparse, and contains thumbnail images, often corresponding + * to chapter markers. Only ever used with AV_DISPOSITION_ATTACHED_PIC. + *) + AV_DISPOSITION_TIMED_THUMBNAILS = $0800; + + (* * + * To specify text track kind (different from subtitles default). + *) + AV_DISPOSITION_CAPTIONS = $10000; + AV_DISPOSITION_DESCRIPTIONS = $20000; + AV_DISPOSITION_METADATA = $40000; + AV_DISPOSITION_DEPENDENT = $80000; // < dependent audio stream (mix_type=0 in mpegts) + AV_DISPOSITION_STILL_IMAGE = $100000; + /// < still images in video stream (still_picture_flag=1 in mpegts) + + (* * + * Options for behavior on timestamp wrap detection. + *) + AV_PTS_WRAP_IGNORE = 0; // < ignore the wrap + AV_PTS_WRAP_ADD_OFFSET = 1; // < add the format specific offset on wrap detection + AV_PTS_WRAP_SUB_OFFSET = -1; // < subtract the format specific offset on wrap detection + + AVSTREAM_EVENT_FLAG_METADATA_UPDATED = $0001; // < The call resulted in updated metadata. + (* **************************************************************** + * All fields below this line are not part of the public API. They + * may not be used outside of libavformat and can be changed and + * removed at will. + * Internal note: be aware that physically removing these fields + * will break ABI. Replace removed fields with dummy fields, and + * add new fields to AVStreamInternal. + ***************************************************************** + *) + + MAX_STD_TIMEBASES = (30 * 12 + 30 + 3 + 6); + + MAX_REORDER_DELAY = 16; + +type + + pAVFormatContext = ^AVFormatContext; + ppAVFormatContext = ^pAVFormatContext; + pAVFormatInternal = ^AVFormatInternal; + pAVInputFormat = ^AVInputFormat; + pAVCodecTag = ^AVCodecTag; + ppAVCodecTag = ^pAVCodecTag; + pAVProbeData = ^AVProbeData; + pAVDeviceInfoList = ^AVDeviceInfoList; + pAVDeviceCapabilitiesQuery = ^AVDeviceCapabilitiesQuery; + pAVOutputFormat = ^AVOutputFormat; + pAVChapter = ^AVChapter; + ppAVChapter = ^pAVChapter; + pAVStream = ^AVStream; + ppAVStream = ^pAVStream; + pAVProgram = ^AVProgram; + ppAVProgram = ^pAVProgram; + + AVChapter = record + id: int; // < unique ID to identify the chapter + time_base: AVRational; // < time base in which the start/end timestamps are specified + start, _end: int64_t; // < chapter start/end time in time_base units + metadata: pAVDictionary; + end; + + (* * + * Callback used by devices to communicate with application. + *) + // typedef int (*av_format_control_message)(struct AVFormatContext *s, int type, + // void *data, size_t data_size); + Tav_format_control_message = function(s: pAVFormatContext; _type: int; data: pointer; data_size: size_t): int; cdecl; + + // typedef int (*AVOpenCallback)(struct AVFormatContext *s, AVIOContext **pb, const char *url, int flags, + // const AVIOInterruptCB *int_cb, AVDictionary **options); + TAVOpenCallback = function(s: pAVFormatContext; var pb: pAVIOContext; const url: PAnsiChar; flags: int; const int_cb: pAVIOInterruptCB; + var options: pAVDictionary): int; cdecl; + + (* * + * The duration of a video can be estimated through various ways, and this enum can be used + * to know how the duration was estimated. + *) + AVDurationEstimationMethod = ( // + AVFMT_DURATION_FROM_PTS, // < Duration accurately estimated from PTSes + AVFMT_DURATION_FROM_STREAM, // < Duration estimated from a stream with a known duration + AVFMT_DURATION_FROM_BITRATE // < Duration estimated from bitrate (less accurate) + ); + + AVFormatInternal = record + end; + + AVInputFormat = record + (* * + * A comma separated list of short names for the format. New names + * may be appended with a minor bump. + *) + name: PAnsiChar; + + (* * + * Descriptive name for the format, meant to be more human-readable + * than name. You should use the NULL_IF_CONFIG_SMALL() macro + * to define it. + *) + long_name: PAnsiChar; + + (* * + * Can use flags: AVFMT_NOFILE, AVFMT_NEEDNUMBER, AVFMT_SHOW_IDS, + * AVFMT_NOTIMESTAMPS, AVFMT_GENERIC_INDEX, AVFMT_TS_DISCONT, AVFMT_NOBINSEARCH, + * AVFMT_NOGENSEARCH, AVFMT_NO_BYTE_SEEK, AVFMT_SEEK_TO_PTS. + *) + flags: int; + + (* * + * If extensions are defined, then no probe is done. You should + * usually not use extension format guessing because it is not + * reliable enough + *) + extensions: PAnsiChar; + + codec_tag: ppAVCodecTag; + + priv_class: pAVClass; // < AVClass for the private context + + (* * + * Comma-separated list of mime types. + * It is used check for matching mime types while probing. + * @see av_probe_input_format2 + *) + mime_type: PAnsiChar; + + (* **************************************************************** + * No fields below this line are part of the public API. They + * may not be used outside of libavformat and can be changed and + * removed at will. + * New public fields should be added right above. + ***************************************************************** + *) + next: pAVInputFormat; + + (* * + * Raw demuxers store their codec ID here. + *) + raw_codec_id: int; + + (* * + * Size of private data so that it can be allocated in the wrapper. + *) + priv_data_size: int; + + (* * + * Tell if a given file has a chance of being parsed as this format. + * The buffer provided is guaranteed to be AVPROBE_PADDING_SIZE bytes + * big so you do not have to check for that unless you need more. + *) + // int (*read_probe)(AVProbeData *); + read_probe: function(const p: pAVProbeData): int; cdecl; + + (* * + * Read the format header and initialize the AVFormatContext + * structure. Return 0 if OK. 'avformat_new_stream' should be + * called to create new streams. + *) + // int (*read_header)(struct AVFormatContext *); + read_header: function(p: pAVFormatContext): int; cdecl; + + (* * + * Read one packet and put it in 'pkt'. pts and flags are also + * set. 'avformat_new_stream' can be called only if the flag + * AVFMTCTX_NOHEADER is used and only in the calling thread (not in a + * background thread). + * @return 0 on success, < 0 on error. + * When returning an error, pkt must not have been allocated + * or must be freed before returning + *) + // int (*read_packet)(struct AVFormatContext *, AVPacket *pkt); + read_packet: function(p: pAVFormatContext; pkt: pAVPacket): int; cdecl; + + (* * + * Close the stream. The AVFormatContext and AVStreams are not + * freed by this function + *) + // int (*read_close)(struct AVFormatContext *); + read_close: function(p: pAVFormatContext): int; cdecl; + (* * + * Seek to a given timestamp relative to the frames in + * stream component stream_index. + * @param stream_index Must not be -1. + * @param flags Selects which direction should be preferred if no exact + * match is available. + * @return >= 0 on success (but not necessarily the new offset) + *) + // int (*read_seek)(struct AVFormatContext *, int stream_index, int64_t timestamp, int flags); + read_seek: function(p: pAVFormatContext; stream_index: int; timestamp: int64_t; flags: int): int; cdecl; + + (* * + * Get the next timestamp in stream[stream_index].time_base units. + * @return the timestamp or AV_NOPTS_VALUE if an error occurred + *) + // int64_t (*read_timestamp)(struct AVFormatContext *s, int stream_index, int64_t *pos, int64_t pos_limit); + read_timestamp: function(s: pAVFormatContext; stream_index: int; var pos: int64_t; pos_limit: int64_t): int64_t; cdecl; + + (* * + * Start/resume playing - only meaningful if using a network-based format + * (RTSP). + *) + // int (*read_play)(struct AVFormatContext *); + read_play: function(p: pAVFormatContext): int; cdecl; + (* * + * Pause playing - only meaningful if using a network-based format + * (RTSP). + *) + // int (*read_pause)(struct AVFormatContext *); + read_pause: function(p: pAVFormatContext): int; cdecl; + (* * + * Seek to timestamp ts. + * Seeking will be done so that the point from which all active streams + * can be presented successfully will be closest to ts and within min/max_ts. + * Active streams are all streams that have AVStream.discard < AVDISCARD_ALL. + *) + // int (*read_seek2)(struct AVFormatContext *s, int stream_index, int64_t min_ts, int64_t ts, int64_t max_ts, int flags); + read_seek2: function(s: pAVFormatContext; stream_index: int; min_ts: int64_t; ts: int64_t; max_ts: int64_t; flags: int): int; cdecl; + + (* * + * Returns device list with it properties. + * @see avdevice_list_devices() for more details. + *) + // int (*get_device_list)(struct AVFormatContext *s, struct AVDeviceInfoList *device_list); + get_device_list: function(s: pAVFormatContext; device_list: pAVDeviceInfoList): int; cdecl; + (* * + * Initialize device capabilities submodule. + * @see avdevice_capabilities_create() for more details. + *) + // int (*create_device_capabilities)(struct AVFormatContext *s, struct AVDeviceCapabilitiesQuery *caps); + create_device_capabilities: function(s: pAVFormatContext; caps: pAVDeviceCapabilitiesQuery): int; cdecl; + (* * + * Free device capabilities submodule. + * @see avdevice_capabilities_free() for more details. + *) + // int (*free_device_capabilities)(struct AVFormatContext *s, struct AVDeviceCapabilitiesQuery *caps); + free_device_capabilities: function(s: pAVFormatContext; caps: pAVDeviceCapabilitiesQuery): int; cdecl; + end; + + (* * + * Format I/O context. + * New fields can be added to the end with minor version bumps. + * Removal, reordering and changes to existing fields require a major + * version bump. + * sizeof(AVFormatContext) must not be used outside libav*, use + * avformat_alloc_context() to create an AVFormatContext. + * + * Fields can be accessed through AVOptions (av_opt* ), + * the name string used matches the associated command line parameter name and + * can be found in libavformat/options_table.h. + * The AVOption/command line parameter names differ in some cases from the C + * structure field names for historic reasons or brevity. + *) + + AVFormatContext = record + (* * + * A class for logging and @ref avoptions. Set by avformat_alloc_context(). + * Exports (de)muxer private options if they exist. + *) + av_class: pAVClass; + + (* * + * The input container format. + * + * Demuxing only, set by avformat_open_input(). + *) + iformat: pAVInputFormat; + + (* * + * The output container format. + * + * Muxing only, must be set by the caller before avformat_write_header(). + *) + oformat: pAVOutputFormat; + + (* * + * Format private data. This is an AVOptions-enabled struct + * if and only if iformat/oformat.priv_class is not NULL. + * + * - muxing: set by avformat_write_header() + * - demuxing: set by avformat_open_input() + *) + priv_data: pointer; + + (* * + * I/O context. + * + * - demuxing: either set by the user before avformat_open_input() (then + * the user must close it manually) or set by avformat_open_input(). + * - muxing: set by the user before avformat_write_header(). The caller must + * take care of closing / freeing the IO context. + * + * Do NOT set this field if AVFMT_NOFILE flag is set in + * iformat/oformat.flags. In such a case, the (de)muxer will handle + * I/O in some other way and this field will be NULL. + *) + pb: pAVIOContext; + + (* stream info *) + (* * + * Flags signalling stream properties. A combination of AVFMTCTX_*. + * Set by libavformat. + *) + ctx_flags: int; + + (* * + * Number of elements in AVFormatContext.streams. + * + * Set by avformat_new_stream(), must not be modified by any other code. + *) + nb_streams: unsigned_int; + (* * + * A list of all streams in the file. New streams are created with + * avformat_new_stream(). + * + * - demuxing: streams are created by libavformat in avformat_open_input(). + * If AVFMTCTX_NOHEADER is set in ctx_flags, then new streams may also + * appear in av_read_frame(). + * - muxing: streams are created by the user before avformat_write_header(). + * + * Freed by libavformat in avformat_free_context(). + *) + streams: ppAVStream; + +{$IFDEF FF_API_FORMAT_FILENAME} + (* * + * input or output filename + * + * - demuxing: set by avformat_open_input() + * - muxing: may be set by the caller before avformat_write_header() + * + * @deprecated Use url instead. + *) + // attribute_deprecated + filename: array [0 .. 1024 - 1] of AnsiChar; +{$ENDIF} + (* * + * input or output URL. Unlike the old filename field, this field has no + * length restriction. + * + * - demuxing: set by avformat_open_input(), initialized to an empty + * string if url parameter was NULL in avformat_open_input(). + * - muxing: may be set by the caller before calling avformat_write_header() + * (or avformat_init_output() if that is called first) to a string + * which is freeable by av_free(). Set to an empty string if it + * was NULL in avformat_init_output(). + * + * Freed by libavformat in avformat_free_context(). + *) + url: PAnsiChar; + + (* * + * Position of the first frame of the component, in + * AV_TIME_BASE fractional seconds. NEVER set this value directly: + * It is deduced from the AVStream values. + * + * Demuxing only, set by libavformat. + *) + start_time: int64_t; + + (* * + * Duration of the stream, in AV_TIME_BASE fractional + * seconds. Only set this value if you know none of the individual stream + * durations and also do not set any of them. This is deduced from the + * AVStream values if not set. + * + * Demuxing only, set by libavformat. + *) + duration: int64_t; + + (* * + * Total stream bitrate in bit/s, 0 if not + * available. Never set it directly if the file_size and the + * duration are known as FFmpeg can compute it automatically. + *) + bit_rate: int64_t; + + packet_size: unsigned_int; + max_delay: int; + + (* * + * Flags modifying the (de)muxer behaviour. A combination of AVFMT_FLAG_*. + * Set by the user before avformat_open_input() / avformat_write_header(). + *) + flags: int; + + (* * + * Maximum size of the data read from input for determining + * the input container format. + * Demuxing only, set by the caller before avformat_open_input(). + *) + robesize: int64_t; + + (* * + * Maximum duration (in AV_TIME_BASE units) of the data read + * from input in avformat_find_stream_info(). + * Demuxing only, set by the caller before avformat_find_stream_info(). + * Can be set to 0 to let avformat choose using a heuristic. + *) + ax_analyze_duration: int64_t; + + key: puint8_t; + keylen: int; + + nb_programs: unsigned_int; + programs: ppAVProgram; + + (* * + * Forced video codec_id. + * Demuxing: Set by user. + *) + video_codec_id: AVCodecID; + + (* * + * Forced audio codec_id. + * Demuxing: Set by user. + *) + audio_codec_id: AVCodecID; + + (* * + * Forced subtitle codec_id. + * Demuxing: Set by user. + *) + subtitle_codec_id: AVCodecID; + + (* * + * Maximum amount of memory in bytes to use for the index of each stream. + * If the index exceeds this size, entries will be discarded as + * needed to maintain a smaller size. This can lead to slower or less + * accurate seeking (depends on demuxer). + * Demuxers for which a full in-memory index is mandatory will ignore + * this. + * - muxing: unused + * - demuxing: set by user + *) + max_index_size: unsigned_int; + + (* * + * Maximum amount of memory in bytes to use for buffering frames + * obtained from realtime capture devices. + *) + max_picture_buffer: unsigned_int; + + (* * + * Number of chapters in AVChapter array. + * When muxing, chapters are normally written in the file header, + * so nb_chapters should normally be initialized before write_header + * is called. Some muxers (e.g. mov and mkv) can also write chapters + * in the trailer. To write chapters in the trailer, nb_chapters + * must be zero when write_header is called and non-zero when + * write_trailer is called. + * - muxing: set by user + * - demuxing: set by libavformat + *) + nb_chapters: unsigned_int; + chapters: ppAVChapter; + + (* * + * Metadata that applies to the whole file. + * + * - demuxing: set by libavformat in avformat_open_input() + * - muxing: may be set by the caller before avformat_write_header() + * + * Freed by libavformat in avformat_free_context(). + *) + metadata: pAVDictionary; + + (* * + * Start time of the stream in real world time, in microseconds + * since the Unix epoch (00:00 1st January 1970). That is, pts=0 in the + * stream was captured at this real world time. + * - muxing: Set by the caller before avformat_write_header(). If set to + * either 0 or AV_NOPTS_VALUE, then the current wall-time will + * be used. + * - demuxing: Set by libavformat. AV_NOPTS_VALUE if unknown. Note that + * the value may become known after some number of frames + * have been received. + *) + start_time_realtime: int64_t; + + (* * + * The number of frames used for determining the framerate in + * avformat_find_stream_info(). + * Demuxing only, set by the caller before avformat_find_stream_info(). + *) + fps_probe_size: int; + + (* * + * Error recognition; higher values will detect more errors but may + * misdetect some more or less valid parts as errors. + * Demuxing only, set by the caller before avformat_open_input(). + *) + error_recognition: int; + + (* * + * Custom interrupt callbacks for the I/O layer. + * + * demuxing: set by the user before avformat_open_input(). + * muxing: set by the user before avformat_write_header() + * (mainly useful for AVFMT_NOFILE formats). The callback + * should also be passed to avio_open2() if it's used to + * open the file. + *) + interrupt_callback: AVIOInterruptCB; + + (* * + * Flags to enable debugging. + *) + debug: int; + + (* * + * Maximum buffering duration for interleaving. + * + * To ensure all the streams are interleaved correctly, + * av_interleaved_write_frame() will wait until it has at least one packet + * for each stream before actually writing any packets to the output file. + * When some streams are "sparse" (i.e. there are large gaps between + * successive packets), this can result in excessive buffering. + * + * This field specifies the maximum difference between the timestamps of the + * first and the last packet in the muxing queue, above which libavformat + * will output a packet regardless of whether it has queued a packet for all + * the streams. + * + * Muxing only, set by the caller before avformat_write_header(). + *) + max_interleave_delta: int64_t; + + (* * + * Allow non-standard and experimental extension + * @see AVCodecContext.strict_std_compliance + *) + strict_std_compliance: int; + + (* * + * Flags for the user to detect events happening on the file. Flags must + * be cleared by the user once the event has been handled. + * A combination of AVFMT_EVENT_FLAG_*. + *) + event_flags: int; + + (* * + * Maximum number of packets to read while waiting for the first timestamp. + * Decoding only. + *) + max_ts_probe: int; + + (* * + * Avoid negative timestamps during muxing. + * Any value of the AVFMT_AVOID_NEG_TS_* constants. + * Note, this only works when using av_interleaved_write_frame. (interleave_packet_per_dts is in use) + * - muxing: Set by user + * - demuxing: unused + *) + avoid_negative_ts: int; + + (* * + * Transport stream id. + * This will be moved into demuxer private options. Thus no API/ABI compatibility + *) + ts_id: int; + + (* * + * Audio preload in microseconds. + * Note, not all formats support this and unpredictable things may happen if it is used when not supported. + * - encoding: Set by user + * - decoding: unused + *) + audio_preload: int; + + (* * + * Max chunk time in microseconds. + * Note, not all formats support this and unpredictable things may happen if it is used when not supported. + * - encoding: Set by user + * - decoding: unused + *) + max_chunk_duration: int; + + (* * + * Max chunk size in bytes + * Note, not all formats support this and unpredictable things may happen if it is used when not supported. + * - encoding: Set by user + * - decoding: unused + *) + max_chunk_size: int; + + (* * + * forces the use of wallclock timestamps as pts/dts of packets + * This has undefined results in the presence of B frames. + * - encoding: unused + * - decoding: Set by user + *) + use_wallclock_as_timestamps: int; + + (* * + * avio flags, used to force AVIO_FLAG_DIRECT. + * - encoding: unused + * - decoding: Set by user + *) + avio_flags: int; + + (* * + * The duration field can be estimated through various ways, and this field can be used + * to know how the duration was estimated. + * - encoding: unused + * - decoding: Read by user + *) + duration_estimation_method: AVDurationEstimationMethod; + + (* * + * Skip initial bytes when opening stream + * - encoding: unused + * - decoding: Set by user + *) + skip_initial_bytes: int64_t; + + (* * + * Correct single timestamp overflows + * - encoding: unused + * - decoding: Set by user + *) + correct_ts_overflow: unsigned_int; + + (* * + * Force seeking to any (also non key) frames. + * - encoding: unused + * - decoding: Set by user + *) + seek2any: int; + + (* * + * Flush the I/O context after each packet. + * - encoding: Set by user + * - decoding: unused + *) + flush_packets: int; + + (* * + * format probing score. + * The maximal score is AVPROBE_SCORE_MAX, its set when the demuxer probes + * the format. + * - encoding: unused + * - decoding: set by avformat, read by user + *) + probe_score: int; + + (* * + * number of bytes to read maximally to identify format. + * - encoding: unused + * - decoding: set by user + *) + format_probesize: int; + + (* * + * ',' separated list of allowed decoders. + * If NULL then all are allowed + * - encoding: unused + * - decoding: set by user + *) + codec_whitelist: PAnsiChar; + + (* * + * ',' separated list of allowed demuxers. + * If NULL then all are allowed + * - encoding: unused + * - decoding: set by user + *) + format_whitelist: PAnsiChar; + + (* * + * An opaque field for libavformat internal usage. + * Must not be accessed in any way by callers. + *) + internal: pAVFormatInternal; + + (* * + * IO repositioned flag. + * This is set by avformat when the underlaying IO context read pointer + * is repositioned, for example when doing byte based seeking. + * Demuxers can use the flag to detect such changes. + *) + io_repositioned: int; + + (* * + * Forced video codec. + * This allows forcing a specific decoder, even when there are multiple with + * the same codec_id. + * Demuxing: Set by user + *) + video_codec: pAVCodec; + + (* * + * Forced audio codec. + * This allows forcing a specific decoder, even when there are multiple with + * the same codec_id. + * Demuxing: Set by user + *) + audio_codec: pAVCodec; + + (* * + * Forced subtitle codec. + * This allows forcing a specific decoder, even when there are multiple with + * the same codec_id. + * Demuxing: Set by user + *) + subtitle_codec: pAVCodec; + + (* * + * Forced data codec. + * This allows forcing a specific decoder, even when there are multiple with + * the same codec_id. + * Demuxing: Set by user + *) + data_codec: pAVCodec; + + (* * + * Number of bytes to be written as padding in a metadata header. + * Demuxing: Unused. + * Muxing: Set by user via av_format_set_metadata_header_padding. + *) + metadata_header_padding: int; + + (* * + * User data. + * This is a place for some private data of the user. + *) + opaque: pointer; + + (* * + * Callback used by devices to communicate with application. + *) + control_message_cb: Tav_format_control_message; + + (* * + * Output timestamp offset, in microseconds. + * Muxing: set by user + *) + output_ts_offset: int64_t; + + (* * + * dump format separator. + * can be ", " or "\n " or anything else + * - muxing: Set by user. + * - demuxing: Set by user. + *) + dump_separator: puint8_t; + + (* * + * Forced Data codec_id. + * Demuxing: Set by user. + *) + data_codec_id: AVCodecID; + +{$IFDEF FF_API_OLD_OPEN_CALLBACKS} + (* * + * Called to open further IO contexts when needed for demuxing. + * + * This can be set by the user application to perform security checks on + * the URLs before opening them. + * The function should behave like avio_open2(), AVFormatContext is provided + * as contextual information and to reach AVFormatContext.opaque. + * + * If NULL then some simple checks are used together with avio_open2(). + * + * Must not be accessed directly from outside avformat. + * @See av_format_set_open_cb() + * + * Demuxing: Set by user. + * + * @deprecated Use io_open and io_close. + *) + // attribute_deprecated + // int (*open_cb)(struct AVFormatContext *s, AVIOContext **p, const char *url, int flags, const AVIOInterruptCB *int_cb, AVDictionary **options); + open_cb: function(s: pAVFormatContext; var p: pAVIOContext; const url: PAnsiChar; flags: int; const int_cb: pAVIOInterruptCB; var options: pAVDictionary) + : int; cdecl; +{$ENDIF} + (* * + * ',' separated list of allowed protocols. + * - encoding: unused + * - decoding: set by user + *) + protocol_whitelist: PAnsiChar; + + (* * + * A callback for opening new IO streams. + * + * Whenever a muxer or a demuxer needs to open an IO stream (typically from + * avformat_open_input() for demuxers, but for certain formats can happen at + * other times as well), it will call this callback to obtain an IO context. + * + * @param s the format context + * @param pb on success, the newly opened IO context should be returned here + * @param url the url to open + * @param flags a combination of AVIO_FLAG_* + * @param options a dictionary of additional options, with the same + * semantics as in avio_open2() + * @return 0 on success, a negative AVERROR code on failure + * + * @note Certain muxers and demuxers do nesting, i.e. they open one or more + * additional internal format contexts. Thus the AVFormatContext pointer + * passed to this callback may be different from the one facing the caller. + * It will, however, have the same 'opaque' field. + *) + // int (*io_open)(struct AVFormatContext *s, AVIOContext **pb, const char *url, int flags, AVDictionary **options); + io_open: function(s: pAVFormatContext; var pb: pAVIOContext; const url: PAnsiChar; flags: int; var options: pAVDictionary): int; cdecl; + + (* * + * A callback for closing the streams opened with AVFormatContext.io_open(). + *) + // void (*io_close)(struct AVFormatContext *s, AVIOContext *pb); + io_close: procedure(s: pAVFormatContext; pb: pAVIOContext); cdecl; + (* * + * ',' separated list of disallowed protocols. + * - encoding: unused + * - decoding: set by user + *) + protocol_blacklist: PAnsiChar; + + (* * + * The maximum number of streams. + * - encoding: unused + * - decoding: set by user + *) + max_streams: int; + (* + * Skip duration calcuation in estimate_timings_from_pts. + * - encoding: unused + * - decoding: set by user + *) + skip_estimate_duration_from_pts: int; + end; + + (* input/output formats *) + + AVCodecTag = record + end; + + (* * + * This structure contains the data a format has to probe a file. + *) + + AVProbeData = record + filename: PAnsiChar; + buf: punsigned_char; (* *< Buffer must have AVPROBE_PADDING_SIZE of extra allocated bytes filled with zero. *) + buf_size: int; (* *< Size of buf except extra allocated bytes *) + mime_type: PAnsiChar; (* *< mime_type, when known. *) + end; + + AVDeviceInfoList = record + + end; + + AVDeviceCapabilitiesQuery = record + + end; + + AVOutputFormat = record + name: PAnsiChar; + (* * + * Descriptive name for the format, meant to be more human-readable + * than name. You should use the NULL_IF_CONFIG_SMALL() macro + * to define it. + *) + long_name: PAnsiChar; + mime_type: PAnsiChar; + extensions: PAnsiChar; (* *< comma-separated filename extensions *) + (* output support *) + audio_codec: AVCodecID; (* *< default audio codec *) + video_codec: AVCodecID; (* *< default video codec *) + subtitle_codec: AVCodecID; (* *< default subtitle codec *) + (* * + * can use flags: AVFMT_NOFILE, AVFMT_NEEDNUMBER, + * AVFMT_GLOBALHEADER, AVFMT_NOTIMESTAMPS, AVFMT_VARIABLE_FPS, + * AVFMT_NODIMENSIONS, AVFMT_NOSTREAMS, AVFMT_ALLOW_FLUSH, + * AVFMT_TS_NONSTRICT, AVFMT_TS_NEGATIVE + *) + flags: int; + + (* * + * List of supported codec_id-codec_tag pairs, ordered by "better + * choice first". The arrays are all terminated by AV_CODEC_ID_NONE. + *) + codec_tag: ppAVCodecTag; + + priv_class: pAVClass; // < AVClass for the private context + + (* **************************************************************** + * No fields below this line are part of the public API. They + * may not be used outside of libavformat and can be changed and + * removed at will. + * New public fields should be added right above. + ***************************************************************** + *) + next: pAVOutputFormat; + (* * + * size of private data so that it can be allocated in the wrapper + *) + priv_data_size: int; + + // int (*write_header)(struct AVFormatContext *); + write_header: function(p: pAVFormatContext): int; cdecl; + (* * + * Write a packet. If AVFMT_ALLOW_FLUSH is set in flags, + * pkt can be NULL in order to flush data buffered in the muxer. + * When flushing, return 0 if there still is more data to flush, + * or 1 if everything was flushed and there is no more buffered + * data. + *) + // int (*write_packet)(struct AVFormatContext *, AVPacket *pkt); + write_packet: function(fc: pAVFormatContext; pkt: pAVPacket): int; cdecl; + + // int (*write_trailer)(struct AVFormatContext *); + write_trailer: function(p: pAVFormatContext): int; cdecl; + (* * + * Currently only used to set pixel format if not YUV420P. + *) + // int (*interleave_packet)(struct AVFormatContext *, AVPacket *out, AVPacket *in, int flush); + interleave_packet: function(p: pAVFormatContext; _out: pAVPacket; _in: pAVPacket; flush: int): int; cdecl; + (* * + * Test if the given codec can be stored in this container. + * + * @return 1 if the codec is supported, 0 if it is not. + * A negative number if unknown. + * MKTAG('A', 'P', 'I', 'C') if the codec is only supported as AV_DISPOSITION_ATTACHED_PIC + *) + // int (*query_codec)(enum AVCodecID id, int std_compliance); + query_codec: function(id: AVCodecID; std_compliance: int): int; cdecl; + + // void (*get_output_timestamp)(struct AVFormatContext *s, int stream, int64_t *dts, int64_t *wall); + get_output_timestamp: procedure(s: pAVFormatContext; stream: int; var dts: int64_t; var wall: int64_t); cdecl; + (* * + * Allows sending messages from application to device. + *) + // int (*control_message)(struct AVFormatContext *s, int type, void *data, size_t data_size); + control_message: function(s: pAVFormatContext; _type: int; data: pointer; data_size: size_t): int; cdecl; + (* * + * Write an uncoded AVFrame. + * + * See av_write_uncoded_frame() for details. + * + * The library will free *frame afterwards, but the muxer can prevent it + * by setting the pointer to NULL. + *) + // int (*write_uncoded_frame)(struct AVFormatContext *, int stream_index, AVFrame **frame, unsigned flags); + write_uncoded_frame: function(p: pAVFormatContext; stream_index: int; var frame: pAVFrame; flags: unsigned): int; cdecl; + (* * + * Returns device list with it properties. + * @see avdevice_list_devices() for more details. + *) + // int (*get_device_list)(struct AVFormatContext *s, struct AVDeviceInfoList *device_list); + get_device_list: function(s: pAVFormatContext; device_list: pAVDeviceInfoList): int; cdecl; + (* * + * Initialize device capabilities submodule. + * @see avdevice_capabilities_create() for more details. + *) + // int (*create_device_capabilities)(struct AVFormatContext *s, struct AVDeviceCapabilitiesQuery *caps); + create_device_capabilities: function(s: pAVFormatContext; caps: pAVDeviceCapabilitiesQuery): int; cdecl; + (* * + * Free device capabilities submodule. + * @see avdevice_capabilities_free() for more details. + *) + // int (*free_device_capabilities)(struct AVFormatContext *s, struct AVDeviceCapabilitiesQuery *caps); + free_device_capabilities: function(s: pAVFormatContext; caps: pAVDeviceCapabilitiesQuery): int; cdecl; + + data_codec: AVCodecID; (* *< default data codec *) + (* * + * Initialize format. May allocate data here, and set any AVFormatContext or + * AVStream parameters that need to be set before packets are sent. + * This method must not write output. + * + * Return 0 if streams were fully configured, 1 if not, negative AVERROR on failure + * + * Any allocations made here must be freed in deinit(). + *) + // int (*init)(struct AVFormatContext *); + init: function(p: pAVFormatContext): int; cdecl; + (* * + * Deinitialize format. If present, this is called whenever the muxer is being + * destroyed, regardless of whether or not the header has been written. + * + * If a trailer is being written, this is called after write_trailer(). + * + * This is called if init() fails as well. + *) + // void (*deinit)(struct AVFormatContext *); + deinit: procedure(p: pAVFormatContext); cdecl; + (* * + * Set up any necessary bitstream filtering and extract any extra data needed + * for the global header. + * Return 0 if more packets from this stream must be checked; 1 if not. + *) + // int (*check_bitstream)(struct AVFormatContext *, const AVPacket *pkt); + check_bitstream: function(p: pAVFormatContext; const pkt: pAVPacket): int; cdecl; + end; + + AVStreamParseType = ( // + AVSTREAM_PARSE_NONE, AVSTREAM_PARSE_FULL, (* *< full parsing and repack *) + AVSTREAM_PARSE_HEADERS, (* *< Only parse headers, do not repack. *) + AVSTREAM_PARSE_TIMESTAMPS, (* *< full parsing and interpolation of timestamps for frames not starting on a packet boundary *) + AVSTREAM_PARSE_FULL_ONCE, (* *< full parsing and repack of the first frame only, only implemented for H.264 currently *) + AVSTREAM_PARSE_FULL_RAW (* *< full parsing and repack with timestamp and position generation by parser for raw + this assumes that each packet in the file contains no demuxer level headers and + just codec level data, otherwise position generation would fail *) + ); + + pAVIndexEntry = ^AVIndexEntry; + + AVIndexEntry = record + pos: int64_t; + timestamp: int64_t; (* *< + * Timestamp in AVStream.time_base units, preferably the time from which on correctly decoded frames are available + * when seeking to this entry. That means preferable PTS on keyframe based formats. + * But demuxers can choose to store a different timestamp, if it is more convenient for the implementation or nothing better + * is known + *) + (* * + * Flag is used to indicate which frame should be discarded after decoding. + *) + // int flags:2; + // int size:30; //Yeah, trying to keep the size of this small to reduce memory requirements (it is 24 vs. 32 bytes due to possible 8-byte alignment). + flag_size: int32; + min_distance: int; (* *< Minimum distance between this and the previous keyframe, used to avoid unneeded searching. *) + end; + + pAVStreamInternal = ^AVStreamInternal; + + AVStreamInternal = record + + end; + + Tduration_error = array [0 .. 1, 0 .. MAX_STD_TIMEBASES] of Double; + pduration_error = ^Tduration_error; + + pAVStream_info = ^AVStream_info; + + AVStream_info = record + last_dts: int64_t; + duration_gcd: int64_t; + duration_count: int; + rfps_duration_sum: int64_t; + duration_error: pduration_error; + codec_info_duration: int64_t; + codec_info_duration_fields: int64_t; + frame_delay_evidence: int; + + (* * + * 0 -> decoder has not been searched for yet. + * >0 -> decoder found + * <0 -> decoder with codec_id == -found_decoder has not been found + *) + found_decoder: int; + + last_duration: int64_t; + + (* * + * Those are used for average framerate estimation. + *) + fps_first_dts: int64_t; + fps_first_dts_idx: int; + fps_last_dts: int64_t; + fps_last_dts_idx: int; + end; + + Tpts_buffer_int64_t = array [0 .. MAX_REORDER_DELAY] of int64_t; + Tpts_reorder_error_count_uint8_t = array [0 .. MAX_REORDER_DELAY] of uint8_t; + + pAVPacketList = ^AVPacketList; + + AVPacketList = record + pkt: AVPacket; + next: pAVPacketList; + end; + + (* * + * Stream structure. + * New fields can be added to the end with minor version bumps. + * Removal, reordering and changes to existing fields require a major + * version bump. + * sizeof(AVStream) must not be used outside libav*. + *) + + AVStream = record + index: int; (* *< stream index in AVFormatContext *) + (* * + * Format-specific stream ID. + * decoding: set by libavformat + * encoding: set by the user, replaced by libavformat if left unset + *) + id: int; +{$IFDEF FF_API_LAVF_AVCTX} + (* * + * @deprecated use the codecpar struct instead + *) + // attribute_deprecated + codec: pAVCodecContext; +{$ENDIF} + priv_data: pointer; + + (* * + * This is the fundamental unit of time (in seconds) in terms + * of which frame timestamps are represented. + * + * decoding: set by libavformat + * encoding: May be set by the caller before avformat_write_header() to + * provide a hint to the muxer about the desired timebase. In + * avformat_write_header(), the muxer will overwrite this field + * with the timebase that will actually be used for the timestamps + * written into the file (which may or may not be related to the + * user-provided one, depending on the format). + *) + time_base: AVRational; + + (* * + * Decoding: pts of the first frame of the stream in presentation order, in stream time base. + * Only set this if you are absolutely 100% sure that the value you set + * it to really is the pts of the first frame. + * This may be undefined (AV_NOPTS_VALUE). + * @note The ASF header does NOT contain a correct start_time the ASF + * demuxer must NOT set this. + *) + start_time: int64_t; + + (* * + * Decoding: duration of the stream, in stream time base. + * If a source file does not specify a duration, but does specify + * a bitrate, this value will be estimated from bitrate and file size. + * + * Encoding: May be set by the caller before avformat_write_header() to + * provide a hint to the muxer about the estimated duration. + *) + duration: int64_t; + + nb_frames: int64_t; // < number of frames in this stream if known or 0 + + disposition: int; (* *< AV_DISPOSITION_* bit field *) + + discard: AVDiscard; // < Selects which packets can be discarded at will and do not need to be demuxed. + + (* * + * sample aspect ratio (0 if unknown) + * - encoding: Set by user. + * - decoding: Set by libavformat. + *) + sample_aspect_ratio: AVRational; + + metadata: pAVDictionary; + + (* * + * Average framerate + * + * - demuxing: May be set by libavformat when creating the stream or in + * avformat_find_stream_info(). + * - muxing: May be set by the caller before avformat_write_header(). + *) + avg_frame_rate: AVRational; + + (* * + * For streams with AV_DISPOSITION_ATTACHED_PIC disposition, this packet + * will contain the attached picture. + * + * decoding: set by libavformat, must not be modified by the caller. + * encoding: unused + *) + attached_pic: AVPacket; + + (* * + * An array of side data that applies to the whole stream (i.e. the + * container does not allow it to change between packets). + * + * There may be no overlap between the side data in this array and side data + * in the packets. I.e. a given side data is either exported by the muxer + * (demuxing) / set by the caller (muxing) in this array, then it never + * appears in the packets, or the side data is exported / sent through + * the packets (always in the first packet where the value becomes known or + * changes), then it does not appear in this array. + * + * - demuxing: Set by libavformat when the stream is created. + * - muxing: May be set by the caller before avformat_write_header(). + * + * Freed by libavformat in avformat_free_context(). + * + * @see av_format_inject_global_side_data() + *) + side_data: pAVPacketSideData; + (* * + * The number of elements in the AVStream.side_data array. + *) + nb_side_data: int; + + (* * + * Flags for the user to detect events happening on the stream. Flags must + * be cleared by the user once the event has been handled. + * A combination of AVSTREAM_EVENT_FLAG_*. + *) + event_flags: int; + (* * + * Real base framerate of the stream. + * This is the lowest framerate with which all timestamps can be + * represented accurately (it is the least common multiple of all + * framerates in the stream). Note, this value is just a guess! + * For example, if the time base is 1/90000 and all frames have either + * approximately 3600 or 1800 timer ticks, then r_frame_rate will be 50/1. + *) + r_frame_rate: AVRational; + +{$IFDEF FF_API_LAVF_FFSERVER} + (* * + * String containing pairs of key and values describing recommended encoder configuration. + * Pairs are separated by ','. + * Keys are separated from values by '='. + * + * @deprecated unused + *) + // attribute_deprecated + recommended_encoder_configuration: PAnsiChar; +{$ENDIF} + (* * + * Codec parameters associated with this stream. Allocated and freed by + * libavformat in avformat_new_stream() and avformat_free_context() + * respectively. + * + * - demuxing: filled by libavformat on stream creation or in + * avformat_find_stream_info() + * - muxing: filled by the caller before avformat_write_header() + *) + codecpar: pAVCodecParameters; + (* * + * Stream information used internally by avformat_find_stream_info() + *) + info: pAVStream_info; + + pts_wrap_bits: int; (* *< number of bits in pts (used for wrapping control) *) + + // Timestamp generation support: + (* * + * Timestamp corresponding to the last dts sync point. + * + * Initialized when AVCodecParserContext.dts_sync_point >= 0 and + * a DTS is received from the underlying container. Otherwise set to + * AV_NOPTS_VALUE by default. + *) + first_dts: int64_t; + cur_dts: int64_t; + last_IP_pts: int64_t; + last_IP_duration: int; + + (* * + * Number of packets to buffer for codec probing + *) + probe_packets: int; + + (* * + * Number of frames that have been demuxed during avformat_find_stream_info() + *) + codec_info_nb_frames: int; + + (* av_read_frame() support *) + need_parsing: AVStreamParseType; + parser: pAVCodecParserContext; + + (* * + * last packet in packet_buffer for this stream when muxing. + *) + last_in_packet_buffer: pAVPacketList; + probe_data: AVProbeData; + + pts_buffer: Tpts_buffer_int64_t; + + index_entries: pAVIndexEntry; (* *< Only used if the format does not + support seeking natively. *) + nb_index_entries: int; + index_entries_allocated_size: unsigned_int; + + (* * + * Stream Identifier + * This is the MPEG-TS stream identifier +1 + * 0 means unknown + *) + stream_identifier: int; + + (* + * Details of the MPEG-TS program which created this stream. + *) + program_num: int; + pmt_version: int; + pmt_stream_idx: int; + + interleaver_chunk_size: int64_t; + interleaver_chunk_duration: int64_t; + + (* * + * stream probing state + * -1 -> probing finished + * 0 -> no probing requested + * rest -> perform probing with request_probe being the minimum score to accept. + * NOT PART OF PUBLIC API + *) + request_probe: int; + (* * + * Indicates that everything up to the next keyframe + * should be discarded. + *) + skip_to_keyframe: int; + + (* * + * Number of samples to skip at the start of the frame decoded from the next packet. + *) + skip_samples: int; + + (* * + * If not 0, the number of samples that should be skipped from the start of + * the stream (the samples are removed from packets with pts==0, which also + * assumes negative timestamps do not happen). + * Intended for use with formats such as mp3 with ad-hoc gapless audio + * support. + *) + start_skip_samples: int64_t; + + (* * + * If not 0, the first audio sample that should be discarded from the stream. + * This is broken by design (needs global sample count), but can't be + * avoided for broken by design formats such as mp3 with ad-hoc gapless + * audio support. + *) + first_discard_sample: int64_t; + + (* * + * The sample after last sample that is intended to be discarded after + * first_discard_sample. Works on frame boundaries only. Used to prevent + * early EOF if the gapless info is broken (considered concatenated mp3s). + *) + last_discard_sample: int64_t; + + (* * + * Number of internally decoded frames, used internally in libavformat, do not access + * its lifetime differs from info which is why it is not in that structure. + *) + nb_decoded_frames: int; + + (* * + * Timestamp offset added to timestamps before muxing + * NOT PART OF PUBLIC API + *) + mux_ts_offset: int64_t; + + (* * + * Internal data to check for wrapping of the time stamp + *) + pts_wrap_reference: int64_t; + + (* * + * Options for behavior, when a wrap is detected. + * + * Defined by AV_PTS_WRAP_ values. + * + * If correction is enabled, there are two possibilities: + * If the first time stamp is near the wrap point, the wrap offset + * will be subtracted, which will create negative time stamps. + * Otherwise the offset will be added. + *) + pts_wrap_behavior: int; + + (* * + * Internal data to prevent doing update_initial_durations() twice + *) + update_initial_durations_done: int; + + (* * + * Internal data to generate dts from pts + *) + pts_reorder_error: Tpts_buffer_int64_t; + pts_reorder_error_count: Tpts_reorder_error_count_uint8_t; + + (* * + * Internal data to analyze DTS and detect faulty mpeg streams + *) + last_dts_for_order_check: int64_t; + dts_ordered: uint8_t; + dts_misordered: uint8_t; + + (* * + * Internal data to inject global side data + *) + inject_global_side_data: int; + + (* * + * display aspect ratio (0 if unknown) + * - encoding: unused + * - decoding: Set by libavformat to calculate sample_aspect_ratio internally + *) + display_aspect_ratio: AVRational; + + (* * + * An opaque field for libavformat internal usage. + * Must not be accessed in any way by callers. + *) + internal: pAVStreamInternal; + end; + + (* * + * New fields can be added to the end with minor version bumps. + * Removal, reordering and changes to existing fields require a major + * version bump. + * sizeof(AVProgram) must not be used outside libav*. + *) + + AVProgram = record + id: int; + flags: int; + discard: AVDiscard; // < selects which program to discard and which to feed to the caller + stream_index: punsigned_int; + nb_stream_indexes: unsigned_int; + metadata: pAVDictionary; + + program_num: int; + pmt_pid: int; + pcr_pid: int; + pmt_version: int; + + (* **************************************************************** + * All fields below this line are not part of the public API. They + * may not be used outside of libavformat and can be changed and + * removed at will. + * New public fields should be added right above. + ***************************************************************** + *) + start_time: int64_t; + end_time: int64_t; + + pts_wrap_reference: int64_t; // < reference dts for wrap detection + pts_wrap_behavior: int; // < behavior on wrap detection + end; + + (* * + * @defgroup metadata_api Public Metadata API + * @{ + * @ingroup libavf + * The metadata API allows libavformat to export metadata tags to a client + * application when demuxing. Conversely it allows a client application to + * set metadata when muxing. + * + * Metadata is exported or set as pairs of key/value strings in the 'metadata' + * fields of the AVFormatContext, AVStream, AVChapter and AVProgram structs + * using the @ref lavu_dict "AVDictionary" API. Like all strings in FFmpeg, + * metadata is assumed to be UTF-8 encoded Unicode. Note that metadata + * exported by demuxers isn't checked to be valid UTF-8 in most cases. + * + * Important concepts to keep in mind: + * - Keys are unique; there can never be 2 tags with the same key. This is + * also meant semantically, i.e., a demuxer should not knowingly produce + * several keys that are literally different but semantically identical. + * E.g., key=Author5, key=Author6. In this example, all authors must be + * placed in the same tag. + * - Metadata is flat, not hierarchical; there are no subtags. If you + * want to store, e.g., the email address of the child of producer Alice + * and actor Bob, that could have key=alice_and_bobs_childs_email_address. + * - Several modifiers can be applied to the tag name. This is done by + * appending a dash character ('-') and the modifier name in the order + * they appear in the list below -- e.g. foo-eng-sort, not foo-sort-eng. + * - language -- a tag whose value is localized for a particular language + * is appended with the ISO 639-2/B 3-letter language code. + * For example: Author-ger=Michael, Author-eng=Mike + * The original/default language is in the unqualified "Author" tag. + * A demuxer should set a default if it sets any translated tag. + * - sorting -- a modified version of a tag that should be used for + * sorting will have '-sort' appended. E.g. artist="The Beatles", + * artist-sort="Beatles, The". + * - Some protocols and demuxers support metadata updates. After a successful + * call to av_read_packet(), AVFormatContext.event_flags or AVStream.event_flags + * will be updated to indicate if metadata changed. In order to detect metadata + * changes on a stream, you need to loop through all streams in the AVFormatContext + * and check their individual event_flags. + * + * - Demuxers attempt to export metadata in a generic format, however tags + * with no generic equivalents are left as they are stored in the container. + * Follows a list of generic tag names: + * + @verbatim + album -- name of the set this work belongs to + album_artist -- main creator of the set/album, if different from artist. + e.g. "Various Artists" for compilation albums. + artist -- main creator of the work + comment -- any additional description of the file. + composer -- who composed the work, if different from artist. + copyright -- name of copyright holder. + creation_time-- date when the file was created, preferably in ISO 8601. + date -- date when the work was created, preferably in ISO 8601. + disc -- number of a subset, e.g. disc in a multi-disc collection. + encoder -- name/settings of the software/hardware that produced the file. + encoded_by -- person/group who created the file. + filename -- original name of the file. + genre -- . + language -- main language in which the work is performed, preferably + in ISO 639-2 format. Multiple languages can be specified by + separating them with commas. + performer -- artist who performed the work, if different from artist. + E.g for "Also sprach Zarathustra", artist would be "Richard + Strauss" and performer "London Philharmonic Orchestra". + publisher -- name of the label/publisher. + service_name -- name of the service in broadcasting (channel name). + service_provider -- name of the service provider in broadcasting. + title -- name of the work. + track -- number of this work in the set, can be in form current/total. + variant_bitrate -- the total bitrate of the bitrate variant that the current stream is part of + @endverbatim + * + * Look in the examples section for an application example how to use the Metadata API. + * + * @} + *) + + (* packet functions *) + + (* * + * Allocate and read the payload of a packet and initialize its + * fields with default values. + * + * @param s associated IO context + * @param pkt packet + * @param size desired payload size + * @return >0 (read size) if OK, AVERROR_xxx otherwise + *) + // int av_get_packet(AVIOContext *s, AVPacket *pkt, int size); +function av_get_packet(s: pAVIOContext; pkt: pAVPacket; size: int): int; cdecl; external avformat_dll; +(* * + * Read data and append it to the current content of the AVPacket. + * If pkt->size is 0 this is identical to av_get_packet. + * Note that this uses av_grow_packet and thus involves a realloc + * which is inefficient. Thus this function should only be used + * when there is no reasonable way to know (an upper bound of) + * the final size. + * + * @param s associated IO context + * @param pkt packet + * @param size amount of data to read + * @return >0 (read size) if OK, AVERROR_xxx otherwise, previous data + * will not be lost even if an error occurs. +*) +// int av_append_packet(AVIOContext *s, AVPacket *pkt, int size); +function av_append_packet(s: pAVIOContext; pkt: pAVPacket; size: int): int; cdecl; external avformat_dll; +(* *********************************************** *) + +{$IFDEF FF_API_FORMAT_GET_SET} +(* * + * Accessors for some AVStream fields. These used to be provided for ABI + * compatibility, and do not need to be used anymore. +*) +// attribute_deprecated +// AVRational av_stream_get_r_frame_rate(const AVStream *s); +function av_stream_get_r_frame_rate(const s: pAVStream): AVRational; cdecl; external avformat_dll; + +// attribute_deprecated +// void av_stream_set_r_frame_rate(AVStream *s, AVRational r); +procedure av_stream_set_r_frame_rate(s: pAVStream; r: AVRational); cdecl; external avformat_dll; +{$IFDEF FF_API_LAVF_FFSERVER} +// attribute_deprecated +// char* av_stream_get_recommended_encoder_configuration(const AVStream *s); +function av_stream_get_recommended_encoder_configuration(const s: pAVStream): PAnsiChar; cdecl; external avformat_dll; + +// attribute_deprecated +// void av_stream_set_recommended_encoder_configuration(AVStream *s, char *configuration); +procedure av_stream_set_recommended_encoder_configuration(s: pAVStream; configuration: PAnsiChar); cdecl; external avformat_dll; +{$ENDIF} +{$ENDIF} +// struct AVCodecParserContext *av_stream_get_parser(const AVStream *s); +function av_stream_get_parser(const s: pAVStream): pAVCodecParserContext; cdecl; external avformat_dll; +(* * + * Returns the pts of the last muxed packet + its duration + * + * the retuned value is undefined when used with a demuxer. +*) +// int64_t av_stream_get_end_pts(const AVStream *st); +function av_stream_get_end_pts(const st: pAVStream): int64_t; cdecl; external avformat_dll; + +{$IFDEF FF_API_FORMAT_GET_SET} +(* * + * Accessors for some AVFormatContext fields. These used to be provided for ABI + * compatibility, and do not need to be used anymore. +*) +// attribute_deprecated +// int av_format_get_probe_score(const AVFormatContext *s); +function av_format_get_probe_score(const s: pAVFormatContext): int; cdecl; external avformat_dll; + +// attribute_deprecated +// AVCodec * av_format_get_video_codec(const AVFormatContext *s); +function av_format_get_video_codec(const s: pAVFormatContext): pAVCodec; cdecl; external avformat_dll; + +// attribute_deprecated +// void av_format_set_video_codec(AVFormatContext *s, AVCodec *c); +procedure av_format_set_video_codec(s: pAVFormatContext; c: pAVCodec); cdecl; external avformat_dll; + +// attribute_deprecated +// AVCodec * av_format_get_audio_codec(const AVFormatContext *s); +function av_format_get_audio_codec(const s: pAVFormatContext): pAVCodec; cdecl; external avformat_dll; + +// attribute_deprecated +// void av_format_set_audio_codec(AVFormatContext *s, AVCodec *c); +procedure av_format_set_audio_codec(s: pAVFormatContext; c: pAVCodec); cdecl; external avformat_dll; + +// attribute_deprecated +// AVCodec *av_format_get_subtitle_codec(const AVFormatContext *s); +function av_format_get_subtitle_codec(const s: pAVFormatContext): pAVCodec; cdecl; external avformat_dll; + +// attribute_deprecated +// void av_format_set_subtitle_codec(AVFormatContext *s, AVCodec *c); +procedure av_format_set_subtitle_codec(s: pAVFormatContext; c: pAVCodec); cdecl; external avformat_dll; + +// attribute_deprecated +// AVCodec *av_format_get_data_codec(const AVFormatContext *s); +function av_format_get_data_codec(const s: pAVFormatContext): pAVCodec; cdecl; external avformat_dll; + +// attribute_deprecated +// void av_format_set_data_codec(AVFormatContext *s, AVCodec *c); +procedure av_format_set_data_codec(s: pAVFormatContext; c: pAVCodec); cdecl; external avformat_dll; + +// attribute_deprecated +// int av_format_get_metadata_header_padding(const AVFormatContext *s); +function av_format_get_metadata_header_padding(const s: pAVFormatContext): int; cdecl; external avformat_dll; + +// attribute_deprecated +// void av_format_set_metadata_header_padding(AVFormatContext *s, int c); +procedure av_format_set_metadata_header_padding(s: pAVFormatContext; c: int); cdecl; external avformat_dll; + +// attribute_deprecated +// void *av_format_get_opaque(const AVFormatContext *s); +function av_format_get_opaque(const s: pAVFormatContext): pointer; cdecl; external avformat_dll; + +// attribute_deprecated +// void av_format_set_opaque(AVFormatContext *s, void *opaque); +procedure av_format_set_opaque(s: pAVFormatContext; opaque: pointer); cdecl; external avformat_dll; + +// attribute_deprecated +// av_format_control_message av_format_get_control_message_cb(const AVFormatContext *s); +function av_format_get_control_message_cb(const s: pAVFormatContext): Tav_format_control_message; cdecl; external avformat_dll; + +// attribute_deprecated +// void av_format_set_control_message_cb(AVFormatContext *s, av_format_control_message callback); +procedure av_format_set_control_message_cb(s: pAVFormatContext; callback: Tav_format_control_message); cdecl; external avformat_dll; +{$IFDEF FF_API_OLD_OPEN_CALLBACKS} +// attribute_deprecated AVOpenCallback av_format_get_open_cb(const AVFormatContext *s); +function av_format_get_open_cb(const s: pAVFormatContext): TAVOpenCallback; cdecl; external avformat_dll; + +// attribute_deprecated void av_format_set_open_cb(AVFormatContext *s, AVOpenCallback callback); +procedure av_format_set_open_cb(s: pAVFormatContext; callback: TAVOpenCallback); cdecl; external avformat_dll; +{$ENDIF} +{$ENDIF} +(* * + * This function will cause global side data to be injected in the next packet + * of each stream as well as after any subsequent seek. +*) +// void av_format_inject_global_side_data(AVFormatContext *s); +procedure av_format_inject_global_side_data(s: pAVFormatContext); cdecl; external avformat_dll; + +(* * + * Returns the method used to set ctx->duration. + * + * @return AVFMT_DURATION_FROM_PTS, AVFMT_DURATION_FROM_STREAM, or AVFMT_DURATION_FROM_BITRATE. +*) +// enum AVDurationEstimationMethod av_fmt_ctx_get_duration_estimation_method(const AVFormatContext* ctx); +function av_fmt_ctx_get_duration_estimation_method(const ctx: pAVFormatContext): AVDurationEstimationMethod; cdecl; external avformat_dll; + +(* * + * @defgroup lavf_core Core functions + * @ingroup libavf + * + * Functions for querying libavformat capabilities, allocating core structures, + * etc. + * @{ +*) + +(* * + * Return the LIBAVFORMAT_VERSION_INT constant. +*) +// unsigned avformat_version(void); +function avformat_version(): unsigned; cdecl; external avformat_dll; + +(* * + * Return the libavformat build-time configuration. +*) +// const char *avformat_configuration(void); +function avformat_configuration(): PAnsiChar; cdecl; external avformat_dll; + +(* * + * Return the libavformat license. +*) +// const char *avformat_license(void); +function avformat_license(): PAnsiChar; cdecl; external avformat_dll; + +{$IFDEF FF_API_NEXT} +(* * + * Initialize libavformat and register all the muxers, demuxers and + * protocols. If you do not call this function, then you can select + * exactly which formats you want to support. + * + * @see av_register_input_format() + * @see av_register_output_format() +*) +// attribute_deprecated void av_register_all(void); +procedure av_register_all(); cdecl; external avformat_dll; + +// attribute_deprecated void av_register_input_format(AVInputFormat *format); +procedure av_register_input_format(format: pAVInputFormat); cdecl; external avformat_dll; +// attribute_deprecated void av_register_output_format(AVOutputFormat *format); +procedure av_register_output_format(format: pAVOutputFormat); cdecl; external avformat_dll; +{$ENDIF} +(* * + * Do global initialization of network libraries. This is optional, + * and not recommended anymore. + * + * This functions only exists to work around thread-safety issues + * with older GnuTLS or OpenSSL libraries. If libavformat is linked + * to newer versions of those libraries, or if you do not use them, + * calling this function is unnecessary. Otherwise, you need to call + * this function before any other threads using them are started. + * + * This function will be deprecated once support for older GnuTLS and + * OpenSSL libraries is removed, and this function has no purpose + * anymore. +*) +// int avformat_network_init(void); +function avformat_network_init(): int; cdecl; external avformat_dll; + +(* * + * Undo the initialization done by avformat_network_init. Call it only + * once for each time you called avformat_network_init. +*) +// int avformat_network_deinit(void); +function avformat_network_deinit(): int; cdecl; external avformat_dll; + +{$IFDEF FF_API_NEXT} +(* * + * If f is NULL, returns the first registered input format, + * if f is non-NULL, returns the next registered input format after f + * or NULL if f is the last one. +*) +// attribute_deprecated +// AVInputFormat *av_iformat_next(const AVInputFormat *f); +function av_iformat_next(const f: pAVInputFormat): pAVInputFormat; cdecl; external avformat_dll; + +(* * + * If f is NULL, returns the first registered output format, + * if f is non-NULL, returns the next registered output format after f + * or NULL if f is the last one. +*) +// attribute_deprecated +// AVOutputFormat *av_oformat_next(const AVOutputFormat *f); +function av_oformat_next(const f: pAVOutputFormat): pAVOutputFormat; cdecl; external avformat_dll; +{$ENDIF} +(* * + * Iterate over all registered muxers. + * + * @param opaque a pointer where libavformat will store the iteration state. Must + * point to NULL to start the iteration. + * + * @return the next registered muxer or NULL when the iteration is + * finished +*) +// const AVOutputFormat *av_muxer_iterate(void **opaque); +function av_muxer_iterate(var opaque: pointer): pAVOutputFormat; cdecl; external avformat_dll; + +(* * + * Iterate over all registered demuxers. + * + * @param opaque a pointer where libavformat will store the iteration state. Must + * point to NULL to start the iteration. + * + * @return the next registered demuxer or NULL when the iteration is + * finished +*) +// const AVInputFormat *av_demuxer_iterate(void **opaque); +function av_demuxer_iterate(var opaque: pointer): pAVInputFormat; cdecl; external avformat_dll; + +(* * + * Allocate an AVFormatContext. + * avformat_free_context() can be used to free the context and everything + * allocated by the framework within it. +*) +// AVFormatContext *avformat_alloc_context(void); +function avformat_alloc_context(): pAVFormatContext; cdecl; external avformat_dll; + +(* * + * Free an AVFormatContext and all its streams. + * @param s context to free +*) +// void avformat_free_context(AVFormatContext *s); +procedure avformat_free_context(s: pAVFormatContext); cdecl; external avformat_dll; + +(* * + * Get the AVClass for AVFormatContext. It can be used in combination with + * AV_OPT_SEARCH_FAKE_OBJ for examining options. + * + * @see av_opt_find(). +*) +// const AVClass *avformat_get_class(void); +function avformat_get_class(): pAVClass; cdecl; external avformat_dll; + +(* * + * Add a new stream to a media file. + * + * When demuxing, it is called by the demuxer in read_header(). If the + * flag AVFMTCTX_NOHEADER is set in s.ctx_flags, then it may also + * be called in read_packet(). + * + * When muxing, should be called by the user before avformat_write_header(). + * + * User is required to call avcodec_close() and avformat_free_context() to + * clean up the allocation by avformat_new_stream(). + * + * @param s media file handle + * @param c If non-NULL, the AVCodecContext corresponding to the new stream + * will be initialized to use this codec. This is needed for e.g. codec-specific + * defaults to be set, so codec should be provided if it is known. + * + * @return newly created stream or NULL on error. +*) +// AVStream *avformat_new_stream(AVFormatContext *s, const AVCodec *c); +function avformat_new_stream(s: pAVFormatContext; const c: pAVCodec): pAVStream; cdecl; external avformat_dll; + +(* * + * Wrap an existing array as stream side data. + * + * @param st stream + * @param type side information type + * @param data the side data array. It must be allocated with the av_malloc() + * family of functions. The ownership of the data is transferred to + * st. + * @param size side information size + * @return zero on success, a negative AVERROR code on failure. On failure, + * the stream is unchanged and the data remains owned by the caller. +*) +// int av_stream_add_side_data(AVStream *st, enum AVPacketSideDataType type, +// uint8_t *data, size_t size); +function av_stream_add_side_data(st: pAVStream; _type: AVPacketSideDataType; data: puint8_t; size: size_t): int; cdecl; external avformat_dll; + +(* * + * Allocate new information from stream. + * + * @param stream stream + * @param type desired side information type + * @param size side information size + * @return pointer to fresh allocated data or NULL otherwise +*) +// uint8_t *av_stream_new_side_data(AVStream *stream, +// enum AVPacketSideDataType type, int size); +function av_stream_new_side_data(stream: pAVStream; _type: AVPacketSideDataType; size: int): puint8_t; cdecl; external avformat_dll; +(* * + * Get side information from stream. + * + * @param stream stream + * @param type desired side information type + * @param size pointer for side information size to store (optional) + * @return pointer to data if present or NULL otherwise +*) +// uint8_t *av_stream_get_side_data(const AVStream *stream, +// enum AVPacketSideDataType type, int *size); +function av_stream_get_side_data(const stream: pAVStream; _type: AVPacketSideDataType; var size: int): puint8_t; cdecl; external avformat_dll; + +// AVProgram *av_new_program(AVFormatContext *s, int id); +function av_new_program(s: pAVFormatContext; id: int): pAVProgram; cdecl; external avformat_dll; + +(* * + * Allocate an AVFormatContext for an output format. + * avformat_free_context() can be used to free the context and + * everything allocated by the framework within it. + * + * @param *ctx is set to the created format context, or to NULL in + * case of failure + * @param oformat format to use for allocating the context, if NULL + * format_name and filename are used instead + * @param format_name the name of output format to use for allocating the + * context, if NULL filename is used instead + * @param filename the name of the filename to use for allocating the + * context, may be NULL + * @return >= 0 in case of success, a negative AVERROR code in case of + * failure +*) +// int avformat_alloc_output_context2(AVFormatContext **ctx, AVOutputFormat *oformat, +// const char *format_name, const char *filename); +function avformat_alloc_output_context2(var ctx: pAVFormatContext; oformat: pAVOutputFormat; const format_name: PAnsiChar; const filename: PAnsiChar): int; + cdecl; external avformat_dll; + +(* * + * @addtogroup lavf_decoding + * @{ +*) + +(* * + * Find AVInputFormat based on the short name of the input format. +*) +// AVInputFormat *av_find_input_format(const char *short_name); +function av_find_input_format(const short_name: PAnsiChar): pAVInputFormat; cdecl; external avformat_dll; + +(* * + * Guess the file format. + * + * @param pd data to be probed + * @param is_opened Whether the file is already opened; determines whether + * demuxers with or without AVFMT_NOFILE are probed. +*) +// AVInputFormat *av_probe_input_format(AVProbeData *pd, int is_opened); +function av_probe_input_format(pd: pAVProbeData; is_opened: int): pAVInputFormat; cdecl; external avformat_dll; + +(* * + * Guess the file format. + * + * @param pd data to be probed + * @param is_opened Whether the file is already opened; determines whether + * demuxers with or without AVFMT_NOFILE are probed. + * @param score_max A probe score larger that this is required to accept a + * detection, the variable is set to the actual detection + * score afterwards. + * If the score is <= AVPROBE_SCORE_MAX / 4 it is recommended + * to retry with a larger probe buffer. +*) +// AVInputFormat *av_probe_input_format2(AVProbeData *pd, int is_opened, int *score_max); +function av_probe_input_format2(pd: pAVProbeData; is_opened: int; var score_max: int): pAVInputFormat; cdecl; external avformat_dll; + +(* * + * Guess the file format. + * + * @param is_opened Whether the file is already opened; determines whether + * demuxers with or without AVFMT_NOFILE are probed. + * @param score_ret The score of the best detection. +*) +// AVInputFormat *av_probe_input_format3(AVProbeData *pd, int is_opened, int *score_ret); +function av_probe_input_format3(pd: pAVProbeData; is_opened: int; var score_ret: int): pAVInputFormat; cdecl; external avformat_dll; + +(* * + * Probe a bytestream to determine the input format. Each time a probe returns + * with a score that is too low, the probe buffer size is increased and another + * attempt is made. When the maximum probe size is reached, the input format + * with the highest score is returned. + * + * @param pb the bytestream to probe + * @param fmt the input format is put here + * @param url the url of the stream + * @param logctx the log context + * @param offset the offset within the bytestream to probe from + * @param max_probe_size the maximum probe buffer size (zero for default) + * @return the score in case of success, a negative value corresponding to an + * the maximal score is AVPROBE_SCORE_MAX + * AVERROR code otherwise +*) +// int av_probe_input_buffer2(AVIOContext *pb, AVInputFormat **fmt, +// const char *url, void *logctx, +// unsigned int offset, unsigned int max_probe_size); +function av_probe_input_buffer2(pb: pAVIOContext; var fmt: pAVInputFormat; const url: PAnsiChar; logctx: pointer; offset: unsigned_int; + max_probe_size: unsigned_int): int; cdecl; external avformat_dll; + +(* * + * Like av_probe_input_buffer2() but returns 0 on success +*) +// int av_probe_input_buffer(AVIOContext *pb, AVInputFormat **fmt, +// const char *url, void *logctx, +// unsigned int offset, unsigned int max_probe_size); +function av_probe_input_buffer(pb: pAVIOContext; var fmt: pAVInputFormat; const url: PAnsiChar; logctx: pointer; offset: unsigned_int; + max_probe_size: unsigned_int): int; cdecl; external avformat_dll; + +(* * + * Open an input stream and read the header. The codecs are not opened. + * The stream must be closed with avformat_close_input(). + * + * @param ps Pointer to user-supplied AVFormatContext (allocated by avformat_alloc_context). + * May be a pointer to NULL, in which case an AVFormatContext is allocated by this + * function and written into ps. + * Note that a user-supplied AVFormatContext will be freed on failure. + * @param url URL of the stream to open. + * @param fmt If non-NULL, this parameter forces a specific input format. + * Otherwise the format is autodetected. + * @param options A dictionary filled with AVFormatContext and demuxer-private options. + * On return this parameter will be destroyed and replaced with a dict containing + * options that were not found. May be NULL. + * + * @return 0 on success, a negative AVERROR on failure. + * + * @note If you want to use custom IO, preallocate the format context and set its pb field. +*) +// int avformat_open_input(AVFormatContext **ps, const char *url, AVInputFormat *fmt, AVDictionary **options); +function avformat_open_input(var ps: pAVFormatContext; const url: PAnsiChar; fmt: pAVInputFormat; options: ppAVDictionary): int; cdecl; overload; external avformat_dll; +function avformat_open_input(var ps: pAVFormatContext; const url: PAnsiChar; fmt: pAVInputFormat; var options: pAVDictionary): int; cdecl; overload; external avformat_dll; + +// attribute_deprecated +// int av_demuxer_open(AVFormatContext *ic); +function av_demuxer_open(ic: pAVFormatContext): int; cdecl; external avformat_dll; + +(* * + * Read packets of a media file to get stream information. This + * is useful for file formats with no headers such as MPEG. This + * function also computes the real framerate in case of MPEG-2 repeat + * frame mode. + * The logical file position is not changed by this function; + * examined packets may be buffered for later processing. + * + * @param ic media file handle + * @param options If non-NULL, an ic.nb_streams long array of pointers to + * dictionaries, where i-th member contains options for + * codec corresponding to i-th stream. + * On return each dictionary will be filled with options that were not found. + * @return >=0 if OK, AVERROR_xxx on error + * + * @note this function isn't guaranteed to open all the codecs, so + * options being non-empty at return is a perfectly normal behavior. + * + * @todo Let the user decide somehow what information is needed so that + * we do not waste time getting stuff the user does not need. +*) +// int avformat_find_stream_info(AVFormatContext *ic, AVDictionary **options); +function avformat_find_stream_info(ic: pAVFormatContext; options: ppAVDictionary): int; cdecl; overload; external avformat_dll; +function avformat_find_stream_info(ic: pAVFormatContext; Var options: pAVDictionary): int; cdecl; overload; external avformat_dll; + +(* * + * Find the programs which belong to a given stream. + * + * @param ic media file handle + * @param last the last found program, the search will start after this + * program, or from the beginning if it is NULL + * @param s stream index + * @return the next program which belongs to s, NULL if no program is found or + * the last program is not among the programs of ic. +*) +// AVProgram *av_find_program_from_stream(AVFormatContext *ic, AVProgram *last, int s); +function av_find_program_from_stream(ic: pAVFormatContext; last: pAVProgram; s: int): pAVProgram; cdecl; external avformat_dll; + +// void av_program_add_stream_index(AVFormatContext *ac, int progid, unsigned int idx); +procedure av_program_add_stream_index(ac: pAVFormatContext; progid: int; idx: unsigned_int); cdecl; external avformat_dll; + +(* * + * Find the "best" stream in the file. + * The best stream is determined according to various heuristics as the most + * likely to be what the user expects. + * If the decoder parameter is non-NULL, av_find_best_stream will find the + * default decoder for the stream's codec; streams for which no decoder can + * be found are ignored. + * + * @param ic media file handle + * @param type stream type: video, audio, subtitles, etc. + * @param wanted_stream_nb user-requested stream number, + * or -1 for automatic selection + * @param related_stream try to find a stream related (eg. in the same + * program) to this one, or -1 if none + * @param decoder_ret if non-NULL, returns the decoder for the + * selected stream + * @param flags flags; none are currently defined + * @return the non-negative stream number in case of success, + * AVERROR_STREAM_NOT_FOUND if no stream with the requested type + * could be found, + * AVERROR_DECODER_NOT_FOUND if streams were found but no decoder + * @note If av_find_best_stream returns successfully and decoder_ret is not + * NULL, then *decoder_ret is guaranteed to be set to a valid AVCodec. +*) +// int av_find_best_stream(AVFormatContext *ic, +// enum AVMediaType type, +// int wanted_stream_nb, +// int related_stream, +// AVCodec **decoder_ret, +// int flags); +function av_find_best_stream(ic: pAVFormatContext; _type: AVMediaType; wanted_stream_nb: int; related_stream: int; var decoder_ret: pAVCodec; flags: int): int; + cdecl; external avformat_dll; + +(* * + * Return the next frame of a stream. + * This function returns what is stored in the file, and does not validate + * that what is there are valid frames for the decoder. It will split what is + * stored in the file into frames and return one for each call. It will not + * omit invalid data between valid frames so as to give the decoder the maximum + * information possible for decoding. + * + * If pkt->buf is NULL, then the packet is valid until the next + * av_read_frame() or until avformat_close_input(). Otherwise the packet + * is valid indefinitely. In both cases the packet must be freed with + * av_packet_unref when it is no longer needed. For video, the packet contains + * exactly one frame. For audio, it contains an integer number of frames if each + * frame has a known fixed size (e.g. PCM or ADPCM data). If the audio frames + * have a variable size (e.g. MPEG audio), then it contains one frame. + * + * pkt->pts, pkt->dts and pkt->duration are always set to correct + * values in AVStream.time_base units (and guessed if the format cannot + * provide them). pkt->pts can be AV_NOPTS_VALUE if the video format + * has B-frames, so it is better to rely on pkt->dts if you do not + * decompress the payload. + * + * @return 0 if OK, < 0 on error or end of file +*) +// int av_read_frame(AVFormatContext *s, AVPacket *pkt); +function av_read_frame(s: pAVFormatContext; pkt: pAVPacket): int; cdecl; overload; external avformat_dll; +function av_read_frame(s: pAVFormatContext; var pkt: AVPacket): int; cdecl; overload; external avformat_dll; + +(* * + * Seek to the keyframe at timestamp. + * 'timestamp' in 'stream_index'. + * + * @param s media file handle + * @param stream_index If stream_index is (-1), a default + * stream is selected, and timestamp is automatically converted + * from AV_TIME_BASE units to the stream specific time_base. + * @param timestamp Timestamp in AVStream.time_base units + * or, if no stream is specified, in AV_TIME_BASE units. + * @param flags flags which select direction and seeking mode + * @return >= 0 on success +*) +// int av_seek_frame(AVFormatContext *s, int stream_index, int64_t timestamp, int flags); +function av_seek_frame(s: pAVFormatContext; stream_index: int; timestamp: int64_t; flags: int): int; cdecl; external avformat_dll; + +(* * + * Seek to timestamp ts. + * Seeking will be done so that the point from which all active streams + * can be presented successfully will be closest to ts and within min/max_ts. + * Active streams are all streams that have AVStream.discard < AVDISCARD_ALL. + * + * If flags contain AVSEEK_FLAG_BYTE, then all timestamps are in bytes and + * are the file position (this may not be supported by all demuxers). + * If flags contain AVSEEK_FLAG_FRAME, then all timestamps are in frames + * in the stream with stream_index (this may not be supported by all demuxers). + * Otherwise all timestamps are in units of the stream selected by stream_index + * or if stream_index is -1, in AV_TIME_BASE units. + * If flags contain AVSEEK_FLAG_ANY, then non-keyframes are treated as + * keyframes (this may not be supported by all demuxers). + * If flags contain AVSEEK_FLAG_BACKWARD, it is ignored. + * + * @param s media file handle + * @param stream_index index of the stream which is used as time base reference + * @param min_ts smallest acceptable timestamp + * @param ts target timestamp + * @param max_ts largest acceptable timestamp + * @param flags flags + * @return >=0 on success, error code otherwise + * + * @note This is part of the new seek API which is still under construction. + * Thus do not use this yet. It may change at any time, do not expect + * ABI compatibility yet! +*) +// int avformat_seek_file(AVFormatContext *s, int stream_index, int64_t min_ts, int64_t ts, int64_t max_ts, int flags); +function avformat_seek_file(s: pAVFormatContext; stream_index: int; min_ts: int64_t; ts: int64_t; max_ts: int64_t; flags: int): int; cdecl; + external avformat_dll; + +(* * + * Discard all internally buffered data. This can be useful when dealing with + * discontinuities in the byte stream. Generally works only with formats that + * can resync. This includes headerless formats like MPEG-TS/TS but should also + * work with NUT, Ogg and in a limited way AVI for example. + * + * The set of streams, the detected duration, stream parameters and codecs do + * not change when calling this function. If you want a complete reset, it's + * better to open a new AVFormatContext. + * + * This does not flush the AVIOContext (s->pb). If necessary, call + * avio_flush(s->pb) before calling this function. + * + * @param s media file handle + * @return >=0 on success, error code otherwise +*) +// int avformat_flush(AVFormatContext *s); +function avformat_flush(s: pAVFormatContext): int; cdecl; external avformat_dll; + +(* * + * Start playing a network-based stream (e.g. RTSP stream) at the + * current position. +*) +// int av_read_play(AVFormatContext *s); +function av_read_play(s: pAVFormatContext): int; cdecl; external avformat_dll; + +(* * + * Pause a network-based stream (e.g. RTSP stream). + * + * Use av_read_play() to resume it. +*) +// int av_read_pause(AVFormatContext *s); +function av_read_pause(s: pAVFormatContext): int; cdecl; external avformat_dll; + +(* * + * Close an opened input AVFormatContext. Free it and all its contents + * and set *s to NULL. +*) +// void avformat_close_input(AVFormatContext **s); +procedure avformat_close_input(var s: pAVFormatContext); cdecl; external avformat_dll; + +(* * + * @} +*) +const + AVSEEK_FLAG_BACKWARD = 1; // < seek backward + AVSEEK_FLAG_BYTE = 2; // < seeking based on position in bytes + AVSEEK_FLAG_ANY = 4; // < seek to any frame, even non-keyframes + AVSEEK_FLAG_FRAME = 8; // < seeking based on frame number + + (* * + * @addtogroup lavf_encoding + * @{ + *) + + AVSTREAM_INIT_IN_WRITE_HEADER = 0; // < stream parameters initialized in avformat_write_header + AVSTREAM_INIT_IN_INIT_OUTPUT = 1; // < stream parameters initialized in avformat_init_output + + (* * + * Allocate the stream private data and write the stream header to + * an output media file. + * + * @param s Media file handle, must be allocated with avformat_alloc_context(). + * Its oformat field must be set to the desired output format; + * Its pb field must be set to an already opened AVIOContext. + * @param options An AVDictionary filled with AVFormatContext and muxer-private options. + * On return this parameter will be destroyed and replaced with a dict containing + * options that were not found. May be NULL. + * + * @return AVSTREAM_INIT_IN_WRITE_HEADER on success if the codec had not already been fully initialized in avformat_init, + * AVSTREAM_INIT_IN_INIT_OUTPUT on success if the codec had already been fully initialized in avformat_init, + * negative AVERROR on failure. + * + * @see av_opt_find, av_dict_set, avio_open, av_oformat_next, avformat_init_output. + *) + // av_warn_unused_result + // int avformat_write_header(AVFormatContext *s, AVDictionary **options); +function avformat_write_header(s: pAVFormatContext; options: ppAVDictionary): int; cdecl; external avformat_dll; + +(* * + * Allocate the stream private data and initialize the codec, but do not write the header. + * May optionally be used before avformat_write_header to initialize stream parameters + * before actually writing the header. + * If using this function, do not pass the same options to avformat_write_header. + * + * @param s Media file handle, must be allocated with avformat_alloc_context(). + * Its oformat field must be set to the desired output format; + * Its pb field must be set to an already opened AVIOContext. + * @param options An AVDictionary filled with AVFormatContext and muxer-private options. + * On return this parameter will be destroyed and replaced with a dict containing + * options that were not found. May be NULL. + * + * @return AVSTREAM_INIT_IN_WRITE_HEADER on success if the codec requires avformat_write_header to fully initialize, + * AVSTREAM_INIT_IN_INIT_OUTPUT on success if the codec has been fully initialized, + * negative AVERROR on failure. + * + * @see av_opt_find, av_dict_set, avio_open, av_oformat_next, avformat_write_header. +*) +// av_warn_unused_result +// int avformat_init_output(AVFormatContext *s, AVDictionary **options); +function avformat_init_output(s: pAVFormatContext; var options: pAVDictionary): int; cdecl; external avformat_dll; + +(* * + * Write a packet to an output media file. + * + * This function passes the packet directly to the muxer, without any buffering + * or reordering. The caller is responsible for correctly interleaving the + * packets if the format requires it. Callers that want libavformat to handle + * the interleaving should call av_interleaved_write_frame() instead of this + * function. + * + * @param s media file handle + * @param pkt The packet containing the data to be written. Note that unlike + * av_interleaved_write_frame(), this function does not take + * ownership of the packet passed to it (though some muxers may make + * an internal reference to the input packet). + *
+ * This parameter can be NULL (at any time, not just at the end), in + * order to immediately flush data buffered within the muxer, for + * muxers that buffer up data internally before writing it to the + * output. + *
+ * Packet's @ref AVPacket.stream_index "stream_index" field must be + * set to the index of the corresponding stream in @ref + * AVFormatContext.streams "s->streams". + *
+ * The timestamps (@ref AVPacket.pts "pts", @ref AVPacket.dts "dts") + * must be set to correct values in the stream's timebase (unless the + * output format is flagged with the AVFMT_NOTIMESTAMPS flag, then + * they can be set to AV_NOPTS_VALUE). + * The dts for subsequent packets passed to this function must be strictly + * increasing when compared in their respective timebases (unless the + * output format is flagged with the AVFMT_TS_NONSTRICT, then they + * merely have to be nondecreasing). @ref AVPacket.duration + * "duration") should also be set if known. + * @return < 0 on error, = 0 if OK, 1 if flushed and there is no more data to flush + * + * @see av_interleaved_write_frame() +*) +// int av_write_frame(AVFormatContext *s, AVPacket *pkt); +function av_write_frame(s: pAVFormatContext; pkt: pAVPacket): int; cdecl; external avformat_dll; +(* * + * Write a packet to an output media file ensuring correct interleaving. + * + * This function will buffer the packets internally as needed to make sure the + * packets in the output file are properly interleaved in the order of + * increasing dts. Callers doing their own interleaving should call + * av_write_frame() instead of this function. + * + * Using this function instead of av_write_frame() can give muxers advance + * knowledge of future packets, improving e.g. the behaviour of the mp4 + * muxer for VFR content in fragmenting mode. + * + * @param s media file handle + * @param pkt The packet containing the data to be written. + *
+ * If the packet is reference-counted, this function will take + * ownership of this reference and unreference it later when it sees + * fit. + * The caller must not access the data through this reference after + * this function returns. If the packet is not reference-counted, + * libavformat will make a copy. + *
+ * This parameter can be NULL (at any time, not just at the end), to + * flush the interleaving queues. + *
+ * Packet's @ref AVPacket.stream_index "stream_index" field must be + * set to the index of the corresponding stream in @ref + * AVFormatContext.streams "s->streams". + *
+ * The timestamps (@ref AVPacket.pts "pts", @ref AVPacket.dts "dts") + * must be set to correct values in the stream's timebase (unless the + * output format is flagged with the AVFMT_NOTIMESTAMPS flag, then + * they can be set to AV_NOPTS_VALUE). + * The dts for subsequent packets in one stream must be strictly + * increasing (unless the output format is flagged with the + * AVFMT_TS_NONSTRICT, then they merely have to be nondecreasing). + * @ref AVPacket.duration "duration") should also be set if known. + * + * @return 0 on success, a negative AVERROR on error. Libavformat will always + * take care of freeing the packet, even if this function fails. + * + * @see av_write_frame(), AVFormatContext.max_interleave_delta +*) +// int av_interleaved_write_frame(AVFormatContext *s, AVPacket *pkt); +function av_interleaved_write_frame(s: pAVFormatContext; pkt: pAVPacket): int; cdecl; external avformat_dll; +(* * + * Write an uncoded frame to an output media file. + * + * The frame must be correctly interleaved according to the container + * specification; if not, then av_interleaved_write_frame() must be used. + * + * See av_interleaved_write_frame() for details. +*) +// int av_write_uncoded_frame(AVFormatContext *s, int stream_index, AVFrame *frame); +function av_write_uncoded_frame(s: pAVFormatContext; stream_index: int; frame: pAVFrame): int; cdecl; external avformat_dll; +(* * + * Write an uncoded frame to an output media file. + * + * If the muxer supports it, this function makes it possible to write an AVFrame + * structure directly, without encoding it into a packet. + * It is mostly useful for devices and similar special muxers that use raw + * video or PCM data and will not serialize it into a byte stream. + * + * To test whether it is possible to use it with a given muxer and stream, + * use av_write_uncoded_frame_query(). + * + * The caller gives up ownership of the frame and must not access it + * afterwards. + * + * @return >=0 for success, a negative code on error +*) +// int av_interleaved_write_uncoded_frame(AVFormatContext *s, int stream_index, AVFrame *frame); +function av_interleaved_write_uncoded_frame(s: pAVFormatContext; stream_index: int; frame: pAVFrame): int; cdecl; external avformat_dll; +(* * + * Test whether a muxer supports uncoded frame. + * + * @return >=0 if an uncoded frame can be written to that muxer and stream, + * <0 if not +*) +// int av_write_uncoded_frame_query(AVFormatContext *s, int stream_index); +function av_write_uncoded_frame_query(s: pAVFormatContext; stream_index: int): int; cdecl; external avformat_dll; +(* * + * Write the stream trailer to an output media file and free the + * file private data. + * + * May only be called after a successful call to avformat_write_header. + * + * @param s media file handle + * @return 0 if OK, AVERROR_xxx on error +*) +// int av_write_trailer(AVFormatContext *s); +function av_write_trailer(s: pAVFormatContext): int; cdecl; external avformat_dll; +(* * + * Return the output format in the list of registered output formats + * which best matches the provided parameters, or return NULL if + * there is no match. + * + * @param short_name if non-NULL checks if short_name matches with the + * names of the registered formats + * @param filename if non-NULL checks if filename terminates with the + * extensions of the registered formats + * @param mime_type if non-NULL checks if mime_type matches with the + * MIME type of the registered formats +*) +// AVOutputFormat *av_guess_format(const char *short_name, +// const char *filename, +// const char *mime_type); +function av_guess_format(const short_name: PAnsiChar; const filename: PAnsiChar; const mime_type: PAnsiChar): pAVOutputFormat; cdecl; external avformat_dll; +(* * + * Guess the codec ID based upon muxer and filename. +*) +// enum AVCodecID av_guess_codec(AVOutputFormat *fmt, const char *short_name, +// const char *filename, const char *mime_type, +// enum AVMediaType type); +function av_guess_codec(fmt: pAVOutputFormat; const short_name: PAnsiChar; const filename: PAnsiChar; const mime_type: PAnsiChar; _type: AVMediaType) + : AVCodecID; cdecl; external avformat_dll; +(* * + * Get timing information for the data currently output. + * The exact meaning of "currently output" depends on the format. + * It is mostly relevant for devices that have an internal buffer and/or + * work in real time. + * @param s media file handle + * @param stream stream in the media file + * @param[out] dts DTS of the last packet output for the stream, in stream + * time_base units + * @param[out] wall absolute time when that packet whas output, + * in microsecond + * @return 0 if OK, AVERROR(ENOSYS) if the format does not support it + * Note: some formats or devices may not allow to measure dts and wall + * atomically. +*) +// int av_get_output_timestamp(struct AVFormatContext *s, int stream, +// int64_t *dts, int64_t *wall); +function av_get_output_timestamp(s: pAVFormatContext; stream: int; var dts: int64_t; var wall: int64_t): int; cdecl; external avformat_dll; +(* * + * @} +*) + +(* * + * @defgroup lavf_misc Utility functions + * @ingroup libavf + * @{ + * + * Miscellaneous utility functions related to both muxing and demuxing + * (or neither). +*) + +(* * + * Send a nice hexadecimal dump of a buffer to the specified file stream. + * + * @param f The file stream pointer where the dump should be sent to. + * @param buf buffer + * @param size buffer size + * + * @see av_hex_dump_log, av_pkt_dump2, av_pkt_dump_log2 +*) +// void av_hex_dump(FILE *f, const uint8_t *buf, int size); +procedure av_hex_dump(f: pFILE; const buf: puint8_t; size: int); cdecl; external avformat_dll; +(* * + * Send a nice hexadecimal dump of a buffer to the log. + * + * @param avcl A pointer to an arbitrary struct of which the first field is a + * pointer to an AVClass struct. + * @param level The importance level of the message, lower values signifying + * higher importance. + * @param buf buffer + * @param size buffer size + * + * @see av_hex_dump, av_pkt_dump2, av_pkt_dump_log2 +*) +// void av_hex_dump_log(void *avcl, int level, const uint8_t *buf, int size); +procedure av_hex_dump_log(avcl: pointer; level: int; const buf: puint8_t; size: int); cdecl; external avformat_dll; +(* * + * Send a nice dump of a packet to the specified file stream. + * + * @param f The file stream pointer where the dump should be sent to. + * @param pkt packet to dump + * @param dump_payload True if the payload must be displayed, too. + * @param st AVStream that the packet belongs to +*) +// void av_pkt_dump2(FILE *f, const AVPacket *pkt, int dump_payload, const AVStream *st); +procedure av_pkt_dump2(f: pFILE; const pkt: pAVPacket; dump_payload: int; const st: pAVStream); cdecl; external avformat_dll; +(* * + * Send a nice dump of a packet to the log. + * + * @param avcl A pointer to an arbitrary struct of which the first field is a + * pointer to an AVClass struct. + * @param level The importance level of the message, lower values signifying + * higher importance. + * @param pkt packet to dump + * @param dump_payload True if the payload must be displayed, too. + * @param st AVStream that the packet belongs to +*) +// void av_pkt_dump_log2(void *avcl, int level, const AVPacket *pkt, int dump_payload, +// const AVStream *st); +procedure av_pkt_dump_log2(avcl: pointer; level: int; const pkt: pAVPacket; dump_payload: int; const st: pAVStream); cdecl; external avformat_dll; +(* * + * Get the AVCodecID for the given codec tag tag. + * If no codec id is found returns AV_CODEC_ID_NONE. + * + * @param tags list of supported codec_id-codec_tag pairs, as stored + * in AVInputFormat.codec_tag and AVOutputFormat.codec_tag + * @param tag codec tag to match to a codec ID +*) +// enum AVCodecID av_codec_get_id(const struct AVCodecTag * const *tags, unsigned int tag); +function av_codec_get_id(const tags: ppAVCodecTag; tag: unsigned_int): AVCodecID; cdecl; external avformat_dll; +(* * + * Get the codec tag for the given codec id id. + * If no codec tag is found returns 0. + * + * @param tags list of supported codec_id-codec_tag pairs, as stored + * in AVInputFormat.codec_tag and AVOutputFormat.codec_tag + * @param id codec ID to match to a codec tag +*) +// unsigned int av_codec_get_tag(const struct AVCodecTag * const *tags, enum AVCodecID id); +function av_codec_get_tag(const tags: ppAVCodecTag; id: AVCodecID): unsigned_int; cdecl; external avformat_dll; +(* * + * Get the codec tag for the given codec id. + * + * @param tags list of supported codec_id - codec_tag pairs, as stored + * in AVInputFormat.codec_tag and AVOutputFormat.codec_tag + * @param id codec id that should be searched for in the list + * @param tag A pointer to the found tag + * @return 0 if id was not found in tags, > 0 if it was found +*) +// int av_codec_get_tag2(const struct AVCodecTag * const *tags, enum AVCodecID id, +// unsigned int *tag); +function av_codec_get_tag2(const tags: ppAVCodecTag; id: AVCodecID; tag: punsigned_int): int; cdecl; external avformat_dll; + +// int av_find_default_stream_index(AVFormatContext *s); +function av_find_default_stream_index(s: pAVFormatContext): int; cdecl; external avformat_dll; +(* * + * Get the index for a specific timestamp. + * + * @param st stream that the timestamp belongs to + * @param timestamp timestamp to retrieve the index for + * @param flags if AVSEEK_FLAG_BACKWARD then the returned index will correspond + * to the timestamp which is <= the requested one, if backward + * is 0, then it will be >= + * if AVSEEK_FLAG_ANY seek to any frame, only keyframes otherwise + * @return < 0 if no such timestamp could be found +*) +// int av_index_search_timestamp(AVStream *st, int64_t timestamp, int flags); +function av_index_search_timestamp(st: pAVStream; timestamp: int64_t; flags: int): int; cdecl; external avformat_dll; +(* * + * Add an index entry into a sorted list. Update the entry if the list + * already contains it. + * + * @param timestamp timestamp in the time base of the given stream +*) +// int av_add_index_entry(AVStream *st, int64_t pos, int64_t timestamp, +// int size, int distance, int flags); +function av_add_index_entry(st: pAVStream; pos: int64_t; timestamp: int64_t; size: int; distance: int; flags: int): int; cdecl; external avformat_dll; +(* * + * Split a URL string into components. + * + * The pointers to buffers for storing individual components may be null, + * in order to ignore that component. Buffers for components not found are + * set to empty strings. If the port is not found, it is set to a negative + * value. + * + * @param proto the buffer for the protocol + * @param proto_size the size of the proto buffer + * @param authorization the buffer for the authorization + * @param authorization_size the size of the authorization buffer + * @param hostname the buffer for the host name + * @param hostname_size the size of the hostname buffer + * @param port_ptr a pointer to store the port number in + * @param path the buffer for the path + * @param path_size the size of the path buffer + * @param url the URL to split +*) +// void av_url_split(char *proto, int proto_size, +// char *authorization, int authorization_size, +// char *hostname, int hostname_size, +// int *port_ptr, +// char *path, int path_size, +// const char *url); +procedure av_url_split(proto: PAnsiChar; proto_size: int; authorization: PAnsiChar; authorization_size: int; hostname: PAnsiChar; hostname_size: int; + var port_ptr: int; path: PAnsiChar; path_size: int; const url: PAnsiChar); cdecl; external avformat_dll; +(* * + * Print detailed information about the input or output format, such as + * duration, bitrate, streams, container, programs, metadata, side data, + * codec and time base. + * + * @param ic the context to analyze + * @param index index of the stream to dump information about + * @param url the URL to print, such as source or destination file + * @param is_output Select whether the specified context is an input(0) or output(1) +*) +// void av_dump_format(AVFormatContext *ic, +// int index, +// const char *url, +// int is_output); +procedure av_dump_format(ic: pAVFormatContext; index: int; const url: PAnsiChar; is_output: int); cdecl; external avformat_dll; + +const + AV_FRAME_FILENAME_FLAGS_MULTIPLE = 1; // < Allow multiple %d + + (* * + * Return in 'buf' the path with '%d' replaced by a number. + * + * Also handles the '%0nd' format where 'n' is the total number + * of digits and '%%'. + * + * @param buf destination buffer + * @param buf_size destination buffer size + * @param path numbered sequence string + * @param number frame number + * @param flags AV_FRAME_FILENAME_FLAGS_* + * @return 0 if OK, -1 on format error + *) + // int av_get_frame_filename2(char *buf, int buf_size, + // const char *path, int number, int flags); +function av_get_frame_filename2(buf: PAnsiChar; buf_size: int; const path: PAnsiChar; number: int; flags: int): int; cdecl; external avformat_dll; + +// int av_get_frame_filename(char *buf, int buf_size, +// const char *path, int number); +function av_get_frame_filename(buf: PAnsiChar; buf_size: int; const path: PAnsiChar; number: int): int; cdecl; external avformat_dll; +(* * + * Check whether filename actually is a numbered sequence generator. + * + * @param filename possible numbered sequence string + * @return 1 if a valid numbered sequence string, 0 otherwise +*) +// int av_filename_number_test(const char *filename); +function av_filename_number_test(const filename: PAnsiChar): int; cdecl; external avformat_dll; +(* * + * Generate an SDP for an RTP session. + * + * Note, this overwrites the id values of AVStreams in the muxer contexts + * for getting unique dynamic payload types. + * + * @param ac array of AVFormatContexts describing the RTP streams. If the + * array is composed by only one context, such context can contain + * multiple AVStreams (one AVStream per RTP stream). Otherwise, + * all the contexts in the array (an AVCodecContext per RTP stream) + * must contain only one AVStream. + * @param n_files number of AVCodecContexts contained in ac + * @param buf buffer where the SDP will be stored (must be allocated by + * the caller) + * @param size the size of the buffer + * @return 0 if OK, AVERROR_xxx on error +*) +// int av_sdp_create(AVFormatContext *ac[], int n_files, char *buf, int size); +function av_sdp_create(ac: ppAVFormatContext; n_files: int; buf: PAnsiChar; size: int): int; cdecl; external avformat_dll; +(* * + * Return a positive value if the given filename has one of the given + * extensions, 0 otherwise. + * + * @param filename file name to check against the given extensions + * @param extensions a comma-separated list of filename extensions +*) +// int av_match_ext(const char *filename, const char *extensions); +function av_match_ext(const filename: PAnsiChar; const extensions: PAnsiChar): int; cdecl; external avformat_dll; +(* * + * Test if the given container can store a codec. + * + * @param ofmt container to check for compatibility + * @param codec_id codec to potentially store in container + * @param std_compliance standards compliance level, one of FF_COMPLIANCE_* + * + * @return 1 if codec with ID codec_id can be stored in ofmt, 0 if it cannot. + * A negative number if this information is not available. +*) +// int avformat_query_codec(const AVOutputFormat *ofmt, enum AVCodecID codec_id, +// int std_compliance); +function avformat_query_codec(const ofmt: pAVOutputFormat; codec_id: AVCodecID; std_compliance: int): int; cdecl; external avformat_dll; +(* * + * @defgroup riff_fourcc RIFF FourCCs + * @{ + * Get the tables mapping RIFF FourCCs to libavcodec AVCodecIDs. The tables are + * meant to be passed to av_codec_get_id()/av_codec_get_tag() as in the + * following code: + * @code + * uint32_t tag = MKTAG('H', '2', '6', '4'); + * const struct AVCodecTag *table[] = { avformat_get_riff_video_tags(), 0 }; + * enum AVCodecID id = av_codec_get_id(table, tag); + * @endcode +*) +(* * + * @return the table mapping RIFF FourCCs for video to libavcodec AVCodecID. +*) +// const struct AVCodecTag *avformat_get_riff_video_tags(void); +function avformat_get_riff_video_tags(): pAVCodecTag; cdecl; external avformat_dll; +(* * + * @return the table mapping RIFF FourCCs for audio to AVCodecID. +*) +// const struct AVCodecTag *avformat_get_riff_audio_tags(void); +function avformat_get_riff_audio_tags(): pAVCodecTag; cdecl; external avformat_dll; +(* * + * @return the table mapping MOV FourCCs for video to libavcodec AVCodecID. +*) +// const struct AVCodecTag *avformat_get_mov_video_tags(void); +function avformat_get_mov_video_tags(): pAVCodecTag; cdecl; external avformat_dll; +(* * + * @return the table mapping MOV FourCCs for audio to AVCodecID. +*) +// const struct AVCodecTag *avformat_get_mov_audio_tags(void); +function avformat_get_mov_audio_tags(): pAVCodecTag; cdecl; external avformat_dll; +(* * + * @} +*) + +(* * + * Guess the sample aspect ratio of a frame, based on both the stream and the + * frame aspect ratio. + * + * Since the frame aspect ratio is set by the codec but the stream aspect ratio + * is set by the demuxer, these two may not be equal. This function tries to + * return the value that you should use if you would like to display the frame. + * + * Basic logic is to use the stream aspect ratio if it is set to something sane + * otherwise use the frame aspect ratio. This way a container setting, which is + * usually easy to modify can override the coded value in the frames. + * + * @param format the format context which the stream is part of + * @param stream the stream which the frame is part of + * @param frame the frame with the aspect ratio to be determined + * @return the guessed (valid) sample_aspect_ratio, 0/1 if no idea +*) +// AVRational av_guess_sample_aspect_ratio(AVFormatContext *format, AVStream *stream, AVFrame *frame); +function av_guess_sample_aspect_ratio(format: pAVFormatContext; stream: pAVStream; frame: pAVFrame): AVRational; cdecl; external avformat_dll; +(* * + * Guess the frame rate, based on both the container and codec information. + * + * @param ctx the format context which the stream is part of + * @param stream the stream which the frame is part of + * @param frame the frame for which the frame rate should be determined, may be NULL + * @return the guessed (valid) frame rate, 0/1 if no idea +*) +// AVRational av_guess_frame_rate(AVFormatContext *ctx, AVStream *stream, AVFrame *frame); +function av_guess_frame_rate(ctx: pAVFormatContext; stream: pAVStream; frame: pAVFrame): AVRational; cdecl; external avformat_dll; +(* * + * Check if the stream st contained in s is matched by the stream specifier + * spec. + * + * See the "stream specifiers" chapter in the documentation for the syntax + * of spec. + * + * @return >0 if st is matched by spec; + * 0 if st is not matched by spec; + * AVERROR code if spec is invalid + * + * @note A stream specifier can match several streams in the format. +*) +// int avformat_match_stream_specifier(AVFormatContext *s, AVStream *st, +// const char *spec); +function avformat_match_stream_specifier(s: pAVFormatContext; st: pAVStream; const spec: PAnsiChar): int; cdecl; external avformat_dll; + +// int avformat_queue_attached_pictures(AVFormatContext *s); +function avformat_queue_attached_pictures(s: pAVFormatContext): int; cdecl; external avformat_dll; +{$IFDEF FF_API_OLD_BSF} +(* * + * Apply a list of bitstream filters to a packet. + * + * @param codec AVCodecContext, usually from an AVStream + * @param pkt the packet to apply filters to. If, on success, the returned + * packet has size == 0 and side_data_elems == 0, it indicates that + * the packet should be dropped + * @param bsfc a NULL-terminated list of filters to apply + * @return >=0 on success; + * AVERROR code on failure +*) +// attribute_deprecated +// int av_apply_bitstream_filters(AVCodecContext *codec, AVPacket *pkt, +// AVBitStreamFilterContext *bsfc); +function av_apply_bitstream_filters(codec: pAVCodecContext; pkt: pAVPacket; bsfc: pAVBitStreamFilterContext): int; cdecl; external avformat_dll; +{$ENDIF} + +type + AVTimebaseSource = ( // + AVFMT_TBCF_AUTO = -1, // + AVFMT_TBCF_DECODER, // + AVFMT_TBCF_DEMUXER // +{$IFDEF FF_API_R_FRAME_RATE} + , AVFMT_TBCF_R_FRAMERATE +{$ENDIF} + ); + + (* * + * Transfer internal timing information from one stream to another. + * + * This function is useful when doing stream copy. + * + * @param ofmt target output format for ost + * @param ost output stream which needs timings copy and adjustments + * @param ist reference input stream to copy timings from + * @param copy_tb define from where the stream codec timebase needs to be imported + *) + // int avformat_transfer_internal_stream_timing_info(const AVOutputFormat *ofmt, + // AVStream *ost, const AVStream *ist, + // enum AVTimebaseSource copy_tb); +function avformat_transfer_internal_stream_timing_info(const ofmt: pAVOutputFormat; ost: pAVStream; const ist: pAVStream; copy_tb: AVTimebaseSource): int; + cdecl; external avformat_dll; +(* * + * Get the internal codec timebase from a stream. + * + * @param st input stream to extract the timebase from +*) +// AVRational av_stream_get_codec_timebase(const AVStream *st); +function av_stream_get_codec_timebase(const st: AVStream): AVRational; cdecl; external avformat_dll; +{$ENDREGION} + +implementation + +function avio_tell(s: pAVIOContext): int64_t; inline; +begin + Result := avio_seek(s, 0, 1 { SEEK_CUR } ); +end; + +end. diff --git a/ffmpeg/libavutil.pas b/ffmpeg/libavutil.pas new file mode 100644 index 0000000..e77126c --- /dev/null +++ b/ffmpeg/libavutil.pas @@ -0,0 +1,9586 @@ +unit libavutil; + +{$IFDEF FPC} +{$MODE Delphi} +{$ENDIF} + +interface + +Uses + ffmpeg_types; + +{$I ffmpeg.inc} +{$REGION 'avconfig.h'} + +const + AV_HAVE_BIGENDIAN = 0; + AV_HAVE_FAST_UNALIGNED = 1; +{$ENDREGION} +{$REGION 'common.h'} + // rounded division & shift + // #define RSHIFT(a,b) ((a) > 0 ? ((a) + ((1<<(b))>>1))>>(b) : ((a) + ((1<<(b))>>1)-1)>>(b)) +function RSHIFT(a, b: int): int; inline; +/// * assume b>0 */ +// #define ROUNDED_DIV(a,b) (((a)>0 ? (a) + ((b)>>1) : (a) - ((b)>>1))/(b)) +function ROUNDED_DIV(a, b: int): int; inline; +// (* Fast a/(1<=0 and b>=0 *) +// #define AV_CEIL_RSHIFT(a,b) (!av_builtin_constant_p(b) ? -((-(a)) >> (b)) : ((a) + (1<<(b)) - 1) >> (b)) +/// * Backwards compat. */ +// #define FF_CEIL_RSHIFT AV_CEIL_RSHIFT + +// #define FFUDIV(a,b) (((a)>0 ?(a):(a)-(b)+1) / (b)) +function FFUDIV(a, b: int): int; inline; +// #define FFUMOD(a,b) ((a)-(b)*FFUDIV(a,b)) +function FFUMOD(a, b: int): int; inline; + +(* * + * Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they + * are not representable as absolute values of their type. This is the same + * as with *abs() + * @see FFNABS() +*) +// #define FFABS(a) ((a) >= 0 ? (a) : (-(a))) +function FFABS(a: int): int; inline; +// #define FFSIGN(a) ((a) > 0 ? 1 : -1) +function FFSIGN(a: int): int; inline; + +(* * + * Negative Absolute value. + * this works for all integers of all types. + * As with many macros, this evaluates its argument twice, it thus must not have + * a sideeffect, that is FFNABS(x++) has undefined behavior. +*) +// #define FFNABS(a) ((a) <= 0 ? (a) : (-(a))) +function FFNABS(a: int): int; inline; + +(* * + * Comparator. + * For two numerical expressions x and y, gives 1 if x > y, -1 if x < y, and 0 + * if x == y. This is useful for instance in a qsort comparator callback. + * Furthermore, compilers are able to optimize this to branchless code, and + * there is no risk of overflow with signed types. + * As with many macros, this evaluates its argument multiple times, it thus + * must not have a side-effect. +*) +// #define FFDIFFSIGN(x,y) (((x)>(y)) - ((x)<(y))) +function FFDIFFSIGN(x, y: int): Boolean; inline; + +// #define FFMAX(a,b) ((a) > (b) ? (a) : (b)) +function FFMAX(a, b: int): int; inline; + +// #define FFMAX3(a,b,c) FFMAX(FFMAX(a,b),c) +// #define FFMIN(a,b) ((a) > (b) ? (b) : (a)) +// #define FFMIN3(a,b,c) FFMIN(FFMIN(a,b),c) + +// #define FFSWAP(type,a,b) do{type SWAP_tmp= b; b= a; a= SWAP_tmp;}while(0) +// #define FF_ARRAY_ELEMS(a) (sizeof(a) / sizeof((a)[0])) + +(* misc math functions *) + +// #ifndef av_log2 +// av_const int av_log2(unsigned v); +// #endif +function av_log2(v: unsigned): int; cdecl; external avutil_dll; + +// #ifndef av_log2_16bit +// av_const int av_log2_16bit(unsigned v); +// #endif +function av_log2_16bit(v: unsigned): int; cdecl; external avutil_dll; + +(* * + * Clip a signed integer value into the amin-amax range. + * @param a value to clip + * @param amin minimum value of the clip range + * @param amax maximum value of the clip range + * @return clipped value +*) +// static av_always_inline av_const int av_clip_c(int a, int amin, int amax) +function av_clip_c(a: int; amin: int; amax: int): int; inline; + +(* * + * Clip a signed 64bit integer value into the amin-amax range. + * @param a value to clip + * @param amin minimum value of the clip range + * @param amax maximum value of the clip range + * @return clipped value +*) +// static av_always_inline av_const int64_t av_clip64_c(int64_t a, int64_t amin, int64_t amax) +function av_clip64_c(a: int64_t; amin: int64_t; amax: int64_t): int64_t; inline; + +(* * + * Clip a signed integer value into the 0-255 range. + * @param a value to clip + * @return clipped value +*) +// static av_always_inline av_const uint8_t av_clip_uint8_c(int a) +function av_clip_uint8_c(a: int): uint8_t; inline; + +(* * + * Clip a signed integer value into the -128,127 range. + * @param a value to clip + * @return clipped value +*) +// static av_always_inline av_const int8_t av_clip_int8_c(int a) +function av_clip_int8_c(a: int): int8_t; inline; + +(* * + * Clip a signed integer value into the 0-65535 range. + * @param a value to clip + * @return clipped value +*) +// static av_always_inline av_const uint16_t av_clip_uint16_c(int a) +function av_clip_uint16_c(a: int): uint16_t; inline; + +(* * + * Clip a signed integer value into the -32768,32767 range. + * @param a value to clip + * @return clipped value +*) +// static av_always_inline av_const int16_t av_clip_int16_c(int a) +function av_clip_int16_c(a: int): int16_t; inline; + +(* * + * Clip a signed 64-bit integer value into the -2147483648,2147483647 range. + * @param a value to clip + * @return clipped value +*) +// static av_always_inline av_const int32_t av_clipl_int32_c(int64_t a) +function av_clipl_int32_c(a: int64_t): int32_t; inline; + +(* * + * Clip a signed integer into the -(2^p),(2^p-1) range. + * @param a value to clip + * @param p bit position to clip at + * @return clipped value +*) +// static av_always_inline av_const int av_clip_intp2_c(int a, int p) +function av_clip_intp2_c(a: int; p: int): int; inline; + +(* * + * Clip a signed integer to an unsigned power of two range. + * @param a value to clip + * @param p bit position to clip at + * @return clipped value +*) +// static av_always_inline av_const unsigned av_clip_uintp2_c(int a, int p) +function av_clip_uintp2_c(a, p: int): unsigned; inline; + +(* * + * Clear high bits from an unsigned integer starting with specific bit position + * @param a value to clip + * @param p bit position to clip at + * @return clipped value +*) +// static av_always_inline av_const unsigned av_mod_uintp2_c(unsigned a, unsigned p) +function av_mod_uintp2_c(a, p: unsigned): unsigned; inline; + +(* * + * Add two signed 32-bit values with saturation. + * + * @param a one value + * @param b another value + * @return sum with signed saturation +*) +// static av_always_inline int av_sat_add32_c(int a, int b) +function av_sat_add32_c(a, b: int): int; inline; + +(* * + * Add a doubled value to another value with saturation at both stages. + * + * @param a first value + * @param b value doubled and added to a + * @return sum sat(a + sat(2*b)) with signed saturation +*) +// static av_always_inline int av_sat_dadd32_c(int a, int b) +function av_sat_dadd32_c(a, b: int): int; inline; + +(* * + * Subtract two signed 32-bit values with saturation. + * + * @param a one value + * @param b another value + * @return difference with signed saturation +*) +// static av_always_inline int av_sat_sub32_c(int a, int b) +function av_sat_sub32_c(a, b: int): int; inline; + +(* * + * Subtract a doubled value from another value with saturation at both stages. + * + * @param a first value + * @param b value doubled and subtracted from a + * @return difference sat(a - sat(2*b)) with signed saturation +*) +// static av_always_inline int av_sat_dsub32_c(int a, int b) +function av_sat_dsub32_c(a, b: int): int; inline; + +(* * + * Clip a float value into the amin-amax range. + * @param a value to clip + * @param amin minimum value of the clip range + * @param amax maximum value of the clip range + * @return clipped value +*) +// static av_always_inline av_const float av_clipf_c(float a, float amin, float amax) +function av_clipf_c(a, amin, amax: float): float; inline; + +(* * + * Clip a double value into the amin-amax range. + * @param a value to clip + * @param amin minimum value of the clip range + * @param amax maximum value of the clip range + * @return clipped value +*) +// static av_always_inline av_const double av_clipd_c(double a, double amin, double amax) +function av_clipd_c(a, amin, amax: double): double; inline; + +(* * Compute ceil(log2(x)). + * @param x value used to compute ceil(log2(x)) + * @return computed ceiling of log2(x) +*) +// static av_always_inline av_const int av_ceil_log2_c(int x) +function av_ceil_log2_c(x: int): int; inline; + +(* * + * Count number of bits set to one in x + * @param x value to count bits of + * @return the number of bits set to one in x +*) +// static av_always_inline av_const int av_popcount_c(uint32_t x) +function av_popcount_c(x: uint32_t): int; inline; + +// static av_always_inline av_const int av_parity_c(uint32_t v) +function av_parity_c(v: uint32_t): int; inline; + +(* * + * Count number of bits set to one in x + * @param x value to count bits of + * @return the number of bits set to one in x +*) +// static av_always_inline av_const int av_popcount64_c(uint64_t x) +function av_popcount64_c(x: uint64_t): int; inline; + +{$ENDREGION} +{$REGION 'bprint.h'} + +(* * + * Buffer to print data progressively + * + * The string buffer grows as necessary and is always 0-terminated. + * The content of the string is never accessed, and thus is + * encoding-agnostic and can even hold binary data. + * + * Small buffers are kept in the structure itself, and thus require no + * memory allocation at all (unless the contents of the buffer is needed + * after the structure goes out of scope). This is almost as lightweight as + * declaring a local "char buf[512]". + * + * The length of the string can go beyond the allocated size: the buffer is + * then truncated, but the functions still keep account of the actual total + * length. + * + * In other words, buf->len can be greater than buf->size and records the + * total length of what would have been to the buffer if there had been + * enough memory. + * + * Append operations do not need to be tested for failure: if a memory + * allocation fails, data stop being appended to the buffer, but the length + * is still updated. This situation can be tested with + * av_bprint_is_complete(). + * + * The size_max field determines several possible behaviours: + * + * size_max = -1 (= UINT_MAX) or any large value will let the buffer be + * reallocated as necessary, with an amortized linear cost. + * + * size_max = 0 prevents writing anything to the buffer: only the total + * length is computed. The write operations can then possibly be repeated in + * a buffer with exactly the necessary size + * (using size_init = size_max = len + 1). + * + * size_max = 1 is automatically replaced by the exact size available in the + * structure itself, thus ensuring no dynamic memory allocation. The + * internal buffer is large enough to hold a reasonable paragraph of text, + * such as the current paragraph. +*) +type + // FF_PAD_STRUCTURE(AVBPrint, 1024, + // char *str; (**< string so far *) + // unsigned len; (**< length so far *) + // unsigned size; (**< allocated memory *) + // unsigned size_max; (**< maximum allocated memory *) + // char reserved_internal_buffer[1]; + // ) + + FF_PAD_STRUCTURE_AVBPrint = record + str: PAnsiChar; (* *< string so far *) + len: Cardinal; (* *< length so far *) + size: Cardinal; (* *< allocated memory *) + size_max: Cardinal; (* *< maximum allocated memory *) + reserved_internal_buffer: array [0 .. 0] of AnsiChar; + end; + + pAVBPrint = ^AVBPrint; + + AVBPrint = record + str: PAnsiChar; (* *< string so far *) + len: Cardinal; (* *< length so far *) + size: Cardinal; (* *< allocated memory *) + size_max: Cardinal; (* *< maximum allocated memory *) + reserved_internal_buffer: array [0 .. 0] of AnsiChar; + reserved_padding: array [0 .. 1024 - SizeOf(FF_PAD_STRUCTURE_AVBPrint) - 1] of AnsiChar; + end; +{$ENDREGION} +{$REGION 'channel_layout.h'} + +const + (* * + * @defgroup channel_masks Audio channel masks + * + * A channel layout is a 64-bits integer with a bit set for every channel. + * The number of bits set must be equal to the number of channels. + * The value 0 means that the channel layout is not known. + * @note this data structure is not powerful enough to handle channels + * combinations that have the same channel multiple times, such as + * dual-mono. + * + * @{ + *) + AV_CH_FRONT_LEFT = $00000001; + AV_CH_FRONT_RIGHT = $00000002; + AV_CH_FRONT_CENTER = $00000004; + AV_CH_LOW_FREQUENCY = $00000008; + AV_CH_BACK_LEFT = $00000010; + AV_CH_BACK_RIGHT = $00000020; + AV_CH_FRONT_LEFT_OF_CENTER = $00000040; + AV_CH_FRONT_RIGHT_OF_CENTER = $00000080; + AV_CH_BACK_CENTER = $00000100; + AV_CH_SIDE_LEFT = $00000200; + AV_CH_SIDE_RIGHT = $00000400; + AV_CH_TOP_CENTER = $00000800; + AV_CH_TOP_FRONT_LEFT = $00001000; + AV_CH_TOP_FRONT_CENTER = $00002000; + AV_CH_TOP_FRONT_RIGHT = $00004000; + AV_CH_TOP_BACK_LEFT = $00008000; + AV_CH_TOP_BACK_CENTER = $00010000; + AV_CH_TOP_BACK_RIGHT = $00020000; + AV_CH_STEREO_LEFT = $20000000; + /// < Stereo downmix. + AV_CH_STEREO_RIGHT = $40000000; + /// < See AV_CH_STEREO_LEFT. + AV_CH_WIDE_LEFT = $0000000080000000; + AV_CH_WIDE_RIGHT = $0000000100000000; + AV_CH_SURROUND_DIRECT_LEFT = $0000000200000000; + AV_CH_SURROUND_DIRECT_RIGHT = $0000000400000000; + AV_CH_LOW_FREQUENCY_2 = $0000000800000000; + + (* * Channel mask value used for AVCodecContext.request_channel_layout + to indicate that the user requests the channel order of the decoder output + to be the native codec channel order. *) + AV_CH_LAYOUT_NATIVE = $8000000000000000; + + (* * + * @} + * @defgroup channel_mask_c Audio channel layouts + * @{ + * *) + AV_CH_LAYOUT_MONO = (AV_CH_FRONT_CENTER); + AV_CH_LAYOUT_STEREO = (AV_CH_FRONT_LEFT or AV_CH_FRONT_RIGHT); + AV_CH_LAYOUT_2POINT1 = (AV_CH_LAYOUT_STEREO or AV_CH_LOW_FREQUENCY); + AV_CH_LAYOUT_2_1 = (AV_CH_LAYOUT_STEREO or AV_CH_BACK_CENTER); + AV_CH_LAYOUT_SURROUND = (AV_CH_LAYOUT_STEREO or AV_CH_FRONT_CENTER); + AV_CH_LAYOUT_3POINT1 = (AV_CH_LAYOUT_SURROUND or AV_CH_LOW_FREQUENCY); + AV_CH_LAYOUT_4POINT0 = (AV_CH_LAYOUT_SURROUND or AV_CH_BACK_CENTER); + AV_CH_LAYOUT_4POINT1 = (AV_CH_LAYOUT_4POINT0 or AV_CH_LOW_FREQUENCY); + AV_CH_LAYOUT_2_2 = (AV_CH_LAYOUT_STEREO or AV_CH_SIDE_LEFT or AV_CH_SIDE_RIGHT); + AV_CH_LAYOUT_QUAD = (AV_CH_LAYOUT_STEREO or AV_CH_BACK_LEFT or AV_CH_BACK_RIGHT); + AV_CH_LAYOUT_5POINT0 = (AV_CH_LAYOUT_SURROUND or AV_CH_SIDE_LEFT or AV_CH_SIDE_RIGHT); + AV_CH_LAYOUT_5POINT1 = (AV_CH_LAYOUT_5POINT0 or AV_CH_LOW_FREQUENCY); + AV_CH_LAYOUT_5POINT0_BACK = (AV_CH_LAYOUT_SURROUND or AV_CH_BACK_LEFT or AV_CH_BACK_RIGHT); + AV_CH_LAYOUT_5POINT1_BACK = (AV_CH_LAYOUT_5POINT0_BACK or AV_CH_LOW_FREQUENCY); + AV_CH_LAYOUT_6POINT0 = (AV_CH_LAYOUT_5POINT0 or AV_CH_BACK_CENTER); + AV_CH_LAYOUT_6POINT0_FRONT = (AV_CH_LAYOUT_2_2 or AV_CH_FRONT_LEFT_OF_CENTER or AV_CH_FRONT_RIGHT_OF_CENTER); + AV_CH_LAYOUT_HEXAGONAL = (AV_CH_LAYOUT_5POINT0_BACK or AV_CH_BACK_CENTER); + AV_CH_LAYOUT_6POINT1 = (AV_CH_LAYOUT_5POINT1 or AV_CH_BACK_CENTER); + AV_CH_LAYOUT_6POINT1_BACK = (AV_CH_LAYOUT_5POINT1_BACK or AV_CH_BACK_CENTER); + AV_CH_LAYOUT_6POINT1_FRONT = (AV_CH_LAYOUT_6POINT0_FRONT or AV_CH_LOW_FREQUENCY); + AV_CH_LAYOUT_7POINT0 = (AV_CH_LAYOUT_5POINT0 or AV_CH_BACK_LEFT or AV_CH_BACK_RIGHT); + AV_CH_LAYOUT_7POINT0_FRONT = (AV_CH_LAYOUT_5POINT0 or AV_CH_FRONT_LEFT_OF_CENTER or AV_CH_FRONT_RIGHT_OF_CENTER); + AV_CH_LAYOUT_7POINT1 = (AV_CH_LAYOUT_5POINT1 or AV_CH_BACK_LEFT or AV_CH_BACK_RIGHT); + AV_CH_LAYOUT_7POINT1_WIDE = (AV_CH_LAYOUT_5POINT1 or AV_CH_FRONT_LEFT_OF_CENTER or AV_CH_FRONT_RIGHT_OF_CENTER); + AV_CH_LAYOUT_7POINT1_WIDE_BACK = (AV_CH_LAYOUT_5POINT1_BACK or AV_CH_FRONT_LEFT_OF_CENTER or AV_CH_FRONT_RIGHT_OF_CENTER); + AV_CH_LAYOUT_OCTAGONAL = (AV_CH_LAYOUT_5POINT0 or AV_CH_BACK_LEFT or AV_CH_BACK_CENTER or AV_CH_BACK_RIGHT); + AV_CH_LAYOUT_HEXADECAGONAL = (AV_CH_LAYOUT_OCTAGONAL or AV_CH_WIDE_LEFT or AV_CH_WIDE_RIGHT or AV_CH_TOP_BACK_LEFT or AV_CH_TOP_BACK_RIGHT or + AV_CH_TOP_BACK_CENTER or AV_CH_TOP_FRONT_CENTER or AV_CH_TOP_FRONT_LEFT or AV_CH_TOP_FRONT_RIGHT); + AV_CH_LAYOUT_STEREO_DOWNMIX = (AV_CH_STEREO_LEFT or AV_CH_STEREO_RIGHT); + +type + AVMatrixEncoding = ( // + AV_MATRIX_ENCODING_NONE, AV_MATRIX_ENCODING_DOLBY, AV_MATRIX_ENCODING_DPLII, AV_MATRIX_ENCODING_DPLIIX, AV_MATRIX_ENCODING_DPLIIZ, + AV_MATRIX_ENCODING_DOLBYEX, AV_MATRIX_ENCODING_DOLBYHEADPHONE, AV_MATRIX_ENCODING_NB); + + (* * + * Return a channel layout id that matches name, or 0 if no match is found. + * + * name can be one or several of the following notations, + * separated by '+' or '|': + * - the name of an usual channel layout (mono, stereo, 4.0, quad, 5.0, + * 5.0(side), 5.1, 5.1(side), 7.1, 7.1(wide), downmix); + * - the name of a single channel (FL, FR, FC, LFE, BL, BR, FLC, FRC, BC, + * SL, SR, TC, TFL, TFC, TFR, TBL, TBC, TBR, DL, DR); + * - a number of channels, in decimal, followed by 'c', yielding + * the default channel layout for that number of channels (@see + * av_get_default_channel_layout); + * - a channel layout mask, in hexadecimal starting with "0x" (see the + * AV_CH_* macros). + * + * Example: "stereo+FC" = "2c+FC" = "2c+1c" = "0x7" + *) + // uint64_t av_get_channel_layout(const char *name); +function av_get_channel_layout(const name: PAnsiChar): uint64_t; cdecl; external avutil_dll; +(* * + * Return a channel layout and the number of channels based on the specified name. + * + * This function is similar to (@see av_get_channel_layout), but can also parse + * unknown channel layout specifications. + * + * @param[in] name channel layout specification string + * @param[out] channel_layout parsed channel layout (0 if unknown) + * @param[out] nb_channels number of channels + * + * @return 0 on success, AVERROR(EINVAL) if the parsing fails. +*) +// int av_get_extended_channel_layout(const char *name, uint64_t* channel_layout, int* nb_channels); +function av_get_extended_channel_layout(const name: PAnsiChar; var channel_layout: uint64_t; var nb_channels: int): int; cdecl; external avutil_dll; +(* * + * Return a description of a channel layout. + * If nb_channels is <= 0, it is guessed from the channel_layout. + * + * @param buf put here the string containing the channel layout + * @param buf_size size in bytes of the buffer +*) +// void av_get_channel_layout_string(char *buf, int buf_size, int nb_channels, uint64_t channel_layout); +procedure av_get_channel_layout_string(buf: PAnsiChar; buf_size: int; nb_channels: int; channel_layout: uint64_t); cdecl; external avutil_dll; + +(* * + * Append a description of a channel layout to a bprint buffer. +*) +// void av_bprint_channel_layout(struct AVBPrint *bp, int nb_channels, uint64_t channel_layout); +procedure av_bprint_channel_layout(bp: pAVBPrint; nb_channels: int; channel_layout: uint64_t); cdecl; external avutil_dll; +(* * + * Return the number of channels in the channel layout. +*) +// int av_get_channel_layout_nb_channels(uint64_t channel_layout); +function av_get_channel_layout_nb_channels(channel_layout: uint64_t): int; cdecl; external avutil_dll; +(* * + * Return default channel layout for a given number of channels. +*) +// int64_t av_get_default_channel_layout(int nb_channels); +function av_get_default_channel_layout(nb_channels: int): int64_t; cdecl; external avutil_dll; +(* * + * Get the index of a channel in channel_layout. + * + * @param channel a channel layout describing exactly one channel which must be + * present in channel_layout. + * + * @return index of channel in channel_layout on success, a negative AVERROR + * on error. +*) +// int av_get_channel_layout_channel_index(uint64_t channel_layout, uint64_t channel); +function av_get_channel_layout_channel_index(channel_layout: uint64_t; channel: uint64_t): int; cdecl; external avutil_dll; +(* * + * Get the channel with the given index in channel_layout. +*) +// uint64_t av_channel_layout_extract_channel(uint64_t channel_layout, int index); +function av_channel_layout_extract_channel(channel_layout: uint64_t; index: int): uint64_t; cdecl; external avutil_dll; +(* * + * Get the name of a given channel. + * + * @return channel name on success, NULL on error. +*) +// const char *av_get_channel_name(uint64_t channel); +function av_get_channel_name(channel: uint64_t): PAnsiChar; cdecl; external avutil_dll; +(* * + * Get the description of a given channel. + * + * @param channel a channel layout with a single channel + * @return channel description on success, NULL on error +*) +// const char *av_get_channel_description(uint64_t channel); +function av_get_channel_description(channel: uint64_t): PAnsiChar; cdecl; external avutil_dll; +(* * + * Get the value and name of a standard channel layout. + * + * @param[in] index index in an internal list, starting at 0 + * @param[out] layout channel layout mask + * @param[out] name name of the layout + * @return 0 if the layout exists, + * <0 if index is beyond the limits +*) +// int av_get_standard_channel_layout(unsigned index, uint64_t *layout, const char **name); +function av_get_standard_channel_layout(index: unsigned; var layout: uint64_t; const name: ppAnsiChar): int; cdecl; external avutil_dll; +{$ENDREGION} +{$REGION 'dict.h'} + +const + AV_DICT_MATCH_CASE = 1; (* *< Only get an entry with exact-case key match. Only relevant in av_dict_get(). *) + AV_DICT_IGNORE_SUFFIX = 2; (* *< Return first entry in a dictionary whose first part corresponds to the search key, + ignoring the suffix of the found key string. Only relevant in av_dict_get(). *) + AV_DICT_DONT_STRDUP_KEY = 4; (* *< Take ownership of a key that's been + allocated with av_malloc() or another memory allocation function. *) + AV_DICT_DONT_STRDUP_VAL = 8; (* *< Take ownership of a value that's been + allocated with av_malloc() or another memory allocation function. *) + AV_DICT_DONT_OVERWRITE = 16; + // < Don't overwrite existing entries. + AV_DICT_APPEND = 32; (* *< If the entry already exists, append to it. Note that no + delimiter is added, the strings are simply concatenated. *) + AV_DICT_MULTIKEY = 64; (* *< Allow to store several equal keys in the dictionary *) + +Type + AVDictionaryEntry = record + key: PAnsiChar; + value: PAnsiChar; + end; + + pAVDictionaryEntry = ^AVDictionaryEntry; + + AVDictionary = record + end; + + pAVDictionary = ^AVDictionary; + ppAVDictionary = ^pAVDictionary; + + (* * + * Get a dictionary entry with matching key. + * + * The returned entry key or value must not be changed, or it will + * cause undefined behavior. + * + * To iterate through all the dictionary entries, you can set the matching key + * to the null string "" and set the AV_DICT_IGNORE_SUFFIX flag. + * + * @param prev Set to the previous matching element to find the next. + * If set to NULL the first matching element is returned. + * @param key matching key + * @param flags a collection of AV_DICT_* flags controlling how the entry is retrieved + * @return found entry or NULL in case no matching entry was found in the dictionary + *) + // AVDictionaryEntry *av_dict_get(const AVDictionary *m, const char *key, + // const AVDictionaryEntry *prev, int flags); +function av_dict_get(const m: pAVDictionary; const key: PAnsiChar; const prev: pAVDictionaryEntry; flags: int): pAVDictionaryEntry; cdecl; external avutil_dll; + +(* * + * Get number of entries in dictionary. + * + * @param m dictionary + * @return number of entries in dictionary +*) +// int av_dict_count(const AVDictionary *m); +function av_dict_count(const m: pAVDictionary): int; cdecl; external avutil_dll; + +(* * + * Set the given entry in *pm, overwriting an existing entry. + * + * Note: If AV_DICT_DONT_STRDUP_KEY or AV_DICT_DONT_STRDUP_VAL is set, + * these arguments will be freed on error. + * + * Warning: Adding a new entry to a dictionary invalidates all existing entries + * previously returned with av_dict_get. + * + * @param pm pointer to a pointer to a dictionary struct. If *pm is NULL + * a dictionary struct is allocated and put in *pm. + * @param key entry key to add to *pm (will either be av_strduped or added as a new key depending on flags) + * @param value entry value to add to *pm (will be av_strduped or added as a new key depending on flags). + * Passing a NULL value will cause an existing entry to be deleted. + * @return >= 0 on success otherwise an error code <0 +*) +// int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags); +function av_dict_set(Var pm: pAVDictionary; const key: PAnsiChar; const value: PAnsiChar; flags: int): int; cdecl; external avutil_dll; + +(* * + * Convenience wrapper for av_dict_set that converts the value to a string + * and stores it. + * + * Note: If AV_DICT_DONT_STRDUP_KEY is set, key will be freed on error. +*) +// int av_dict_set_int(AVDictionary **pm, const char *key, int64_t value, int flags); +function av_dict_set_int(var pm: pAVDictionary; const key: PAnsiChar; value: int64_t; flags: int): int; cdecl; external avutil_dll; + +(* * + * Parse the key/value pairs list and add the parsed entries to a dictionary. + * + * In case of failure, all the successfully set entries are stored in + * *pm. You may need to manually free the created dictionary. + * + * @param key_val_sep a 0-terminated list of characters used to separate + * key from value + * @param pairs_sep a 0-terminated list of characters used to separate + * two pairs from each other + * @param flags flags to use when adding to dictionary. + * AV_DICT_DONT_STRDUP_KEY and AV_DICT_DONT_STRDUP_VAL + * are ignored since the key/value tokens will always + * be duplicated. + * @return 0 on success, negative AVERROR code on failure +*) +// int av_dict_parse_string(AVDictionary **pm, const char *str, +// const char *key_val_sep, const char *pairs_sep, +// int flags); +function av_dict_parse_string(Var pm: pAVDictionary; const str: PAnsiChar; const key_val_sep: PAnsiChar; const pairs_sep: PAnsiChar; flags: int): int; cdecl; + external avutil_dll; + +(* * + * Copy entries from one AVDictionary struct into another. + * @param dst pointer to a pointer to a AVDictionary struct. If *dst is NULL, + * this function will allocate a struct for you and put it in *dst + * @param src pointer to source AVDictionary struct + * @param flags flags to use when setting entries in *dst + * @note metadata is read using the AV_DICT_IGNORE_SUFFIX flag + * @return 0 on success, negative AVERROR code on failure. If dst was allocated + * by this function, callers should free the associated memory. +*) +// int av_dict_copy(AVDictionary **dst, const AVDictionary *src, int flags); +function av_dict_copy(var dst: pAVDictionary; const src: pAVDictionary; flags: int): int; cdecl; external avutil_dll; + +(* * + * Free all the memory allocated for an AVDictionary struct + * and all keys and values. +*) +// void av_dict_free(AVDictionary **m); +procedure av_dict_free(Var m: pAVDictionary); cdecl; external avutil_dll; + +(* * + * Get dictionary entries as a string. + * + * Create a string containing dictionary's entries. + * Such string may be passed back to av_dict_parse_string(). + * @note String is escaped with backslashes ('\'). + * + * @param[in] m dictionary + * @param[out] buffer Pointer to buffer that will be allocated with string containg entries. + * Buffer must be freed by the caller when is no longer needed. + * @param[in] key_val_sep character used to separate key from value + * @param[in] pairs_sep character used to separate two pairs from each other + * @return >= 0 on success, negative on error + * @warning Separators cannot be neither '\\' nor '\0'. They also cannot be the same. +*) +// int av_dict_get_string(const AVDictionary *m, char **buffer, +// const char key_val_sep, const char pairs_sep); +function av_dict_get_string(const m: pAVDictionary; Var buffer: PAnsiChar; const key_val_sep: AnsiChar; const pairs_sep: AnsiChar): int; cdecl; + external avutil_dll; + +{$ENDREGION} +{$REGION 'buffer.h'} + +type + (* * + * A reference counted buffer type. It is opaque and is meant to be used through + * references (AVBufferRef). + *) + AVBuffer = record + end; + + pAVBuffer = ^AVBuffer; + + (* * + * A reference to a data buffer. + * + * The size of this struct is not a part of the public ABI and it is not meant + * to be allocated directly. + *) + AVBufferRef = record + buffer: pAVBuffer; + + (* * + * The data buffer. It is considered writable if and only if + * this is the only reference to the buffer, in which case + * av_buffer_is_writable() returns 1. + *) + data: puint8_t; + (* * + * Size of data in bytes. + *) + size: int; + end; + + pAVBufferRef = ^AVBufferRef; + ppAVBufferRef = ^pAVBufferRef; + + (* * + * Allocate an AVBuffer of the given size using av_malloc(). + * + * @return an AVBufferRef of given size or NULL when out of memory + *) + // AVBufferRef *av_buffer_alloc(int size); +function av_buffer_alloc(size: int): pAVBufferRef; cdecl; external avutil_dll; + +(* * + * Same as av_buffer_alloc(), except the returned buffer will be initialized + * to zero. +*) +// AVBufferRef *av_buffer_allocz(int size); +function av_buffer_allocz(size: int): pAVBufferRef; cdecl; external avutil_dll; + +const + (* * + * Always treat the buffer as read-only, even when it has only one + * reference. + *) + AV_BUFFER_FLAG_READONLY = (1 shl 0); + + (* * + * Create an AVBuffer from an existing array. + * + * If this function is successful, data is owned by the AVBuffer. The caller may + * only access data through the returned AVBufferRef and references derived from + * it. + * If this function fails, data is left untouched. + * @param data data array + * @param size size of data in bytes + * @param free a callback for freeing this buffer's data + * @param opaque parameter to be got for processing or passed to free + * @param flags a combination of AV_BUFFER_FLAG_* + * + * @return an AVBufferRef referring to data on success, NULL on failure. + *) + // AVBufferRef *av_buffer_create(uint8_t *data, int size, + // void (*free)(void *opaque, uint8_t *data), + // void *opaque, int flags); + +type + TFreeProc = procedure(opaque: Pointer; data: puint8_t); cdecl; + +function av_buffer_create(data: puint8_t; size: int; freeproc: TFreeProc; opaque: Pointer; flags: int): AVBufferRef; cdecl; external avutil_dll; + +(* * + * Default free callback, which calls av_free() on the buffer data. + * This function is meant to be passed to av_buffer_create(), not called + * directly. +*) +// void av_buffer_default_free(void *opaque, uint8_t *data); +procedure av_buffer_default_free(opaque: Pointer; data: puint8_t); cdecl; external avutil_dll; + +(* * + * Create a new reference to an AVBuffer. + * + * @return a new AVBufferRef referring to the same AVBuffer as buf or NULL on + * failure. +*) +// AVBufferRef *av_buffer_ref(AVBufferRef *buf); +function av_buffer_ref(buf: pAVBufferRef): pAVBufferRef; cdecl; external avutil_dll; +(* * + * Free a given reference and automatically free the buffer if there are no more + * references to it. + * + * @param buf the reference to be freed. The pointer is set to NULL on return. +*) +// void av_buffer_unref(AVBufferRef **buf); +procedure av_buffer_unref(var buf: pAVBufferRef); cdecl; external avutil_dll; + +(* * + * @return 1 if the caller may write to the data referred to by buf (which is + * true if and only if buf is the only reference to the underlying AVBuffer). + * Return 0 otherwise. + * A positive answer is valid until av_buffer_ref() is called on buf. +*) +// int av_buffer_is_writable(const AVBufferRef *buf); +function av_buffer_is_writable(const buf: pAVBufferRef): int; cdecl; external avutil_dll; + +(* * + * @return the opaque parameter set by av_buffer_create. +*) +// void *av_buffer_get_opaque(const AVBufferRef *buf); +function av_buffer_get_opaque(const buf: pAVBufferRef): Pointer; cdecl; external avutil_dll; + +// int av_buffer_get_ref_count(const AVBufferRef *buf); +function av_buffer_get_ref_count(const buf: pAVBufferRef): int; cdecl; external avutil_dll; + +(* * + * Create a writable reference from a given buffer reference, avoiding data copy + * if possible. + * + * @param buf buffer reference to make writable. On success, buf is either left + * untouched, or it is unreferenced and a new writable AVBufferRef is + * written in its place. On failure, buf is left untouched. + * @return 0 on success, a negative AVERROR on failure. +*) +// int av_buffer_make_writable(AVBufferRef **buf); +function av_buffer_make_writable(var buf: pAVBufferRef): int; cdecl; external avutil_dll; + +(* * + * Reallocate a given buffer. + * + * @param buf a buffer reference to reallocate. On success, buf will be + * unreferenced and a new reference with the required size will be + * written in its place. On failure buf will be left untouched. *buf + * may be NULL, then a new buffer is allocated. + * @param size required new buffer size. + * @return 0 on success, a negative AVERROR on failure. + * + * @note the buffer is actually reallocated with av_realloc() only if it was + * initially allocated through av_buffer_realloc(NULL) and there is only one + * reference to it (i.e. the one passed to this function). In all other cases + * a new buffer is allocated and the data is copied. +*) +// int av_buffer_realloc(AVBufferRef **buf, int size); +function av_buffer_realloc(var buf: pAVBufferRef; size: int): int; cdecl; external avutil_dll; + +(* * + * @defgroup lavu_bufferpool AVBufferPool + * @ingroup lavu_data + * + * @{ + * AVBufferPool is an API for a lock-free thread-safe pool of AVBuffers. + * + * Frequently allocating and freeing large buffers may be slow. AVBufferPool is + * meant to solve this in cases when the caller needs a set of buffers of the + * same size (the most obvious use case being buffers for raw video or audio + * frames). + * + * At the beginning, the user must call av_buffer_pool_init() to create the + * buffer pool. Then whenever a buffer is needed, call av_buffer_pool_get() to + * get a reference to a new buffer, similar to av_buffer_alloc(). This new + * reference works in all aspects the same way as the one created by + * av_buffer_alloc(). However, when the last reference to this buffer is + * unreferenced, it is returned to the pool instead of being freed and will be + * reused for subsequent av_buffer_pool_get() calls. + * + * When the caller is done with the pool and no longer needs to allocate any new + * buffers, av_buffer_pool_uninit() must be called to mark the pool as freeable. + * Once all the buffers are released, it will automatically be freed. + * + * Allocating and releasing buffers with this API is thread-safe as long as + * either the default alloc callback is used, or the user-supplied one is + * thread-safe. +*) + +type + (* * + * The buffer pool. This structure is opaque and not meant to be accessed + * directly. It is allocated with av_buffer_pool_init() and freed with + * av_buffer_pool_uninit(). + *) + AVBufferPool = record + end; + + pAVBufferPool = ^AVBufferPool; + + (* * + * Allocate and initialize a buffer pool. + * + * @param size size of each buffer in this pool + * @param alloc a function that will be used to allocate new buffers when the + * pool is empty. May be NULL, then the default allocator will be used + * (av_buffer_alloc()). + * @return newly created buffer pool on success, NULL on error. + *) + // AVBufferPool *av_buffer_pool_init(int size, AVBufferRef* (*alloc)(int size)); +type + Tbuffer_pool_init_proc = function(size: int): pAVBufferRef; cdecl; + +function av_buffer_pool_init(size: int; alloc: Tbuffer_pool_init_proc): pAVBufferPool; cdecl; external avutil_dll; + +(* * + * Allocate and initialize a buffer pool with a more complex allocator. + * + * @param size size of each buffer in this pool + * @param opaque arbitrary user data used by the allocator + * @param alloc a function that will be used to allocate new buffers when the + * pool is empty. + * @param pool_free a function that will be called immediately before the pool + * is freed. I.e. after av_buffer_pool_uninit() is called + * by the caller and all the frames are returned to the pool + * and freed. It is intended to uninitialize the user opaque + * data. + * @return newly created buffer pool on success, NULL on error. +*) +type + Tav_buffer_pool_init2_alloc_proc = function(opaque: Pointer; size: int): pAVBufferRef; cdecl; + Tav_buffer_pool_init2_pool_free_proc = procedure(opaque: Pointer); cdecl; + + // AVBufferPool *av_buffer_pool_init2(int size, void *opaque, + // AVBufferRef* (*alloc)(void *opaque, int size), + // void (*pool_free)(void *opaque)); +function av_buffer_pool_init2(size: int; opaque: Pointer; alloc: Tav_buffer_pool_init2_alloc_proc; pool_free: Tav_buffer_pool_init2_pool_free_proc) + : pAVBufferPool; cdecl; external avutil_dll; + +(* * + * Mark the pool as being available for freeing. It will actually be freed only + * once all the allocated buffers associated with the pool are released. Thus it + * is safe to call this function while some of the allocated buffers are still + * in use. + * + * @param pool pointer to the pool to be freed. It will be set to NULL. +*) +// void av_buffer_pool_uninit(AVBufferPool **pool); +procedure av_buffer_pool_uninit(var pool: pAVBufferPool); cdecl; external avutil_dll; + +(* * + * Allocate a new AVBuffer, reusing an old buffer from the pool when available. + * This function may be called simultaneously from multiple threads. + * + * @return a reference to the new buffer on success, NULL on error. +*) +// AVBufferRef *av_buffer_pool_get(AVBufferPool *pool); +function av_buffer_pool_get(pool: pAVBufferPool): pAVBufferRef; cdecl; external avutil_dll; + +{$ENDREGION} +{$REGION 'rational.h'} + +Type + (* * + * Rational number (pair of numerator and denominator). + *) + AVRational = record + num: int; // < Numerator + den: int; // < Denominator + end; + + pAVRational = ^AVRational; + + (* * + * Create an AVRational. + * + * Useful for compilers that do not support compound literals. + * + * @note The return value is not reduced. + * @see av_reduce() + *) + + // static inline AVRational av_make_q(int num, int den) +function av_make_q(_num: int; _den: int): AVRational; inline; + +(* * + * Compare two rationals. + * + * @param a First rational + * @param b Second rational + * + * @return One of the following values: + * - 0 if `a == b` + * - 1 if `a > b` + * - -1 if `a < b` + * - `INT_MIN` if one of the values is of the form `0 / 0` +*) +// static inline int av_cmp_q(AVRational a, AVRational b) +function av_cmp_q(a, b: AVRational): int; inline; + +(* * + * Convert an AVRational to a `double`. + * @param a AVRational to convert + * @return `a` in floating-point form + * @see av_d2q() +*) +// static inline double av_q2d(AVRational a) +function av_q2d(a: AVRational): double; inline; + +(* * + * Reduce a fraction. + * + * This is useful for framerate calculations. + * + * @param[out] dst_num Destination numerator + * @param[out] dst_den Destination denominator + * @param[in] num Source numerator + * @param[in] den Source denominator + * @param[in] max Maximum allowed values for `dst_num` & `dst_den` + * @return 1 if the operation is exact, 0 otherwise +*) +// int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max); +function av_reduce(Var dst_num: int; var dst_den: int; num: int64_t; den: int64_t; max: int64_t): int; cdecl; external avutil_dll; + +(* * + * Multiply two rationals. + * @param b First rational + * @param c Second rational + * @return b*c +*) +// AVRational av_mul_q(AVRational b, AVRational c) av_const; +function av_mul_q(b, c: AVRational): AVRational; cdecl; external avutil_dll; + +(* * + * Divide one rational by another. + * @param b First rational + * @param c Second rational + * @return b/c +*) +// AVRational av_div_q(AVRational b, AVRational c) av_const; +function av_div_q(b, c: AVRational): AVRational; cdecl; external avutil_dll; + +(* * + * Add two rationals. + * @param b First rational + * @param c Second rational + * @return b+c +*) +// AVRational av_add_q(AVRational b, AVRational c) av_const; +function av_add_q(b, c: AVRational): AVRational; cdecl; external avutil_dll; + +(* * + * Subtract one rational from another. + * @param b First rational + * @param c Second rational + * @return b-c +*) +// AVRational av_sub_q(AVRational b, AVRational c) av_const; +function av_sub_q(b, c: AVRational): AVRational; cdecl; external avutil_dll; + +(* * + * Invert a rational. + * @param q value + * @return 1 / q +*) +// static av_always_inline AVRational av_inv_q(AVRational q) +function av_inv_q(q: AVRational): AVRational; inline; + +(* * + * Convert a double precision floating point number to a rational. + * + * In case of infinity, the returned value is expressed as `{1, 0}` or + * `{-1, 0}` depending on the sign. + * + * @param d `double` to convert + * @param max Maximum allowed numerator and denominator + * @return `d` in AVRational form + * @see av_q2d() +*) +// AVRational av_d2q(double d, int max) av_const; +function av_d2q(d: double; max: int): AVRational; cdecl; external avutil_dll; + +(* * + * Find which of the two rationals is closer to another rational. + * + * @param q Rational to be compared against + * @param q1,q2 Rationals to be tested + * @return One of the following values: + * - 1 if `q1` is nearer to `q` than `q2` + * - -1 if `q2` is nearer to `q` than `q1` + * - 0 if they have the same distance +*) +// int av_nearer_q(AVRational q, AVRational q1, AVRational q2); +function av_nearer_q(q: AVRational; q1: AVRational; q2: AVRational): int; cdecl; external avutil_dll; + +(* * + * Find the value in a list of rationals nearest a given reference rational. + * + * @param q Reference rational + * @param q_list Array of rationals terminated by `{0, 0}` + * @return Index of the nearest value found in the array +*) +// int av_find_nearest_q_idx(AVRational q, const AVRational* q_list); +function av_find_nearest_q_idx(q: AVRational; const q_list: pAVRational): int; cdecl; external avutil_dll; + +(* * + * Convert an AVRational to a IEEE 32-bit `float` expressed in fixed-point + * format. + * + * @param q Rational to be converted + * @return Equivalent floating-point value, expressed as an unsigned 32-bit + * integer. + * @note The returned value is platform-indepedant. +*) +// uint32_t av_q2intfloat(AVRational q); +function av_q2intfloat(q: AVRational): uint32_t; cdecl; external avutil_dll; + +{$ENDREGION} +{$REGION 'avutil'} +(* * + * @} +*) + +(* * + * @addtogroup lavu_media Media Type + * @brief Media Type +*) +type + AVMediaType = ( // + AVMEDIA_TYPE_UNKNOWN = -1, // < Usually treated as AVMEDIA_TYPE_DATA + AVMEDIA_TYPE_VIDEO = 0, // + AVMEDIA_TYPE_AUDIO = 1, // + AVMEDIA_TYPE_DATA = 2, // < Opaque data information usually continuous + AVMEDIA_TYPE_SUBTITLE = 3, // + AVMEDIA_TYPE_ATTACHMENT = 4, // < Opaque data information usually sparse + AVMEDIA_TYPE_NB = 5 // + ); + + (* * + * @defgroup lavu_const Constants + * @{ + * + * @defgroup lavu_enc Encoding specific + * + * @note those definition should move to avcodec + * @{ + *) +const + FF_LAMBDA_SHIFT = 7; + FF_LAMBDA_SCALE = (1 shl FF_LAMBDA_SHIFT); + FF_QP2LAMBDA = 118; + // < factor to convert from H.263 QP to lambda + FF_LAMBDA_MAX = (256 * 128 - 1); + + FF_QUALITY_SCALE = FF_LAMBDA_SCALE; // FIXME maybe remove + + (* * + * @} + * @defgroup lavu_time Timestamp specific + * + * FFmpeg internal timebase and timestamp definitions + * + * @{ + *) + + (* * + * @brief Undefined timestamp value + * + * Usually reported by demuxer that work on containers that do not provide + * either pts or dts. + *) + + AV_NOPTS_VALUE = int64_t($8000000000000000); + + (* * + * Internal time base represented as integer + *) + + AV_TIME_BASE = 1000000; + + (* * + * Internal time base represented as fractional value + *) + + AV_TIME_BASE_Q: AVRational = (num: 1; den: AV_TIME_BASE); + + (* * + * @} + * @} + * @defgroup lavu_picture Image related + * + * AVPicture types, pixel formats and basic image planes manipulation. + * + * @{ + *) +type + AVPictureType = ( // + AV_PICTURE_TYPE_NONE = 0, // < Undefined + AV_PICTURE_TYPE_I = 1, // < Intra + AV_PICTURE_TYPE_P = 2, // < Predicted + AV_PICTURE_TYPE_B = 3, // < Bi-dir predicted + AV_PICTURE_TYPE_S = 4, // < S(GMC)-VOP MPEG-4 + AV_PICTURE_TYPE_SI = 5, // < Switching Intra + AV_PICTURE_TYPE_SP = 6, // < Switching Predicted + AV_PICTURE_TYPE_BI = 7 // + ); + // < BI type +{$ENDREGION} +{$REGION 'pixfmt.h'} + +type + (* * + * Pixel format. + * + * @note + * AV_PIX_FMT_RGB32 is handled in an endian-specific manner. An RGBA + * color is put together as: + * (A shl 24) | (R shl 16) | (G shl 8) | B + * This is stored as BGRA on little-endian CPU architectures and ARGB on + * big-endian CPUs. + * + * @par + * When the pixel format is palettized RGB32 (AV_PIX_FMT_PAL8), the palettized + * image data is stored in AVFrame.data[0]. The palette is transported in + * AVFrame.data[1], is 1024 bytes long (256 4-byte entries) and is + * formatted the same as in AV_PIX_FMT_RGB32 described above (i.e., it is + * also endian-specific). Note also that the individual RGB32 palette + * components stored in AVFrame.data[1] should be in the range 0..255. + * This is important as many custom PAL8 video codecs that were designed + * to run on the IBM VGA graphics adapter use 6-bit palette components. + * + * @par + * For all the 8 bits per pixel formats, an RGB32 palette is in data[1] like + * for pal8. This palette is filled in automatically by the function + * allocating the picture. + *) + pAVPixelFormat = ^AVPixelFormat; + AVPixelFormat = ( // + AV_PIX_FMT_NONE = -1, // + AV_PIX_FMT_YUV420P, // < planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples) + AV_PIX_FMT_YUYV422, // < packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr + AV_PIX_FMT_RGB24, // < packed RGB 8:8:8, 24bpp, RGBRGB... + AV_PIX_FMT_BGR24, // < packed RGB 8:8:8, 24bpp, BGRBGR... + AV_PIX_FMT_YUV422P, // < planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples) + AV_PIX_FMT_YUV444P, // < planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples) + AV_PIX_FMT_YUV410P, // < planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples) + AV_PIX_FMT_YUV411P, // < planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) + AV_PIX_FMT_GRAY8, // < Y , 8bpp + AV_PIX_FMT_MONOWHITE, // < Y , 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb to the lsb + AV_PIX_FMT_MONOBLACK, // < Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb + AV_PIX_FMT_PAL8, // < 8 bits with AV_PIX_FMT_RGB32 palette + AV_PIX_FMT_YUVJ420P, // < planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting color_range + AV_PIX_FMT_YUVJ422P, // < planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting color_range + AV_PIX_FMT_YUVJ444P, // < planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting color_range +{$IFDEF FF_API_XVMC} + AV_PIX_FMT_XVMC_MPEG2_MC, // < XVideo Motion Acceleration via common packet passing + AV_PIX_FMT_XVMC_MPEG2_IDCT, // + AV_PIX_FMT_XVMC = AV_PIX_FMT_XVMC_MPEG2_IDCT, // +{$ENDIF} (* FF_API_XVMC *) + AV_PIX_FMT_UYVY422, // < packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1 + AV_PIX_FMT_UYYVYY411, // < packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3 + AV_PIX_FMT_BGR8, // < packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb) + AV_PIX_FMT_BGR4, + // < packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits + AV_PIX_FMT_BGR4_BYTE, // < packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb) + AV_PIX_FMT_RGB8, // < packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb) + AV_PIX_FMT_RGB4, + // < packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits + AV_PIX_FMT_RGB4_BYTE, // < packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb) + AV_PIX_FMT_NV12, + // < planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V) + AV_PIX_FMT_NV21, // < as above, but U and V bytes are swapped + + AV_PIX_FMT_ARGB, // < packed ARGB 8:8:8:8, 32bpp, ARGBARGB... + AV_PIX_FMT_RGBA, + // < packed RGBA 8:8:8:8, 32bpp, RGBARGBA... + AV_PIX_FMT_ABGR, + // < packed ABGR 8:8:8:8, 32bpp, ABGRABGR... + AV_PIX_FMT_BGRA, + // < packed BGRA 8:8:8:8, 32bpp, BGRABGRA... + + AV_PIX_FMT_GRAY16BE, + // < Y , 16bpp, big-endian + AV_PIX_FMT_GRAY16LE, + // < Y , 16bpp, little-endian + AV_PIX_FMT_YUV440P, + // < planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples) + AV_PIX_FMT_YUVJ440P, + // < planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range + AV_PIX_FMT_YUVA420P, + // < planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples) +{$IFDEF FF_API_VDPAU} + AV_PIX_FMT_VDPAU_H264, + // < H.264 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers + AV_PIX_FMT_VDPAU_MPEG1, + // < MPEG-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers + AV_PIX_FMT_VDPAU_MPEG2, + // < MPEG-2 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers + AV_PIX_FMT_VDPAU_WMV3, + // < WMV3 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers + AV_PIX_FMT_VDPAU_VC1, + // < VC-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers +{$ENDIF} + AV_PIX_FMT_RGB48BE, + // < packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big-endian + AV_PIX_FMT_RGB48LE, + // < packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as little-endian + + AV_PIX_FMT_RGB565BE, + // < packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian + AV_PIX_FMT_RGB565LE, + // < packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian + AV_PIX_FMT_RGB555BE, + // < packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), big-endian , X=unused/undefined + AV_PIX_FMT_RGB555LE, + // < packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), little-endian, X=unused/undefined + + AV_PIX_FMT_BGR565BE, + // < packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), big-endian + AV_PIX_FMT_BGR565LE, + // < packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), little-endian + AV_PIX_FMT_BGR555BE, + // < packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), big-endian , X=unused/undefined + AV_PIX_FMT_BGR555LE, + // < packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), little-endian, X=unused/undefined + +{$IFDEF FF_API_VAAPI} + (* * @name Deprecated pixel formats *) + (* *@{ *) + AV_PIX_FMT_VAAPI_MOCO, + // < HW acceleration through VA API at motion compensation entry-point, Picture.data[3] contains a vaapi_render_state struct which contains macroblocks as well as various fields extracted from headers + AV_PIX_FMT_VAAPI_IDCT, + // < HW acceleration through VA API at IDCT entry-point, Picture.data[3] contains a vaapi_render_state struct which contains fields extracted from headers + AV_PIX_FMT_VAAPI_VLD, + // < HW decoding through VA API, Picture.data[3] contains a VASurfaceID + (* *@} *) + AV_PIX_FMT_VAAPI = AV_PIX_FMT_VAAPI_VLD, +{$ELSE} + (* * + * Hardware acceleration through VA-API, data[3] contains a + * VASurfaceID. + *) + AV_PIX_FMT_VAAPI, +{$ENDIF} + AV_PIX_FMT_YUV420P16LE, + // < planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian + AV_PIX_FMT_YUV420P16BE, + // < planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian + AV_PIX_FMT_YUV422P16LE, + // < planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + AV_PIX_FMT_YUV422P16BE, + // < planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + AV_PIX_FMT_YUV444P16LE, + // < planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian + AV_PIX_FMT_YUV444P16BE, + // < planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian +{$IFDEF FF_API_VDPAU} + AV_PIX_FMT_VDPAU_MPEG4, + // < MPEG-4 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers +{$ENDIF} + AV_PIX_FMT_DXVA2_VLD, + // < HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer + + AV_PIX_FMT_RGB444LE, + // < packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), little-endian, X=unused/undefined + AV_PIX_FMT_RGB444BE, + // < packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), big-endian, X=unused/undefined + AV_PIX_FMT_BGR444LE, + // < packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), little-endian, X=unused/undefined + AV_PIX_FMT_BGR444BE, + // < packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), big-endian, X=unused/undefined + AV_PIX_FMT_YA8, + // < 8 bits gray, 8 bits alpha + + AV_PIX_FMT_Y400A = AV_PIX_FMT_YA8, + // < alias for AV_PIX_FMT_YA8 + AV_PIX_FMT_GRAY8A = AV_PIX_FMT_YA8, + // < alias for AV_PIX_FMT_YA8 + + AV_PIX_FMT_BGR48BE, + // < packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big-endian + AV_PIX_FMT_BGR48LE, + // < packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as little-endian + + (* * + * The following 12 formats have the disadvantage of needing 1 format for each bit depth. + * Notice that each 9/10 bits sample is stored in 16 bits with extra padding. + * If you want to support multiple bit depths, then using AV_PIX_FMT_YUV420P16* with the bpp stored separately is better. + *) + AV_PIX_FMT_YUV420P9BE, + // < planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian + AV_PIX_FMT_YUV420P9LE, + // < planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian + AV_PIX_FMT_YUV420P10BE, + // < planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian + AV_PIX_FMT_YUV420P10LE, + // < planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian + AV_PIX_FMT_YUV422P10BE, + // < planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + AV_PIX_FMT_YUV422P10LE, + // < planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + AV_PIX_FMT_YUV444P9BE, + // < planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian + AV_PIX_FMT_YUV444P9LE, + // < planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian + AV_PIX_FMT_YUV444P10BE, + // < planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian + AV_PIX_FMT_YUV444P10LE, + // < planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian + AV_PIX_FMT_YUV422P9BE, + // < planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + AV_PIX_FMT_YUV422P9LE, + // < planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + AV_PIX_FMT_VDA_VLD, + // < hardware decoding through VDA + AV_PIX_FMT_GBRP, + // < planar GBR 4:4:4 24bpp + AV_PIX_FMT_GBR24P = AV_PIX_FMT_GBRP, // alias for #AV_PIX_FMT_GBRP + AV_PIX_FMT_GBRP9BE, + // < planar GBR 4:4:4 27bpp, big-endian + AV_PIX_FMT_GBRP9LE, + // < planar GBR 4:4:4 27bpp, little-endian + AV_PIX_FMT_GBRP10BE, + // < planar GBR 4:4:4 30bpp, big-endian + AV_PIX_FMT_GBRP10LE, + // < planar GBR 4:4:4 30bpp, little-endian + AV_PIX_FMT_GBRP16BE, + // < planar GBR 4:4:4 48bpp, big-endian + AV_PIX_FMT_GBRP16LE, + // < planar GBR 4:4:4 48bpp, little-endian + AV_PIX_FMT_YUVA422P, + // < planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples) + AV_PIX_FMT_YUVA444P, + // < planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples) + AV_PIX_FMT_YUVA420P9BE, + // < planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), big-endian + AV_PIX_FMT_YUVA420P9LE, + // < planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), little-endian + AV_PIX_FMT_YUVA422P9BE, + // < planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), big-endian + AV_PIX_FMT_YUVA422P9LE, + // < planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), little-endian + AV_PIX_FMT_YUVA444P9BE, + // < planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), big-endian + AV_PIX_FMT_YUVA444P9LE, + // < planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), little-endian + AV_PIX_FMT_YUVA420P10BE, + // < planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian) + AV_PIX_FMT_YUVA420P10LE, + // < planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian) + AV_PIX_FMT_YUVA422P10BE, + // < planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian) + AV_PIX_FMT_YUVA422P10LE, + // < planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian) + AV_PIX_FMT_YUVA444P10BE, + // < planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian) + AV_PIX_FMT_YUVA444P10LE, + // < planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian) + AV_PIX_FMT_YUVA420P16BE, + // < planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian) + AV_PIX_FMT_YUVA420P16LE, + // < planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian) + AV_PIX_FMT_YUVA422P16BE, + // < planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian) + AV_PIX_FMT_YUVA422P16LE, + // < planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian) + AV_PIX_FMT_YUVA444P16BE, + // < planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian) + AV_PIX_FMT_YUVA444P16LE, + // < planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian) + + AV_PIX_FMT_VDPAU, + // < HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface + + AV_PIX_FMT_XYZ12LE, + // < packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as little-endian, the 4 lower bits are set to 0 + AV_PIX_FMT_XYZ12BE, + // < packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as big-endian, the 4 lower bits are set to 0 + AV_PIX_FMT_NV16, + // < interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples) + AV_PIX_FMT_NV20LE, + // < interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + AV_PIX_FMT_NV20BE, + // < interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + + AV_PIX_FMT_RGBA64BE, + // < packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian + AV_PIX_FMT_RGBA64LE, + // < packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian + AV_PIX_FMT_BGRA64BE, + // < packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian + AV_PIX_FMT_BGRA64LE, + // < packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian + + AV_PIX_FMT_YVYU422, + // < packed YUV 4:2:2, 16bpp, Y0 Cr Y1 Cb + + AV_PIX_FMT_VDA, + // < HW acceleration through VDA, data[3] contains a CVPixelBufferRef + + AV_PIX_FMT_YA16BE, + // < 16 bits gray, 16 bits alpha (big-endian) + AV_PIX_FMT_YA16LE, + // < 16 bits gray, 16 bits alpha (little-endian) + + AV_PIX_FMT_GBRAP, + // < planar GBRA 4:4:4:4 32bpp + AV_PIX_FMT_GBRAP16BE, + // < planar GBRA 4:4:4:4 64bpp, big-endian + AV_PIX_FMT_GBRAP16LE, + // < planar GBRA 4:4:4:4 64bpp, little-endian + (* * + * HW acceleration through QSV, data[3] contains a pointer to the + * mfxFrameSurface1 structure. + *) + AV_PIX_FMT_QSV, + (* * + * HW acceleration though MMAL, data[3] contains a pointer to the + * MMAL_BUFFER_HEADER_T structure. + *) + AV_PIX_FMT_MMAL, + + AV_PIX_FMT_D3D11VA_VLD, + // < HW decoding through Direct3D11, Picture.data[3] contains a ID3D11VideoDecoderOutputView pointer + + (* * + * HW acceleration through CUDA. data[i] contain CUdeviceptr pointers + * exactly as for system memory frames. + *) + AV_PIX_FMT_CUDA, AV_PIX_FMT_0RGB = $123 + 4, + // < packed RGB 8:8:8, 32bpp, XRGBXRGB... X=unused/undefined + AV_PIX_FMT_RGB0, + // < packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined + AV_PIX_FMT_0BGR, + // < packed BGR 8:8:8, 32bpp, XBGRXBGR... X=unused/undefined + AV_PIX_FMT_BGR0, + // < packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined + + AV_PIX_FMT_YUV420P12BE, + // < planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian + AV_PIX_FMT_YUV420P12LE, + // < planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian + AV_PIX_FMT_YUV420P14BE, + // < planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian + AV_PIX_FMT_YUV420P14LE, + // < planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian + AV_PIX_FMT_YUV422P12BE, + // < planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + AV_PIX_FMT_YUV422P12LE, + // < planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + AV_PIX_FMT_YUV422P14BE, + // < planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + AV_PIX_FMT_YUV422P14LE, + // < planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + AV_PIX_FMT_YUV444P12BE, + // < planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian + AV_PIX_FMT_YUV444P12LE, + // < planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian + AV_PIX_FMT_YUV444P14BE, + // < planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian + AV_PIX_FMT_YUV444P14LE, + // < planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian + AV_PIX_FMT_GBRP12BE, + // < planar GBR 4:4:4 36bpp, big-endian + AV_PIX_FMT_GBRP12LE, + // < planar GBR 4:4:4 36bpp, little-endian + AV_PIX_FMT_GBRP14BE, + // < planar GBR 4:4:4 42bpp, big-endian + AV_PIX_FMT_GBRP14LE, + // < planar GBR 4:4:4 42bpp, little-endian + AV_PIX_FMT_YUVJ411P, + // < planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV411P and setting color_range + + AV_PIX_FMT_BAYER_BGGR8, + // < bayer, BGBG..(odd line), GRGR..(even line), 8-bit samples *) + AV_PIX_FMT_BAYER_RGGB8, + // < bayer, RGRG..(odd line), GBGB..(even line), 8-bit samples *) + AV_PIX_FMT_BAYER_GBRG8, + // < bayer, GBGB..(odd line), RGRG..(even line), 8-bit samples *) + AV_PIX_FMT_BAYER_GRBG8, + // < bayer, GRGR..(odd line), BGBG..(even line), 8-bit samples *) + AV_PIX_FMT_BAYER_BGGR16LE, + // < bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, little-endian *) + AV_PIX_FMT_BAYER_BGGR16BE, + // < bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, big-endian *) + AV_PIX_FMT_BAYER_RGGB16LE, + // < bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, little-endian *) + AV_PIX_FMT_BAYER_RGGB16BE, + // < bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, big-endian *) + AV_PIX_FMT_BAYER_GBRG16LE, + // < bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, little-endian *) + AV_PIX_FMT_BAYER_GBRG16BE, + // < bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, big-endian *) + AV_PIX_FMT_BAYER_GRBG16LE, + // < bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, little-endian *) + AV_PIX_FMT_BAYER_GRBG16BE, + // < bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, big-endian *) +{$IFNDEF FF_API_XVMC} + AV_PIX_FMT_XVMC, + // < XVideo Motion Acceleration via common packet passing +{$ENDIF} (* !FF_API_XVMC *) + AV_PIX_FMT_YUV440P10LE, + // < planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian + AV_PIX_FMT_YUV440P10BE, + // < planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian + AV_PIX_FMT_YUV440P12LE, + // < planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian + AV_PIX_FMT_YUV440P12BE, + // < planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian + AV_PIX_FMT_AYUV64LE, + // < packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), little-endian + AV_PIX_FMT_AYUV64BE, + // < packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), big-endian + + AV_PIX_FMT_VIDEOTOOLBOX, + // < hardware decoding through Videotoolbox + + AV_PIX_FMT_P010LE, + // < like NV12, with 10bpp per component, data in the high bits, zeros in the low bits, little-endian + AV_PIX_FMT_P010BE, + // < like NV12, with 10bpp per component, data in the high bits, zeros in the low bits, big-endian + + AV_PIX_FMT_GBRAP12BE, + // < planar GBR 4:4:4:4 48bpp, big-endian + AV_PIX_FMT_GBRAP12LE, + // < planar GBR 4:4:4:4 48bpp, little-endian + + AV_PIX_FMT_GBRAP10BE, + // < planar GBR 4:4:4:4 40bpp, big-endian + AV_PIX_FMT_GBRAP10LE, + // < planar GBR 4:4:4:4 40bpp, little-endian + + AV_PIX_FMT_MEDIACODEC, + // < hardware decoding through MediaCodec + + AV_PIX_FMT_GRAY12BE, + // < Y , 12bpp, big-endian + AV_PIX_FMT_GRAY12LE, + // < Y , 12bpp, little-endian + AV_PIX_FMT_GRAY10BE, + // < Y , 10bpp, big-endian + AV_PIX_FMT_GRAY10LE, + // < Y , 10bpp, little-endian + + AV_PIX_FMT_P016LE, + // < like NV12, with 16bpp per component, little-endian + AV_PIX_FMT_P016BE, + // < like NV12, with 16bpp per component, big-endian + (* * + * Hardware surfaces for Direct3D11. + * + * This is preferred over the legacy AV_PIX_FMT_D3D11VA_VLD. The new D3D11 + * hwaccel API and filtering support AV_PIX_FMT_D3D11 only. + * + * data[0] contains a ID3D11Texture2D pointer, and data[1] contains the + * texture array index of the frame as intptr_t if the ID3D11Texture2D is + * an array texture (or always 0 if it's a normal texture). + *) + AV_PIX_FMT_D3D11, + + AV_PIX_FMT_GRAY9BE, + /// < Y , 9bpp, big-endian + AV_PIX_FMT_GRAY9LE, + /// < Y , 9bpp, little-endian + + AV_PIX_FMT_GBRPF32BE, + /// < IEEE-754 single precision planar GBR 4:4:4, 96bpp, big-endian + AV_PIX_FMT_GBRPF32LE, + /// < IEEE-754 single precision planar GBR 4:4:4, 96bpp, little-endian + AV_PIX_FMT_GBRAPF32BE, + /// < IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, big-endian + AV_PIX_FMT_GBRAPF32LE, + /// < IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, little-endian + + (* * + * DRM-managed buffers exposed through PRIME buffer sharing. + * + * data[0] points to an AVDRMFrameDescriptor. + *) + AV_PIX_FMT_DRM_PRIME, + (* * + * Hardware surfaces for OpenCL. + * + * data[i] contain 2D image objects (typed in C as cl_mem, used + * in OpenCL as image2d_t) for each plane of the surface. + *) + AV_PIX_FMT_OPENCL, // + AV_PIX_FMT_GRAY14BE, + /// < Y , 14bpp, big-endian + AV_PIX_FMT_GRAY14LE, + /// < Y , 14bpp, little-endian + + AV_PIX_FMT_GRAYF32BE, + /// < IEEE-754 single precision Y, 32bpp, big-endian + AV_PIX_FMT_GRAYF32LE, + /// < IEEE-754 single precision Y, 32bpp, little-endian + + AV_PIX_FMT_YUVA422P12BE, + /// < planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), 12b alpha, big-endian + AV_PIX_FMT_YUVA422P12LE, + /// < planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), 12b alpha, little-endian + AV_PIX_FMT_YUVA444P12BE, + /// < planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), 12b alpha, big-endian + AV_PIX_FMT_YUVA444P12LE, + /// < planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), 12b alpha, little-endian + + AV_PIX_FMT_NV24, + /// < planar YUV 4:4:4, 24bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V) + AV_PIX_FMT_NV42, + /// < as above, but U and V bytes are swapped + AV_PIX_FMT_NB + // < number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of formats might differ between versions + ); + +const + AVPALETTE_SIZE = 1024; + AVPALETTE_COUNT = 256; + + AV_PIX_FMT_RGB32: AVPixelFormat = AV_PIX_FMT_BGRA; // AV_PIX_FMT_NE(ARGB, BGRA) + AV_PIX_FMT_RGB32_1: AVPixelFormat = AV_PIX_FMT_ABGR; // AV_PIX_FMT_NE(RGBA, ABGR) + AV_PIX_FMT_BGR32: AVPixelFormat = AV_PIX_FMT_RGBA; // AV_PIX_FMT_NE(ABGR, RGBA) + AV_PIX_FMT_BGR32_1: AVPixelFormat = AV_PIX_FMT_ARGB; // AV_PIX_FMT_NE(BGRA, ARGB) + AV_PIX_FMT_0RGB32: AVPixelFormat = AV_PIX_FMT_BGR0; // AV_PIX_FMT_NE(0RGB, BGR0) + AV_PIX_FMT_0BGR32: AVPixelFormat = AV_PIX_FMT_RGB0; // AV_PIX_FMT_NE(0BGR, RGB0) + + AV_PIX_FMT_GRAY10: AVPixelFormat = AV_PIX_FMT_GRAY10LE; // AV_PIX_FMT_NE(GRAY10BE, GRAY10LE) + AV_PIX_FMT_GRAY12: AVPixelFormat = AV_PIX_FMT_GRAY12LE; // AV_PIX_FMT_NE(GRAY12BE, GRAY12LE) + AV_PIX_FMT_GRAY16: AVPixelFormat = AV_PIX_FMT_GRAY16LE; // AV_PIX_FMT_NE(GRAY16BE, GRAY16LE) + AV_PIX_FMT_YA16: AVPixelFormat = AV_PIX_FMT_YA16LE; // AV_PIX_FMT_NE(YA16BE, YA16LE ) + AV_PIX_FMT_RGB48: AVPixelFormat = AV_PIX_FMT_RGB48LE; // AV_PIX_FMT_NE(RGB48BE, RGB48LE ) + AV_PIX_FMT_RGB565: AVPixelFormat = AV_PIX_FMT_RGB565LE; // AV_PIX_FMT_NE(RGB565BE, RGB565LE) + AV_PIX_FMT_RGB555: AVPixelFormat = AV_PIX_FMT_RGB555LE; // AV_PIX_FMT_NE(RGB555BE, RGB555LE) + AV_PIX_FMT_RGB444: AVPixelFormat = AV_PIX_FMT_RGB444LE; // AV_PIX_FMT_NE(RGB444BE, RGB444LE) + AV_PIX_FMT_RGBA64: AVPixelFormat = AV_PIX_FMT_RGBA64LE; // AV_PIX_FMT_NE(RGBA64BE, RGBA64LE) + AV_PIX_FMT_BGR48: AVPixelFormat = AV_PIX_FMT_BGR48LE; // AV_PIX_FMT_NE(BGR48BE, BGR48LE ) + AV_PIX_FMT_BGR565: AVPixelFormat = AV_PIX_FMT_BGR565LE; // AV_PIX_FMT_NE(BGR565BE, BGR565LE) + AV_PIX_FMT_BGR555: AVPixelFormat = AV_PIX_FMT_BGR555LE; // AV_PIX_FMT_NE(BGR555BE, BGR555LE) + AV_PIX_FMT_BGR444: AVPixelFormat = AV_PIX_FMT_BGR444LE; // AV_PIX_FMT_NE(BGR444BE, BGR444LE) + AV_PIX_FMT_BGRA64: AVPixelFormat = AV_PIX_FMT_BGRA64LE; // AV_PIX_FMT_NE(BGRA64BE, BGRA64LE) + + AV_PIX_FMT_YUV420P9: AVPixelFormat = AV_PIX_FMT_YUV420P9LE; // AV_PIX_FMT_NE(YUV420P9BE , YUV420P9LE ) + AV_PIX_FMT_YUV422P9: AVPixelFormat = AV_PIX_FMT_YUV422P9LE; // AV_PIX_FMT_NE(YUV422P9BE , YUV422P9LE ) + AV_PIX_FMT_YUV444P9: AVPixelFormat = AV_PIX_FMT_YUV444P9LE; // AV_PIX_FMT_NE(YUV444P9BE , YUV444P9LE ) + AV_PIX_FMT_YUV420P10: AVPixelFormat = AV_PIX_FMT_YUV420P10LE; // AV_PIX_FMT_NE(YUV420P10BE, YUV420P10LE) + AV_PIX_FMT_YUV422P10: AVPixelFormat = AV_PIX_FMT_YUV422P10LE; // AV_PIX_FMT_NE(YUV422P10BE, YUV422P10LE) + AV_PIX_FMT_YUV440P10: AVPixelFormat = AV_PIX_FMT_YUV440P10LE; // AV_PIX_FMT_NE(YUV440P10BE, YUV440P10LE) + AV_PIX_FMT_YUV444P10: AVPixelFormat = AV_PIX_FMT_YUV444P10LE; // AV_PIX_FMT_NE(YUV444P10BE, YUV444P10LE) + AV_PIX_FMT_YUV420P12: AVPixelFormat = AV_PIX_FMT_YUV420P12LE; // AV_PIX_FMT_NE(YUV420P12BE, YUV420P12LE) + AV_PIX_FMT_YUV422P12: AVPixelFormat = AV_PIX_FMT_YUV422P12LE; // AV_PIX_FMT_NE(YUV422P12BE, YUV422P12LE) + AV_PIX_FMT_YUV440P12: AVPixelFormat = AV_PIX_FMT_YUV440P12LE; // AV_PIX_FMT_NE(YUV440P12BE, YUV440P12LE) + AV_PIX_FMT_YUV444P12: AVPixelFormat = AV_PIX_FMT_YUV444P12LE; // AV_PIX_FMT_NE(YUV444P12BE, YUV444P12LE) + AV_PIX_FMT_YUV420P14: AVPixelFormat = AV_PIX_FMT_YUV420P14LE; // AV_PIX_FMT_NE(YUV420P14BE, YUV420P14LE) + AV_PIX_FMT_YUV422P14: AVPixelFormat = AV_PIX_FMT_YUV422P14LE; // AV_PIX_FMT_NE(YUV422P14BE, YUV422P14LE) + AV_PIX_FMT_YUV444P14: AVPixelFormat = AV_PIX_FMT_YUV444P14LE; // AV_PIX_FMT_NE(YUV444P14BE, YUV444P14LE) + AV_PIX_FMT_YUV420P16: AVPixelFormat = AV_PIX_FMT_YUV420P16LE; // AV_PIX_FMT_NE(YUV420P16BE, YUV420P16LE) + AV_PIX_FMT_YUV422P16: AVPixelFormat = AV_PIX_FMT_YUV422P16LE; // AV_PIX_FMT_NE(YUV422P16BE, YUV422P16LE) + AV_PIX_FMT_YUV444P16: AVPixelFormat = AV_PIX_FMT_YUV444P16LE; // AV_PIX_FMT_NE(YUV444P16BE, YUV444P16LE) + + AV_PIX_FMT_GBRP9: AVPixelFormat = AV_PIX_FMT_GBRP9LE; // AV_PIX_FMT_NE(GBRP9BE , GBRP9LE ) + AV_PIX_FMT_GBRP10: AVPixelFormat = AV_PIX_FMT_GBRP10LE; // AV_PIX_FMT_NE(GBRP10BE, GBRP10LE ) + AV_PIX_FMT_GBRP12: AVPixelFormat = AV_PIX_FMT_GBRP12LE; // AV_PIX_FMT_NE(GBRP12BE, GBRP12LE ) + AV_PIX_FMT_GBRP14: AVPixelFormat = AV_PIX_FMT_GBRP14LE; // AV_PIX_FMT_NE(GBRP14BE, GBRP14LE ) + AV_PIX_FMT_GBRP16: AVPixelFormat = AV_PIX_FMT_GBRP16LE; // AV_PIX_FMT_NE(GBRP16BE, GBRP16LE ) + AV_PIX_FMT_GBRAP10: AVPixelFormat = AV_PIX_FMT_GBRAP10LE; // AV_PIX_FMT_NE(GBRAP10BE, GBRAP10LE) + AV_PIX_FMT_GBRAP12: AVPixelFormat = AV_PIX_FMT_GBRAP12LE; // AV_PIX_FMT_NE(GBRAP12BE, GBRAP12LE) + AV_PIX_FMT_GBRAP16: AVPixelFormat = AV_PIX_FMT_GBRAP16LE; // AV_PIX_FMT_NE(GBRAP16BE, GBRAP16LE) + + AV_PIX_FMT_BAYER_BGGR16: AVPixelFormat = AV_PIX_FMT_BAYER_BGGR16LE; // AV_PIX_FMT_NE(BAYER_BGGR16BE, BAYER_BGGR16LE) + AV_PIX_FMT_BAYER_RGGB16: AVPixelFormat = AV_PIX_FMT_BAYER_RGGB16LE; // AV_PIX_FMT_NE(BAYER_RGGB16BE, BAYER_RGGB16LE) + AV_PIX_FMT_BAYER_GBRG16: AVPixelFormat = AV_PIX_FMT_BAYER_GBRG16LE; // AV_PIX_FMT_NE(BAYER_GBRG16BE, BAYER_GBRG16LE) + AV_PIX_FMT_BAYER_GRBG16: AVPixelFormat = AV_PIX_FMT_BAYER_GRBG16LE; // AV_PIX_FMT_NE(BAYER_GRBG16BE, BAYER_GRBG16LE) + + AV_PIX_FMT_YUVA420P9: AVPixelFormat = AV_PIX_FMT_YUVA420P9LE; // AV_PIX_FMT_NE(YUVA420P9BE , YUVA420P9LE ) + AV_PIX_FMT_YUVA422P9: AVPixelFormat = AV_PIX_FMT_YUVA422P9LE; // AV_PIX_FMT_NE(YUVA422P9BE , YUVA422P9LE ) + AV_PIX_FMT_YUVA444P9: AVPixelFormat = AV_PIX_FMT_YUVA444P9LE; // AV_PIX_FMT_NE(YUVA444P9BE , YUVA444P9LE ) + AV_PIX_FMT_YUVA420P10: AVPixelFormat = AV_PIX_FMT_YUVA420P10LE; // AV_PIX_FMT_NE(YUVA420P10BE, YUVA420P10LE) + AV_PIX_FMT_YUVA422P10: AVPixelFormat = AV_PIX_FMT_YUVA422P10LE; // AV_PIX_FMT_NE(YUVA422P10BE, YUVA422P10LE) + AV_PIX_FMT_YUVA444P10: AVPixelFormat = AV_PIX_FMT_YUVA444P10LE; // AV_PIX_FMT_NE(YUVA444P10BE, YUVA444P10LE) + AV_PIX_FMT_YUVA422P12: AVPixelFormat = AV_PIX_FMT_YUVA422P12LE; // AV_PIX_FMT_NE(YUVA422P12BE, YUVA422P12LE); + AV_PIX_FMT_YUVA444P12: AVPixelFormat = AV_PIX_FMT_YUVA444P12LE; // AV_PIX_FMT_NE(YUVA444P12BE, YUVA444P12LE); + AV_PIX_FMT_YUVA420P16: AVPixelFormat = AV_PIX_FMT_YUVA420P16LE; // AV_PIX_FMT_NE(YUVA420P16BE, YUVA420P16LE) + AV_PIX_FMT_YUVA422P16: AVPixelFormat = AV_PIX_FMT_YUVA422P16LE; // AV_PIX_FMT_NE(YUVA422P16BE, YUVA422P16LE) + AV_PIX_FMT_YUVA444P16: AVPixelFormat = AV_PIX_FMT_YUVA444P16LE; // AV_PIX_FMT_NE(YUVA444P16BE, YUVA444P16LE) + + AV_PIX_FMT_XYZ12: AVPixelFormat = AV_PIX_FMT_XYZ12LE; // AV_PIX_FMT_NE(XYZ12BE, XYZ12LE ) + AV_PIX_FMT_NV20: AVPixelFormat = AV_PIX_FMT_NV20LE; // AV_PIX_FMT_NE(NV20BE, NV20LE ) + AV_PIX_FMT_AYUV64: AVPixelFormat = AV_PIX_FMT_AYUV64LE; // AV_PIX_FMT_NE(AYUV64BE,AYUV64LE) + AV_PIX_FMT_P010: AVPixelFormat = AV_PIX_FMT_P010LE; // AV_PIX_FMT_NE(P010BE, P010LE ) + AV_PIX_FMT_P016: AVPixelFormat = AV_PIX_FMT_P016LE; // AV_PIX_FMT_NE(P016BE, P016LE ) + + (* * + * Chromaticity coordinates of the source primaries. + *) +Type + AVColorPrimaries = ( // + AVCOL_PRI_RESERVED0 = 0, AVCOL_PRI_BT709 = 1, + // < also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP177 Annex B + AVCOL_PRI_UNSPECIFIED = 2, AVCOL_PRI_RESERVED = 3, AVCOL_PRI_BT470M = 4, + // < also FCC Title 47 Code of Federal Regulations 73.682 (a)(20) + + AVCOL_PRI_BT470BG = 5, + // < also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM + AVCOL_PRI_SMPTE170M = 6, + // < also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC + AVCOL_PRI_SMPTE240M = 7, + // < functionally identical to above + AVCOL_PRI_FILM = 8, + // < colour filters using Illuminant C + AVCOL_PRI_BT2020 = 9, + // < ITU-R BT2020 + AVCOL_PRI_SMPTE428 = 10, + // < SMPTE ST 428-1 (CIE 1931 XYZ) + AVCOL_PRI_SMPTEST428_1 = AVCOL_PRI_SMPTE428, // + AVCOL_PRI_SMPTE431 = 11, // < SMPTE ST 431-2 (2011) / DCI P3 + AVCOL_PRI_SMPTE432 = 12, // < SMPTE ST 432-1 (2010) / P3 D65 / Display P3 + AVCOL_PRI_JEDEC_P22 = 22, + /// < JEDEC P22 phosphors + AVCOL_PRI_NB + /// < Not part of ABI + ); + + (* * + * Color Transfer Characteristic. + *) + AVColorTransferCharacteristic = ( // + AVCOL_TRC_RESERVED0 = 0, AVCOL_TRC_BT709 = 1, + // < also ITU-R BT1361 + AVCOL_TRC_UNSPECIFIED = 2, // + AVCOL_TRC_RESERVED = 3, // + AVCOL_TRC_GAMMA22 = 4, // < also ITU-R BT470M / ITU-R BT1700 625 PAL & SECAM + AVCOL_TRC_GAMMA28 = 5, // < also ITU-R BT470BG + AVCOL_TRC_SMPTE170M = 6, // < also ITU-R BT601-6 525 or 625 / ITU-R BT1358 525 or 625 / ITU-R BT1700 NTSC + AVCOL_TRC_SMPTE240M = 7, // + AVCOL_TRC_LINEAR = 8, // < "Linear transfer characteristics" + AVCOL_TRC_LOG = 9, // < "Logarithmic transfer characteristic (100:1 range)" + AVCOL_TRC_LOG_SQRT = 10, // < "Logarithmic transfer characteristic (100 * Sqrt(10) : 1 range)" + AVCOL_TRC_IEC61966_2_4 = 11, // < IEC 61966-2-4 + AVCOL_TRC_BT1361_ECG = 12, // < ITU-R BT1361 Extended Colour Gamut + AVCOL_TRC_IEC61966_2_1 = 13, // < IEC 61966-2-1 (sRGB or sYCC) + AVCOL_TRC_BT2020_10 = 14, // < ITU-R BT2020 for 10-bit system + AVCOL_TRC_BT2020_12 = 15, // < ITU-R BT2020 for 12-bit system + AVCOL_TRC_SMPTE2084 = 16, // < SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems + AVCOL_TRC_SMPTEST2084 = AVCOL_TRC_SMPTE2084, // + AVCOL_TRC_SMPTE428 = 17, // < SMPTE ST 428-1 + AVCOL_TRC_SMPTEST428_1 = AVCOL_TRC_SMPTE428, // + AVCOL_TRC_ARIB_STD_B67 = 18, // < ARIB STD-B67, known as "Hybrid log-gamma" + AVCOL_TRC_NB // < Not part of ABI + ); + + (* * + * YUV colorspace type. + *) + AVColorSpace = ( // + AVCOL_SPC_RGB = 0, + // < order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB) + AVCOL_SPC_BT709 = 1, + // < also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / SMPTE RP177 Annex B + AVCOL_SPC_UNSPECIFIED = 2, AVCOL_SPC_RESERVED = 3, AVCOL_SPC_FCC = 4, + // < FCC Title 47 Code of Federal Regulations 73.682 (a)(20) + AVCOL_SPC_BT470BG = 5, + // < also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601 + AVCOL_SPC_SMPTE170M = 6, + // < also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC + AVCOL_SPC_SMPTE240M = 7, + // < functionally identical to above + AVCOL_SPC_YCGCO = 8, + // < Used by Dirac / VC-2 and H.264 FRext, see ITU-T SG16 + AVCOL_SPC_YCOCG = AVCOL_SPC_YCGCO, AVCOL_SPC_BT2020_NCL = 9, + // < ITU-R BT2020 non-constant luminance system + AVCOL_SPC_BT2020_CL = 10, + // < ITU-R BT2020 constant luminance system + AVCOL_SPC_SMPTE2085 = 11, + // < SMPTE 2085, Y'D'zD'x + AVCOL_SPC_NB + // < Not part of ABI + ); + + // #define AVCOL_SPC_YCGCO AVCOL_SPC_YCOCG + + (* * + * MPEG vs JPEG YUV range. + *) + AVColorRange = ( // + AVCOL_RANGE_UNSPECIFIED = 0, AVCOL_RANGE_MPEG = 1, + // < the normal 219*2^(n-8) "MPEG" YUV ranges + AVCOL_RANGE_JPEG = 2, + // < the normal 2^n-1 "JPEG" YUV ranges + AVCOL_RANGE_NB + // < Not part of ABI + ); + + (* * + * Location of chroma samples. + * + * Illustration showing the location of the first (top left) chroma sample of the + * image, the left shows only luma, the right + * shows the location of the chroma sample, the 2 could be imagined to overlay + * each other but are drawn separately due to limitations of ASCII + * + *----------------1st 2nd 1st 2nd horizontal luma sample positions + *-----------------v v v v + * ______ ______ + *1st luma line > |X X ... |3 4 X ... X are luma samples, + *----------------| |1 2 1-6 are possible chroma positions + *2nd luma line > |X X ... |5 6 X ... 0 is undefined/unknown position + *) + AVChromaLocation = ( // + AVCHROMA_LOC_UNSPECIFIED = 0, AVCHROMA_LOC_LEFT = 1, + // < MPEG-2/4 4:2:0, H.264 default for 4:2:0 + AVCHROMA_LOC_CENTER = 2, + // < MPEG-1 4:2:0, JPEG 4:2:0, H.263 4:2:0 + AVCHROMA_LOC_TOPLEFT = 3, + // < ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2 + AVCHROMA_LOC_TOP = 4, AVCHROMA_LOC_BOTTOMLEFT = 5, AVCHROMA_LOC_BOTTOM = 6, AVCHROMA_LOC_NB + // < Not part of ABI + ); +{$ENDREGION} +{$REGION 'frame.h'} + +const + AV_NUM_DATA_POINTERS = 8; + +Type + TAVNDPArray = array [0 .. AV_NUM_DATA_POINTERS - 1] of int; + pAVNDPArray = ^TAVNDPArray; + TAVNDPArray_int = TAVNDPArray; + pAVNDPArray_int = ^TAVNDPArray_int; + + TAVNDPArray_puint8_t = array [0 .. AV_NUM_DATA_POINTERS - 1] of puint8_t; + pAVNDPArray_puint8_t = ^TAVNDPArray_puint8_t; + + TAVNDPArray_uint64_t = array [0 .. AV_NUM_DATA_POINTERS - 1] of uint64_t; + + TAVNDPArray_pAVBufferRef = array [0 .. AV_NUM_DATA_POINTERS - 1] of pAVBufferRef; + pAVNDPArray_pAVBufferRef = ^TAVNDPArray_pAVBufferRef; + + // uint8_t * data[4]; + Tuint8_t_array_4 = array [0 .. 3] of uint8_t; + puint8_t_array_4 = ^Tuint8_t_array_4; + // int linesize[4]; + Tint_array_4 = array [0 .. 3] of int; + + AVFrameSideDataType = ( + (* * + * The data is the AVPanScan struct defined in libavcodec. + *) + AV_FRAME_DATA_PANSCAN, + (* * + * ATSC A53 Part 4 Closed Captions. + * A53 CC bitstream is stored as uint8_t in AVFrameSideData.data. + * The number of bytes of CC data is AVFrameSideData.size. + *) + AV_FRAME_DATA_A53_CC, + (* * + * Stereoscopic 3d metadata. + * The data is the AVStereo3D struct defined in libavutil/stereo3d.h. + *) + AV_FRAME_DATA_STEREO3D, + (* * + * The data is the AVMatrixEncoding enum defined in libavutil/channel_layout.h. + *) + AV_FRAME_DATA_MATRIXENCODING, + (* * + * Metadata relevant to a downmix procedure. + * The data is the AVDownmixInfo struct defined in libavutil/downmix_info.h. + *) + AV_FRAME_DATA_DOWNMIX_INFO, + (* * + * ReplayGain information in the form of the AVReplayGain struct. + *) + AV_FRAME_DATA_REPLAYGAIN, + (* * + * This side data contains a 3x3 transformation matrix describing an affine + * transformation that needs to be applied to the frame for correct + * presentation. + * + * See libavutil/display.h for a detailed description of the data. + *) + AV_FRAME_DATA_DISPLAYMATRIX, + (* * + * Active Format Description data consisting of a single byte as specified + * in ETSI TS 101 154 using AVActiveFormatDescription enum. + *) + AV_FRAME_DATA_AFD, + (* * + * Motion vectors exported by some codecs (on demand through the export_mvs + * flag set in the libavcodec AVCodecContext flags2 option). + * The data is the AVMotionVector struct defined in + * libavutil/motion_vector.h. + *) + AV_FRAME_DATA_MOTION_VECTORS, + (* * + * Recommmends skipping the specified number of samples. This is exported + * only if the "skip_manual" AVOption is set in libavcodec. + * This has the same format as AV_PKT_DATA_SKIP_SAMPLES. + * @code + * u32le number of samples to skip from start of this packet + * u32le number of samples to skip from end of this packet + * u8 reason for start skip + * u8 reason for end skip (0=padding silence, 1=convergence) + * @endcode + *) + AV_FRAME_DATA_SKIP_SAMPLES, + (* * + * This side data must be associated with an audio frame and corresponds to + * enum AVAudioServiceType defined in avcodec.h. + *) + AV_FRAME_DATA_AUDIO_SERVICE_TYPE, + (* * + * Mastering display metadata associated with a video frame. The payload is + * an AVMasteringDisplayMetadata type and contains information about the + * mastering display color volume. + *) + AV_FRAME_DATA_MASTERING_DISPLAY_METADATA, + (* * + * The GOP timecode in 25 bit timecode format. Data format is 64-bit integer. + * This is set on the first frame of a GOP that has a temporal reference of 0. + *) + AV_FRAME_DATA_GOP_TIMECODE, + + (* * + * The data represents the AVSphericalMapping structure defined in + * libavutil/spherical.h. + *) + AV_FRAME_DATA_SPHERICAL, + + (* * + * Content light level (based on CTA-861.3). This payload contains data in + * the form of the AVContentLightMetadata struct. + *) + AV_FRAME_DATA_CONTENT_LIGHT_LEVEL, + + (* * + * The data contains an ICC profile as an opaque octet buffer following the + * format described by ISO 15076-1 with an optional name defined in the + * metadata key entry "name". + *) + AV_FRAME_DATA_ICC_PROFILE, + +{$IFDEF FF_API_FRAME_QP} + (* * + * Implementation-specific description of the format of AV_FRAME_QP_TABLE_DATA. + * The contents of this side data are undocumented and internal; use + * av_frame_set_qp_table() and av_frame_get_qp_table() to access this in a + * meaningful way instead. + *) + AV_FRAME_DATA_QP_TABLE_PROPERTIES, + + (* * + * Raw QP table data. Its format is described by + * AV_FRAME_DATA_QP_TABLE_PROPERTIES. Use av_frame_set_qp_table() and + * av_frame_get_qp_table() to access this instead. + *) + AV_FRAME_DATA_QP_TABLE_DATA, +{$ENDIF} + (* + * Timecode which conforms to SMPTE ST 12-1. The data is an array of 4 uint32_t + * where the first uint32_t describes how many (1-3) of the other timecodes are used. + * The timecode format is described in the av_timecode_get_smpte_from_framenum() + * function in libavutil/timecode.c. + *) + AV_FRAME_DATA_S12M_TIMECODE, // + (* + * HDR dynamic metadata associated with a video frame. The payload is + * an AVDynamicHDRPlus type and contains information for color + * volume transform - application 4 of SMPTE 2094-40:2016 standard. + *) + AV_FRAME_DATA_DYNAMIC_HDR_PLUS, + + (* + * Regions Of Interest, the data is an array of AVRegionOfInterest type, the number of + * array element is implied by AVFrameSideData.size / AVRegionOfInterest.self_size. + *) + AV_FRAME_DATA_REGIONS_OF_INTEREST // + ); + + AVActiveFormatDescription = ( // + AV_AFD_SAME = 8, AV_AFD_4_3 = 9, AV_AFD_16_9 = 10, AV_AFD_14_9 = 11, AV_AFD_4_3_SP_14_9 = 13, AV_AFD_16_9_SP_14_9 = 14, AV_AFD_SP_4_3 = 15); + + (* * + * Structure to hold side data for an AVFrame. + * + * sizeof(AVFrameSideData) is not a part of the public ABI, so new fields may be added + * to the end with a minor bump. + *) + AVFrameSideData = record + _type: AVFrameSideDataType; + data: puint8_t; + size: int; + metadata: pAVDictionary; + buf: pAVBufferRef; + end; + + pAVFrameSideData = ^AVFrameSideData; + ppAVFrameSideData = ^pAVFrameSideData; + + (* + * Structure describing a single Region Of Interest. + * + * When multiple regions are defined in a single side-data block, they + * should be ordered from most to least important - some encoders are only + * capable of supporting a limited number of distinct regions, so will have + * to truncate the list. + * + * When overlapping regions are defined, the first region containing a given + * area of the frame applies. + *) + AVRegionOfInterest = record + (* + * Must be set to the size of this data structure (that is, + * sizeof(AVRegionOfInterest)). + *) + self_size: uint32_t; + (* + * Distance in pixels from the top edge of the frame to the top and + * bottom edges and from the left edge of the frame to the left and + * right edges of the rectangle defining this region of interest. + * + * The constraints on a region are encoder dependent, so the region + * actually affected may be slightly larger for alignment or other + * reasons. + *) + top: int; + bottom: int; + left: int; + right: int; + (* + * Quantisation offset. + * + * Must be in the range -1 to +1. A value of zero indicates no quality + * change. A negative value asks for better quality (less quantisation), + * while a positive value asks for worse quality (greater quantisation). + * + * The range is calibrated so that the extreme values indicate the + * largest possible offset - if the rest of the frame is encoded with the + * worst possible quality, an offset of -1 indicates that this region + * should be encoded with the best possible quality anyway. Intermediate + * values are then interpolated in some codec-dependent way. + * + * For example, in 10-bit H.264 the quantisation parameter varies between + * -12 and 51. A typical qoffset value of -1/10 therefore indicates that + * this region should be encoded with a QP around one-tenth of the full + * range better than the rest of the frame. So, if most of the frame + * were to be encoded with a QP of around 30, this region would get a QP + * of around 24 (an offset of approximately -1/10 * (51 - -12) = -6.3). + * An extreme value of -1 would indicate that this region should be + * encoded with the best possible quality regardless of the treatment of + * the rest of the frame - that is, should be encoded at a QP of -12. + *) + qoffset: AVRational; + end; + + pAVRegionOfInterest = ^AVRegionOfInterest; + + (* * + * This structure describes decoded (raw) audio or video data. + * + * AVFrame must be allocated using av_frame_alloc(). Note that this only + * allocates the AVFrame itself, the buffers for the data must be managed + * through other means (see below). + * AVFrame must be freed with av_frame_free(). + * + * AVFrame is typically allocated once and then reused multiple times to hold + * different data (e.g. a single AVFrame to hold frames received from a + * decoder). In such a case, av_frame_unref() will free any references held by + * the frame and reset it to its original clean state before it + * is reused again. + * + * The data described by an AVFrame is usually reference counted through the + * AVBuffer API. The underlying buffer references are stored in AVFrame.buf / + * AVFrame.extended_buf. An AVFrame is considered to be reference counted if at + * least one reference is set, i.e. if AVFrame.buf[0] != NULL. In such a case, + * every single data plane must be contained in one of the buffers in + * AVFrame.buf or AVFrame.extended_buf. + * There may be a single buffer for all the data, or one separate buffer for + * each plane, or anything in between. + * + * sizeof(AVFrame) is not a part of the public ABI, so new fields may be added + * to the end with a minor bump. + * + * Fields can be accessed through AVOptions, the name string used, matches the + * C structure field name for fields accessible through AVOptions. The AVClass + * for AVFrame can be obtained from avcodec_get_frame_class() + *) + (* * + * @defgroup lavu_frame_flags AV_FRAME_FLAGS + * @ingroup lavu_frame + * Flags describing additional frame properties. + *) +const + (* * + * The frame data may be corrupted, e.g. due to decoding errors. + *) + AV_FRAME_FLAG_CORRUPT = (1 shl 0); + (* * + * A flag to mark the frames which need to be decoded, but shouldn't be output. + *) + AV_FRAME_FLAG_DISCARD = (1 shl 2); + + // AVFrame -> decode_error_flags:int; + FF_DECODE_ERROR_INVALID_BITSTREAM = 1; + FF_DECODE_ERROR_MISSING_REFERENCE = 2; + FF_DECODE_ERROR_CONCEALMENT_ACTIVE = 4; + FF_DECODE_ERROR_DECODE_SLICES = 8; + +type + + pAVFrame = ^AVFrame; + + AVFrame = record + (* * + * pointer to the picture/channel planes. + * This might be different from the first allocated byte + * + * Some decoders access areas outside 0,0 - width,height, please + * see avcodec_align_dimensions2(). Some filters and swscale can read + * up to 16 bytes beyond the planes, if these filters are to be used, + * then 16 extra bytes must be allocated. + * + * NOTE: Except for hwaccel formats, pointers not needed by the format + * MUST be set to NULL. + *) + data: TAVNDPArray_puint8_t; + + (* * + * For video, size in bytes of each picture line. + * For audio, size in bytes of each plane. + * + * For audio, only linesize[0] may be set. For planar audio, each channel + * plane must be the same size. + * + * For video the linesizes should be multiples of the CPUs alignment + * preference, this is 16 or 32 for modern desktop CPUs. + * Some code requires such alignment other code can be slower without + * correct alignment, for yet other it makes no difference. + * + * @note The linesize may be larger than the size of usable data -- there + * may be extra padding present for performance reasons. + *) + linesize: TAVNDPArray_int; + + (* * + * pointers to the data planes/channels. + * + * For video, this should simply point to data[]. + * + * For planar audio, each channel has a separate data pointer, and + * linesize[0] contains the size of each channel buffer. + * For packed audio, there is just one data pointer, and linesize[0] + * contains the total size of the buffer for all channels. + * + * Note: Both data and extended_data should always be set in a valid frame, + * but for planar audio with more channels that can fit in data, + * extended_data must be used in order to access all channels. + *) + extended_data: ppuint8_t; + + (* * + * @name Video dimensions + * Video frames only. The coded dimensions (in pixels) of the video frame, + * i.e. the size of the rectangle that contains some well-defined values. + * + * @note The part of the frame intended for display/presentation is further + * restricted by the @ref cropping "Cropping rectangle". + * @{ + *) + width, height: int; + (* * + * @} + *) + + (* * + * number of audio samples (per channel) described by this frame + *) + nb_samples: int; + + (* * + * format of the frame, -1 if unknown or unset + * Values correspond to enum AVPixelFormat for video frames, + * enum AVSampleFormat for audio) + *) + format: int; + + (* * + * 1 -> keyframe, 0-> not + *) + key_frame: int; + + (* * + * Picture type of the frame. + *) + pict_type: AVPictureType; + + (* * + * Sample aspect ratio for the video frame, 0/1 if unknown/unspecified. + *) + sample_aspect_ratio: AVRational; + + (* * + * Presentation timestamp in time_base units (time when frame should be shown to user). + *) + pts: int64_t; + +{$IFDEF FF_API_PKT_PTS} + (* * + * PTS copied from the AVPacket that was decoded to produce this frame. + * @deprecated use the pts field instead + *) + // attribute_deprecated + pkt_pts: int64_t; +{$ENDIF} + (* * + * DTS copied from the AVPacket that triggered returning this frame. (if frame threading isn't used) + * This is also the Presentation time of this AVFrame calculated from + * only AVPacket.dts values without pts values. + *) + pkt_dts: int64_t; + + (* * + * picture number in bitstream order + *) + coded_picture_number: int; + (* * + * picture number in display order + *) + display_picture_number: int; + + (* * + * quality (between 1 (good) and FF_LAMBDA_MAX (bad)) + *) + quality: int; + + (* * + * for some private data of the user + *) + opaque: Pointer; + +{$IFDEF FF_API_ERROR_FRAME} + (* * + * @deprecated unused + *) + // attribute_deprecated + error: TAVNDPArray_puint8_t; +{$ENDIF} + (* * + * When decoding, this signals how much the picture must be delayed. + * extra_delay = repeat_pict / (2*fps) + *) + repeat_pict: int; + + (* * + * The content of the picture is interlaced. + *) + interlaced_frame: int; + + (* * + * If the content is interlaced, is top field displayed first. + *) + top_field_first: int; + + (* * + * Tell user application that palette has changed from previous frame. + *) + palette_has_changed: int; + + (* * + * reordered opaque 64 bits (generally an integer or a double precision float + * PTS but can be anything). + * The user sets AVCodecContext.reordered_opaque to represent the input at + * that time, + * the decoder reorders values as needed and sets AVFrame.reordered_opaque + * to exactly one of the values provided by the user through AVCodecContext.reordered_opaque + *) + reordered_opaque: int64_t; + + (* * + * Sample rate of the audio data. + *) + sample_rate: int; + + (* * + * Channel layout of the audio data. + *) + channel_layout: uint64_t; + + (* * + * AVBuffer references backing the data for this frame. If all elements of + * this array are NULL, then this frame is not reference counted. This array + * must be filled contiguously -- if buf[i] is non-NULL then buf[j] must + * also be non-NULL for all j < i. + * + * There may be at most one AVBuffer per data plane, so for video this array + * always contains all the references. For planar audio with more than + * AV_NUM_DATA_POINTERS channels, there may be more buffers than can fit in + * this array. Then the extra AVBufferRef pointers are stored in the + * extended_buf array. + *) + buf: TAVNDPArray_pAVBufferRef; + + (* * + * For planar audio which requires more than AV_NUM_DATA_POINTERS + * AVBufferRef pointers, this array will hold all the references which + * cannot fit into AVFrame.buf. + * + * Note that this is different from AVFrame.extended_data, which always + * contains all the pointers. This array only contains the extra pointers, + * which cannot fit into AVFrame.buf. + * + * This array is always allocated using av_malloc() by whoever constructs + * the frame. It is freed in av_frame_unref(). + *) + extended_buf: ppAVBufferRef; + (* * + * Number of elements in extended_buf. + *) + nb_extended_buf: int; + + side_data: ppAVFrameSideData; + nb_side_data: int; + + (* * + * Frame flags, a combination of @ref lavu_frame_flags + *) + flags: int; + + (* * + * MPEG vs JPEG YUV range. + * - encoding: Set by user + * - decoding: Set by libavcodec + *) + color_range: AVColorRange; + + color_primaries: AVColorPrimaries; + + color_trc: AVColorTransferCharacteristic; + + (* * + * YUV colorspace type. + * - encoding: Set by user + * - decoding: Set by libavcodec + *) + colorspace: AVColorSpace; + + chroma_location: AVChromaLocation; + + (* * + * frame timestamp estimated using various heuristics, in stream time base + * - encoding: unused + * - decoding: set by libavcodec, read by user. + *) + best_effort_timestamp: int64_t; + + (* * + * reordered pos from the last AVPacket that has been input into the decoder + * - encoding: unused + * - decoding: Read by user. + *) + pkt_pos: int64_t; + + (* * + * duration of the corresponding packet, expressed in + * AVStream->time_base units, 0 if unknown. + * - encoding: unused + * - decoding: Read by user. + *) + pkt_duration: int64_t; + + (* * + * metadata. + * - encoding: Set by user. + * - decoding: Set by libavcodec. + *) + metadata: pAVDictionary; + + (* * + * decode error flags of the frame, set to a combination of + * FF_DECODE_ERROR_xxx flags if the decoder produced a frame, but there + * were errors during the decoding. + * - encoding: unused + * - decoding: set by libavcodec, read by user. + *) + decode_error_flags: int; + + (* * + * number of audio channels, only used for audio. + * - encoding: unused + * - decoding: Read by user. + *) + channels: int; + + (* * + * size of the corresponding packet containing the compressed + * frame. + * It is set to a negative value if unknown. + * - encoding: unused + * - decoding: set by libavcodec, read by user. + *) + pkt_size: int; + +{$IFDEF FF_API_FRAME_QP} + (* * + * QP table + *) + // attribute_deprecated + qscale_table: pint8_t; + (* * + * QP store stride + *) + // attribute_deprecated + qstride: int; + + // attribute_deprecated + qscale_type: int; + + // attribute_deprecated + qp_table_buf: pAVBufferRef; +{$ENDIF} + (* * + * For hwaccel-format frames, this should be a reference to the + * AVHWFramesContext describing the frame. + *) + hw_frames_ctx: pAVBufferRef; + + (* * + * AVBufferRef for free use by the API user. FFmpeg will never check the + * contents of the buffer ref. FFmpeg calls av_buffer_unref() on it when + * the frame is unreferenced. av_frame_copy_props() calls create a new + * reference with av_buffer_ref() for the target frame's opaque_ref field. + * + * This is unrelated to the opaque field, although it serves a similar + * purpose. + *) + opaque_ref: pAVBufferRef; + + (* * + * @anchor cropping + * @name Cropping + * Video frames only. The number of pixels to discard from the the + * top/bottom/left/right border of the frame to obtain the sub-rectangle of + * the frame intended for presentation. + * @{ + *) + crop_top: size_t; + crop_bottom: size_t; + crop_left: size_t; + crop_right: size_t; + (* * + * @} + *) + + (* * + * AVBufferRef for internal use by a single libav* library. + * Must not be used to transfer data between libraries. + * Has to be NULL when ownership of the frame leaves the respective library. + * + * Code outside the FFmpeg libs should never check or change the contents of the buffer ref. + * + * FFmpeg calls av_buffer_unref() on it when the frame is unreferenced. + * av_frame_copy_props() calls create a new reference with av_buffer_ref() + * for the target frame's private_ref field. + *) + private_ref: pAVBufferRef; + end; + +{$IFDEF FF_API_FRAME_GET_SET} + + (* * + * Accessors for some AVFrame fields. These used to be provided for ABI + * compatibility, and do not need to be used anymore. + *) + // attribute_deprecated + // int64_t av_frame_get_best_effort_timestamp(const AVFrame *frame); +function av_frame_get_best_effort_timestamp(const frame: pAVFrame): int64_t; cdecl; external avutil_dll; +// attribute_deprecated +// void av_frame_set_best_effort_timestamp(AVFrame *frame, int64_t val); +procedure av_frame_set_best_effort_timestamp(frame: pAVFrame; val: int64_t); cdecl; external avutil_dll; +// attribute_deprecated +// int64_t av_frame_get_pkt_duration (const AVFrame *frame); +function av_frame_get_pkt_duration(const frame: pAVFrame): int64_t; cdecl; external avutil_dll; +// attribute_deprecated +// void av_frame_set_pkt_duration (AVFrame *frame, int64_t val); +procedure av_frame_set_pkt_duration(frame: pAVFrame; val: int64_t); cdecl; external avutil_dll; +// attribute_deprecated +// int64_t av_frame_get_pkt_pos (const AVFrame *frame); +function av_frame_get_pkt_pos(const frame: pAVFrame): int64_t; cdecl; external avutil_dll; +// attribute_deprecated +// void av_frame_set_pkt_pos (AVFrame *frame, int64_t val); +procedure av_frame_set_pkt_pos(frame: pAVFrame; val: int64_t); cdecl; external avutil_dll; +// attribute_deprecated +// int64_t av_frame_get_channel_layout (const AVFrame *frame); +function av_frame_get_channel_layout(const frame: pAVFrame): int64_t; cdecl; external avutil_dll; +// attribute_deprecated +// void av_frame_set_channel_layout (AVFrame *frame, int64_t val); +procedure av_frame_set_channel_layout(frame: pAVFrame; val: int64_t); cdecl; external avutil_dll; +// attribute_deprecated +// int av_frame_get_channels (const AVFrame *frame); +function av_frame_get_channels(const frame: pAVFrame): int; cdecl; external avutil_dll; +// attribute_deprecated +// void av_frame_set_channels (AVFrame *frame, int val); +procedure av_frame_set_channels(frame: pAVFrame; val: int); cdecl; external avutil_dll; +// attribute_deprecated +// int av_frame_get_sample_rate (const AVFrame *frame); +function av_frame_get_sample_rate(const frame: pAVFrame): int; cdecl; external avutil_dll; +// attribute_deprecated +// void av_frame_set_sample_rate (AVFrame *frame, int val); +procedure av_frame_set_sample_rate(frame: pAVFrame; val: int); cdecl; external avutil_dll; +// attribute_deprecated +// AVDictionary *av_frame_get_metadata (const AVFrame *frame); +function av_frame_get_metadata(const frame: AVFrame): pAVDictionary; cdecl; external avutil_dll; +// attribute_deprecated +// void av_frame_set_metadata (AVFrame *frame, AVDictionary *val); +procedure av_frame_set_metadata(frame: pAVFrame; val: pAVDictionary); cdecl; external avutil_dll; +// attribute_deprecated +// int av_frame_get_decode_error_flags (const AVFrame *frame); +function av_frame_get_decode_error_flags(const frame: pAVFrame): int; cdecl; external avutil_dll; +// attribute_deprecated +// void av_frame_set_decode_error_flags (AVFrame *frame, int val); +procedure av_frame_set_decode_error_flags(frame: pAVFrame; val: int); cdecl; external avutil_dll; +// attribute_deprecated +// int av_frame_get_pkt_size(const AVFrame *frame); +function av_frame_get_pkt_size(const frame: pAVFrame): int; cdecl; external avutil_dll; +// attribute_deprecated +// void av_frame_set_pkt_size(AVFrame *frame, int val); +procedure av_frame_set_pkt_size(frame: AVFrame; val: int); cdecl; external avutil_dll; +{$IFDEF FF_API_FRAME_QP} +// attribute_deprecated +// int8_t *av_frame_get_qp_table(AVFrame *f, int *stride, int *type); +function av_frame_get_qp_table(f: pAVFrame; stride: pint; _type: pint): pint8_t; cdecl; external avutil_dll; +// attribute_deprecated +// int av_frame_set_qp_table(AVFrame *f, AVBufferRef *buf, int stride, int type); +function av_frame_set_qp_table(f: pAVFrame; buf: pAVBufferRef; stride: int; _type: int): int; cdecl; external avutil_dll; +{$ENDIF} +// attribute_deprecated +// enum AVColorSpace av_frame_get_colorspace(const AVFrame *frame); +function av_frame_get_colorspace(const frame: pAVFrame): AVColorSpace; cdecl; external avutil_dll; +// attribute_deprecated +// void av_frame_set_colorspace(AVFrame *frame, enum AVColorSpace val); +procedure av_frame_set_colorspace(frame: pAVFrame; val: AVColorSpace); cdecl; external avutil_dll; +// attribute_deprecated +// enum AVColorRange av_frame_get_color_range(const AVFrame *frame); +function av_frame_get_color_range(const frame: pAVFrame): AVColorRange; cdecl; external avutil_dll; +// attribute_deprecated +// void av_frame_set_color_range(AVFrame *frame, enum AVColorRange val); +procedure av_frame_set_color_range(frame: pAVFrame; val: AVColorRange); cdecl; external avutil_dll; +{$ENDIF} +(* * + * Get the name of a colorspace. + * @return a static string identifying the colorspace; can be NULL. +*) +// const char *av_get_colorspace_name(enum AVColorSpace val); +function av_get_colorspace_name(val: AVColorSpace): PAnsiChar; cdecl; external avutil_dll; + +(* * + * Allocate an AVFrame and set its fields to default values. The resulting + * struct must be freed using av_frame_free(). + * + * @return An AVFrame filled with default values or NULL on failure. + * + * @note this only allocates the AVFrame itself, not the data buffers. Those + * must be allocated through other means, e.g. with av_frame_get_buffer() or + * manually. +*) +// AVFrame *av_frame_alloc(void); +function av_frame_alloc(): pAVFrame; cdecl; external avutil_dll; + +(* * + * Free the frame and any dynamically allocated objects in it, + * e.g. extended_data. If the frame is reference counted, it will be + * unreferenced first. + * + * @param frame frame to be freed. The pointer will be set to NULL. +*) +// void av_frame_free(AVFrame **frame); +procedure av_frame_free(Var frame: pAVFrame); cdecl; external avutil_dll; + +(* * + * Set up a new reference to the data described by the source frame. + * + * Copy frame properties from src to dst and create a new reference for each + * AVBufferRef from src. + * + * If src is not reference counted, new buffers are allocated and the data is + * copied. + * + * @warning: dst MUST have been either unreferenced with av_frame_unref(dst), + * or newly allocated with av_frame_alloc() before calling this + * function, or undefined behavior will occur. + * + * @return 0 on success, a negative AVERROR on error +*) +// int av_frame_ref(AVFrame *dst, const AVFrame *src); +function av_frame_ref(dst: pAVFrame; const src: pAVFrame): int; cdecl; external avutil_dll; + +(* * + * Create a new frame that references the same data as src. + * + * This is a shortcut for av_frame_alloc()+av_frame_ref(). + * + * @return newly created AVFrame on success, NULL on error. +*) +// AVFrame *av_frame_clone(const AVFrame *src); +function av_frame_clone(const src: pAVFrame): pAVFrame; cdecl; external avutil_dll; + +(* * + * Unreference all the buffers referenced by frame and reset the frame fields. +*) +// void av_frame_unref(AVFrame *frame); +procedure av_frame_unref(frame: pAVFrame); cdecl; external avutil_dll; + +(* * + * Move everything contained in src to dst and reset src. + * + * @warning: dst is not unreferenced, but directly overwritten without reading + * or deallocating its contents. Call av_frame_unref(dst) manually + * before calling this function to ensure that no memory is leaked. +*) +// void av_frame_move_ref(AVFrame *dst, AVFrame *src); +procedure av_frame_move_ref(dst: pAVFrame; src: pAVFrame); cdecl; external avutil_dll; + +(* * + * Allocate new buffer(s) for audio or video data. + * + * The following fields must be set on frame before calling this function: + * - format (pixel format for video, sample format for audio) + * - width and height for video + * - nb_samples and channel_layout for audio + * + * This function will fill AVFrame.data and AVFrame.buf arrays and, if + * necessary, allocate and fill AVFrame.extended_data and AVFrame.extended_buf. + * For planar formats, one buffer will be allocated for each plane. + * + * @warning: if frame already has been allocated, calling this function will + * leak memory. In addition, undefined behavior can occur in certain + * cases. + * + * @param frame frame in which to store the new buffers. + * @param align Required buffer size alignment. If equal to 0, alignment will be + * chosen automatically for the current CPU. It is highly + * recommended to pass 0 here unless you know what you are doing. + * + * @return 0 on success, a negative AVERROR on error. +*) +// int av_frame_get_buffer(AVFrame *frame, int align); +function av_frame_get_buffer(frame: pAVFrame; align: int): int; cdecl; external avutil_dll; + +(* * + * Check if the frame data is writable. + * + * @return A positive value if the frame data is writable (which is true if and + * only if each of the underlying buffers has only one reference, namely the one + * stored in this frame). Return 0 otherwise. + * + * If 1 is returned the answer is valid until av_buffer_ref() is called on any + * of the underlying AVBufferRefs (e.g. through av_frame_ref() or directly). + * + * @see av_frame_make_writable(), av_buffer_is_writable() +*) +// int av_frame_is_writable(AVFrame *frame); +function av_frame_is_writable(frame: pAVFrame): int; cdecl; external avutil_dll; + +(* * + * Ensure that the frame data is writable, avoiding data copy if possible. + * + * Do nothing if the frame is writable, allocate new buffers and copy the data + * if it is not. + * + * @return 0 on success, a negative AVERROR on error. + * + * @see av_frame_is_writable(), av_buffer_is_writable(), + * av_buffer_make_writable() +*) +// int av_frame_make_writable(AVFrame *frame); +function av_frame_make_writable(frame: pAVFrame): int; cdecl; external avutil_dll; + +(* * + * Copy the frame data from src to dst. + * + * This function does not allocate anything, dst must be already initialized and + * allocated with the same parameters as src. + * + * This function only copies the frame data (i.e. the contents of the data / + * extended data arrays), not any other properties. + * + * @return >= 0 on success, a negative AVERROR on error. +*) +// int av_frame_copy(AVFrame *dst, const AVFrame *src); +function av_frame_copy(dst: pAVFrame; const src: pAVFrame): int; cdecl; external avutil_dll; + +(* * + * Copy only "metadata" fields from src to dst. + * + * Metadata for the purpose of this function are those fields that do not affect + * the data layout in the buffers. E.g. pts, sample rate (for audio) or sample + * aspect ratio (for video), but not width/height or channel layout. + * Side data is also copied. +*) +// int av_frame_copy_props(AVFrame *dst, const AVFrame *src); +function av_frame_copy_props(dst: pAVFrame; const src: pAVFrame): int; cdecl; external avutil_dll; + +(* * + * Get the buffer reference a given data plane is stored in. + * + * @param plane index of the data plane of interest in frame->extended_data. + * + * @return the buffer reference that contains the plane or NULL if the input + * frame is not valid. +*) +// AVBufferRef *av_frame_get_plane_buffer(AVFrame *frame, int plane); +function av_frame_get_plane_buffer(frame: pAVFrame; plane: int): pAVBufferRef; cdecl; external avutil_dll; + +(* * + * Add a new side data to a frame. + * + * @param frame a frame to which the side data should be added + * @param type type of the added side data + * @param size size of the side data + * + * @return newly added side data on success, NULL on error +*) +// AVFrameSideData *av_frame_new_side_data(AVFrame *frame, +// enum AVFrameSideDataType type, +// int size); +function av_frame_new_side_data(frame: AVFrame; _type: AVFrameSideDataType; size: int): pAVFrameSideData; cdecl; external avutil_dll; + +(* * + * Add a new side data to a frame from an existing AVBufferRef + * + * @param frame a frame to which the side data should be added + * @param type the type of the added side data + * @param buf an AVBufferRef to add as side data. The ownership of + * the reference is transferred to the frame. + * + * @return newly added side data on success, NULL on error. On failure + * the frame is unchanged and the AVBufferRef remains owned by + * the caller. +*) +// AVFrameSideData *av_frame_new_side_data_from_buf(AVFrame *frame, +// enum AVFrameSideDataType type, +// AVBufferRef *buf); +function av_frame_new_side_data_from_buf(frame: pAVFrame; _type: AVFrameSideDataType; buf: pAVBufferRef): pAVFrameSideData; cdecl; external avutil_dll; + +(* * + * @return a pointer to the side data of a given type on success, NULL if there + * is no side data with such type in this frame. +*) +// AVFrameSideData *av_frame_get_side_data(const AVFrame *frame, +// enum AVFrameSideDataType type); +function av_frame_get_side_data(const frame: pAVFrame; _type: AVFrameSideDataType): pAVFrameSideData; cdecl; external avutil_dll; + +(* * + * If side data of the supplied type exists in the frame, free it and remove it + * from the frame. +*) +// void av_frame_remove_side_data(AVFrame *frame, enum AVFrameSideDataType type); +procedure av_frame_remove_side_data(frame: pAVFrame; _type: AVFrameSideDataType); cdecl; external avutil_dll; + +const + (* * + * Flags for frame cropping. + *) + + (* * + * Apply the maximum possible cropping, even if it requires setting the + * AVFrame.data[] entries to unaligned pointers. Passing unaligned data + * to FFmpeg API is generally not allowed, and causes undefined behavior + * (such as crashes). You can pass unaligned data only to FFmpeg APIs that + * are explicitly documented to accept it. Use this flag only if you + * absolutely know what you are doing. + *) + AV_FRAME_CROP_UNALIGNED = 1 shl 0; + + (* * + * Crop the given video AVFrame according to its crop_left/crop_top/crop_right/ + * crop_bottom fields. If cropping is successful, the function will adjust the + * data pointers and the width/height fields, and set the crop fields to 0. + * + * In all cases, the cropping boundaries will be rounded to the inherent + * alignment of the pixel format. In some cases, such as for opaque hwaccel + * formats, the left/top cropping is ignored. The crop fields are set to 0 even + * if the cropping was rounded or ignored. + * + * @param frame the frame which should be cropped + * @param flags Some combination of AV_FRAME_CROP_* flags, or 0. + * + * @return >= 0 on success, a negative AVERROR on error. If the cropping fields + * were invalid, AVERROR(ERANGE) is returned, and nothing is changed. + *) + // int av_frame_apply_cropping(AVFrame *frame, int flags); +function av_frame_apply_cropping(frame: pAVFrame; flags: int): int; cdecl; external avutil_dll; + +(* * + * @return a string identifying the side data type +*) +// const char *av_frame_side_data_name(enum AVFrameSideDataType type); +function av_frame_side_data_name(_type: AVFrameSideDataType): PAnsiChar; cdecl; external avutil_dll; + +{$ENDREGION} +{$REGION 'framequeue.h'} + +type + pFFFrameBucket = ^FFFrameBucket; + + FFFrameBucket = record + frame: pAVFrame; + end; + + (* * + * Structure to hold global options and statistics for frame queues. + * + * This structure is intended to allow implementing global control of the + * frame queues, including memory consumption caps. + * + * It is currently empty. + *) + pFFFrameQueueGlobal = ^FFFrameQueueGlobal; + + FFFrameQueueGlobal = record + dummy: AnsiChar; (* C does not allow empty structs *) + end; + + (* * + * Queue of AVFrame pointers. + *) + pFFFrameQueue = ^FFFrameQueue; + + FFFrameQueue = record + + (* * + * Array of allocated buckets, used as a circular buffer. + *) + queue: pFFFrameBucket; + + (* * + * Size of the array of buckets. + *) + allocated: size_t; + + (* * + * Tail of the queue. + * It is the index in the array of the next frame to take. + *) + tail: size_t; + + (* * + * Number of currently queued frames. + *) + queued: size_t; + + (* * + * Pre-allocated bucket for queues of size 1. + *) + first_bucket: FFFrameBucket; + + (* * + * Total number of frames entered in the queue. + *) + total_frames_head: uint64_t; + + (* * + * Total number of frames dequeued from the queue. + * queued = total_frames_head - total_frames_tail + *) + total_frames_tail: uint64_t; + + (* * + * Total number of samples entered in the queue. + *) + total_samples_head: uint64_t; + + (* * + * Total number of samples dequeued from the queue. + * queued_samples = total_samples_head - total_samples_tail + *) + total_samples_tail: uint64_t; + + (* * + * Indicate that samples are skipped + *) + samples_skipped: int; + end; + +{$ENDREGION} +{$REGION 'opt.h'} + +Type + AVOptionType = ( // + AV_OPT_TYPE_FLAGS, AV_OPT_TYPE_INT, AV_OPT_TYPE_INT64, AV_OPT_TYPE_DOUBLE, AV_OPT_TYPE_FLOAT, AV_OPT_TYPE_STRING, AV_OPT_TYPE_RATIONAL, AV_OPT_TYPE_BINARY, + // < offset must point to a pointer immediately followed by an int for the length + AV_OPT_TYPE_DICT, AV_OPT_TYPE_UINT64, AV_OPT_TYPE_CONST, AV_OPT_TYPE_IMAGE_SIZE, + // < offset must point to two consecutive integers + AV_OPT_TYPE_PIXEL_FMT, AV_OPT_TYPE_SAMPLE_FMT, AV_OPT_TYPE_VIDEO_RATE, + // < offset must point to AVRational + AV_OPT_TYPE_DURATION, AV_OPT_TYPE_COLOR, AV_OPT_TYPE_CHANNEL_LAYOUT, AV_OPT_TYPE_BOOL); + + (* * + * AVOption + *) + + Tdefault_val = record + case int of + 0: + (i64: int64_t); + 1: + (dbl: double); + 2: + (str: PAnsiChar); + (* TODO those are unused now *) + 3: + (q: AVRational); + end; + + AVOption = record + // const char *name; + name: PAnsiChar; + + (* * + * short English help text + * @todo What about other languages? + *) + // const char *help; + help: PAnsiChar; + + (* * + * The offset relative to the context structure where the option + * value is stored. It should be 0 for named constants. + *) + // int offset; + offset: int; + // enum AVOptionType type; + _type: AVOptionType; + + (* * + * the default value for scalar options + *) + default_val: Tdefault_val; + + min: double; + // < minimum valid value for the option + max: double; + // < maximum valid value for the option + + flags: int; + + (* * + * The logical unit to which the option belongs. Non-constant + * options and corresponding named constants share the same + * unit. May be NULL. + *) + // const char *unit; + _unit: PAnsiChar; + end; + + pAVOption = ^AVOption; + + (* * + * A single allowed range of values, or a single allowed value. + *) + AVOptionRange = record + // const char *str; + str: PAnsiChar; + (* * + * Value range. + * For string ranges this represents the min/max length. + * For dimensions this represents the min/max pixel count or width/height in multi-component case. + *) + value_min, value_max: double; + (* * + * Value's component range. + * For string this represents the unicode range for chars, 0-127 limits to ASCII. + *) + component_min, component_max: double; + (* * + * Range flag. + * If set to 1 the struct encodes a range, if set to 0 a single value. + *) + is_range: int; + end; + + pAVOptionRange = ^AVOptionRange; + ppAVOptionRange = ^pAVOptionRange; + + (* * + * List of AVOptionRange structs. + *) + AVOptionRanges = record + (* * + * Array of option ranges. + * + * Most of option types use just one component. + * Following describes multi-component option types: + * + * AV_OPT_TYPE_IMAGE_SIZE: + * component index 0: range of pixel count (width * height). + * component index 1: range of width. + * component index 2: range of height. + * + * @note To obtain multi-component version of this structure, user must + * provide AV_OPT_MULTI_COMPONENT_RANGE to av_opt_query_ranges or + * av_opt_query_ranges_default function. + * + * Multi-component range can be read as in following example: + * + * @code + * int range_index, component_index; + * AVOptionRanges *ranges; + * AVOptionRange *range[3]; //may require more than 3 in the future. + * av_opt_query_ranges(&ranges, obj, key, AV_OPT_MULTI_COMPONENT_RANGE); + * for (range_index = 0; range_index < ranges->nb_ranges; range_index++) { + * for (component_index = 0; component_index < ranges->nb_components; component_index++) + * range[component_index] = ranges->range[ranges->nb_ranges * component_index + range_index]; + * //do something with range here. + * } + * av_opt_freep_ranges(&ranges); + * @endcode + *) + // AVOptionRange **range; + range: ppAVOptionRange; + (* * + * Number of ranges per component. + *) + nb_ranges: int; + (* * + * Number of componentes. + *) + nb_components: int; + end; + + pAVOptionRanges = ^AVOptionRanges; + +const + AV_OPT_FLAG_ENCODING_PARAM = 1; + // < a generic parameter which can be set by the user for muxing or encoding + AV_OPT_FLAG_DECODING_PARAM = 2; + // < a generic parameter which can be set by the user for demuxing or decoding + AV_OPT_FLAG_AUDIO_PARAM = 8; + AV_OPT_FLAG_VIDEO_PARAM = 16; + AV_OPT_FLAG_SUBTITLE_PARAM = 32; + (* * + * The option is intended for exporting values to the caller. + *) + AV_OPT_FLAG_EXPORT = 64; + (* * + * The option may not be set through the AVOptions API, only read. + * This flag only makes sense when AV_OPT_FLAG_EXPORT is also set. + *) + AV_OPT_FLAG_READONLY = 128; + AV_OPT_FLAG_BSF_PARAM = (1 shl 8); + // < a generic parameter which can be set by the user for bit stream filtering + AV_OPT_FLAG_FILTERING_PARAM = (1 shl 16); + +{$ENDREGION} +{$REGION 'log.h'} + +type + AVClassCategory = ( // + AV_CLASS_CATEGORY_NA = 0, // + AV_CLASS_CATEGORY_INPUT, // + AV_CLASS_CATEGORY_OUTPUT, // + AV_CLASS_CATEGORY_MUXER, // + AV_CLASS_CATEGORY_DEMUXER, // + AV_CLASS_CATEGORY_ENCODER, // + AV_CLASS_CATEGORY_DECODER, // + AV_CLASS_CATEGORY_FILTER, // + AV_CLASS_CATEGORY_BITSTREAM_FILTER, // + AV_CLASS_CATEGORY_SWSCALER, // + AV_CLASS_CATEGORY_SWRESAMPLER, // + AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT = 40, // + AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT, // + AV_CLASS_CATEGORY_DEVICE_AUDIO_OUTPUT, // + AV_CLASS_CATEGORY_DEVICE_AUDIO_INPUT, // + AV_CLASS_CATEGORY_DEVICE_OUTPUT, // + AV_CLASS_CATEGORY_DEVICE_INPUT, // + AV_CLASS_CATEGORY_NB + // < not part of ABI/API + ); + pAVClassCategory = ^AVClassCategory; + + // #define AV_IS_INPUT_DEVICE(category) \ + // (((category) == AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT) || \ + // ((category) == AV_CLASS_CATEGORY_DEVICE_AUDIO_INPUT) || \ + // ((category) == AV_CLASS_CATEGORY_DEVICE_INPUT)) + // + // #define AV_IS_OUTPUT_DEVICE(category) \ + // (((category) == AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT) || \ + // ((category) == AV_CLASS_CATEGORY_DEVICE_AUDIO_OUTPUT) || \ + // ((category) == AV_CLASS_CATEGORY_DEVICE_OUTPUT)) + + // struct AVOptionRanges; + // AVOptionRanges = record + // end; + // pAVOptionRanges = ^AVOptionRanges; + + (* * + * Describe the class of an AVClass context structure. That is an + * arbitrary struct of which the first field is a pointer to an + * AVClass struct (e.g. AVCodecContext, AVFormatContext etc.). + *) + pAVClass = ^avclass; + + avclass = record + (* * + * The name of the class; usually it is the same name as the + * context structure type to which the AVClass is associated. + *) + // const char* class_name; + class_name: PAnsiChar; + + (* * + * A pointer to a function which returns the name of a context + * instance ctx associated with the class. + *) + // const char* (*item_name)(void* ctx); + item_name: function(ctx: Pointer): PAnsiChar; cdecl; + + (* * + * a pointer to the first option specified in the class if any or NULL + * + * @see av_set_default_options() + *) + // const struct AVOption *option; + option: pAVOption; + + (* * + * LIBAVUTIL_VERSION with which this structure was created. + * This is used to allow fields to be added without requiring major + * version bumps everywhere. + *) + + version: int; + + (* * + * Offset in the structure where log_level_offset is stored. + * 0 means there is no such variable + *) + log_level_offset_offset: int; + + (* * + * Offset in the structure where a pointer to the parent context for + * logging is stored. For example a decoder could pass its AVCodecContext + * to eval as such a parent context, which an av_log() implementation + * could then leverage to display the parent context. + * The offset can be NULL. + *) + parent_log_context_offset: int; + + (* * + * Return next AVOptions-enabled child or NULL + *) + // void * (* child_next)(void *obj, void *prev); + child_next: function(obj: Pointer; prev: Pointer): Pointer; cdecl; + + (* * + * Return an AVClass corresponding to the next potential + * AVOptions-enabled child. + * + * The difference between child_next and this is that + * child_next iterates over _already existing_ objects, while + * child_class_next iterates over _all possible_ children. + *) + // const struct AVClass* (*child_class_next)(const struct AVClass *prev); + child_class_next: function(const prev: pAVClass): pAVClass; cdecl; + + (* * + * Category used for visualization (like color) + * This is only set if the category is equal for all objects using this class. + * available since version (51 shl 16 | 56 shl 8 | 100) + *) + category: AVClassCategory; + + (* * + * Callback to return the category. + * available since version (51 shl 16 | 59 shl 8 | 100) + *) + // AVClassCategory (*get_category)(void* ctx); + get_category: function(ctx: Pointer): pAVClassCategory; cdecl; + (* * + * Callback to return the supported/allowed ranges. + * available since version (52.12) + *) + // int (*query_ranges)(struct AVOptionRanges **, void *obj, const char *key, int flags); + query_ranges: function(var ranges: pAVOptionRanges; obj: Pointer; const key: PAnsiChar; flags: int): int; cdecl; + end; + + PVA_LIST = ^VA_LIST; + VA_LIST = array [0 .. 0] of Pointer; + +const + (* * + * Print no output. + *) + + AV_LOG_QUIET = -8; + + (* * + * Something went really wrong and we will crash now. + *) + AV_LOG_PANIC = 0; + + (* * + * Something went wrong and recovery is not possible. + * For example, no header was found for a format which depends + * on headers or an illegal combination of parameters is used. + *) + AV_LOG_FATAL = 8; + + (* * + * Something went wrong and cannot losslessly be recovered. + * However, not all future data is affected. + *) + AV_LOG_ERROR = 16; + + (* * + * Something somehow does not look correct. This may or may not + * lead to problems. An example would be the use of '-vstrict -2'. + *) + AV_LOG_WARNING = 24; + + (* * + * Standard information. + *) + AV_LOG_INFO = 32; + + (* * + * Detailed information. + *) + AV_LOG_VERBOSE = 40; + + (* * + * Stuff which is only useful for libav* developers. + *) + AV_LOG_DEBUG = 48; + + (* * + * Extremely verbose debugging, useful for libav* development. + *) + AV_LOG_TRACE = 56; + + AV_LOG_MAX_OFFSET = (AV_LOG_TRACE - AV_LOG_QUIET); +{$ENDREGION} +{$REGION 'samplefmt.h'} + +type + pAVSampleFormat = ^AVSampleFormat; + AVSampleFormat = ( // + AV_SAMPLE_FMT_NONE = -1, // + AV_SAMPLE_FMT_U8, + // < unsigned 8 bits + AV_SAMPLE_FMT_S16, + // < signed 16 bits + AV_SAMPLE_FMT_S32, + // < signed 32 bits + AV_SAMPLE_FMT_FLT, + // < float + AV_SAMPLE_FMT_DBL, + // < double + + AV_SAMPLE_FMT_U8P, + // < unsigned 8 bits, planar + AV_SAMPLE_FMT_S16P, + // < signed 16 bits, planar + AV_SAMPLE_FMT_S32P, + // < signed 32 bits, planar + AV_SAMPLE_FMT_FLTP, + // < float, planar + AV_SAMPLE_FMT_DBLP, + // < double, planar + AV_SAMPLE_FMT_S64, + // < signed 64 bits + AV_SAMPLE_FMT_S64P, + // < signed 64 bits, planar + + AV_SAMPLE_FMT_NB + // < Number of sample formats. DO NOT USE if linking dynamically + ); + + (* * + * Return the name of sample_fmt, or NULL if sample_fmt is not + * recognized. + *) + // const char *av_get_sample_fmt_name(enum AVSampleFormat sample_fmt); +function av_get_sample_fmt_name(sample_fmt: AVSampleFormat): PAnsiChar; cdecl; external avutil_dll; + +(* * + * Return a sample format corresponding to name, or AV_SAMPLE_FMT_NONE + * on error. +*) +// enum AVSampleFormat av_get_sample_fmt(const char *name); +function av_get_sample_fmt(const name: PAnsiChar): AVSampleFormat; cdecl; external avutil_dll; + +(* * + * Return the planar<->packed alternative form of the given sample format, or + * AV_SAMPLE_FMT_NONE on error. If the passed sample_fmt is already in the + * requested planar/packed format, the format returned is the same as the + * input. +*) +// enum AVSampleFormat av_get_alt_sample_fmt(enum AVSampleFormat sample_fmt, int planar); +function av_get_alt_sample_fmt(sample_fmt: AVSampleFormat; planar: int): AVSampleFormat; cdecl; external avutil_dll; + +(* * + * Get the packed alternative form of the given sample format. + * + * If the passed sample_fmt is already in packed format, the format returned is + * the same as the input. + * + * @return the packed alternative form of the given sample format or + AV_SAMPLE_FMT_NONE on error. +*) +// enum AVSampleFormat av_get_packed_sample_fmt(enum AVSampleFormat sample_fmt); +function av_get_packed_sample_fmt(sample_fmt: AVSampleFormat): AVSampleFormat; cdecl; external avutil_dll; + +(* * + * Get the planar alternative form of the given sample format. + * + * If the passed sample_fmt is already in planar format, the format returned is + * the same as the input. + * + * @return the planar alternative form of the given sample format or + AV_SAMPLE_FMT_NONE on error. +*) +// enum AVSampleFormat av_get_planar_sample_fmt(enum AVSampleFormat sample_fmt); +function av_get_planar_sample_fmt(sample_fmt: AVSampleFormat): AVSampleFormat; cdecl; external avutil_dll; + +(* * + * Generate a string corresponding to the sample format with + * sample_fmt, or a header if sample_fmt is negative. + * + * @param buf the buffer where to write the string + * @param buf_size the size of buf + * @param sample_fmt the number of the sample format to print the + * corresponding info string, or a negative value to print the + * corresponding header. + * @return the pointer to the filled buffer or NULL if sample_fmt is + * unknown or in case of other errors +*) +// char *av_get_sample_fmt_string(char *buf, int buf_size, enum AVSampleFormat sample_fmt); +function av_get_sample_fmt_string(buf: PAnsiChar; buf_size: int; sample_fmt: AVSampleFormat): PAnsiChar; cdecl; external avutil_dll; + +(* * + * Return number of bytes per sample. + * + * @param sample_fmt the sample format + * @return number of bytes per sample or zero if unknown for the given + * sample format +*) +// int av_get_bytes_per_sample(enum AVSampleFormat sample_fmt); +function av_get_bytes_per_sample(sample_fmt: AVSampleFormat): int; cdecl; external avutil_dll; + +(* * + * Check if the sample format is planar. + * + * @param sample_fmt the sample format to inspect + * @return 1 if the sample format is planar, 0 if it is interleaved +*) +// int av_sample_fmt_is_planar(enum AVSampleFormat sample_fmt); +function av_sample_fmt_is_planar(sample_fmt: AVSampleFormat): int; cdecl; external avutil_dll; + +(* * + * Get the required buffer size for the given audio parameters. + * + * @param[out] linesize calculated linesize, may be NULL + * @param nb_channels the number of channels + * @param nb_samples the number of samples in a single channel + * @param sample_fmt the sample format + * @param align buffer size alignment (0 = default, 1 = no alignment) + * @return required buffer size, or negative error code on failure +*) +// int av_samples_get_buffer_size(int *linesize, int nb_channels, int nb_samples, +// enum AVSampleFormat sample_fmt, int align); +function av_samples_get_buffer_size(var linesize: int; nb_channels: int; nb_samples: int; sample_fmt: AVSampleFormat; align: int): int; cdecl; overload; + external avutil_dll; +function av_samples_get_buffer_size(linesize: pint; nb_channels: int; nb_samples: int; sample_fmt: AVSampleFormat; align: int): int; cdecl; overload; + external avutil_dll; +(* * + * @} + * + * @defgroup lavu_sampmanip Samples manipulation + * + * Functions that manipulate audio samples + * @{ +*) + +(* * + * Fill plane data pointers and linesize for samples with sample + * format sample_fmt. + * + * The audio_data array is filled with the pointers to the samples data planes: + * for planar, set the start point of each channel's data within the buffer, + * for packed, set the start point of the entire buffer only. + * + * The value pointed to by linesize is set to the aligned size of each + * channel's data buffer for planar layout, or to the aligned size of the + * buffer for all channels for packed layout. + * + * The buffer in buf must be big enough to contain all the samples + * (use av_samples_get_buffer_size() to compute its minimum size), + * otherwise the audio_data pointers will point to invalid data. + * + * @see enum AVSampleFormat + * The documentation for AVSampleFormat describes the data layout. + * + * @param[out] audio_data array to be filled with the pointer for each channel + * @param[out] linesize calculated linesize, may be NULL + * @param buf the pointer to a buffer containing the samples + * @param nb_channels the number of channels + * @param nb_samples the number of samples in a single channel + * @param sample_fmt the sample format + * @param align buffer size alignment (0 = default, 1 = no alignment) + * @return >=0 on success or a negative error code on failure + * @todo return minimum size in bytes required for the buffer in case + * of success at the next bump +*) +// int av_samples_fill_arrays(uint8_t **audio_data, int *linesize, +// const uint8_t *buf, +// int nb_channels, int nb_samples, +// enum AVSampleFormat sample_fmt, int align); +function av_samples_fill_arrays(var audio_data: puint8_t; var linesize: int; const buf: puint8_t; nb_channels: int; nb_samples: int; sample_fmt: AVSampleFormat; + align: int): int; cdecl; external avutil_dll; + +(* * + * Allocate a samples buffer for nb_samples samples, and fill data pointers and + * linesize accordingly. + * The allocated samples buffer can be freed by using av_freep(&audio_data[0]) + * Allocated data will be initialized to silence. + * + * @see enum AVSampleFormat + * The documentation for AVSampleFormat describes the data layout. + * + * @param[out] audio_data array to be filled with the pointer for each channel + * @param[out] linesize aligned size for audio buffer(s), may be NULL + * @param nb_channels number of audio channels + * @param nb_samples number of samples per channel + * @param align buffer size alignment (0 = default, 1 = no alignment) + * @return >=0 on success or a negative error code on failure + * @todo return the size of the allocated buffer in case of success at the next bump + * @see av_samples_fill_arrays() + * @see av_samples_alloc_array_and_samples() +*) +// int av_samples_alloc(uint8_t **audio_data, int *linesize, int nb_channels, +// int nb_samples, enum AVSampleFormat sample_fmt, int align); +function av_samples_alloc(var audio_data: puint8_t; linesize: pint; nb_channels: int; nb_samples: int; sample_fmt: AVSampleFormat; align: int): int; cdecl; + external avutil_dll; + +(* * + * Allocate a data pointers array, samples buffer for nb_samples + * samples, and fill data pointers and linesize accordingly. + * + * This is the same as av_samples_alloc(), but also allocates the data + * pointers array. + * + * @see av_samples_alloc() +*) +// int av_samples_alloc_array_and_samples(uint8_t ***audio_data, int *linesize, int nb_channels, +// int nb_samples, enum AVSampleFormat sample_fmt, int align); +function av_samples_alloc_array_and_samples(Var audio_data: ppuint8_t; var linesize: int; nb_channels: int; nb_samples: int; sample_fmt: AVSampleFormat; + align: int): int; cdecl; external avutil_dll; + +(* * + * Copy samples from src to dst. + * + * @param dst destination array of pointers to data planes + * @param src source array of pointers to data planes + * @param dst_offset offset in samples at which the data will be written to dst + * @param src_offset offset in samples at which the data will be read from src + * @param nb_samples number of samples to be copied + * @param nb_channels number of audio channels + * @param sample_fmt audio sample format +*) +// int av_samples_copy(uint8_t **dst, uint8_t * const *src, int dst_offset, +// int src_offset, int nb_samples, int nb_channels, +// enum AVSampleFormat sample_fmt); +function av_samples_copy(var dst: puint8_t; const src: ppuint8_t; dst_offset: int; src_offset: int; nb_samples: int; nb_channels: int; + sample_fmt: AVSampleFormat): int; cdecl; external avutil_dll; + +(* * + * Fill an audio buffer with silence. + * + * @param audio_data array of pointers to data planes + * @param offset offset in samples at which to start filling + * @param nb_samples number of samples to fill + * @param nb_channels number of audio channels + * @param sample_fmt audio sample format +*) +// int av_samples_set_silence(uint8_t **audio_data, int offset, int nb_samples, +// int nb_channels, enum AVSampleFormat sample_fmt); +function av_samples_set_silence(var audio_data: puint8_t; offset: int; nb_samples: int; nb_channels: int; sample_fmt: AVSampleFormat): int; cdecl; + external avutil_dll; + +{$ENDREGION} +{$REGION 'opt.h'} +(* * + * Show the obj options. + * + * @param req_flags requested flags for the options to show. Show only the + * options for which it is opt->flags & req_flags. + * @param rej_flags rejected flags for the options to show. Show only the + * options for which it is !(opt->flags & req_flags). + * @param av_log_obj log context to use for showing the options +*) +// int av_opt_show2(void *obj, void *av_log_obj, int req_flags, int rej_flags); +function av_opt_show2(obj, av_log_obj: Pointer; req_flags, rej_flags: int): int; cdecl; external avutil_dll; + +(* * + * Set the values of all AVOption fields to their default values. + * + * @param s an AVOption-enabled struct (its first member must be a pointer to AVClass) +*) +// void av_opt_set_defaults(void *s); +procedure av_opt_set_defaults(s: Pointer); cdecl; external avutil_dll; + +(* * + * Set the values of all AVOption fields to their default values. Only these + * AVOption fields for which (opt->flags & mask) == flags will have their + * default applied to s. + * + * @param s an AVOption-enabled struct (its first member must be a pointer to AVClass) + * @param mask combination of AV_OPT_FLAG_* + * @param flags combination of AV_OPT_FLAG_* +*) +// void av_opt_set_defaults2(void *s, int mask, int flags); +procedure av_opt_set_defaults2(s: Pointer; mask, flags: int); cdecl; external avutil_dll; + +(* * + * Parse the key/value pairs list in opts. For each key/value pair + * found, stores the value in the field in ctx that is named like the + * key. ctx must be an AVClass context, storing is done using + * AVOptions. + * + * @param opts options string to parse, may be NULL + * @param key_val_sep a 0-terminated list of characters used to + * separate key from value + * @param pairs_sep a 0-terminated list of characters used to separate + * two pairs from each other + * @return the number of successfully set key/value pairs, or a negative + * value corresponding to an AVERROR code in case of error: + * AVERROR(EINVAL) if opts cannot be parsed, + * the error code issued by av_opt_set() if a key/value pair + * cannot be set +*) +// int av_set_options_string(void *ctx, const char *opts, +// const char *key_val_sep, const char *pairs_sep); +function av_set_options_string(ctx: Pointer; const opts: PAnsiChar; const key_val_sep: PAnsiChar; const pairs_sep: PAnsiChar): int; cdecl; external avutil_dll; + +(* * + * Parse the key-value pairs list in opts. For each key=value pair found, + * set the value of the corresponding option in ctx. + * + * @param ctx the AVClass object to set options on + * @param opts the options string, key-value pairs separated by a + * delimiter + * @param shorthand a NULL-terminated array of options names for shorthand + * notation: if the first field in opts has no key part, + * the key is taken from the first element of shorthand; + * then again for the second, etc., until either opts is + * finished, shorthand is finished or a named option is + * found; after that, all options must be named + * @param key_val_sep a 0-terminated list of characters used to separate + * key from value, for example '=' + * @param pairs_sep a 0-terminated list of characters used to separate + * two pairs from each other, for example ':' or ',' + * @return the number of successfully set key=value pairs, or a negative + * value corresponding to an AVERROR code in case of error: + * AVERROR(EINVAL) if opts cannot be parsed, + * the error code issued by av_set_string3() if a key/value pair + * cannot be set + * + * Options names must use only the following characters: a-z A-Z 0-9 - . / _ + * Separators must use characters distinct from option names and from each + * other. +*) +// int av_opt_set_from_string(void *ctx, const char *opts, +// const char *const *shorthand, +// const char *key_val_sep, const char *pairs_sep); +function av_opt_set_from_string(ctx: Pointer; const opts: PAnsiChar; const shorthand: ppAnsiChar; const key_val_sep: PAnsiChar; const pairs_sep: PAnsiChar) + : int; cdecl; external avutil_dll; +(* * + * Free all allocated objects in obj. +*) +// void av_opt_free(void *obj); +procedure av_opt_free(obj: Pointer); cdecl; external avutil_dll; + +(* * + * Check whether a particular flag is set in a flags field. + * + * @param field_name the name of the flag field option + * @param flag_name the name of the flag to check + * @return non-zero if the flag is set, zero if the flag isn't set, + * isn't of the right type, or the flags field doesn't exist. +*) +// int av_opt_flag_is_set(void *obj, const char *field_name, const char *flag_name); +function av_opt_flag_is_set(obj: Pointer; const field_name: PAnsiChar; const flag_name: PAnsiChar): int; cdecl; external avutil_dll; + +(* * + * Set all the options from a given dictionary on an object. + * + * @param obj a struct whose first element is a pointer to AVClass + * @param options options to process. This dictionary will be freed and replaced + * by a new one containing all options not found in obj. + * Of course this new dictionary needs to be freed by caller + * with av_dict_free(). + * + * @return 0 on success, a negative AVERROR if some option was found in obj, + * but could not be set. + * + * @see av_dict_copy() +*) +// int av_opt_set_dict(void *obj, struct AVDictionary **options); +function av_opt_set_dict(obj: Pointer; var options: pAVDictionary): int; cdecl; external avutil_dll; + +(* * + * Set all the options from a given dictionary on an object. + * + * @param obj a struct whose first element is a pointer to AVClass + * @param options options to process. This dictionary will be freed and replaced + * by a new one containing all options not found in obj. + * Of course this new dictionary needs to be freed by caller + * with av_dict_free(). + * @param search_flags A combination of AV_OPT_SEARCH_*. + * + * @return 0 on success, a negative AVERROR if some option was found in obj, + * but could not be set. + * + * @see av_dict_copy() +*) +// int av_opt_set_dict2(void *obj, struct AVDictionary **options, int search_flags); +function av_opt_set_dict2(obj: Pointer; Var options: pAVDictionary; search_flags: int): int; cdecl; external avutil_dll; + +(* * + * Extract a key-value pair from the beginning of a string. + * + * @param ropts pointer to the options string, will be updated to + * point to the rest of the string (one of the pairs_sep + * or the final NUL) + * @param key_val_sep a 0-terminated list of characters used to separate + * key from value, for example '=' + * @param pairs_sep a 0-terminated list of characters used to separate + * two pairs from each other, for example ':' or ',' + * @param flags flags; see the AV_OPT_FLAG_* values below + * @param rkey parsed key; must be freed using av_free() + * @param rval parsed value; must be freed using av_free() + * + * @return >=0 for success, or a negative value corresponding to an + * AVERROR code in case of error; in particular: + * AVERROR(EINVAL) if no key is present + * +*) +// int av_opt_get_key_value(const char **ropts, +// const char *key_val_sep, const char *pairs_sep, +// unsigned flags, +// char **rkey, char **rval); +function av_opt_get_key_value(const ropts: ppAnsiChar; const key_val_sep: PAnsiChar; const pairs_sep: PAnsiChar; flags: unsigned; rkey: ppAnsiChar; + rval: ppAnsiChar): int; cdecl; external avutil_dll; + +(* * + * Accept to parse a value without a key; the key will then be returned + * as NULL. +*) +const + AV_OPT_FLAG_IMPLICIT_KEY = 1; + + (* * + * @defgroup opt_eval_funcs Evaluating option strings + * @{ + * This group of functions can be used to evaluate option strings + * and get numbers out of them. They do the same thing as av_opt_set(), + * except the result is written into the caller-supplied pointer. + * + * @param obj a struct whose first element is a pointer to AVClass. + * @param o an option for which the string is to be evaluated. + * @param val string to be evaluated. + * @param *_out value of the string will be written here. + * + * @return 0 on success, a negative number on failure. + *) + // int av_opt_eval_flags (void *obj, const AVOption *o, const char *val, int *flags_out); +function av_opt_eval_flags(obj: Pointer; const o: pAVOption; const val: PAnsiChar; var flags_out: int): int; cdecl; external avutil_dll; + +// int av_opt_eval_int (void *obj, const AVOption *o, const char *val, int *int_out); +// int av_opt_eval_int64 (void *obj, const AVOption *o, const char *val, int64_t *int64_out); +// int av_opt_eval_float (void *obj, const AVOption *o, const char *val, float *float_out); +// int av_opt_eval_double(void *obj, const AVOption *o, const char *val, double *double_out); +// int av_opt_eval_q (void *obj, const AVOption *o, const char *val, AVRational *q_out); +(* * + * @} +*) +const + AV_OPT_SEARCH_CHILDREN = (1 shl 0); (* *< Search in possible children of the + given object first. *) + (* * + * The obj passed to av_opt_find() is fake -- only a double pointer to AVClass + * instead of a required pointer to a struct containing AVClass. This is + * useful for searching for options without needing to allocate the corresponding + * object. + *) + AV_OPT_SEARCH_FAKE_OBJ = (1 shl 1); + + (* * + * In av_opt_get, return NULL if the option has a pointer type and is set to NULL, + * rather than returning an empty string. + *) + AV_OPT_ALLOW_NULL = (1 shl 2); + + (* * + * Allows av_opt_query_ranges and av_opt_query_ranges_default to return more than + * one component for certain option types. + * @see AVOptionRanges for details. + *) + AV_OPT_MULTI_COMPONENT_RANGE = (1 shl 12); + + (* * + * Look for an option in an object. Consider only options which + * have all the specified flags set. + * + * @param[in] obj A pointer to a struct whose first element is a + * pointer to an AVClass. + * Alternatively a double pointer to an AVClass, if + * AV_OPT_SEARCH_FAKE_OBJ search flag is set. + * @param[in] name The name of the option to look for. + * @param[in] unit When searching for named constants, name of the unit + * it belongs to. + * @param opt_flags Find only options with all the specified flags set (AV_OPT_FLAG). + * @param search_flags A combination of AV_OPT_SEARCH_*. + * + * @return A pointer to the option found, or NULL if no option + * was found. + * + * @note Options found with AV_OPT_SEARCH_CHILDREN flag may not be settable + * directly with av_opt_set(). Use special calls which take an options + * AVDictionary (e.g. avformat_open_input()) to set options found with this + * flag. + *) + // const AVOption *av_opt_find(void *obj, const char *name, const char *unit, + // int opt_flags, int search_flags); +function av_opt_find(obj: Pointer; const name: PAnsiChar; const _unit: PAnsiChar; opt_flags: int; search_flags: int): pAVOption; cdecl; external avutil_dll; + +(* * + * Look for an option in an object. Consider only options which + * have all the specified flags set. + * + * @param[in] obj A pointer to a struct whose first element is a + * pointer to an AVClass. + * Alternatively a double pointer to an AVClass, if + * AV_OPT_SEARCH_FAKE_OBJ search flag is set. + * @param[in] name The name of the option to look for. + * @param[in] unit When searching for named constants, name of the unit + * it belongs to. + * @param opt_flags Find only options with all the specified flags set (AV_OPT_FLAG). + * @param search_flags A combination of AV_OPT_SEARCH_*. + * @param[out] target_obj if non-NULL, an object to which the option belongs will be + * written here. It may be different from obj if AV_OPT_SEARCH_CHILDREN is present + * in search_flags. This parameter is ignored if search_flags contain + * AV_OPT_SEARCH_FAKE_OBJ. + * + * @return A pointer to the option found, or NULL if no option + * was found. +*) +// const AVOption *av_opt_find2(void *obj, const char *name, const char *unit, +// int opt_flags, int search_flags, void **target_obj); +function av_opt_find2(obj: Pointer; const name: PAnsiChar; const _unit: PAnsiChar; opt_flags: int; search_flags: int; Var target_obj: Pointer): pAVOption; + cdecl; external avutil_dll; + +(* * + * Iterate over all AVOptions belonging to obj. + * + * @param obj an AVOptions-enabled struct or a double pointer to an + * AVClass describing it. + * @param prev result of the previous call to av_opt_next() on this object + * or NULL + * @return next AVOption or NULL +*) +// const AVOption *av_opt_next(const void *obj, const AVOption *prev); +function av_opt_next(const obj: Pointer; const prev: pAVOption): pAVOption; cdecl; external avutil_dll; + +(* * + * Iterate over AVOptions-enabled children of obj. + * + * @param prev result of a previous call to this function or NULL + * @return next AVOptions-enabled child or NULL +*) +// void *av_opt_child_next(void *obj, void *prev); +function av_opt_child_next(obj: Pointer; prev: Pointer): Pointer; cdecl; external avutil_dll; + +(* * + * Iterate over potential AVOptions-enabled children of parent. + * + * @param prev result of a previous call to this function or NULL + * @return AVClass corresponding to next potential child or NULL +*) +// const AVClass *av_opt_child_class_next(const AVClass *parent, const AVClass *prev); +function av_opt_child_class_next(const parent: pAVClass; const prev: pAVClass): pAVClass; cdecl; external avutil_dll; + +(* * + * @defgroup opt_set_funcs Option setting functions + * @{ + * Those functions set the field of obj with the given name to value. + * + * @param[in] obj A struct whose first element is a pointer to an AVClass. + * @param[in] name the name of the field to set + * @param[in] val The value to set. In case of av_opt_set() if the field is not + * of a string type, then the given string is parsed. + * SI postfixes and some named scalars are supported. + * If the field is of a numeric type, it has to be a numeric or named + * scalar. Behavior with more than one scalar and +- infix operators + * is undefined. + * If the field is of a flags type, it has to be a sequence of numeric + * scalars or named flags separated by '+' or '-'. Prefixing a flag + * with '+' causes it to be set without affecting the other flags; + * similarly, '-' unsets a flag. + * @param search_flags flags passed to av_opt_find2. I.e. if AV_OPT_SEARCH_CHILDREN + * is passed here, then the option may be set on a child of obj. + * + * @return 0 if the value has been set, or an AVERROR code in case of + * error: + * AVERROR_OPTION_NOT_FOUND if no matching option exists + * AVERROR(ERANGE) if the value is out of range + * AVERROR(EINVAL) if the value is not valid +*) + +// int av_opt_set (void *obj, const char *name, const char *val, int search_flags); +function av_opt_set(obj: Pointer; const name: PAnsiChar; const val: PAnsiChar; search_flags: int): int; cdecl; external avutil_dll; +// int av_opt_set_int (void *obj, const char *name, int64_t val, int search_flags); +function av_opt_set_int(obj: Pointer; const name: PAnsiChar; val: int64_t; search_flags: int): int; cdecl; external avutil_dll; +// int av_opt_set_double (void *obj, const char *name, double val, int search_flags); +function av_opt_set_double(obj: Pointer; const name: PAnsiChar; val: double; search_flags: int): int; cdecl; external avutil_dll; +// int av_opt_set_q (void *obj, const char *name, AVRational val, int search_flags); +function av_opt_set_q(obj: Pointer; const name: PAnsiChar; val: AVRational; search_flags: int): int; cdecl; external avutil_dll; +// int av_opt_set_bin (void *obj, const char *name, const uint8_t *val, int size, int search_flags); +function av_opt_set_bin(obj: Pointer; const name: PAnsiChar; const val: puint8_t; size: int; search_flags: int): int; cdecl; external avutil_dll; +// int av_opt_set_image_size(void *obj, const char *name, int w, int h, int search_flags); +function av_opt_set_image_size(obj: Pointer; const name: PAnsiChar; w, h, search_flags: int): int; cdecl; external avutil_dll; +// int av_opt_set_pixel_fmt (void *obj, const char *name, enum AVPixelFormat fmt, int search_flags); +function av_opt_set_pixel_fmt(obj: Pointer; const name: PAnsiChar; fmt: AVPixelFormat; search_flags: int): int; cdecl; external avutil_dll; +// int av_opt_set_sample_fmt(void *obj, const char *name, enum AVSampleFormat fmt, int search_flags); +function av_opt_set_sample_fmt(obj: Pointer; const name: PAnsiChar; fmt: AVSampleFormat; search_flags: int): int; cdecl; external avutil_dll; +// int av_opt_set_video_rate(void *obj, const char *name, AVRational val, int search_flags); +function av_opt_set_video_rate(obj: Pointer; const name: PAnsiChar; val: AVRational; search_flags: int): int; cdecl; external avutil_dll; +// int av_opt_set_channel_layout(void *obj, const char *name, int64_t ch_layout, int search_flags); +function av_opt_set_channel_layout(obj: Pointer; const name: PAnsiChar; ch_layout: int64_t; search_flags: int): int; cdecl; external avutil_dll; +(* * + * @note Any old dictionary present is discarded and replaced with a copy of the new one. The + * caller still owns val is and responsible for freeing it. +*) +// int av_opt_set_dict_val(void *obj, const char *name, const AVDictionary *val, int search_flags); +function av_opt_set_dict_val(obj: Pointer; const name: PAnsiChar; const val: pAVDictionary; search_flags: int): int; cdecl; external avutil_dll; + +(* * + * Set a binary option to an integer list. + * + * @param obj AVClass object to set options on + * @param name name of the binary option + * @param val pointer to an integer list (must have the correct type with + * regard to the contents of the list) + * @param term list terminator (usually 0 or -1) + * @param flags search flags +*) +// #define av_opt_set_int_list(obj, name, val, term, flags) \ +// (av_int_list_length(val, term) > INT_MAX / sizeof(*(val)) ? \ +// AVERROR(EINVAL) : \ +// av_opt_set_bin(obj, name, (const uint8_t *)(val), \ +// av_int_list_length(val, term) * sizeof(*(val)), flags)) +function av_opt_set_int_list(obj: Pointer; name: PAnsiChar; list: Pointer; item_size: int; term: int64_t; flags: int): Integer; inline; + +(* * + * @} +*) + +(* * + * @defgroup opt_get_funcs Option getting functions + * @{ + * Those functions get a value of the option with the given name from an object. + * + * @param[in] obj a struct whose first element is a pointer to an AVClass. + * @param[in] name name of the option to get. + * @param[in] search_flags flags passed to av_opt_find2. I.e. if AV_OPT_SEARCH_CHILDREN + * is passed here, then the option may be found in a child of obj. + * @param[out] out_val value of the option will be written here + * @return >=0 on success, a negative error code otherwise +*) +(* * + * @note the returned string will be av_malloc()ed and must be av_free()ed by the caller + * + * @note if AV_OPT_ALLOW_NULL is set in search_flags in av_opt_get, and the option has + * AV_OPT_TYPE_STRING or AV_OPT_TYPE_BINARY and is set to NULL, *out_val will be set + * to NULL instead of an allocated empty string. +*) +// int av_opt_get (void *obj, const char *name, int search_flags, uint8_t **out_val); +function av_opt_get(obj: Pointer; const name: PAnsiChar; search_flags: int; Var out_val: puint8_t): int; cdecl; external avutil_dll; +// int av_opt_get_int (void *obj, const char *name, int search_flags, int64_t *out_val); +function av_opt_get_int(obj: Pointer; const name: PAnsiChar; search_flags: int; var out_val: int64_t): int; cdecl; external avutil_dll; +// int av_opt_get_double (void *obj, const char *name, int search_flags, double *out_val); +function av_opt_get_double(obj: Pointer; const name: PAnsiChar; search_flags: int; out_val: double): int; cdecl; external avutil_dll; +// int av_opt_get_q (void *obj, const char *name, int search_flags, AVRational *out_val); +function av_opt_get_q(obj: Pointer; const name: PAnsiChar; search_flags: int; var out_val: AVRational): int; cdecl; external avutil_dll; +// int av_opt_get_image_size(void *obj, const char *name, int search_flags, int *w_out, int *h_out); +function av_opt_get_image_size(obj: Pointer; const name: PAnsiChar; search_flags: int; var w_out, h_out: int): int; cdecl; external avutil_dll; +// int av_opt_get_pixel_fmt (void *obj, const char *name, int search_flags, enum AVPixelFormat *out_fmt); +function av_opt_get_pixel_fmt(obj: Pointer; const name: PAnsiChar; search_flags: int; var out_fmt: AVPixelFormat): int; cdecl; external avutil_dll; +// int av_opt_get_sample_fmt(void *obj, const char *name, int search_flags, enum AVSampleFormat *out_fmt); +function av_opt_get_sample_fmt(obj: Pointer; const name: PAnsiChar; search_flags: int; var out_fmt: AVSampleFormat): int; cdecl; external avutil_dll; +// int av_opt_get_video_rate(void *obj, const char *name, int search_flags, AVRational *out_val); +function av_opt_get_video_rate(obj: Pointer; const name: PAnsiChar; search_flags: int; var out_val: AVRational): int; cdecl; external avutil_dll; +// int av_opt_get_channel_layout(void *obj, const char *name, int search_flags, int64_t *ch_layout); +function av_opt_get_channel_layout(obj: Pointer; const name: PAnsiChar; search_flags: int; var ch_layout: int64_t): int; cdecl; external avutil_dll; +(* * + * @param[out] out_val The returned dictionary is a copy of the actual value and must + * be freed with av_dict_free() by the caller +*) +// int av_opt_get_dict_val(void *obj, const char *name, int search_flags, AVDictionary **out_val); +function av_opt_get_dict_val(obj: Pointer; const name: PAnsiChar; search_flags: int; var out_val: pAVDictionary): int; cdecl; external avutil_dll; +(* * + * @} +*) +(* * + * Gets a pointer to the requested field in a struct. + * This function allows accessing a struct even when its fields are moved or + * renamed since the application making the access has been compiled, + * + * @returns a pointer to the field, it can be cast to the correct type and read + * or written to. +*) +// void *av_opt_ptr(const AVClass *avclass, void *obj, const char *name); +function av_opt_ptr(const avclass: pAVClass; obj: Pointer; const name: PAnsiChar): Pointer; cdecl; external avutil_dll; + +(* * + * Free an AVOptionRanges struct and set it to NULL. +*) +// void av_opt_freep_ranges(AVOptionRanges **ranges); +procedure av_opt_freep_ranges(var ranges: pAVOptionRanges); cdecl; external avutil_dll; + +(* * + * Get a list of allowed ranges for the given option. + * + * The returned list may depend on other fields in obj like for example profile. + * + * @param flags is a bitmask of flags, undefined flags should not be set and should be ignored + * AV_OPT_SEARCH_FAKE_OBJ indicates that the obj is a double pointer to a AVClass instead of a full instance + * AV_OPT_MULTI_COMPONENT_RANGE indicates that function may return more than one component, @see AVOptionRanges + * + * The result must be freed with av_opt_freep_ranges. + * + * @return number of compontents returned on success, a negative errro code otherwise +*) +// int av_opt_query_ranges(AVOptionRanges **, void *obj, const char *key, int flags); +function av_opt_query_ranges(Var ranges: pAVOptionRanges; obj: Pointer; const key: PAnsiChar; flags: int): int; cdecl; external avutil_dll; + +(* * + * Copy options from src object into dest object. + * + * Options that require memory allocation (e.g. string or binary) are malloc'ed in dest object. + * Original memory allocated for such options is freed unless both src and dest options points to the same memory. + * + * @param dest Object to copy from + * @param src Object to copy into + * @return 0 on success, negative on error +*) +// int av_opt_copy(void *dest, const void *src); +function av_opt_copy(dest: Pointer; const src: Pointer): int; cdecl; external avutil_dll; + +(* * + * Get a default list of allowed ranges for the given option. + * + * This list is constructed without using the AVClass.query_ranges() callback + * and can be used as fallback from within the callback. + * + * @param flags is a bitmask of flags, undefined flags should not be set and should be ignored + * AV_OPT_SEARCH_FAKE_OBJ indicates that the obj is a double pointer to a AVClass instead of a full instance + * AV_OPT_MULTI_COMPONENT_RANGE indicates that function may return more than one component, @see AVOptionRanges + * + * The result must be freed with av_opt_free_ranges. + * + * @return number of compontents returned on success, a negative errro code otherwise +*) +// int av_opt_query_ranges_default(AVOptionRanges **, void *obj, const char *key, int flags); +function av_opt_query_ranges_default(var ranges: pAVOptionRanges; obj: Pointer; const key: PAnsiChar; flags: int): int; cdecl; external avutil_dll; + +(* * + * Check if given option is set to its default value. + * + * Options o must belong to the obj. This function must not be called to check child's options state. + * @see av_opt_is_set_to_default_by_name(). + * + * @param obj AVClass object to check option on + * @param o option to be checked + * @return >0 when option is set to its default, + * 0 when option is not set its default, + * <0 on error +*) +// int av_opt_is_set_to_default(void *obj, const AVOption *o); +function av_opt_is_set_to_default(obj: Pointer; const o: pAVOption): int; cdecl; external avutil_dll; + +(* * + * Check if given option is set to its default value. + * + * @param obj AVClass object to check option on + * @param name option name + * @param search_flags combination of AV_OPT_SEARCH_* + * @return >0 when option is set to its default, + * 0 when option is not set its default, + * <0 on error +*) +// int av_opt_is_set_to_default_by_name(void *obj, const char *name, int search_flags); +function av_opt_is_set_to_default_by_name(obj: Pointer; const name: PAnsiChar; search_flags: int): int; cdecl; external avutil_dll; + +const + AV_OPT_SERIALIZE_SKIP_DEFAULTS = $00000001; + // < Serialize options that are not set to default values only. + AV_OPT_SERIALIZE_OPT_FLAGS_EXACT = $00000002; + // < Serialize options that exactly match opt_flags only. + + (* * + * Serialize object's options. + * + * Create a string containing object's serialized options. + * Such string may be passed back to av_opt_set_from_string() in order to restore option values. + * A key/value or pairs separator occurring in the serialized value or + * name string are escaped through the av_escape() function. + * + * @param[in] obj AVClass object to serialize + * @param[in] opt_flags serialize options with all the specified flags set (AV_OPT_FLAG) + * @param[in] flags combination of AV_OPT_SERIALIZE_* flags + * @param[out] buffer Pointer to buffer that will be allocated with string containg serialized options. + * Buffer must be freed by the caller when is no longer needed. + * @param[in] key_val_sep character used to separate key from value + * @param[in] pairs_sep character used to separate two pairs from each other + * @return >= 0 on success, negative on error + * @warning Separators cannot be neither '\\' nor '\0'. They also cannot be the same. + *) + // int av_opt_serialize(void *obj, int opt_flags, int flags, char **buffer, + // const char key_val_sep, const char pairs_sep); + +function av_opt_serialize(obj: Pointer; opt_flags: int; flags: int; Var buffer: PAnsiChar; const key_val_sep: AnsiChar; const pairs_sep: AnsiChar): int; cdecl; + external avutil_dll; + +{$ENDREGION} +{$REGION 'log.h'} +(* * + * Sets additional colors for extended debugging sessions. + * @code + av_log(ctx, AV_LOG_DEBUG|AV_LOG_C(134), "Message in purple\n"); + @endcode + * Requires 256color terminal support. Uses outside debugging is not + * recommended. +*) +// #define AV_LOG_C(x) ((x) shl 8) + +(* * + * Send the specified message to the log if the level is less than or equal + * to the current av_log_level. By default, all logging messages are sent to + * stderr. This behavior can be altered by setting a different logging callback + * function. + * @see av_log_set_callback + * + * @param avcl A pointer to an arbitrary struct of which the first field is a + * pointer to an AVClass struct or NULL if general log. + * @param level The importance level of the message expressed using a @ref + * lavu_log_constants "Logging Constant". + * @param fmt The format string (printf-compatible) that specifies how + * subsequent arguments are converted to output. +*) +// void av_log(void *avcl, int level, const char *fmt, ...) av_printf_format(3, 4); +procedure av_log(avcl: Pointer; level: int; const fmt: PAnsiChar); +cdecl varargs; +external avutil_dll; + +(* * + * Send the specified message to the log if the level is less than or equal + * to the current av_log_level. By default, all logging messages are sent to + * stderr. This behavior can be altered by setting a different logging callback + * function. + * @see av_log_set_callback + * + * @param avcl A pointer to an arbitrary struct of which the first field is a + * pointer to an AVClass struct. + * @param level The importance level of the message expressed using a @ref + * lavu_log_constants "Logging Constant". + * @param fmt The format string (printf-compatible) that specifies how + * subsequent arguments are converted to output. + * @param vl The arguments referenced by the format string. +*) +// void av_vlog(void *avcl, int level, const char *fmt, va_list vl); +procedure av_vlog(avcl: Pointer; level: int; const fmt: PAnsiChar; vl: PVA_LIST); cdecl; external avutil_dll; + +(* * + * Get the current log level + * + * @see lavu_log_constants + * + * @return Current log level +*) +// int av_log_get_level(void); +function av_log_get_level(): int; cdecl; external avutil_dll; + +(* * + * Set the log level + * + * @see lavu_log_constants + * + * @param level Logging level +*) +// void av_log_set_level(int level); +procedure av_log_set_level(level: int); cdecl; external avutil_dll; + +(* * + * Set the logging callback + * + * @note The callback must be thread safe, even if the application does not use + * threads itself as some codecs are multithreaded. + * + * @see av_log_default_callback + * + * @param callback A logging function with a compatible signature. +*) +// void av_log_set_callback(void (*callback)(void*, int, const char*, va_list)); +Type + Tav_log_callback = procedure(p: Pointer; lvl: Integer; fmt: PAnsiChar; vl: PVA_LIST); +cdecl varargs; + +procedure av_log_set_callback(callbackproc: Tav_log_callback); cdecl; external avutil_dll; + +(* * + * Default logging callback + * + * It prints the message to stderr, optionally colorizing it. + * + * @param avcl A pointer to an arbitrary struct of which the first field is a + * pointer to an AVClass struct. + * @param level The importance level of the message expressed using a @ref + * lavu_log_constants "Logging Constant". + * @param fmt The format string (printf-compatible) that specifies how + * subsequent arguments are converted to output. + * @param vl The arguments referenced by the format string. +*) +// void av_log_default_callback(void *avcl, int level, const char *fmt, va_list vl); +procedure av_log_default_callback(avcl: Pointer; level: int; const fmt: PAnsiChar; vl: PVA_LIST); cdecl; external avutil_dll; + +(* * + * Return the context name + * + * @param ctx The AVClass context + * + * @return The AVClass class_name +*) +// const char* av_default_item_name(void* ctx); +function av_default_item_name(ctx: Pointer): PAnsiChar; cdecl; external avutil_dll; + +// AVClassCategory av_default_get_category(void *ptr); +function av_default_get_category(ptr: Pointer): AVClassCategory; cdecl; external avutil_dll; + +(* * + * Format a line of log the same way as the default callback. + * @param line buffer to receive the formatted line + * @param line_size size of the buffer + * @param print_prefix used to store whether the prefix must be printed; + * must point to a persistent integer initially set to 1 +*) +// void av_log_format_line(void *ptr, int level, const char *fmt, va_list vl, +// char *line, int line_size, int *print_prefix); +procedure av_log_format_line(ptr: Pointer; level: int; const fmt: PAnsiChar; vl: PVA_LIST; line: PAnsiChar; line_size: int; Var print_prefix: int); cdecl; + external avutil_dll; + +(* * + * Format a line of log the same way as the default callback. + * @param line buffer to receive the formatted line; + * may be NULL if line_size is 0 + * @param line_size size of the buffer; at most line_size-1 characters will + * be written to the buffer, plus one null terminator + * @param print_prefix used to store whether the prefix must be printed; + * must point to a persistent integer initially set to 1 + * @return Returns a negative value if an error occurred, otherwise returns + * the number of characters that would have been written for a + * sufficiently large buffer, not including the terminating null + * character. If the return value is not less than line_size, it means + * that the log message was truncated to fit the buffer. +*) +// int av_log_format_line2(void *ptr, int level, const char *fmt, va_list vl, +// char *line, int line_size, int *print_prefix); +function av_log_format_line2(ptr: Pointer; level: int; const fmt: PAnsiChar; vl: PVA_LIST; line: PAnsiChar; line_size: int; Var print_prefix: int): int; cdecl; + external avutil_dll; + +const + (* * + * Skip repeated messages, this requires the user app to use av_log() instead of + * (f)printf as the 2 would otherwise interfere and lead to + * "Last message repeated x times" messages below (f)printf messages with some + * bad luck. + * Also to receive the last, "last repeated" line if any, the user app must + * call av_log(NULL, AV_LOG_QUIET, "%s", ""); at the end + *) + AV_LOG_SKIP_REPEATED = 1; + + (* * + * Include the log severity in messages originating from codecs. + * + * Results in messages such as: + * [rawvideo @ $DEADBEEF] [error] encode did not produce valid pts + *) + AV_LOG_PRINT_LEVEL = 2; + + // void av_log_set_flags(int arg); +procedure av_log_set_flags(arg: int); cdecl; external avutil_dll; + +// int av_log_get_flags(void); +function av_log_get_flags(): int; cdecl; external avutil_dll; +{$ENDREGION} +{$REGION 'avutil.h'} +(* * + * Return the LIBAVUTIL_VERSION_INT constant. +*) +// unsigned avutil_version(void); +function avutil_version(): unsigned; cdecl; external avutil_dll; + +(* * + * Return an informative version string. This usually is the actual release + * version number or a git commit description. This string has no fixed format + * and can change any time. It should never be parsed by code. +*) +// const char *av_version_info(void); +function av_version_info(): PAnsiChar; cdecl; external avutil_dll; + +(* * + * Return the libavutil build-time configuration. +*) +// const char *avutil_configuration(void); +function avutil_configuration(): PAnsiChar; cdecl; external avutil_dll; + +(* * + * Return the libavutil license. +*) +// const char *avutil_license(void); +function avutil_license(): PAnsiChar; cdecl; external avutil_dll; + +(* * + * Return a string describing the media_type enum, NULL if media_type + * is unknown. +*) +// const char *av_get_media_type_string(enum AVMediaType media_type); +function av_get_media_type_string(media_type: AVMediaType): PAnsiChar; cdecl; external avutil_dll; + +(* * + * Return a single letter to describe the given picture type + * pict_type. + * + * @param[in] pict_type the picture type @return a single character + * representing the picture type, '?' if pict_type is unknown +*) +// char av_get_picture_type_char(enum AVPictureType pict_type); +function av_get_picture_type_char(pict_type: AVPictureType): AnsiChar; cdecl; external avutil_dll; + +(* * + * Return x default pointer in case p is NULL. +*) +// static inline void *av_x_if_null(const void *p, const void *x) +// { +// return (void *)(intptr_t)(p ? p : x); +// } +function av_x_if_null(const p: Pointer; const x: Pointer): Pointer; inline; + +(* * + * Compute the length of an integer list. + * + * @param elsize size in bytes of each list element (only 1, 2, 4 or 8) + * @param term list terminator (usually 0 or -1) + * @param list pointer to the list + * @return length of the list, in elements, not counting the terminator +*) +// unsigned av_int_list_length_for_size(unsigned elsize, +// const void *list, uint64_t term) av_pure; +function av_int_list_length_for_size(elsize: unsigned; const list: Pointer; term: uint64_t): unsigned; cdecl; external avutil_dll; + +(* * + * Compute the length of an integer list. + * + * @param term list terminator (usually 0 or -1) + * @param list pointer to the list + * @return length of the list, in elements, not counting the terminator +*) +// #define av_int_list_length(list, term) \ +// av_int_list_length_for_size(sizeof(*(list)), list, term) +function av_int_list_length(list: Pointer; item_size: int; term: int64_t): int; inline; + +(* * + * Open a file using a UTF-8 filename. + * The API of this function matches POSIX fopen(), errors are returned through + * errno. +*) +// FILE *av_fopen_utf8(const char *path, const char *mode); +function av_fopen_utf8(const path: PAnsiChar; const mode: PAnsiChar): pFile; cdecl; external avutil_dll; + +(* * + * Return the fractional representation of the internal time base. +*) +// AVRational av_get_time_base_q(void); +function av_get_time_base_q(): AVRational; cdecl; external avutil_dll; + +const + AV_FOURCC_MAX_STRING_SIZE = 32; + + // #define av_fourcc2str(fourcc) av_fourcc_make_string((char[AV_FOURCC_MAX_STRING_SIZE]){0}, fourcc) + + (* * + * Fill the provided buffer with a string containing a FourCC (four-character + * code) representation. + * + * @param buf a buffer with size in bytes of at least AV_FOURCC_MAX_STRING_SIZE + * @param fourcc the fourcc to represent + * @return the buffer in input + *) + // char *av_fourcc_make_string(char *buf, uint32_t fourcc); +function av_fourcc_make_string(buf: PAnsiChar; fourcc: uint32_t): PAnsiChar; cdecl; external avutil_dll; +{$ENDREGION} +{$REGION 'file.h'} +(* * + * Read the file with name filename, and put its content in a newly + * allocated buffer or map it with mmap() when available. + * In case of success set *bufptr to the read or mmapped buffer, and + * *size to the size in bytes of the buffer in *bufptr. + * The returned buffer must be released with av_file_unmap(). + * + * @param log_offset loglevel offset used for logging + * @param log_ctx context used for logging + * @return a non negative number in case of success, a negative value + * corresponding to an AVERROR error code in case of failure +*) +// av_warn_unused_result +// int av_file_map(const char *filename, uint8_t **bufptr, size_t *size, +// int log_offset, void *log_ctx); +function av_file_map(const filename: PAnsiChar; var bufptr: puint8_t; var size: size_t; log_offset: int; log_ctx: Pointer): int; cdecl; external avutil_dll; +(* * + * Unmap or free the buffer bufptr created by av_file_map(). + * + * @param size size in bytes of bufptr, must be the same as returned + * by av_file_map() +*) +// void av_file_unmap(uint8_t *bufptr, size_t size); +procedure av_file_unmap(bufptr: puint8_t; size: size_t); cdecl; external avutil_dll; +(* * + * Wrapper to work around the lack of mkstemp() on mingw. + * Also, tries to create file in /tmp first, if possible. + * *prefix can be a character constant; *filename will be allocated internally. + * @return file descriptor of opened file (or negative value corresponding to an + * AVERROR code on error) + * and opened file name in **filename. + * @note On very old libcs it is necessary to set a secure umask before + * calling this, av_tempfile() can't call umask itself as it is used in + * libraries and could interfere with the calling application. + * @deprecated as fd numbers cannot be passed saftely between libs on some platforms +*) +// int av_tempfile(const char *prefix, char **filename, int log_offset, void *log_ctx); +function av_tempfile(const prefix: PAnsiChar; var filename: PAnsiChar; log_offset: int; log_ctx: Pointer): int; cdecl; external avutil_dll; +{$ENDREGION} +{$REGION 'error.h'} + +const + // #define AVERROR_BSF_NOT_FOUND FFERRTAG(0xF8,'B','S','F') //< Bitstream filter not found + AVERROR_BSF_NOT_FOUND = -($F8 or (Ord('B') shl 8) or (Ord('S') shl 16) or (Ord('F') shl 24)); + // #define AVERROR_BUG FFERRTAG( 'B','U','G','!') //< Internal bug, also see AVERROR_BUG2 + AVERROR_BUG = -(Ord('B') or (Ord('U') shl 8) or (Ord('G') shl 16) or (Ord('!') shl 24)); + // #define AVERROR_BUFFER_TOO_SMALL FFERRTAG( 'B','U','F','S') //< Buffer too small + AVERROR_BUFFER_TOO_SMALL = -(Ord('B') or (Ord('U') shl 8) or (Ord('F') shl 16) or (Ord('S') shl 24)); + // #define AVERROR_DECODER_NOT_FOUND FFERRTAG(0xF8,'D','E','C') //< Decoder not found + AVERROR_DECODER_NOT_FOUND = -($F8 or (Ord('D') shl 8) or (Ord('E') shl 16) or (Ord('C') shl 24)); + // #define AVERROR_DEMUXER_NOT_FOUND FFERRTAG(0xF8,'D','E','M') //< Demuxer not found + AVERROR_DEMUXER_NOT_FOUND = -($F8 or (Ord('D') shl 8) or (Ord('E') shl 16) or (Ord('M') shl 24)); + // #define AVERROR_ENCODER_NOT_FOUND FFERRTAG(0xF8,'E','N','C') //< Encoder not found + AVERROR_ENCODER_NOT_FOUND = -($F8 or (Ord('E') shl 8) or (Ord('N') shl 16) or (Ord('C') shl 24)); + // #define AVERROR_EOF FFERRTAG( 'E','O','F',' ') //< End of file + AVERROR_EOF = -(Ord('E') or (Ord('O') shl 8) or (Ord('F') shl 16) or (Ord(' ') shl 24)); + // #define AVERROR_EXIT FFERRTAG( 'E','X','I','T') //< Immediate exit was requested; the called function should not be restarted + AVERROR_EXIT = -(Ord('E') or (Ord('X') shl 8) or (Ord('I') shl 16) or (Ord('T') shl 24)); + // #define AVERROR_EXTERNAL FFERRTAG( 'E','X','T',' ') //< Generic error in an external library + AVERROR_EXTERNAL = -(Ord('E') or (Ord('X') shl 8) or (Ord('T') shl 16) or (Ord(' ') shl 24)); + // #define AVERROR_FILTER_NOT_FOUND FFERRTAG(0xF8,'F','I','L') //< Filter not found + AVERROR_FILTER_NOT_FOUND = -($F8 or (Ord('F') shl 8) or (Ord('I') shl 16) or (Ord('L') shl 24)); + // #define AVERROR_INVALIDDATA FFERRTAG( 'I','N','D','A') //< Invalid data found when processing input + AVERROR_INVALIDDATA = -(Ord('I') or (Ord('N') shl 8) or (Ord('D') shl 16) or (Ord('A') shl 24)); + // #define AVERROR_MUXER_NOT_FOUND FFERRTAG(0xF8,'M','U','X') //< Muxer not found + AVERROR_MUXER_NOT_FOUND = -($F8 or (Ord('M') shl 8) or (Ord('U') shl 16) or (Ord('X') shl 24)); + // #define AVERROR_OPTION_NOT_FOUND FFERRTAG(0xF8,'O','P','T') //< Option not found + AVERROR_OPTION_NOT_FOUND = -($F8 or (Ord('O') shl 8) or (Ord('P') shl 16) or (Ord('T') shl 24)); + // #define AVERROR_PATCHWELCOME FFERRTAG( 'P','A','W','E') //< Not yet implemented in FFmpeg, patches welcome + AVERROR_PATCHWELCOME = -(Ord('P') or (Ord('A') shl 8) or (Ord('W') shl 16) or (Ord('E') shl 24)); + // #define AVERROR_PROTOCOL_NOT_FOUND FFERRTAG(0xF8,'P','R','O') //< Protocol not found + AVERROR_PROTOCOL_NOT_FOUND = -($F8 or (Ord('P') shl 8) or (Ord('R') shl 16) or (Ord('O') shl 24)); + + // #define AVERROR_STREAM_NOT_FOUND FFERRTAG(0xF8,'S','T','R') //< Stream not found + AVERROR_STREAM_NOT_FOUND = -($F8 or (Ord('S') shl 8) or (Ord('T') shl 16) or (Ord('R') shl 24)); + + (* * + * This is semantically identical to AVERROR_BUG + * it has been introduced in Libav after our AVERROR_BUG and with a modified value. + *) + // #define AVERROR_BUG2 FFERRTAG( 'B','U','G',' ') + AVERROR_BUG2 = -(Ord('B') or (Ord('U') shl 8) or (Ord('G') shl 16) or (Ord(' ') shl 24)); + // #define AVERROR_UNKNOWN FFERRTAG( 'U','N','K','N') //< Unknown error, typically from an external library + AVERROR_UNKNOWN = -(Ord('U') or (Ord('N') shl 8) or (Ord('K') shl 16) or (Ord('N') shl 24)); + // #define AVERROR_EXPERIMENTAL (-0x2bb2afa8) //< Requested feature is flagged experimental. Set strict_std_compliance if you really want to use it. + AVERROR_EXPERIMENTAL = -$2BB2AFA8; + // #define AVERROR_INPUT_CHANGED (-0x636e6701) //< Input changed between calls. Reconfiguration is required. (can be OR-ed with AVERROR_OUTPUT_CHANGED) + AVERROR_INPUT_CHANGED = -$636E6701; + // #define AVERROR_OUTPUT_CHANGED (-0x636e6702) //< Output changed between calls. Reconfiguration is required. (can be OR-ed with AVERROR_INPUT_CHANGED) + AVERROR_OUTPUT_CHANGED = -$636E6702; + + // * HTTP & RTSP errors */ + // #define AVERROR_HTTP_BAD_REQUEST FFERRTAG(0xF8,'4','0','0') + AVERROR_HTTP_BAD_REQUEST = -($F8 or (Ord('4') shl 8) or (Ord('0') shl 16) or (Ord('0') shl 24)); + // #define AVERROR_HTTP_UNAUTHORIZED FFERRTAG(0xF8,'4','0','1') + AVERROR_HTTP_UNAUTHORIZED = -($F8 or (Ord('4') shl 8) or (Ord('0') shl 16) or (Ord('1') shl 24)); + // #define AVERROR_HTTP_FORBIDDEN FFERRTAG(0xF8,'4','0','3') + AVERROR_HTTP_FORBIDDEN = -($F8 or (Ord('4') shl 8) or (Ord('0') shl 16) or (Ord('3') shl 24)); + // #define AVERROR_HTTP_NOT_FOUND FFERRTAG(0xF8,'4','0','4') + AVERROR_HTTP_NOT_FOUND = -($F8 or (Ord('4') shl 8) or (Ord('0') shl 16) or (Ord('4') shl 24)); + // #define AVERROR_HTTP_OTHER_4XX FFERRTAG(0xF8,'4','X','X') + AVERROR_HTTP_OTHER_4XX = -($F8 or (Ord('4') shl 8) or (Ord('X') shl 16) or (Ord('X') shl 24)); + // #define AVERROR_HTTP_SERVER_ERROR FFERRTAG(0xF8,'5','X','X') + AVERROR_HTTP_SERVER_ERROR = -($F8 or (Ord('5') shl 8) or (Ord('X') shl 16) or (Ord('X') shl 24)); + + AV_ERROR_MAX_STRING_SIZE = 64; + + // errno.h + AVERROR_EPERM = -1; // < Operation not permitted + AVERROR_ENOENT = -2; // < No such file or directory + AVERROR_ESRCH = -3; // < No such process + AVERROR_EINTR = -4; // < Interrupted function call + AVERROR_EIO = -5; // < I/O error + AVERROR_ENXIO = -6; // < No such device or address + AVERROR_E2BIG = -7; // < Argument list too long + AVERROR_ENOEXEC = -8; // < Exec format error + AVERROR_EBADF = -9; // < Bad file number + AVERROR_ECHILD = -10; // < No child processes + AVERROR_EAGAIN = -11; // < Resource temporarily unavailable / Try again + AVERROR_ENOMEM = -12; // < Not enough space / Out of memory + AVERROR_EACCES = -13; // < Permission denied + AVERROR_EFAULT = -14; // < Bad address + AVERROR_ENOTBLK = -15; // < Block device required (WIN: Unknown error) + AVERROR_EBUSY = -16; // < Device or resource busy + AVERROR_EEXIST = -17; // < File exists + AVERROR_EXDEV = -18; // < Cross-device link + AVERROR_ENODEV = -19; // < No such device + AVERROR_ENOTDIR = -20; // < Not a directory + AVERROR_EISDIR = -21; // < Is a directory + AVERROR_EINVAL = -22; // < Invalid argument + AVERROR_ENFILE = -23; // < Too many open files in system / File table overflow + AVERROR_EMFILE = -24; // < Too many open files + AVERROR_ENOTTY = -25; // < Inappropriate I/O control operation / Not a typewriter + AVERROR_ETXTBSY = -26; // < Text file busy (WIN: Unknown error) + AVERROR_EFBIG = -27; // < File too large + AVERROR_ENOSPC = -28; // < No space left on device + AVERROR_ESPIPE = -29; // < Illegal seek + AVERROR_EROFS = -30; // < Read-only file system + AVERROR_EMLINK = -31; // < Too many links + AVERROR_EPIPE = -32; // < Broken pipe + AVERROR_EDOM = -33; // < Math argument out of domain of func + AVERROR_ERANGE = -34; // < Math result not representable + AVERROR_EDEADLK = -36; // < Resource deadlock avoided + AVERROR_ENAMETOOLONG = -38; // < File name too long + AVERROR_ENOLCK = -39; // < No locks available + AVERROR_ENOSYS = -40; // < Function not implemented + AVERROR_ENOTEMPTY = -41; // < Directory not empty + AVERROR_ELOOP = -114; // < Too many symbolic links encountered + AVERROR_ENOMSG = -91; // < No message of desired type (WIN: Unknown error) + AVERROR_EIDRM = -90; // < Identifier removed (WIN: Unknown error) + AVERROR_ENOSTR = -99; // < Device not a stream + AVERROR_ENODATA = -96; // < No data available + AVERROR_ETIME = -101; // < Timer expired + AVERROR_ENOSR = -98; // < Out of streams resources + AVERROR_EREMOTE = -71; // < Too many levels of remote in path + AVERROR_ENOLINK = -97; // < Link has been severed + AVERROR_EMULTIHOP = -95; // < Multihop attempted + AVERROR_EBADMSG = -94; // < Not a data message + AVERROR_EPROTO = -134; // < Protocol error + AVERROR_EOVERFLOW = -132; // < Value too large for defined data type + AVERROR_EILSEQ = -42; // < Illegal byte sequence + AVERROR_EUSERS = -68; // < Too many users + AVERROR_ENOTSOCK = -128; // < Socket operation on non-socket + AVERROR_EDESTADDRREQ = -109; // < Destination address required + AVERROR_EMSGSIZE = -115; // < Message too long + AVERROR_EPROTOTYPE = -136; // < Protocol wrong type for socket + AVERROR_ENOPROTOOPT = -123; // < Protocol not available + AVERROR_EPROTONOSUPPORT = -135; // < Protocol not supported + AVERROR_ESOCKTNOSUPPORT = -44; // < Socket type not supported + AVERROR_EOPNOTSUPP = -130; // < Operation not supported on transport endpoint + AVERROR_EPFNOSUPPORT = -46; // < Protocol family not supported + AVERROR_EAFNOSUPPORT = -102; // < Address family not supported by protocol + AVERROR_EADDRINUSE = -100; // < Address already in use + AVERROR_EADDRNOTAVAIL = -101; // < Cannot assign requested address + AVERROR_ENETDOWN = -116; // < Network is down + AVERROR_ENETUNREACH = -118; // < Network is unreachable + AVERROR_ENETRESET = -117; // < Network dropped connection because of reset + AVERROR_ECONNABORTED = -106; // < Software caused connection abort + AVERROR_ECONNRESET = -108; // < Connection reset by peer + AVERROR_ENOBUFS = -119; // < No buffer space available + AVERROR_EISCONN = -113; // < Transport endpoint is already connected + AVERROR_ENOTCONN = -126; // < Transport endpoint is not connected + AVERROR_ESHUTDOWN = -58; // < Cannot send after transport endpoint shutdown + AVERROR_ETOOMANYREFS = -59; // < Too many references: cannot splice + AVERROR_ETIMEDOUT = -138; // < Connection timed out + AVERROR_ECONNREFUSED = -107; // < Connection refused + AVERROR_EHOSTDOWN = -64; // < Host is down + AVERROR_EHOSTUNREACH = -110; // < No route to host + AVERROR_EALREADY = -103; // < Operation already in progress + AVERROR_EINPROGRESS = -112; // < Operation now in progress + AVERROR_ESTALE = -70; // < Stale NFS file handle + AVERROR_ECANCELED = -105; // < Operation Canceled + AVERROR_EOWNERDEAD = -133; // < Owner died + AVERROR_ENOTRECOVERABLE = -44; // < State not recoverable + + WSABASEERR = -10000; +{$EXTERNALSYM WSABASEERR} + WSAEINTR = WSABASEERR - 4; +{$EXTERNALSYM WSAEINTR} + WSAEBADF = WSABASEERR - 9; +{$EXTERNALSYM WSAEBADF} + WSAEACCES = WSABASEERR - 13; +{$EXTERNALSYM WSAEACCES} + WSAEFAULT = WSABASEERR - 14; +{$EXTERNALSYM WSAEFAULT} + WSAEINVAL = WSABASEERR - 22; +{$EXTERNALSYM WSAEINVAL} + WSAEMFILE = WSABASEERR - 24; +{$EXTERNALSYM WSAEMFILE} + WSAEWOULDBLOCK = WSABASEERR - 35; +{$EXTERNALSYM WSAEWOULDBLOCK} + WSAEINPROGRESS = WSABASEERR - 36; (* deprecated on WinSock2 *) +{$EXTERNALSYM WSAEINPROGRESS} + WSAEALREADY = WSABASEERR - 37; +{$EXTERNALSYM WSAEALREADY} + WSAENOTSOCK = WSABASEERR - 38; +{$EXTERNALSYM WSAENOTSOCK} + WSAEDESTADDRREQ = WSABASEERR - 39; +{$EXTERNALSYM WSAEDESTADDRREQ} + WSAEMSGSIZE = WSABASEERR - 40; +{$EXTERNALSYM WSAEMSGSIZE} + WSAEPROTOTYPE = WSABASEERR - 41; +{$EXTERNALSYM WSAEPROTOTYPE} + WSAENOPROTOOPT = WSABASEERR - 42; +{$EXTERNALSYM WSAENOPROTOOPT} + WSAEPROTONOSUPPORT = WSABASEERR - 43; +{$EXTERNALSYM WSAEPROTONOSUPPORT} + WSAESOCKTNOSUPPORT = WSABASEERR - 44; +{$EXTERNALSYM WSAESOCKTNOSUPPORT} + WSAEOPNOTSUPP = WSABASEERR - 45; +{$EXTERNALSYM WSAEOPNOTSUPP} + WSAEPFNOSUPPORT = WSABASEERR - 46; +{$EXTERNALSYM WSAEPFNOSUPPORT} + WSAEAFNOSUPPORT = WSABASEERR - 47; +{$EXTERNALSYM WSAEAFNOSUPPORT} + WSAEADDRINUSE = WSABASEERR - 48; +{$EXTERNALSYM WSAEADDRINUSE} + WSAEADDRNOTAVAIL = WSABASEERR - 49; +{$EXTERNALSYM WSAEADDRNOTAVAIL} + WSAENETDOWN = WSABASEERR - 50; +{$EXTERNALSYM WSAENETDOWN} + WSAENETUNREACH = WSABASEERR - 51; +{$EXTERNALSYM WSAENETUNREACH} + WSAENETRESET = WSABASEERR - 52; +{$EXTERNALSYM WSAENETRESET} + WSAECONNABORTED = WSABASEERR - 53; +{$EXTERNALSYM WSAECONNABORTED} + WSAECONNRESET = WSABASEERR - 54; +{$EXTERNALSYM WSAECONNRESET} + WSAENOBUFS = WSABASEERR - 55; +{$EXTERNALSYM WSAENOBUFS} + WSAEISCONN = WSABASEERR - 56; +{$EXTERNALSYM WSAEISCONN} + WSAENOTCONN = WSABASEERR - 57; +{$EXTERNALSYM WSAENOTCONN} + WSAESHUTDOWN = WSABASEERR - 58; +{$EXTERNALSYM WSAESHUTDOWN} + WSAETOOMANYREFS = WSABASEERR - 59; +{$EXTERNALSYM WSAETOOMANYREFS} + WSAETIMEDOUT = WSABASEERR - 60; +{$EXTERNALSYM WSAETIMEDOUT} + WSAECONNREFUSED = WSABASEERR - 61; +{$EXTERNALSYM WSAECONNREFUSED} + WSAELOOP = WSABASEERR - 62; +{$EXTERNALSYM WSAELOOP} + WSAENAMETOOLONG = WSABASEERR - 63; +{$EXTERNALSYM WSAENAMETOOLONG} + WSAEHOSTDOWN = WSABASEERR - 64; +{$EXTERNALSYM WSAEHOSTDOWN} + WSAEHOSTUNREACH = WSABASEERR - 65; +{$EXTERNALSYM WSAEHOSTUNREACH} + WSAENOTEMPTY = WSABASEERR - 66; +{$EXTERNALSYM WSAENOTEMPTY} + WSAEPROCLIM = WSABASEERR - 67; +{$EXTERNALSYM WSAEPROCLIM} + WSAEUSERS = WSABASEERR - 68; +{$EXTERNALSYM WSAEUSERS} + WSAEDQUOT = WSABASEERR - 69; +{$EXTERNALSYM WSAEDQUOT} + WSAESTALE = WSABASEERR - 70; +{$EXTERNALSYM WSAESTALE} + WSAEREMOTE = WSABASEERR - 71; +{$EXTERNALSYM WSAEREMOTE} + WSAEDISCON = WSABASEERR - 101; +{$EXTERNALSYM WSAEDISCON} + WSASYSNOTREADY = WSABASEERR - 91; +{$EXTERNALSYM WSASYSNOTREADY} + WSAVERNOTSUPPORTED = WSABASEERR - 92; +{$EXTERNALSYM WSAVERNOTSUPPORTED} + WSANOTINITIALISED = WSABASEERR - 93; +{$EXTERNALSYM WSANOTINITIALISED} + WSAHOST_NOT_FOUND = WSABASEERR - 1001; +{$EXTERNALSYM WSAHOST_NOT_FOUND} + WSATRY_AGAIN = WSABASEERR - 1002; +{$EXTERNALSYM WSATRY_AGAIN} + WSANO_RECOVERY = WSABASEERR - 1003; +{$EXTERNALSYM WSANO_RECOVERY} + WSANO_DATA = WSABASEERR - 1004; +{$EXTERNALSYM WSANO_DATA} + (* WinSock2 specific error codes *) + WSAENOMORE = WSABASEERR - 102; +{$EXTERNALSYM WSAENOMORE} + WSAECANCELLED = WSABASEERR - 103; +{$EXTERNALSYM WSAECANCELLED} + WSAEINVALIDPROCTABLE = WSABASEERR - 104; +{$EXTERNALSYM WSAEINVALIDPROCTABLE} + WSAEINVALIDPROVIDER = WSABASEERR - 105; +{$EXTERNALSYM WSAEINVALIDPROVIDER} + WSAEPROVIDERFAILEDINIT = WSABASEERR - 106; +{$EXTERNALSYM WSAEPROVIDERFAILEDINIT} + WSASYSCALLFAILURE = WSABASEERR - 107; +{$EXTERNALSYM WSASYSCALLFAILURE} + WSASERVICE_NOT_FOUND = WSABASEERR - 108; +{$EXTERNALSYM WSASERVICE_NOT_FOUND} + WSATYPE_NOT_FOUND = WSABASEERR - 109; +{$EXTERNALSYM WSATYPE_NOT_FOUND} + WSA_E_NO_MORE = WSABASEERR - 110; +{$EXTERNALSYM WSA_E_NO_MORE} + WSA_E_CANCELLED = WSABASEERR - 111; +{$EXTERNALSYM WSA_E_CANCELLED} + WSAEREFUSED = WSABASEERR - 112; +{$EXTERNALSYM WSAEREFUSED} + (* WS QualityofService errors *) + WSA_QOS_RECEIVERS = WSABASEERR - 1005; +{$EXTERNALSYM WSA_QOS_RECEIVERS} + WSA_QOS_SENDERS = WSABASEERR - 1006; +{$EXTERNALSYM WSA_QOS_SENDERS} + WSA_QOS_NO_SENDERS = WSABASEERR - 1007; +{$EXTERNALSYM WSA_QOS_NO_SENDERS} + WSA_QOS_NO_RECEIVERS = WSABASEERR - 1008; +{$EXTERNALSYM WSA_QOS_NO_RECEIVERS} + WSA_QOS_REQUEST_CONFIRMED = WSABASEERR - 1009; +{$EXTERNALSYM WSA_QOS_REQUEST_CONFIRMED} + WSA_QOS_ADMISSION_FAILURE = WSABASEERR - 1010; +{$EXTERNALSYM WSA_QOS_ADMISSION_FAILURE} + WSA_QOS_POLICY_FAILURE = WSABASEERR - 1011; +{$EXTERNALSYM WSA_QOS_POLICY_FAILURE} + WSA_QOS_BAD_STYLE = WSABASEERR - 1012; +{$EXTERNALSYM WSA_QOS_BAD_STYLE} + WSA_QOS_BAD_OBJECT = WSABASEERR - 1013; +{$EXTERNALSYM WSA_QOS_BAD_OBJECT} + WSA_QOS_TRAFFIC_CTRL_ERROR = WSABASEERR - 1014; +{$EXTERNALSYM WSA_QOS_TRAFFIC_CTRL_ERROR} + WSA_QOS_GENERIC_ERROR = WSABASEERR - 1015; +{$EXTERNALSYM WSA_QOS_GENERIC_ERROR} + WSA_QOS_ESERVICETYPE = WSABASEERR - 1016; +{$EXTERNALSYM WSA_QOS_ESERVICETYPE} + WSA_QOS_EFLOWSPEC = WSABASEERR - 1017; +{$EXTERNALSYM WSA_QOS_EFLOWSPEC} + WSA_QOS_EPROVSPECBUF = WSABASEERR - 1018; +{$EXTERNALSYM WSA_QOS_EPROVSPECBUF} + WSA_QOS_EFILTERSTYLE = WSABASEERR - 1019; +{$EXTERNALSYM WSA_QOS_EFILTERSTYLE} + WSA_QOS_EFILTERTYPE = WSABASEERR - 1020; +{$EXTERNALSYM WSA_QOS_EFILTERTYPE} + WSA_QOS_EFILTERCOUNT = WSABASEERR - 1021; +{$EXTERNALSYM WSA_QOS_EFILTERCOUNT} + WSA_QOS_EOBJLENGTH = WSABASEERR - 1022; +{$EXTERNALSYM WSA_QOS_EOBJLENGTH} + WSA_QOS_EFLOWCOUNT = WSABASEERR - 1023; +{$EXTERNALSYM WSA_QOS_EFLOWCOUNT} + WSA_QOS_EUNKOWNPSOBJ = WSABASEERR - 1024; +{$EXTERNALSYM WSA_QOS_EUNKOWNPSOBJ} + WSA_QOS_EPOLICYOBJ = WSABASEERR - 1025; +{$EXTERNALSYM WSA_QOS_EPOLICYOBJ} + WSA_QOS_EFLOWDESC = WSABASEERR - 1026; +{$EXTERNALSYM WSA_QOS_EFLOWDESC} + WSA_QOS_EPSFLOWSPEC = WSABASEERR - 1027; +{$EXTERNALSYM WSA_QOS_EPSFLOWSPEC} + WSA_QOS_EPSFILTERSPEC = WSABASEERR - 1028; +{$EXTERNALSYM WSA_QOS_EPSFILTERSPEC} + WSA_QOS_ESDMODEOBJ = WSABASEERR - 1029; +{$EXTERNALSYM WSA_QOS_ESDMODEOBJ} + WSA_QOS_ESHAPERATEOBJ = WSABASEERR - 1030; +{$EXTERNALSYM WSA_QOS_ESHAPERATEOBJ} + WSA_QOS_RESERVED_PETYPE = WSABASEERR - 1031; +{$EXTERNALSYM WSA_QOS_RESERVED_PETYPE} + +type + TErrorItem = record + err: Integer; + msg: string; + end; + +const + CErrorList: array [0 .. 173] of TErrorItem = ((err: WSAEINTR; msg: 'Interrupted function call'), (err: WSAEBADF; msg: 'Bad file number'), (err: WSAEACCES; + msg: 'Permission denied'), (err: WSAEFAULT; msg: 'Bad address'), (err: WSAEINVAL; msg: 'Invalid argument / Invalid data found when processing input'), + (err: WSAEMFILE; msg: 'Too many open files'), (err: WSAENAMETOOLONG; msg: 'File name too long'), (err: WSAENOTEMPTY; msg: 'Directory not empty'), + (err: WSAELOOP; msg: 'Too many symbolic links encountered'), (err: WSAEREMOTE; msg: 'Too many levels of remote in path'), (err: WSAEUSERS; + msg: 'Too many users'), (err: WSAENOTSOCK; msg: 'Socket operation on non-socket'), (err: WSAEDESTADDRREQ; msg: 'Destination address required'), + (err: WSAEMSGSIZE; msg: 'Message too long'), (err: WSAEPROTOTYPE; msg: 'Protocol wrong type for socket'), (err: WSAENOPROTOOPT; + msg: 'Protocol not available'), (err: WSAEPROTONOSUPPORT; msg: 'Protocol not supported'), (err: WSAESOCKTNOSUPPORT; msg: 'Socket type not supported'), + (err: WSAEOPNOTSUPP; msg: 'Operation not supported on transport endpoint'), (err: WSAEPFNOSUPPORT; msg: 'Protocol family not supported'), + (err: WSAEAFNOSUPPORT; msg: 'Address family not supported by protocol'), (err: WSAEADDRINUSE; msg: 'Address already in use'), (err: WSAEADDRNOTAVAIL; + msg: 'Cannot assign requested address'), (err: WSAENETDOWN; msg: 'Network is down'), (err: WSAENETUNREACH; msg: 'Network is unreachable'), + (err: WSAENETRESET; msg: 'Network dropped connection because of reset'), (err: WSAECONNABORTED; msg: 'Software caused connection abort'), + (err: WSAECONNRESET; msg: 'Connection reset by peer'), (err: WSAENOBUFS; msg: 'No buffer space available'), (err: WSAEISCONN; + msg: 'Transport endpoint is already connected'), (err: WSAENOTCONN; msg: 'Transport endpoint is not connected'), (err: WSAESHUTDOWN; + msg: 'Cannot send after transport endpoint shutdown'), (err: WSAETOOMANYREFS; msg: 'Too many references: cannot splice'), (err: WSAETIMEDOUT; + msg: 'Connection timed out'), (err: WSAECONNREFUSED; msg: 'Connection refused'), (err: WSAEHOSTDOWN; msg: 'Host is down'), (err: WSAEHOSTUNREACH; + msg: 'No route to host'), (err: WSAEALREADY; msg: 'Operation already in progress'), (err: WSAEINPROGRESS; msg: 'Operation now in progress'), + (err: WSAESTALE; msg: 'Stale NFS file handle'), (err: WSAEDQUOT; msg: 'Quota exceeded'), (err: WSAEWOULDBLOCK; msg: 'WSAEWOULDBLOCK'), (err: WSAEPROCLIM; + msg: 'WSAEPROCLIM'), (err: WSAEDISCON; msg: 'WSAEDISCON'), (err: WSASYSNOTREADY; msg: 'WSASYSNOTREADY'), (err: WSAVERNOTSUPPORTED; + msg: 'WSAVERNOTSUPPORTED'), (err: WSANOTINITIALISED; msg: 'WSANOTINITIALISED'), (err: WSAHOST_NOT_FOUND; msg: 'WSAHOST_NOT_FOUND'), (err: WSATRY_AGAIN; + msg: 'WSATRY_AGAIN'), (err: WSANO_RECOVERY; msg: 'WSANO_RECOVERY'), (err: WSANO_DATA; msg: 'WSANO_DATA'), (err: WSAENOMORE; msg: 'WSAENOMORE'), + (err: WSAECANCELLED; msg: 'WSAECANCELLED'), (err: WSAEINVALIDPROCTABLE; msg: 'WSAEINVALIDPROCTABLE'), (err: WSAEINVALIDPROVIDER; + msg: 'WSAEINVALIDPROVIDER'), (err: WSAEPROVIDERFAILEDINIT; msg: 'WSAEPROVIDERFAILEDINIT'), (err: WSASYSCALLFAILURE; msg: 'WSASYSCALLFAILURE'), + (err: WSASERVICE_NOT_FOUND; msg: 'WSASERVICE_NOT_FOUND'), (err: WSATYPE_NOT_FOUND; msg: 'WSATYPE_NOT_FOUND'), (err: WSA_E_NO_MORE; msg: 'WSA_E_NO_MORE'), + (err: WSA_E_CANCELLED; msg: 'WSA_E_CANCELLED'), (err: WSAEREFUSED; msg: 'WSAEREFUSED'), // + (err: AVERROR_BSF_NOT_FOUND; msg: 'Bitstream filter not found'), (err: AVERROR_BUG; msg: 'Internal bug, should not have happened'), (err: AVERROR_BUG2; + msg: 'Internal bug, should not have happened'), (err: AVERROR_BUFFER_TOO_SMALL; msg: 'Buffer too small'), (err: AVERROR_DECODER_NOT_FOUND; + msg: 'Decoder not found'), (err: AVERROR_DEMUXER_NOT_FOUND; msg: 'Demuxer not found'), (err: AVERROR_ENCODER_NOT_FOUND; msg: 'Encoder not found'), + (err: AVERROR_EOF; msg: 'End of file'), (err: AVERROR_EXIT; msg: 'Immediate exit requested'), (err: AVERROR_EXTERNAL; + msg: 'Generic error in an external library'), (err: AVERROR_FILTER_NOT_FOUND; msg: 'Filter not found'), (err: AVERROR_INVALIDDATA; + msg: 'Invalid data found when processing input'), (err: AVERROR_MUXER_NOT_FOUND; msg: 'Muxer not found'), (err: AVERROR_OPTION_NOT_FOUND; + msg: 'Option not found'), (err: AVERROR_PATCHWELCOME; msg: 'Not yet implemented in FFmpeg, patches welcome'), (err: AVERROR_PROTOCOL_NOT_FOUND; + msg: 'Protocol not found'), (err: AVERROR_STREAM_NOT_FOUND; msg: 'Stream not found'), (err: AVERROR_UNKNOWN; msg: 'Unknown error occurred'), + (err: AVERROR_EXPERIMENTAL; msg: 'Requested feature is flagged experimental. Set strict_std_compliance if you really want to use it.'), + (err: AVERROR_INPUT_CHANGED; msg: 'Input changed between calls. Reconfiguration is required. (can be OR-ed with AVERROR_OUTPUT_CHANGED)'), + (err: AVERROR_OUTPUT_CHANGED; msg: 'Output changed between calls. Reconfiguration is required. (can be OR-ed with AVERROR_INPUT_CHANGED)'), + (err: AVERROR_HTTP_BAD_REQUEST; msg: 'HTTP or RTSP error: bad request(400)'), (err: AVERROR_HTTP_UNAUTHORIZED; + msg: 'HTTP or RTSP error: unauthorized(401)'), (err: AVERROR_HTTP_FORBIDDEN; msg: 'HTTP or RTSP error: forbidden(403)'), (err: AVERROR_HTTP_NOT_FOUND; + msg: 'HTTP or RTSP error: not found(404)'), (err: AVERROR_HTTP_OTHER_4XX; msg: 'HTTP or RTSP error: other error(4xx)'), (err: AVERROR_HTTP_SERVER_ERROR; + msg: 'HTTP or RTSP error: server error(5xx)'), (err: AVERROR_ENOENT; msg: 'No such file or directory'), (err: AVERROR_ESRCH; msg: 'No such process'), + (err: AVERROR_EINTR; msg: 'Interrupted function call'), (err: AVERROR_EIO; msg: 'I/O error'), (err: AVERROR_ENXIO; msg: 'No such device or address'), + (err: AVERROR_E2BIG; msg: 'Argument list too long'), (err: AVERROR_ENOEXEC; msg: 'Exec format error'), (err: AVERROR_EBADF; msg: 'Bad file number'), + (err: AVERROR_ECHILD; msg: 'No child processes'), (err: AVERROR_EAGAIN; msg: 'Resource temporarily unavailable / Try again'), (err: AVERROR_ENOMEM; + msg: 'Not enough space / Out of memory'), (err: AVERROR_EACCES; msg: 'Permission denied'), (err: AVERROR_EFAULT; msg: 'Bad address'), (err: AVERROR_ENOTBLK; + msg: 'Unknown error'), (err: AVERROR_EBUSY; msg: 'Device or resource busy'), (err: AVERROR_EEXIST; msg: 'File exists'), (err: AVERROR_EXDEV; + msg: 'Cross-device link'), (err: AVERROR_ENODEV; msg: 'No such device'), (err: AVERROR_ENOTDIR; msg: 'Not a directory'), (err: AVERROR_EISDIR; + msg: 'Is a directory'), (err: AVERROR_EINVAL; msg: 'Invalid argument / Invalid data found when processing input'), (err: AVERROR_ENFILE; + msg: 'Too many open files in system / File table overflow'), (err: AVERROR_EMFILE; msg: 'Too many open files'), (err: AVERROR_ENOTTY; + msg: 'Inappropriate I/O control operation / Not a typewriter'), (err: AVERROR_ETXTBSY; msg: 'Unknown error'), (err: AVERROR_EFBIG; msg: 'File too large'), + (err: AVERROR_ENOSPC; msg: 'No space left on device'), (err: AVERROR_ESPIPE; msg: 'Illegal seek'), (err: AVERROR_EROFS; msg: 'Read-only file system'), + (err: AVERROR_EMLINK; msg: 'Too many links'), (err: AVERROR_EPIPE; msg: 'Broken pipe'), (err: AVERROR_EDOM; msg: 'Math argument out of domain of func'), + (err: AVERROR_ERANGE; msg: 'Math result not representable'), (err: AVERROR_EDEADLK; msg: 'Resource deadlock avoided'), (err: AVERROR_ENAMETOOLONG; + msg: 'File name too long'), (err: AVERROR_ENOLCK; msg: 'No locks available'), (err: AVERROR_ENOSYS; msg: 'Function not implemented'), + (err: AVERROR_ENOTEMPTY; msg: 'Directory not empty'), (err: AVERROR_ELOOP; msg: 'Too many symbolic links encountered'), (err: AVERROR_ENOMSG; + msg: 'Unknown error'), (err: AVERROR_EIDRM; msg: 'Unknown error'), (err: AVERROR_ENOSTR; msg: 'Unknown error'), (err: AVERROR_ENODATA; + msg: 'Unknown error'), (err: AVERROR_ETIME; msg: 'Unknown error'), (err: AVERROR_ENOSR; msg: 'Unknown error'), (err: AVERROR_EREMOTE; msg: 'Unknown error'), + (err: AVERROR_ENOLINK; msg: 'Unknown error'), (err: AVERROR_EPROTO; msg: 'Protocol error'), (err: AVERROR_EMULTIHOP; msg: 'Unknown error'), + (err: AVERROR_EBADMSG; msg: 'Unknown error'), (err: AVERROR_EOVERFLOW; msg: 'Value too large for defined data type'), (err: AVERROR_EILSEQ; + msg: 'Illegal byte sequence'), (err: AVERROR_EUSERS; msg: 'Unknown error'), (err: AVERROR_ENOTSOCK; msg: 'Socket operation on non-socket'), + (err: AVERROR_EDESTADDRREQ; msg: 'Destination address required'), (err: AVERROR_EMSGSIZE; msg: 'Message too long'), (err: AVERROR_EPROTOTYPE; + msg: 'Protocol wrong type for socket'), (err: AVERROR_ENOPROTOOPT; msg: 'Protocol not available'), (err: AVERROR_EPROTONOSUPPORT; + msg: 'Protocol not supported'), (err: AVERROR_ESOCKTNOSUPPORT; msg: 'Unknown error'), (err: AVERROR_EOPNOTSUPP; + msg: 'Operation not supported on transport endpoint'), (err: AVERROR_EPFNOSUPPORT; msg: 'Unknown error'), (err: AVERROR_EAFNOSUPPORT; + msg: 'Address family not supported by protocol'), (err: AVERROR_EADDRINUSE; msg: 'Address already in use'), (err: AVERROR_EADDRNOTAVAIL; + msg: 'Cannot assign requested address'), (err: AVERROR_ENETDOWN; msg: 'Network is down'), (err: AVERROR_ENETUNREACH; msg: 'Network is unreachable'), + (err: AVERROR_ENETRESET; msg: 'Network dropped connection because of reset'), (err: AVERROR_ECONNABORTED; msg: 'Software caused connection abort'), + (err: AVERROR_ECONNRESET; msg: 'Connection reset by peer'), (err: AVERROR_ENOBUFS; msg: 'No buffer space available'), (err: AVERROR_EISCONN; + msg: 'Transport endpoint is already connected'), (err: AVERROR_ENOTCONN; msg: 'Transport endpoint is not connected'), (err: AVERROR_ESHUTDOWN; + msg: 'Unknown error'), (err: AVERROR_ETOOMANYREFS; msg: 'Unknown error'), (err: AVERROR_ETIMEDOUT; msg: 'Connection timed out'), (err: AVERROR_ECONNREFUSED; + msg: 'Connection refused'), (err: AVERROR_EHOSTDOWN; msg: 'Unknown error'), (err: AVERROR_EHOSTUNREACH; msg: 'No route to host'), (err: AVERROR_EALREADY; + msg: 'Operation already in progress'), (err: AVERROR_EINPROGRESS; msg: 'Operation now in progress'), (err: AVERROR_ESTALE; msg: 'Unknown error'), + (err: AVERROR_ECANCELED; msg: 'Operation Canceled'), (err: AVERROR_EOWNERDEAD; msg: 'Owner died'), (err: AVERROR_ENOTRECOVERABLE; + msg: 'State not recoverable')); + + (* * + * Put a description of the AVERROR code errnum in errbuf. + * In case of failure the global variable errno is set to indicate the + * error. Even in case of failure av_strerror() will print a generic + * error message indicating the errnum provided to errbuf. + * + * @param errnum error code to describe + * @param errbuf buffer to which description is written + * @param errbuf_size the size in bytes of errbuf + * @return 0 on success, a negative value if a description for errnum + * cannot be found + *) + // int av_strerror(int errnum, char *errbuf, size_t errbuf_size); +function av_strerror(errnum: int; errbuf: PAnsiChar; errbuf_size: size_t): int; cdecl; external avutil_dll; +(* * + * Fill the provided buffer with a string containing an error string + * corresponding to the AVERROR code errnum. + * + * @param errbuf a buffer + * @param errbuf_size size in bytes of errbuf + * @param errnum error code to describe + * @return the buffer in input, filled with the error description + * @see av_strerror() +*) +// static inline char *av_make_error_string(char *errbuf, size_t errbuf_size, int errnum) +function av_make_error_string(errbuf: PAnsiChar; errbuf_size: size_t; errnum: int): PAnsiChar; inline; + +(* * + * Convenience macro, the return value should be used only directly in + * function arguments but never stand-alone. +*) +function av_err2str(errnum: int): PAnsiChar; + +{$ENDREGION} +{$REGION 'cpu.h'} + +const + AV_CPU_FLAG_FORCE = $80000000; (* force usage of selected flags (OR) *) + + (* lower 16 bits - CPU features *) + AV_CPU_FLAG_MMX = $0001; // < standard MMX + AV_CPU_FLAG_MMXEXT = $0002; // < SSE integer functions or AMD MMX ext + AV_CPU_FLAG_MMX2 = $0002; // < SSE integer functions or AMD MMX ext + AV_CPU_FLAG_3DNOW = $0004; // < AMD 3DNOW + AV_CPU_FLAG_SSE = $0008; // < SSE functions + AV_CPU_FLAG_SSE2 = $0010; // < PIV SSE2 functions + AV_CPU_FLAG_SSE2SLOW = $40000000; // < SSE2 supported, but usually not faster + // < than regular MMX/SSE (e.g. Core1) + AV_CPU_FLAG_3DNOWEXT = $0020; // < AMD 3DNowExt + AV_CPU_FLAG_SSE3 = $0040; // < Prescott SSE3 functions + AV_CPU_FLAG_SSE3SLOW = $20000000; // < SSE3 supported, but usually not faster + // < than regular MMX/SSE (e.g. Core1) + AV_CPU_FLAG_SSSE3 = $0080; // < Conroe SSSE3 functions + AV_CPU_FLAG_SSSE3SLOW = $4000000; // < SSSE3 supported, but usually not faster + AV_CPU_FLAG_ATOM = $10000000; // < Atom processor, some SSSE3 instructions are slower + AV_CPU_FLAG_SSE4 = $0100; // < Penryn SSE4.1 functions + AV_CPU_FLAG_SSE42 = $0200; // < Nehalem SSE4.2 functions + AV_CPU_FLAG_AESNI = $80000; // < Advanced Encryption Standard functions + AV_CPU_FLAG_AVX = $4000; // < AVX functions: requires OS support even if YMM registers aren't used + AV_CPU_FLAG_AVXSLOW = $8000000; // < AVX supported, but slow when using YMM registers (e.g. Bulldozer) + AV_CPU_FLAG_XOP = $0400; // < Bulldozer XOP functions + AV_CPU_FLAG_FMA4 = $0800; // < Bulldozer FMA4 functions + AV_CPU_FLAG_CMOV = $1000; // < supports cmov instruction + AV_CPU_FLAG_AVX2 = $8000; // < AVX2 functions: requires OS support even if YMM registers aren't used + AV_CPU_FLAG_FMA3 = $10000; // < Haswell FMA3 functions + AV_CPU_FLAG_BMI1 = $20000; // < Bit Manipulation Instruction Set 1 + AV_CPU_FLAG_BMI2 = $40000; // < Bit Manipulation Instruction Set 2 + AV_CPU_FLAG_AVX512 = $100000; // < AVX-512 functions: requires OS support even if YMM/ZMM registers aren't used + + AV_CPU_FLAG_ALTIVEC = $0001; // < standard + AV_CPU_FLAG_VSX = $0002; // < ISA 2.06 + AV_CPU_FLAG_POWER8 = $0004; // < ISA 2.07 + + AV_CPU_FLAG_ARMV5TE = (1 shl 0); + AV_CPU_FLAG_ARMV6 = (1 shl 1); + AV_CPU_FLAG_ARMV6T2 = (1 shl 2); + AV_CPU_FLAG_VFP = (1 shl 3); + AV_CPU_FLAG_VFPV3 = (1 shl 4); + AV_CPU_FLAG_NEON = (1 shl 5); + AV_CPU_FLAG_ARMV8 = (1 shl 6); + AV_CPU_FLAG_VFP_VM = (1 shl 7); // < VFPv2 vector mode, deprecated in ARMv7-A and unavailable in various CPUs implementations + AV_CPU_FLAG_SETEND = (1 shl 16); + + (* * + * Return the flags which specify extensions supported by the CPU. + * The returned value is affected by av_force_cpu_flags() if that was used + * before. So av_get_cpu_flags() can easily be used in an application to + * detect the enabled cpu flags. + *) + // int av_get_cpu_flags(void); +function av_get_cpu_flags(): int; cdecl; external avutil_dll; +(* * + * Disables cpu detection and forces the specified flags. + * -1 is a special case that disables forcing of specific flags. +*) +// void av_force_cpu_flags(int flags); +procedure av_force_cpu_flags(flags: int); cdecl; external avutil_dll; +(* * + * Set a mask on flags returned by av_get_cpu_flags(). + * This function is mainly useful for testing. + * Please use av_force_cpu_flags() and av_get_cpu_flags() instead which are more flexible +*) +// attribute_deprecated void av_set_cpu_flags_mask(int mask); +procedure av_set_cpu_flags_mask(mask: int); cdecl; external avutil_dll; + deprecated 'Please use av_force_cpu_flags() and av_get_cpu_flags() instead which are more flexible'; +(* * + * Parse CPU flags from a string. + * + * The returned flags contain the specified flags as well as related unspecified flags. + * + * This function exists only for compatibility with libav. + * Please use av_parse_cpu_caps() when possible. + * @return a combination of AV_CPU_* flags, negative on error. +*) +// attribute_deprecated int av_parse_cpu_flags(const char *s); +function av_parse_cpu_flags(const s: PAnsiChar): int; cdecl; external avutil_dll; deprecated 'Please use av_parse_cpu_caps() when possible'; +(* * + * Parse CPU caps from a string and update the given AV_CPU_* flags based on that. + * + * @return negative on error. +*) +// int av_parse_cpu_caps(unsigned *flags, const char *s); +function av_parse_cpu_caps(var flags: unsigned; const s: PAnsiChar): int; cdecl; external avutil_dll; +(* * + * @return the number of logical CPU cores present. +*) +// int av_cpu_count(void); +function av_cpu_count(): int; cdecl; external avutil_dll; +(* * + * Get the maximum data alignment that may be required by FFmpeg. + * + * Note that this is affected by the build configuration and the CPU flags mask, + * so e.g. if the CPU supports AVX, but libavutil has been built with + * --disable-avx or the AV_CPU_FLAG_AVX flag has been disabled through + * av_set_cpu_flags_mask(), then this function will behave as if AVX is not + * present. +*) +// size_t av_cpu_max_align(void); +function av_cpu_max_align(): size_t; cdecl; external avutil_dll; +{$ENDREGION} +{$REGION 'audio_fifo.h'} + +(* * + * Context for an Audio FIFO Buffer. + * + * - Operates at the sample level rather than the byte level. + * - Supports multiple channels with either planar or packed sample format. + * - Automatic reallocation when writing to a full buffer. +*) +type + pAVAudioFifo = ^AVAudioFifo; + + AVAudioFifo = record + end; + + (* * + * Free an AVAudioFifo. + * + * @param af AVAudioFifo to free + *) + // void av_audio_fifo_free(AVAudioFifo *af); +procedure av_audio_fifo_free(af: pAVAudioFifo); cdecl; external avutil_dll; +(* * + * Allocate an AVAudioFifo. + * + * @param sample_fmt sample format + * @param channels number of channels + * @param nb_samples initial allocation size, in samples + * @return newly allocated AVAudioFifo, or NULL on error +*) +// AVAudioFifo *av_audio_fifo_alloc(enum AVSampleFormat sample_fmt, int channels, +// int nb_samples); +function av_audio_fifo_alloc(sample_fmt: AVSampleFormat; channels: int; nb_samples: int): pAVAudioFifo; cdecl; external avutil_dll; +(* * + * Reallocate an AVAudioFifo. + * + * @param af AVAudioFifo to reallocate + * @param nb_samples new allocation size, in samples + * @return 0 if OK, or negative AVERROR code on failure +*) +// av_warn_unused_result +// int av_audio_fifo_realloc(AVAudioFifo *af, int nb_samples); +function av_audio_fifo_realloc(af: pAVAudioFifo; nb_samples: int): int; cdecl; external avutil_dll; +(* * + * Write data to an AVAudioFifo. + * + * The AVAudioFifo will be reallocated automatically if the available space + * is less than nb_samples. + * + * @see enum AVSampleFormat + * The documentation for AVSampleFormat describes the data layout. + * + * @param af AVAudioFifo to write to + * @param data audio data plane pointers + * @param nb_samples number of samples to write + * @return number of samples actually written, or negative AVERROR + * code on failure. If successful, the number of samples + * actually written will always be nb_samples. +*) +// int av_audio_fifo_write(AVAudioFifo *af, void **data, int nb_samples); +function av_audio_fifo_write(af: pAVAudioFifo; var data: puint8_t; nb_samples: int): int; cdecl; external avutil_dll; +(* * + * Peek data from an AVAudioFifo. + * + * @see enum AVSampleFormat + * The documentation for AVSampleFormat describes the data layout. + * + * @param af AVAudioFifo to read from + * @param data audio data plane pointers + * @param nb_samples number of samples to peek + * @return number of samples actually peek, or negative AVERROR code + * on failure. The number of samples actually peek will not + * be greater than nb_samples, and will only be less than + * nb_samples if av_audio_fifo_size is less than nb_samples. +*) +// int av_audio_fifo_peek(AVAudioFifo *af, void **data, int nb_samples); +function av_audio_fifo_peek(af: pAVAudioFifo; var data: puint8_t; nb_samples: int): int; cdecl; external avutil_dll; +(* * + * Peek data from an AVAudioFifo. + * + * @see enum AVSampleFormat + * The documentation for AVSampleFormat describes the data layout. + * + * @param af AVAudioFifo to read from + * @param data audio data plane pointers + * @param nb_samples number of samples to peek + * @param offset offset from current read position + * @return number of samples actually peek, or negative AVERROR code + * on failure. The number of samples actually peek will not + * be greater than nb_samples, and will only be less than + * nb_samples if av_audio_fifo_size is less than nb_samples. +*) +// int av_audio_fifo_peek_at(AVAudioFifo *af, void **data, int nb_samples, int offset); +function av_audio_fifo_peek_at(af: pAVAudioFifo; var data: Pointer; nb_samples: int; offset: int): int; cdecl; external avutil_dll; +(* * + * Read data from an AVAudioFifo. + * + * @see enum AVSampleFormat + * The documentation for AVSampleFormat describes the data layout. + * + * @param af AVAudioFifo to read from + * @param data audio data plane pointers + * @param nb_samples number of samples to read + * @return number of samples actually read, or negative AVERROR code + * on failure. The number of samples actually read will not + * be greater than nb_samples, and will only be less than + * nb_samples if av_audio_fifo_size is less than nb_samples. +*) +// int av_audio_fifo_read(AVAudioFifo *af, void **data, int nb_samples); +function av_audio_fifo_read(af: pAVAudioFifo; var data: Pointer; nb_samples: int): int; cdecl; external avutil_dll; +(* * + * Drain data from an AVAudioFifo. + * + * Removes the data without reading it. + * + * @param af AVAudioFifo to drain + * @param nb_samples number of samples to drain + * @return 0 if OK, or negative AVERROR code on failure +*) +// int av_audio_fifo_drain(AVAudioFifo *af, int nb_samples); +function av_audio_fifo_drain(af: pAVAudioFifo; nb_samples: int): int; cdecl; external avutil_dll; +(* * + * Reset the AVAudioFifo buffer. + * + * This empties all data in the buffer. + * + * @param af AVAudioFifo to reset +*) +// void av_audio_fifo_reset(AVAudioFifo *af); +procedure av_audio_fifo_reset(af: pAVAudioFifo); cdecl; external avutil_dll; +(* * + * Get the current number of samples in the AVAudioFifo available for reading. + * + * @param af the AVAudioFifo to query + * @return number of samples available for reading +*) +// int av_audio_fifo_size(AVAudioFifo *af); +function av_audio_fifo_size(af: pAVAudioFifo): int; cdecl; external avutil_dll; +(* * + * Get the current number of samples in the AVAudioFifo available for writing. + * + * @param af the AVAudioFifo to query + * @return number of samples available for writing +*) +// int av_audio_fifo_space(AVAudioFifo *af); +function av_audio_fifo_space(af: pAVAudioFifo): int; cdecl; external avutil_dll; +{$ENDREGION} +{$REGION 'avstring.h'} +(* * + * Return non-zero if pfx is a prefix of str. If it is, *ptr is set to + * the address of the first character in str after the prefix. + * + * @param str input string + * @param pfx prefix to test + * @param ptr updated if the prefix is matched inside str + * @return non-zero if the prefix matches, zero otherwise +*) +// int av_strstart(const char *str, const char *pfx, const char **ptr); +function av_strstart(const str: PAnsiChar; const pfx: PAnsiChar; const ptr: ppAnsiChar): int; cdecl; external avutil_dll; +(* * + * Return non-zero if pfx is a prefix of str independent of case. If + * it is, *ptr is set to the address of the first character in str + * after the prefix. + * + * @param str input string + * @param pfx prefix to test + * @param ptr updated if the prefix is matched inside str + * @return non-zero if the prefix matches, zero otherwise +*) +// int av_stristart(const char *str, const char *pfx, const char **ptr); +function av_stristart(const str: PAnsiChar; const pfx: PAnsiChar; const ptr: ppAnsiChar): int; cdecl; external avutil_dll; +(* * + * Locate the first case-independent occurrence in the string haystack + * of the string needle. A zero-length string needle is considered to + * match at the start of haystack. + * + * This function is a case-insensitive version of the standard strstr(). + * + * @param haystack string to search in + * @param needle string to search for + * @return pointer to the located match within haystack + * or a null pointer if no match +*) +// char *av_stristr(const char *haystack, const char *needle); +function av_stristr(const haystack: PAnsiChar; const needle: PAnsiChar): PAnsiChar; cdecl; external avutil_dll; +(* * + * Locate the first occurrence of the string needle in the string haystack + * where not more than hay_length characters are searched. A zero-length + * string needle is considered to match at the start of haystack. + * + * This function is a length-limited version of the standard strstr(). + * + * @param haystack string to search in + * @param needle string to search for + * @param hay_length length of string to search in + * @return pointer to the located match within haystack + * or a null pointer if no match +*) +// char *av_strnstr(const char *haystack, const char *needle, size_t hay_length); +function av_strnstr(const haystack: PAnsiChar; const needle: PAnsiChar; hay_length: size_t): PAnsiChar; cdecl; external avutil_dll; +(* * + * Copy the string src to dst, but no more than size - 1 bytes, and + * null-terminate dst. + * + * This function is the same as BSD strlcpy(). + * + * @param dst destination buffer + * @param src source string + * @param size size of destination buffer + * @return the length of src + * + * @warning since the return value is the length of src, src absolutely + * _must_ be a properly 0-terminated string, otherwise this will read beyond + * the end of the buffer and possibly crash. +*) +// size_t av_strlcpy(char *dst, const char *src, size_t size); +function av_strlcpy(dst: PAnsiChar; const src: PAnsiChar; size: size_t): size_t; cdecl; external avutil_dll; +(* * + * Append the string src to the string dst, but to a total length of + * no more than size - 1 bytes, and null-terminate dst. + * + * This function is similar to BSD strlcat(), but differs when + * size <= strlen(dst). + * + * @param dst destination buffer + * @param src source string + * @param size size of destination buffer + * @return the total length of src and dst + * + * @warning since the return value use the length of src and dst, these + * absolutely _must_ be a properly 0-terminated strings, otherwise this + * will read beyond the end of the buffer and possibly crash. +*) +// size_t av_strlcat(char *dst, const char *src, size_t size); +function av_strlcat(dst: PAnsiChar; const src: PAnsiChar; size: size_t): size_t; cdecl; external avutil_dll; +(* * + * Append output to a string, according to a format. Never write out of + * the destination buffer, and always put a terminating 0 within + * the buffer. + * @param dst destination buffer (string to which the output is + * appended) + * @param size total size of the destination buffer + * @param fmt printf-compatible format string, specifying how the + * following parameters are used + * @return the length of the string that would have been generated + * if enough space had been available +*) +// size_t av_strlcatf(char *dst, size_t size, const char *fmt, ...) av_printf_format(3, 4); + +(* * + * Get the count of continuous non zero chars starting from the beginning. + * + * @param len maximum number of characters to check in the string, that + * is the maximum value which is returned by the function +*) +// static inline size_t av_strnlen(const char *s, size_t len) +function av_strnlen(const s: PAnsiChar; len: size_t): size_t; inline; + +(* * + * Print arguments following specified format into a large enough auto + * allocated buffer. It is similar to GNU asprintf(). + * @param fmt printf-compatible format string, specifying how the + * following parameters are used. + * @return the allocated string + * @note You have to free the string yourself with av_free(). +*) +// char *av_asprintf(const char *fmt, ...) av_printf_format(1, 2); + +(* * + * Convert a number to an av_malloced string. +*) +// char *av_d2str(double d); +function av_d2str(d: double): PAnsiChar; cdecl; external avutil_dll; +(* * + * Unescape the given string until a non escaped terminating char, + * and return the token corresponding to the unescaped string. + * + * The normal \ and ' escaping is supported. Leading and trailing + * whitespaces are removed, unless they are escaped with '\' or are + * enclosed between ''. + * + * @param buf the buffer to parse, buf will be updated to point to the + * terminating char + * @param term a 0-terminated list of terminating chars + * @return the malloced unescaped string, which must be av_freed by + * the user, NULL in case of allocation failure +*) +// char *av_get_token(const char **buf, const char *term); +function av_get_token(const buf: ppAnsiChar; const term: PAnsiChar): PAnsiChar; cdecl; external avutil_dll; +(* * + * Split the string into several tokens which can be accessed by + * successive calls to av_strtok(). + * + * A token is defined as a sequence of characters not belonging to the + * set specified in delim. + * + * On the first call to av_strtok(), s should point to the string to + * parse, and the value of saveptr is ignored. In subsequent calls, s + * should be NULL, and saveptr should be unchanged since the previous + * call. + * + * This function is similar to strtok_r() defined in POSIX.1. + * + * @param s the string to parse, may be NULL + * @param delim 0-terminated list of token delimiters, must be non-NULL + * @param saveptr user-provided pointer which points to stored + * information necessary for av_strtok() to continue scanning the same + * string. saveptr is updated to point to the next character after the + * first delimiter found, or to NULL if the string was terminated + * @return the found token, or NULL when no token is found +*) +// char *av_strtok(char *s, const char *delim, char **saveptr); +function av_strtok(s: PAnsiChar; const delim: PAnsiChar; saveptr: ppAnsiChar): PAnsiChar; cdecl; external avutil_dll; +(* * + * Locale-independent conversion of ASCII isdigit. +*) +// static inline av_const int av_isdigit(int c) +function av_isdigit(c: int): Boolean; inline; + +(* * + * Locale-independent conversion of ASCII isgraph. +*) +// static inline av_const int av_isgraph(int c) +function av_isgraph(c: int): Boolean; inline; + +(* * + * Locale-independent conversion of ASCII isspace. +*) +// static inline av_const int av_isspace(int c) +function av_isspace(c1: int): Boolean; inline; + +(* * + * Locale-independent conversion of ASCII characters to uppercase. +*) +// static inline av_const int av_toupper(int c) +function av_toupper(c1: int): int; inline; + +(* * + * Locale-independent conversion of ASCII characters to lowercase. +*) +// static inline av_const int av_tolower(int c) +function av_tolower(c1: int): int; inline; + +(* * + * Locale-independent conversion of ASCII isxdigit. +*) +// static inline av_const int av_isxdigit(int c) +function av_isxdigit(c1: int): Boolean; inline; + +(* * + * Locale-independent case-insensitive compare. + * @note This means only ASCII-range characters are case-insensitive +*) +// int av_strcasecmp(const char *a, const char *b); +function av_strcasecmp(const a: PAnsiChar; const b: PAnsiChar): int; cdecl; external avutil_dll; +(* * + * Locale-independent case-insensitive compare. + * @note This means only ASCII-range characters are case-insensitive +*) +// int av_strncasecmp(const char *a, const char *b, size_t n); +function av_strncasecmp(const a: PAnsiChar; const b: PAnsiChar; n: size_t): int; cdecl; external avutil_dll; +(* * + * Locale-independent strings replace. + * @note This means only ASCII-range characters are replace +*) +// char *av_strireplace(const char *str, const char *from, const char *to); +function av_strireplace(const str: PAnsiChar; const from: PAnsiChar; const _to: PAnsiChar): PAnsiChar; cdecl; external avutil_dll; +(* * + * Thread safe basename. + * @param path the path, on DOS both \ and / are considered separators. + * @return pointer to the basename substring. +*) +// const char *av_basename(const char *path); +function av_basename(const path: PAnsiChar): PAnsiChar; cdecl; external avutil_dll; +(* * + * Thread safe dirname. + * @param path the path, on DOS both \ and / are considered separators. + * @return the path with the separator replaced by the string terminator or ".". + * @note the function may change the input string. +*) +// const char *av_dirname(char *path); +function av_dirname(path: PAnsiChar): PAnsiChar; cdecl; external avutil_dll; +(* * + * Match instances of a name in a comma-separated list of names. + * List entries are checked from the start to the end of the names list, + * the first match ends further processing. If an entry prefixed with '-' + * matches, then 0 is returned. The "ALL" list entry is considered to + * match all names. + * + * @param name Name to look for. + * @param names List of names. + * @return 1 on match, 0 otherwise. +*) +// int av_match_name(const char *name, const char *names); +function av_match_name(const name: PAnsiChar; const names: PAnsiChar): int; cdecl; external avutil_dll; +(* * + * Append path component to the existing path. + * Path separator '/' is placed between when needed. + * Resulting string have to be freed with av_free(). + * @param path base path + * @param component component to be appended + * @return new path or NULL on error. +*) +// char *av_append_path_component(const char *path, const char *component); +function av_append_path_component(const path: PAnsiChar; const component: PAnsiChar): PAnsiChar; cdecl; external avutil_dll; + +type + AVEscapeMode = ( // + AV_ESCAPE_MODE_AUTO, + // < Use auto-selected escaping mode. + AV_ESCAPE_MODE_BACKSLASH, + // < Use backslash escaping. + AV_ESCAPE_MODE_QUOTE + // < Use single-quote escaping. + ); + +const + (* * + * Consider spaces special and escape them even in the middle of the + * string. + * + * This is equivalent to adding the whitespace characters to the special + * characters lists, except it is guaranteed to use the exact same list + * of whitespace characters as the rest of libavutil. + *) + AV_ESCAPE_FLAG_WHITESPACE = (1 shl 0); + + (* * + * Escape only specified special characters. + * Without this flag, escape also any characters that may be considered + * special by av_get_token(), such as the single quote. + *) + AV_ESCAPE_FLAG_STRICT = (1 shl 1); + + (* * + * Escape string in src, and put the escaped string in an allocated + * string in *dst, which must be freed with av_free(). + * + * @param dst pointer where an allocated string is put + * @param src string to escape, must be non-NULL + * @param special_chars string containing the special characters which + * need to be escaped, can be NULL + * @param mode escape mode to employ, see AV_ESCAPE_MODE_* macros. + * Any unknown value for mode will be considered equivalent to + * AV_ESCAPE_MODE_BACKSLASH, but this behaviour can change without + * notice. + * @param flags flags which control how to escape, see AV_ESCAPE_FLAG_ macros + * @return the length of the allocated string, or a negative error code in case of error + * @see av_bprint_escape() + *) + // av_warn_unused_result + // int av_escape(char **dst, const char *src, const char *special_chars, + // enum AVEscapeMode mode, int flags); +function av_escape(var dst: PAnsiChar; const src: PAnsiChar; const special_chars: PAnsiChar; mode: AVEscapeMode; flags: int): int; cdecl; external avutil_dll; + +const + AV_UTF8_FLAG_ACCEPT_INVALID_BIG_CODES = 1; + // < accept codepoints over 0x10FFFF + AV_UTF8_FLAG_ACCEPT_NON_CHARACTERS = 2; + // < accept non-characters - 0xFFFE and 0xFFFF + AV_UTF8_FLAG_ACCEPT_SURROGATES = 4; + // < accept UTF-16 surrogates codes + AV_UTF8_FLAG_EXCLUDE_XML_INVALID_CONTROL_CODES = 8; + // < exclude control codes not accepted by XML + + AV_UTF8_FLAG_ACCEPT_ALL = AV_UTF8_FLAG_ACCEPT_INVALID_BIG_CODES or AV_UTF8_FLAG_ACCEPT_NON_CHARACTERS or AV_UTF8_FLAG_ACCEPT_SURROGATES; + + (* * + * Read and decode a single UTF-8 code point (character) from the + * buffer in *buf, and update *buf to point to the next byte to + * decode. + * + * In case of an invalid byte sequence, the pointer will be updated to + * the next byte after the invalid sequence and the function will + * return an error code. + * + * Depending on the specified flags, the function will also fail in + * case the decoded code point does not belong to a valid range. + * + * @note For speed-relevant code a carefully implemented use of + * GET_UTF8() may be preferred. + * + * @param codep pointer used to return the parsed code in case of success. + * The value in *codep is set even in case the range check fails. + * @param bufp pointer to the address the first byte of the sequence + * to decode, updated by the function to point to the + * byte next after the decoded sequence + * @param buf_end pointer to the end of the buffer, points to the next + * byte past the last in the buffer. This is used to + * avoid buffer overreads (in case of an unfinished + * UTF-8 sequence towards the end of the buffer). + * @param flags a collection of AV_UTF8_FLAG_* flags + * @return >= 0 in case a sequence was successfully read, a negative + * value in case of invalid sequence + *) + // av_warn_unused_result + // int av_utf8_decode(int32_t *codep, const uint8_t **bufp, const uint8_t *buf_end, + // unsigned int flags); +function av_utf8_decode(var codep: int32_t; const bufp: ppuint8_t; const buf_end: puint8_t; flags: unsigned_int): int; cdecl; external avutil_dll; +(* * + * Check if a name is in a list. + * @returns 0 if not found, or the 1 based index where it has been found in the + * list. +*) +// int av_match_list(const char *name, const char *list, char separator); +function av_match_list(const name: PAnsiChar; const list: PAnsiChar; separator: AnsiChar): int; cdecl; external avutil_dll; + +(* + * See libc sscanf manual for more information. + * Locale-independent sscanf implementation. +*) +// int av_sscanf(const char *string, const char *format, ...); + +{$ENDREGION} +{$REGION 'bprint.h'} + +const + (* * + * Convenience macros for special values for av_bprint_init() size_max + * parameter. + *) + AV_BPRINT_SIZE_UNLIMITED = ((max_unsigned) - 1); + AV_BPRINT_SIZE_AUTOMATIC = 1; + AV_BPRINT_SIZE_COUNT_ONLY = 0; + + (* * + * Init a print buffer. + * + * @param buf buffer to init + * @param size_init initial size (including the final 0) + * @param size_max maximum size; + * 0 means do not write anything, just count the length; + * 1 is replaced by the maximum value for automatic storage; + * any large value means that the internal buffer will be + * reallocated as needed up to that limit; -1 is converted to + * UINT_MAX, the largest limit possible. + * Check also AV_BPRINT_SIZE_* macros. + *) + // void av_bprint_init(AVBPrint *buf, unsigned size_init, unsigned size_max); +procedure av_bprint_init(buf: pAVBPrint; size_init: unsigned; size_max: unsigned); cdecl; external avutil_dll; +(* * + * Init a print buffer using a pre-existing buffer. + * + * The buffer will not be reallocated. + * + * @param buf buffer structure to init + * @param buffer byte buffer to use for the string data + * @param size size of buffer +*) +// void av_bprint_init_for_buffer(AVBPrint *buf, char *buffer, unsigned size); +procedure av_bprint_init_for_buffer(buf: pAVBPrint; buffer: PAnsiChar; size: unsigned); cdecl; external avutil_dll; +(* * + * Append a formatted string to a print buffer. +*) +// void av_bprintf(AVBPrint *buf, const char *fmt, ...) av_printf_format(2, 3); + +(* * + * Append a formatted string to a print buffer. +*) +// void av_vbprintf(AVBPrint *buf, const char *fmt, va_list vl_arg); +procedure av_vbprintf(buf: pAVBPrint; const fmt: PAnsiChar; vl_arg: PVA_LIST); cdecl; external avutil_dll; +(* * + * Append char c n times to a print buffer. +*) +// void av_bprint_chars(AVBPrint *buf, char c, unsigned n); +procedure av_bprint_chars(buf: pAVBPrint; c: AnsiChar; n: unsigned); cdecl; external avutil_dll; +(* * + * Append data to a print buffer. + * + * param buf bprint buffer to use + * param data pointer to data + * param size size of data +*) +// void av_bprint_append_data(AVBPrint *buf, const char *data, unsigned size); +procedure av_bprint_append_data(buf: pAVBPrint; const data: PAnsiChar; size: unsigned); cdecl; external avutil_dll; + +type + ptm = ^tm; + + tm = record + end; + + (* * + * Append a formatted date and time to a print buffer. + * + * param buf bprint buffer to use + * param fmt date and time format string, see strftime() + * param tm broken-down time structure to translate + * + * @note due to poor design of the standard strftime function, it may + * produce poor results if the format string expands to a very long text and + * the bprint buffer is near the limit stated by the size_max option. + *) + // void av_bprint_strftime(AVBPrint *buf, const char *fmt, const struct tm *tm); +procedure av_bprint_strftime(buf: pAVBPrint; const fmt: PAnsiChar; const tm: ptm); cdecl; external avutil_dll; +(* * + * Allocate bytes in the buffer for external use. + * + * @param[in] buf buffer structure + * @param[in] size required size + * @param[out] mem pointer to the memory area + * @param[out] actual_size size of the memory area after allocation; + * can be larger or smaller than size +*) +// void av_bprint_get_buffer(AVBPrint *buf, unsigned size, +// unsigned char **mem, unsigned *actual_size); +procedure av_bprint_get_buffer(buf: pAVBPrint; size: unsigned; var mem: punsigned_char; var actual_size: unsigned); cdecl; external avutil_dll; +(* * + * Reset the string to "" but keep internal allocated data. +*) +// void av_bprint_clear(AVBPrint *buf); +procedure av_bprint_clear(buf: pAVBPrint); cdecl; external avutil_dll; +(* * + * Test if the print buffer is complete (not truncated). + * + * It may have been truncated due to a memory allocation failure + * or the size_max limit (compare size and size_max if necessary). +*) +// static inline int av_bprint_is_complete(const AVBPrint *buf) +function av_bprint_is_complete(const buf: pAVBPrint): Boolean; inline; + +(* * + * Finalize a print buffer. + * + * The print buffer can no longer be used afterwards, + * but the len and size fields are still valid. + * + * @arg[out] ret_str if not NULL, used to return a permanent copy of the + * buffer contents, or NULL if memory allocation fails; + * if NULL, the buffer is discarded and freed + * @return 0 for success or error code (probably AVERROR(ENOMEM)) +*) +// int av_bprint_finalize(AVBPrint *buf, char **ret_str); +function av_bprint_finalize(buf: pAVBPrint; var ret_str: PAnsiChar): int; cdecl; external avutil_dll; +(* * + * Escape the content in src and append it to dstbuf. + * + * @param dstbuf already inited destination bprint buffer + * @param src string containing the text to escape + * @param special_chars string containing the special characters which + * need to be escaped, can be NULL + * @param mode escape mode to employ, see AV_ESCAPE_MODE_* macros. + * Any unknown value for mode will be considered equivalent to + * AV_ESCAPE_MODE_BACKSLASH, but this behaviour can change without + * notice. + * @param flags flags which control how to escape, see AV_ESCAPE_FLAG_* macros +*) +// void av_bprint_escape(AVBPrint *dstbuf, const char *src, const char *special_chars, +// enum AVEscapeMode mode, int flags); +procedure av_bprint_escape(dstbuf: pAVBPrint; const src: PAnsiChar; const special_chars: PAnsiChar; mode: AVEscapeMode; flags: int); cdecl; external avutil_dll; +{$ENDREGION} +{$REGION 'display.h'} +(* * + * @addtogroup lavu_video_display + * The display transformation matrix specifies an affine transformation that + * should be applied to video frames for correct presentation. It is compatible + * with the matrices stored in the ISO/IEC 14496-12 container format. + * + * The data is a 3x3 matrix represented as a 9-element array: + * + * @code{.unparsed} + * | a b u | + * (a, b, u, c, d, v, x, y, w) -> | c d v | + * | x y w | + * @endcode + * + * All numbers are stored in native endianness, as 16.16 fixed-point values, + * except for u, v and w, which are stored as 2.30 fixed-point values. + * + * The transformation maps a point (p, q) in the source (pre-transformation) + * frame to the point (p', q') in the destination (post-transformation) frame as + * follows: + * + * @code{.unparsed} + * | a b u | + * (p, q, 1) . | c d v | = z * (p', q', 1) + * | x y w | + * @endcode + * + * The transformation can also be more explicitly written in components as + * follows: + * + * @code{.unparsed} + * p' = (a * p + c * q + x) / z; + * q' = (b * p + d * q + y) / z; + * z = u * p + v * q + w + * @endcode +*) + +type + Tav_display_matrix = array [0 .. 8] of int32_t; + + (* * + * Extract the rotation component of the transformation matrix. + * + * @param matrix the transformation matrix + * @return the angle (in degrees) by which the transformation rotates the frame + * counterclockwise. The angle will be in range [-180.0, 180.0], + * or NaN if the matrix is singular. + * + * @note floating point numbers are inherently inexact, so callers are + * recommended to round the return value to nearest integer before use. + *) + // double av_display_rotation_get(const int32_t matrix[9]); +function av_display_rotation_get(const matrix: Tav_display_matrix): double; cdecl; external avutil_dll; +(* * + * Initialize a transformation matrix describing a pure counterclockwise + * rotation by the specified angle (in degrees). + * + * @param matrix an allocated transformation matrix (will be fully overwritten + * by this function) + * @param angle rotation angle in degrees. +*) +// void av_display_rotation_set(int32_t matrix[9], double angle); +procedure av_display_rotation_set(matrix: Tav_display_matrix; angle: double); cdecl; external avutil_dll; +(* * + * Flip the input matrix horizontally and/or vertically. + * + * @param matrix an allocated transformation matrix + * @param hflip whether the matrix should be flipped horizontally + * @param vflip whether the matrix should be flipped vertically +*) +// void av_display_matrix_flip(int32_t matrix[9], int hflip, int vflip); +procedure av_display_matrix_flip(matrix: Tav_display_matrix; hflip: int; vflip: int); cdecl; external avutil_dll; +{$ENDREGION} +{$REGION 'eval.h'} + +type + pAVExpr = ^AVExpr; + + AVExpr = record + end; + + // double (* const *funcs1)(void *, double) + Tav_expr_funcs1 = function(p1: Pointer; p2: double): ppDouble; cdecl; + // double (* const *funcs2)(void *, double, double) + Tav_expr_funcs2 = function(p1: Pointer; p2: double; p3: double): ppDouble; cdecl; + + (* * + * Parse and evaluate an expression. + * Note, this is significantly slower than av_expr_eval(). + * + * @param res a pointer to a double where is put the result value of + * the expression, or NAN in case of error + * @param s expression as a zero terminated string, for example "1+2^3+5*5+sin(2/3)" + * @param const_names NULL terminated array of zero terminated strings of constant identifiers, for example {"PI", "E", 0} + * @param const_values a zero terminated array of values for the identifiers from const_names + * @param func1_names NULL terminated array of zero terminated strings of funcs1 identifiers + * @param funcs1 NULL terminated array of function pointers for functions which take 1 argument + * @param func2_names NULL terminated array of zero terminated strings of funcs2 identifiers + * @param funcs2 NULL terminated array of function pointers for functions which take 2 arguments + * @param opaque a pointer which will be passed to all functions from funcs1 and funcs2 + * @param log_ctx parent logging context + * @return >= 0 in case of success, a negative value corresponding to an + * AVERROR code otherwise + *) + // int av_expr_parse_and_eval(double *res, const char *s, + // const char * const *const_names, const double *const_values, + // const char * const *func1_names, double (* const *funcs1)(void *, double), + // const char * const *func2_names, double (* const *funcs2)(void *, double, double), + // void *opaque, int log_offset, void *log_ctx); + +function av_expr_parse_and_eval(var res: double; const s: PAnsiChar; const_names: ppAnsiChar; const const_values: pdouble; func1_names: ppAnsiChar; + funcs1: Tav_expr_funcs1; func2_names: ppAnsiChar; funcs2: Tav_expr_funcs2; opaque: Pointer; log_offset: int; log_ctx: Pointer): int; cdecl; + external avutil_dll; +(* * + * Parse an expression. + * + * @param expr a pointer where is put an AVExpr containing the parsed + * value in case of successful parsing, or NULL otherwise. + * The pointed to AVExpr must be freed with av_expr_free() by the user + * when it is not needed anymore. + * @param s expression as a zero terminated string, for example "1+2^3+5*5+sin(2/3)" + * @param const_names NULL terminated array of zero terminated strings of constant identifiers, for example {"PI", "E", 0} + * @param func1_names NULL terminated array of zero terminated strings of funcs1 identifiers + * @param funcs1 NULL terminated array of function pointers for functions which take 1 argument + * @param func2_names NULL terminated array of zero terminated strings of funcs2 identifiers + * @param funcs2 NULL terminated array of function pointers for functions which take 2 arguments + * @param log_ctx parent logging context + * @return >= 0 in case of success, a negative value corresponding to an + * AVERROR code otherwise +*) +// int av_expr_parse(AVExpr **expr, const char *s, +// const char * const *const_names, +// const char * const *func1_names, double (* const *funcs1)(void *, double), +// const char * const *func2_names, double (* const *funcs2)(void *, double, double), +// int log_offset, void *log_ctx); + +function av_expr_parse(var expr: pAVExpr; const s: PAnsiChar; const_names: ppAnsiChar; func1_names: ppAnsiChar; funcs1: Tav_expr_funcs1; + func2_names: ppAnsiChar; funcs2: Tav_expr_funcs2; log_offset: int; log_ctx: Pointer): int; cdecl; external avutil_dll; +(* * + * Evaluate a previously parsed expression. + * + * @param const_values a zero terminated array of values for the identifiers from av_expr_parse() const_names + * @param opaque a pointer which will be passed to all functions from funcs1 and funcs2 + * @return the value of the expression +*) +// double av_expr_eval(AVExpr *e, const double *const_values, void *opaque); +function av_expr_eval(e: pAVExpr; const const_values: pdouble; opaque: Pointer): double; cdecl; external avutil_dll; +(* * + * Free a parsed expression previously created with av_expr_parse(). +*) +// void av_expr_free(AVExpr *e); +procedure av_expr_free(e: pAVExpr); cdecl; external avutil_dll; +(* * + * Parse the string in numstr and return its value as a double. If + * the string is empty, contains only whitespaces, or does not contain + * an initial substring that has the expected syntax for a + * floating-point number, no conversion is performed. In this case, + * returns a value of zero and the value returned in tail is the value + * of numstr. + * + * @param numstr a string representing a number, may contain one of + * the International System number postfixes, for example 'K', 'M', + * 'G'. If 'i' is appended after the postfix, powers of 2 are used + * instead of powers of 10. The 'B' postfix multiplies the value by + * 8, and can be appended after another postfix or used alone. This + * allows using for example 'KB', 'MiB', 'G' and 'B' as postfix. + * @param tail if non-NULL puts here the pointer to the char next + * after the last parsed character +*) +// double av_strtod(const char *numstr, char **tail); +function av_strtod(const numstr: PAnsiChar; var tail: PAnsiChar): double; cdecl; external avutil_dll; +{$ENDREGION} +{$REGION 'fifo.h'} + +type + pAVFifoBuffer = ^AVFifoBuffer; + + AVFifoBuffer = record + buffer: puint8_t; + rptr, wptr, _end: puint8_t; + rndx, wndx: uint32_t; + end; + + (* * + * Initialize an AVFifoBuffer. + * @param size of FIFO + * @return AVFifoBuffer or NULL in case of memory allocation failure + *) + // AVFifoBuffer *av_fifo_alloc(unsigned int size); +function av_fifo_alloc(size: unsigned_int): pAVFifoBuffer; cdecl; external avutil_dll; +(* * + * Initialize an AVFifoBuffer. + * @param nmemb number of elements + * @param size size of the single element + * @return AVFifoBuffer or NULL in case of memory allocation failure +*) +// AVFifoBuffer *av_fifo_alloc_array(size_t nmemb, size_t size); +function av_fifo_alloc_array(nmemb: size_t; size: size_t): pAVFifoBuffer; cdecl; external avutil_dll; +(* * + * Free an AVFifoBuffer. + * @param f AVFifoBuffer to free +*) +// void av_fifo_free(AVFifoBuffer *f); +procedure av_fifo_free(f: pAVFifoBuffer); cdecl; external avutil_dll; +(* * + * Free an AVFifoBuffer and reset pointer to NULL. + * @param f AVFifoBuffer to free +*) +// void av_fifo_freep(AVFifoBuffer **f); +procedure av_fifo_freep(var f: pAVFifoBuffer); cdecl; external avutil_dll; +(* * + * Reset the AVFifoBuffer to the state right after av_fifo_alloc, in particular it is emptied. + * @param f AVFifoBuffer to reset +*) +// void av_fifo_reset(AVFifoBuffer *f); +procedure av_fifo_reset(f: pAVFifoBuffer); cdecl; external avutil_dll; +(* * + * Return the amount of data in bytes in the AVFifoBuffer, that is the + * amount of data you can read from it. + * @param f AVFifoBuffer to read from + * @return size +*) +// int av_fifo_size(const AVFifoBuffer *f); +function av_fifo_size(const f: pAVFifoBuffer): int; cdecl; external avutil_dll; +(* * + * Return the amount of space in bytes in the AVFifoBuffer, that is the + * amount of data you can write into it. + * @param f AVFifoBuffer to write into + * @return size +*) +// int av_fifo_space(const AVFifoBuffer *f); +function av_fifo_space(const f: pAVFifoBuffer): int; cdecl; external avutil_dll; + +(* * + * Feed data at specific position from an AVFifoBuffer to a user-supplied callback. + * Similar as av_fifo_gereric_read but without discarding data. + * @param f AVFifoBuffer to read from + * @param offset offset from current read position + * @param buf_size number of bytes to read + * @param func generic read function + * @param dest data destination +*) +// int av_fifo_generic_peek_at(AVFifoBuffer *f, void *dest, int offset, int buf_size, void (*func)(void*, void*, int)); +type + Tav_fifo_proc = procedure(p1: Pointer; p2: Pointer; p3: int); cdecl; + Tav_fifo_func = function(p1: Pointer; p2: Pointer; p3: int): int; cdecl; + +function av_fifo_generic_peek_at(f: pAVFifoBuffer; dest: Pointer; offset: int; buf_size: int; func: Tav_fifo_proc): int; cdecl; external avutil_dll; +(* * + * Feed data from an AVFifoBuffer to a user-supplied callback. + * Similar as av_fifo_gereric_read but without discarding data. + * @param f AVFifoBuffer to read from + * @param buf_size number of bytes to read + * @param func generic read function + * @param dest data destination +*) +// int av_fifo_generic_peek(AVFifoBuffer *f, void *dest, int buf_size, void (*func)(void*, void*, int)); +function av_fifo_generic_peek(f: pAVFifoBuffer; dest: Pointer; buf_size: int; func: Tav_fifo_proc): int; cdecl; external avutil_dll; +(* * + * Feed data from an AVFifoBuffer to a user-supplied callback. + * @param f AVFifoBuffer to read from + * @param buf_size number of bytes to read + * @param func generic read function + * @param dest data destination +*) +// int av_fifo_generic_read(AVFifoBuffer *f, void *dest, int buf_size, void (*func)(void*, void*, int)); +function av_fifo_generic_read(f: pAVFifoBuffer; dest: Pointer; buf_size: int; func: Tav_fifo_proc): int; cdecl; external avutil_dll; +(* * + * Feed data from a user-supplied callback to an AVFifoBuffer. + * @param f AVFifoBuffer to write to + * @param src data source; non-const since it may be used as a + * modifiable context by the function defined in func + * @param size number of bytes to write + * @param func generic write function; the first parameter is src, + * the second is dest_buf, the third is dest_buf_size. + * func must return the number of bytes written to dest_buf, or <= 0 to + * indicate no more data available to write. + * If func is NULL, src is interpreted as a simple byte array for source data. + * @return the number of bytes written to the FIFO +*) +// int av_fifo_generic_write(AVFifoBuffer *f, void *src, int size, int (*func)(void*, void*, int)); +function av_fifo_generic_write(f: pAVFifoBuffer; src: Pointer; size: int; func: Tav_fifo_func): int; cdecl; external avutil_dll; +(* * + * Resize an AVFifoBuffer. + * In case of reallocation failure, the old FIFO is kept unchanged. + * + * @param f AVFifoBuffer to resize + * @param size new AVFifoBuffer size in bytes + * @return <0 for failure, >=0 otherwise +*) +// int av_fifo_realloc2(AVFifoBuffer *f, unsigned int size); +function av_fifo_realloc2(f: pAVFifoBuffer; size: unsigned_int): int; cdecl; external avutil_dll; +(* * + * Enlarge an AVFifoBuffer. + * In case of reallocation failure, the old FIFO is kept unchanged. + * The new fifo size may be larger than the requested size. + * + * @param f AVFifoBuffer to resize + * @param additional_space the amount of space in bytes to allocate in addition to av_fifo_size() + * @return <0 for failure, >=0 otherwise +*) +// int av_fifo_grow(AVFifoBuffer *f, unsigned int additional_space); +function av_fifo_grow(f: pAVFifoBuffer; additional_space: unsigned_int): int; cdecl; external avutil_dll; +(* * + * Read and discard the specified amount of data from an AVFifoBuffer. + * @param f AVFifoBuffer to read from + * @param size amount of data to read in bytes +*) +// void av_fifo_drain(AVFifoBuffer *f, int size); +procedure av_fifo_drain(f: pAVFifoBuffer; size: int); cdecl; external avutil_dll; +(* * + * Return a pointer to the data stored in a FIFO buffer at a certain offset. + * The FIFO buffer is not modified. + * + * @param f AVFifoBuffer to peek at, f must be non-NULL + * @param offs an offset in bytes, its absolute value must be less + * than the used buffer size or the returned pointer will + * point outside to the buffer data. + * The used buffer size can be checked with av_fifo_size(). +*) +// static inline uint8_t *av_fifo_peek2(const AVFifoBuffer *f, int offs) +function av_fifo_peek2(const f: pAVFifoBuffer; offs: int): puint8_t; inline; + +{$ENDREGION} +{$REGION 'hwcontext.h'} + +type + AVHWDeviceType = (AV_HWDEVICE_TYPE_NONE, AV_HWDEVICE_TYPE_VDPAU, AV_HWDEVICE_TYPE_CUDA, AV_HWDEVICE_TYPE_VAAPI, AV_HWDEVICE_TYPE_DXVA2, AV_HWDEVICE_TYPE_QSV, + AV_HWDEVICE_TYPE_VIDEOTOOLBOX, AV_HWDEVICE_TYPE_D3D11VA, AV_HWDEVICE_TYPE_DRM, AV_HWDEVICE_TYPE_OPENCL, AV_HWDEVICE_TYPE_MEDIACODEC); + + pAVHWDeviceInternal = ^AVHWDeviceInternal; + + AVHWDeviceInternal = record + end; + + (* * + * This struct aggregates all the (hardware/vendor-specific) "high-level" state, + * i.e. state that is not tied to a concrete processing configuration. + * E.g., in an API that supports hardware-accelerated encoding and decoding, + * this struct will (if possible) wrap the state that is common to both encoding + * and decoding and from which specific instances of encoders or decoders can be + * derived. + * + * This struct is reference-counted with the AVBuffer mechanism. The + * av_hwdevice_ctx_alloc() constructor yields a reference, whose data field + * points to the actual AVHWDeviceContext. Further objects derived from + * AVHWDeviceContext (such as AVHWFramesContext, describing a frame pool with + * specific properties) will hold an internal reference to it. After all the + * references are released, the AVHWDeviceContext itself will be freed, + * optionally invoking a user-specified callback for uninitializing the hardware + * state. + *) + pAVHWDeviceContext = ^AVHWDeviceContext; + + AVHWDeviceContext = record + (* * + * A class for logging. Set by av_hwdevice_ctx_alloc(). + *) + av_class: pAVClass; + + (* * + * Private data used internally by libavutil. Must not be accessed in any + * way by the caller. + *) + internal: pAVHWDeviceInternal; + + (* * + * This field identifies the underlying API used for hardware access. + * + * This field is set when this struct is allocated and never changed + * afterwards. + *) + _type: AVHWDeviceType; + + (* * + * The format-specific data, allocated and freed by libavutil along with + * this context. + * + * Should be cast by the user to the format-specific context defined in the + * corresponding header (hwcontext_*.h) and filled as described in the + * documentation before calling av_hwdevice_ctx_init(). + * + * After calling av_hwdevice_ctx_init() this struct should not be modified + * by the caller. + *) + hwctx: Pointer; + + (* * + * This field may be set by the caller before calling av_hwdevice_ctx_init(). + * + * If non-NULL, this callback will be called when the last reference to + * this context is unreferenced, immediately before it is freed. + * + * @note when other objects (e.g an AVHWFramesContext) are derived from this + * struct, this callback will be invoked after all such child objects + * are fully uninitialized and their respective destructors invoked. + *) + // void (*free)(struct AVHWDeviceContext *ctx); + free: procedure(ctx: pAVHWDeviceContext); cdecl; + (* * + * Arbitrary user data, to be used e.g. by the free() callback. + *) + user_opaque: Pointer; + end; + + pAVHWFramesInternal = ^AVHWFramesInternal; + + AVHWFramesInternal = record + end; + + (* * + * This struct describes a set or pool of "hardware" frames (i.e. those with + * data not located in normal system memory). All the frames in the pool are + * assumed to be allocated in the same way and interchangeable. + * + * This struct is reference-counted with the AVBuffer mechanism and tied to a + * given AVHWDeviceContext instance. The av_hwframe_ctx_alloc() constructor + * yields a reference, whose data field points to the actual AVHWFramesContext + * struct. + *) + pAVHWFramesContext = ^AVHWFramesContext; + + AVHWFramesContext = record + (* * + * A class for logging. + *) + av_class: pAVClass; + + (* * + * Private data used internally by libavutil. Must not be accessed in any + * way by the caller. + *) + internal: pAVHWFramesInternal; + + (* * + * A reference to the parent AVHWDeviceContext. This reference is owned and + * managed by the enclosing AVHWFramesContext, but the caller may derive + * additional references from it. + *) + device_ref: pAVBufferRef; + + (* * + * The parent AVHWDeviceContext. This is simply a pointer to + * device_ref->data provided for convenience. + * + * Set by libavutil in av_hwframe_ctx_init(). + *) + device_ctx: pAVHWDeviceContext; + + (* * + * The format-specific data, allocated and freed automatically along with + * this context. + * + * Should be cast by the user to the format-specific context defined in the + * corresponding header (hwframe_*.h) and filled as described in the + * documentation before calling av_hwframe_ctx_init(). + * + * After any frames using this context are created, the contents of this + * struct should not be modified by the caller. + *) + hwctx: Pointer; + + (* * + * This field may be set by the caller before calling av_hwframe_ctx_init(). + * + * If non-NULL, this callback will be called when the last reference to + * this context is unreferenced, immediately before it is freed. + *) + // void (*free)(struct AVHWFramesContext *ctx); + free: procedure(ctx: pAVHWFramesContext); cdecl; + (* * + * Arbitrary user data, to be used e.g. by the free() callback. + *) + user_opaque: Pointer; + + (* * + * A pool from which the frames are allocated by av_hwframe_get_buffer(). + * This field may be set by the caller before calling av_hwframe_ctx_init(). + * The buffers returned by calling av_buffer_pool_get() on this pool must + * have the properties described in the documentation in the corresponding hw + * type's header (hwcontext_*.h). The pool will be freed strictly before + * this struct's free() callback is invoked. + * + * This field may be NULL, then libavutil will attempt to allocate a pool + * internally. Note that certain device types enforce pools allocated at + * fixed size (frame count), which cannot be extended dynamically. In such a + * case, initial_pool_size must be set appropriately. + *) + pool: pAVBufferPool; + + (* * + * Initial size of the frame pool. If a device type does not support + * dynamically resizing the pool, then this is also the maximum pool size. + * + * May be set by the caller before calling av_hwframe_ctx_init(). Must be + * set if pool is NULL and the device type does not support dynamic pools. + *) + initial_pool_size: int; + + (* * + * The pixel format identifying the underlying HW surface type. + * + * Must be a hwaccel format, i.e. the corresponding descriptor must have the + * AV_PIX_FMT_FLAG_HWACCEL flag set. + * + * Must be set by the user before calling av_hwframe_ctx_init(). + *) + format: AVPixelFormat; + + (* * + * The pixel format identifying the actual data layout of the hardware + * frames. + * + * Must be set by the caller before calling av_hwframe_ctx_init(). + * + * @note when the underlying API does not provide the exact data layout, but + * only the colorspace/bit depth, this field should be set to the fully + * planar version of that format (e.g. for 8-bit 420 YUV it should be + * AV_PIX_FMT_YUV420P, not AV_PIX_FMT_NV12 or anything else). + *) + sw_format: AVPixelFormat; + + (* * + * The allocated dimensions of the frames in this pool. + * + * Must be set by the user before calling av_hwframe_ctx_init(). + *) + width, height: int; + end; + + (* * + * Look up an AVHWDeviceType by name. + * + * @param name String name of the device type (case-insensitive). + * @return The type from enum AVHWDeviceType, or AV_HWDEVICE_TYPE_NONE if + * not found. + *) + // enum AVHWDeviceType av_hwdevice_find_type_by_name(const char *name); +function av_hwdevice_find_type_by_name(const name: PAnsiChar): AVHWDeviceType; cdecl; external avutil_dll; +(* * Get the string name of an AVHWDeviceType. + * + * @param type Type from enum AVHWDeviceType. + * @return Pointer to a static string containing the name, or NULL if the type + * is not valid. +*) +// const char *av_hwdevice_get_type_name(enum AVHWDeviceType type); +function av_hwdevice_get_type_name(_type: AVHWDeviceType): PAnsiChar; cdecl; external avutil_dll; +(* * + * Iterate over supported device types. + * + * @param type AV_HWDEVICE_TYPE_NONE initially, then the previous type + * returned by this function in subsequent iterations. + * @return The next usable device type from enum AVHWDeviceType, or + * AV_HWDEVICE_TYPE_NONE if there are no more. +*) +// enum AVHWDeviceType av_hwdevice_iterate_types(enum AVHWDeviceType prev); +function av_hwdevice_iterate_types(prev: AVHWDeviceType): AVHWDeviceType; cdecl; external avutil_dll; +(* * + * Allocate an AVHWDeviceContext for a given hardware type. + * + * @param type the type of the hardware device to allocate. + * @return a reference to the newly created AVHWDeviceContext on success or NULL + * on failure. +*) +// AVBufferRef *av_hwdevice_ctx_alloc(enum AVHWDeviceType type); +function av_hwdevice_ctx_alloc(_type: AVHWDeviceType): pAVBufferRef; cdecl; external avutil_dll; +(* * + * Finalize the device context before use. This function must be called after + * the context is filled with all the required information and before it is + * used in any way. + * + * @param ref a reference to the AVHWDeviceContext + * @return 0 on success, a negative AVERROR code on failure +*) +// int av_hwdevice_ctx_init(AVBufferRef *ref); +function av_hwdevice_ctx_init(ref: pAVBufferRef): int; cdecl; external avutil_dll; +(* * + * Open a device of the specified type and create an AVHWDeviceContext for it. + * + * This is a convenience function intended to cover the simple cases. Callers + * who need to fine-tune device creation/management should open the device + * manually and then wrap it in an AVHWDeviceContext using + * av_hwdevice_ctx_alloc()/av_hwdevice_ctx_init(). + * + * The returned context is already initialized and ready for use, the caller + * should not call av_hwdevice_ctx_init() on it. The user_opaque/free fields of + * the created AVHWDeviceContext are set by this function and should not be + * touched by the caller. + * + * @param device_ctx On success, a reference to the newly-created device context + * will be written here. The reference is owned by the caller + * and must be released with av_buffer_unref() when no longer + * needed. On failure, NULL will be written to this pointer. + * @param type The type of the device to create. + * @param device A type-specific string identifying the device to open. + * @param opts A dictionary of additional (type-specific) options to use in + * opening the device. The dictionary remains owned by the caller. + * @param flags currently unused + * + * @return 0 on success, a negative AVERROR code on failure. +*) +// int av_hwdevice_ctx_create(AVBufferRef **device_ctx, enum AVHWDeviceType type, +// const char *device, AVDictionary *opts, int flags); +function av_hwdevice_ctx_create(var device_ctx: pAVBufferRef; _type: AVHWDeviceType; const device: PAnsiChar; opts: pAVDictionary; flags: int): int; cdecl; + external avutil_dll; +(* * + * Create a new device of the specified type from an existing device. + * + * If the source device is a device of the target type or was originally + * derived from such a device (possibly through one or more intermediate + * devices of other types), then this will return a reference to the + * existing device of the same type as is requested. + * + * Otherwise, it will attempt to derive a new device from the given source + * device. If direct derivation to the new type is not implemented, it will + * attempt the same derivation from each ancestor of the source device in + * turn looking for an implemented derivation method. + * + * @param dst_ctx On success, a reference to the newly-created + * AVHWDeviceContext. + * @param type The type of the new device to create. + * @param src_ctx A reference to an existing AVHWDeviceContext which will be + * used to create the new device. + * @param flags Currently unused; should be set to zero. + * @return Zero on success, a negative AVERROR code on failure. +*) +// int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ctx, +// enum AVHWDeviceType type, +// AVBufferRef *src_ctx, int flags); +function av_hwdevice_ctx_create_derived(var dst_ctx: pAVBufferRef; _type: AVHWDeviceType; src_ctx: pAVBufferRef; flags: int): int; cdecl; external avutil_dll; +(* * + * Allocate an AVHWFramesContext tied to a given device context. + * + * @param device_ctx a reference to a AVHWDeviceContext. This function will make + * a new reference for internal use, the one passed to the + * function remains owned by the caller. + * @return a reference to the newly created AVHWFramesContext on success or NULL + * on failure. +*) +// AVBufferRef *av_hwframe_ctx_alloc(AVBufferRef *device_ctx); +function av_hwframe_ctx_alloc(device_ctx: pAVBufferRef): pAVBufferRef; cdecl; external avutil_dll; +(* * + * Finalize the context before use. This function must be called after the + * context is filled with all the required information and before it is attached + * to any frames. + * + * @param ref a reference to the AVHWFramesContext + * @return 0 on success, a negative AVERROR code on failure +*) +// int av_hwframe_ctx_init(AVBufferRef *ref); +function av_hwframe_ctx_init(ref: pAVBufferRef): int; cdecl; external avutil_dll; +(* * + * Allocate a new frame attached to the given AVHWFramesContext. + * + * @param hwframe_ctx a reference to an AVHWFramesContext + * @param frame an empty (freshly allocated or unreffed) frame to be filled with + * newly allocated buffers. + * @param flags currently unused, should be set to zero + * @return 0 on success, a negative AVERROR code on failure +*) +// int av_hwframe_get_buffer(AVBufferRef *hwframe_ctx, AVFrame *frame, int flags); +function av_hwframe_get_buffer(hwframe_ctx: pAVBufferRef; frame: pAVFrame; flags: int): int; cdecl; external avutil_dll; +(* * + * Copy data to or from a hw surface. At least one of dst/src must have an + * AVHWFramesContext attached. + * + * If src has an AVHWFramesContext attached, then the format of dst (if set) + * must use one of the formats returned by av_hwframe_transfer_get_formats(src, + * AV_HWFRAME_TRANSFER_DIRECTION_FROM). + * If dst has an AVHWFramesContext attached, then the format of src must use one + * of the formats returned by av_hwframe_transfer_get_formats(dst, + * AV_HWFRAME_TRANSFER_DIRECTION_TO) + * + * dst may be "clean" (i.e. with data/buf pointers unset), in which case the + * data buffers will be allocated by this function using av_frame_get_buffer(). + * If dst->format is set, then this format will be used, otherwise (when + * dst->format is AV_PIX_FMT_NONE) the first acceptable format will be chosen. + * + * The two frames must have matching allocated dimensions (i.e. equal to + * AVHWFramesContext.width/height), since not all device types support + * transferring a sub-rectangle of the whole surface. The display dimensions + * (i.e. AVFrame.width/height) may be smaller than the allocated dimensions, but + * also have to be equal for both frames. When the display dimensions are + * smaller than the allocated dimensions, the content of the padding in the + * destination frame is unspecified. + * + * @param dst the destination frame. dst is not touched on failure. + * @param src the source frame. + * @param flags currently unused, should be set to zero + * @return 0 on success, a negative AVERROR error code on failure. +*) +// int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags); +function av_hwframe_transfer_data(dst: pAVFrame; const src: pAVFrame; flags: int): int; cdecl; external avutil_dll; + +type + AVHWFrameTransferDirection = ( + (* * + * Transfer the data from the queried hw frame. + *) + AV_HWFRAME_TRANSFER_DIRECTION_FROM, + (* * + * Transfer the data to the queried hw frame. + *) + AV_HWFRAME_TRANSFER_DIRECTION_TO); + + (* * + * Get a list of possible source or target formats usable in + * av_hwframe_transfer_data(). + * + * @param hwframe_ctx the frame context to obtain the information for + * @param dir the direction of the transfer + * @param formats the pointer to the output format list will be written here. + * The list is terminated with AV_PIX_FMT_NONE and must be freed + * by the caller when no longer needed using av_free(). + * If this function returns successfully, the format list will + * have at least one item (not counting the terminator). + * On failure, the contents of this pointer are unspecified. + * @param flags currently unused, should be set to zero + * @return 0 on success, a negative AVERROR code on failure. + *) + // int av_hwframe_transfer_get_formats(AVBufferRef *hwframe_ctx, + // enum AVHWFrameTransferDirection dir, + // enum AVPixelFormat **formats, int flags); +function av_hwframe_transfer_get_formats(hwframe_ctx: pAVBufferRef; dir: AVHWFrameTransferDirection; var formats: pAVPixelFormat; flags: int): int; cdecl; + external avutil_dll; + +type + (* * + * This struct describes the constraints on hardware frames attached to + * a given device with a hardware-specific configuration. This is returned + * by av_hwdevice_get_hwframe_constraints() and must be freed by + * av_hwframe_constraints_free() after use. + *) + pAVHWFramesConstraints = ^AVHWFramesConstraints; + + AVHWFramesConstraints = record + (* * + * A list of possible values for format in the hw_frames_ctx, + * terminated by AV_PIX_FMT_NONE. This member will always be filled. + *) + valid_hw_formats: pAVPixelFormat; + + (* * + * A list of possible values for sw_format in the hw_frames_ctx, + * terminated by AV_PIX_FMT_NONE. Can be NULL if this information is + * not known. + *) + valid_sw_formats: pAVPixelFormat; + + (* * + * The minimum size of frames in this hw_frames_ctx. + * (Zero if not known.) + *) + min_width: int; + min_height: int; + + (* * + * The maximum size of frames in this hw_frames_ctx. + * (INT_MAX if not known / no limit.) + *) + max_width: int; + max_height: int; + end; + + (* * + * Allocate a HW-specific configuration structure for a given HW device. + * After use, the user must free all members as required by the specific + * hardware structure being used, then free the structure itself with + * av_free(). + * + * @param device_ctx a reference to the associated AVHWDeviceContext. + * @return The newly created HW-specific configuration structure on + * success or NULL on failure. + *) + // void *av_hwdevice_hwconfig_alloc(AVBufferRef *device_ctx); +function av_hwdevice_hwconfig_alloc(device_ctx: pAVBufferRef): Pointer; cdecl; external avutil_dll; +(* * + * Get the constraints on HW frames given a device and the HW-specific + * configuration to be used with that device. If no HW-specific + * configuration is provided, returns the maximum possible capabilities + * of the device. + * + * @param ref a reference to the associated AVHWDeviceContext. + * @param hwconfig a filled HW-specific configuration structure, or NULL + * to return the maximum possible capabilities of the device. + * @return AVHWFramesConstraints structure describing the constraints + * on the device, or NULL if not available. +*) +// AVHWFramesConstraints *av_hwdevice_get_hwframe_constraints(AVBufferRef *ref,const void *hwconfig); +function av_hwdevice_get_hwframe_constraints(ref: pAVBufferRef; const hwconfig: Pointer): pAVHWFramesConstraints; cdecl; external avutil_dll; +(* * + * Free an AVHWFrameConstraints structure. + * + * @param constraints The (filled or unfilled) AVHWFrameConstraints structure. +*) +// void av_hwframe_constraints_free(AVHWFramesConstraints **constraints); +procedure av_hwframe_constraints_free(var constraints: pAVHWFramesConstraints); cdecl; external avutil_dll; + +const + (* * + * Flags to apply to frame mappings. + *) + + (* * + * The mapping must be readable. + *) + AV_HWFRAME_MAP_READ = 1 shl 0; + (* * + * The mapping must be writeable. + *) + AV_HWFRAME_MAP_WRITE = 1 shl 1; + (* * + * The mapped frame will be overwritten completely in subsequent + * operations, so the current frame data need not be loaded. Any values + * which are not overwritten are unspecified. + *) + AV_HWFRAME_MAP_OVERWRITE = 1 shl 2; + (* * + * The mapping must be direct. That is, there must not be any copying in + * the map or unmap steps. Note that performance of direct mappings may + * be much lower than normal memory. + *) + AV_HWFRAME_MAP_DIRECT = 1 shl 3; + + (* * + * Map a hardware frame. + * + * This has a number of different possible effects, depending on the format + * and origin of the src and dst frames. On input, src should be a usable + * frame with valid buffers and dst should be blank (typically as just created + * by av_frame_alloc()). src should have an associated hwframe context, and + * dst may optionally have a format and associated hwframe context. + * + * If src was created by mapping a frame from the hwframe context of dst, + * then this function undoes the mapping - dst is replaced by a reference to + * the frame that src was originally mapped from. + * + * If both src and dst have an associated hwframe context, then this function + * attempts to map the src frame from its hardware context to that of dst and + * then fill dst with appropriate data to be usable there. This will only be + * possible if the hwframe contexts and associated devices are compatible - + * given compatible devices, av_hwframe_ctx_create_derived() can be used to + * create a hwframe context for dst in which mapping should be possible. + * + * If src has a hwframe context but dst does not, then the src frame is + * mapped to normal memory and should thereafter be usable as a normal frame. + * If the format is set on dst, then the mapping will attempt to create dst + * with that format and fail if it is not possible. If format is unset (is + * AV_PIX_FMT_NONE) then dst will be mapped with whatever the most appropriate + * format to use is (probably the sw_format of the src hwframe context). + * + * A return value of AVERROR(ENOSYS) indicates that the mapping is not + * possible with the given arguments and hwframe setup, while other return + * values indicate that it failed somehow. + * + * @param dst Destination frame, to contain the mapping. + * @param src Source frame, to be mapped. + * @param flags Some combination of AV_HWFRAME_MAP_* flags. + * @return Zero on success, negative AVERROR code on failure. + *) + // int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags); +function av_hwframe_map(dst: pAVFrame; const src: pAVFrame; flags: int): int; cdecl; external avutil_dll; +(* * + * Create and initialise an AVHWFramesContext as a mapping of another existing + * AVHWFramesContext on a different device. + * + * av_hwframe_ctx_init() should not be called after this. + * + * @param derived_frame_ctx On success, a reference to the newly created + * AVHWFramesContext. + * @param derived_device_ctx A reference to the device to create the new + * AVHWFramesContext on. + * @param source_frame_ctx A reference to an existing AVHWFramesContext + * which will be mapped to the derived context. + * @param flags Some combination of AV_HWFRAME_MAP_* flags, defining the + * mapping parameters to apply to frames which are allocated + * in the derived device. + * @return Zero on success, negative AVERROR code on failure. +*) +// int av_hwframe_ctx_create_derived(AVBufferRef **derived_frame_ctx, +// enum AVPixelFormat format, +// AVBufferRef *derived_device_ctx, +// AVBufferRef *source_frame_ctx, +// int flags); +function av_hwframe_ctx_create_derived(var derived_frame_ctx: pAVBufferRef; format: AVPixelFormat; derived_device_ctx: pAVBufferRef; + source_frame_ctx: pAVBufferRef; flags: int): int; cdecl; external avutil_dll; +{$ENDREGION} +{$REGION 'hwcontext_mediacodec.h'} + +(* * + * MediaCodec details. + * + * Allocated as AVHWDeviceContext.hwctx +*) +type + pAVMediaCodecDeviceContext = ^AVMediaCodecDeviceContext; + + AVMediaCodecDeviceContext = record + (* * + * android/view/Surface handle, to be filled by the user. + * + * This is the default surface used by decoders on this device. + *) + surface: Pointer; + end; +{$ENDREGION} +{$REGION 'hwcontext_drm.h'} + (* * + * @file + * API-specific header for AV_HWDEVICE_TYPE_DRM. + * + * Internal frame allocation is not currently supported - all frames + * must be allocated by the user. Thus AVHWFramesContext is always + * NULL, though this may change if support for frame allocation is + * added in future. + *) + +const + (* * + * The maximum number of layers/planes in a DRM frame. + *) + AV_DRM_MAX_PLANES = 4; + + (* * + * DRM object descriptor. + * + * Describes a single DRM object, addressing it as a PRIME file + * descriptor. + *) +type + pAVDRMObjectDescriptor = ^AVDRMObjectDescriptor; + + AVDRMObjectDescriptor = record + (* * + * DRM PRIME fd for the object. + *) + fd: int; + (* * + * Total size of the object. + * + * (This includes any parts not which do not contain image data.) + *) + size: size_t; + (* * + * Format modifier applied to the object (DRM_FORMAT_MOD_* ). + * + * If the format modifier is unknown then this should be set to + * DRM_FORMAT_MOD_INVALID. + *) + format_modifier: uint64_t; + end; + + (* * + * DRM plane descriptor. + * + * Describes a single plane of a layer, which is contained within + * a single object. + *) + pAVDRMPlaneDescriptor = ^AVDRMPlaneDescriptor; + + AVDRMPlaneDescriptor = record + (* * + * Index of the object containing this plane in the objects + * array of the enclosing frame descriptor. + *) + object_index: int; + (* * + * Offset within that object of this plane. + *) + offset: ptrdiff_t; + (* * + * Pitch (linesize) of this plane. + *) + pitch: ptrdiff_t; + end; + + (* * + * DRM layer descriptor. + * + * Describes a single layer within a frame. This has the structure + * defined by its format, and will contain one or more planes. + *) + pAVDRMLayerDescriptor = ^AVDRMLayerDescriptor; + + AVDRMLayerDescriptor = record + (* * + * Format of the layer (DRM_FORMAT_* ). + *) + format: uint32_t; + (* * + * Number of planes in the layer. + * + * This must match the number of planes required by format. + *) + nb_planes: int; + (* * + * Array of planes in this layer. + *) + planes: array [0 .. AV_DRM_MAX_PLANES - 1] of AVDRMPlaneDescriptor; + end; + + (* * + * DRM frame descriptor. + * + * This is used as the data pointer for AV_PIX_FMT_DRM_PRIME frames. + * It is also used by user-allocated frame pools - allocating in + * AVHWFramesContext.pool must return AVBufferRefs which contain + * an object of this type. + * + * The fields of this structure should be set such it can be + * imported directly by EGL using the EGL_EXT_image_dma_buf_import + * and EGL_EXT_image_dma_buf_import_modifiers extensions. + * (Note that the exact layout of a particular format may vary between + * platforms - we only specify that the same platform should be able + * to import it.) + * + * The total number of planes must not exceed AV_DRM_MAX_PLANES, and + * the order of the planes by increasing layer index followed by + * increasing plane index must be the same as the order which would + * be used for the data pointers in the equivalent software format. + *) + pAVDRMFrameDescriptor = ^AVDRMFrameDescriptor; + + AVDRMFrameDescriptor = record + (* * + * Number of DRM objects making up this frame. + *) + nb_objects: int; + (* * + * Array of objects making up the frame. + *) + objects: array [0 .. AV_DRM_MAX_PLANES - 1] of AVDRMObjectDescriptor; + (* * + * Number of layers in the frame. + *) + nb_layers: int; + (* * + * Array of layers in the frame. + *) + layers: array [0 .. AV_DRM_MAX_PLANES - 1] of AVDRMLayerDescriptor; + end; + + (* * + * DRM device. + * + * Allocated as AVHWDeviceContext.hwctx. + *) + pAVDRMDeviceContext = ^AVDRMDeviceContext; + + AVDRMDeviceContext = record + (* * + * File descriptor of DRM device. + * + * This is used as the device to create frames on, and may also be + * used in some derivation and mapping operations. + * + * If no device is required, set to -1. + *) + fd: int; + end; + +{$ENDREGION} +{$REGION 'pixdesc.h'} + +type + pAVComponentDescriptor = ^AVComponentDescriptor; + + AVComponentDescriptor = record + (* * + * Which of the 4 planes contains the component. + *) + plane: int; + + (* * + * Number of elements between 2 horizontally consecutive pixels. + * Elements are bits for bitstream formats, bytes otherwise. + *) + step: int; + + (* * + * Number of elements before the component of the first pixel. + * Elements are bits for bitstream formats, bytes otherwise. + *) + offset: int; + + (* * + * Number of least significant bits that must be shifted away + * to get the value. + *) + shift: int; + + (* * + * Number of bits in the component. + *) + depth: int; + +{$IFDEF FF_API_PLUS1_MINUS1} + (* * deprecated, use step instead *) + // attribute_deprecated int step_minus1; + step_minus1: int deprecated; + + (* * deprecated, use depth instead *) + // attribute_deprecated int depth_minus1; + depth_minus1: int deprecated; + (* * deprecated, use offset instead *) + // attribute_deprecated int offset_plus1; + offset_plus1: int deprecated; +{$ENDIF} + end; + + (* * + * Descriptor that unambiguously describes how the bits of a pixel are + * stored in the up to 4 data planes of an image. It also stores the + * subsampling factors and number of components. + * + * @note This is separate of the colorspace (RGB, YCbCr, YPbPr, JPEG-style YUV + * and all the YUV variants) AVPixFmtDescriptor just stores how values + * are stored not what these values represent. + *) + pAVPixFmtDescriptor = ^AVPixFmtDescriptor; + + AVPixFmtDescriptor = record + name: PAnsiChar; + nb_components: uint8_t; + /// < The number of components each pixel has, (1-4) + + (* * + * Amount to shift the luma width right to find the chroma width. + * For YV12 this is 1 for example. + * chroma_width = AV_CEIL_RSHIFT(luma_width, log2_chroma_w) + * The note above is needed to ensure rounding up. + * This value only refers to the chroma components. + *) + log2_chroma_w: uint8_t; + + (* * + * Amount to shift the luma height right to find the chroma height. + * For YV12 this is 1 for example. + * chroma_height= AV_CEIL_RSHIFT(luma_height, log2_chroma_h) + * The note above is needed to ensure rounding up. + * This value only refers to the chroma components. + *) + log2_chroma_h: uint8_t; + + (* * + * Combination of AV_PIX_FMT_FLAG_... flags. + *) + flags: uint64_t; + + (* * + * Parameters that describe how pixels are packed. + * If the format has 1 or 2 components, then luma is 0. + * If the format has 3 or 4 components: + * if the RGB flag is set then 0 is red, 1 is green and 2 is blue; + * otherwise 0 is luma, 1 is chroma-U and 2 is chroma-V. + * + * If present, the Alpha channel is always the last component. + *) + comp: array [0 .. 3] of AVComponentDescriptor; + + (* * + * Alternative comma-separated names. + *) + alias: PAnsiChar; + end; + +const + (* * + * Pixel format is big-endian. + *) + AV_PIX_FMT_FLAG_BE = (1 shl 0); + (* * + * Pixel format has a palette in data[1], values are indexes in this palette. + *) + AV_PIX_FMT_FLAG_PAL = (1 shl 1); + (* * + * All values of a component are bit-wise packed end to end. + *) + AV_PIX_FMT_FLAG_BITSTREAM = (1 shl 2); + (* * + * Pixel format is an HW accelerated format. + *) + AV_PIX_FMT_FLAG_HWACCEL = (1 shl 3); + (* * + * At least one pixel component is not in the first data plane. + *) + AV_PIX_FMT_FLAG_PLANAR = (1 shl 4); + (* * + * The pixel format contains RGB-like data (as opposed to YUV/grayscale). + *) + AV_PIX_FMT_FLAG_RGB = (1 shl 5); + + (* * + * The pixel format is "pseudo-paletted". This means that it contains a + * fixed palette in the 2nd plane but the palette is fixed/constant for each + * PIX_FMT. This allows interpreting the data as if it was PAL8, which can + * in some cases be simpler. Or the data can be interpreted purely based on + * the pixel format without using the palette. + * An example of a pseudo-paletted format is AV_PIX_FMT_GRAY8 + * + * @deprecated This flag is deprecated, and will be removed. When it is removed, + * the extra palette allocation in AVFrame.data[1] is removed as well. Only + * actual paletted formats (as indicated by AV_PIX_FMT_FLAG_PAL) will have a + * palette. Starting with FFmpeg versions which have this flag deprecated, the + * extra "pseudo" palette is already ignored, and API users are not required to + * allocate a palette for AV_PIX_FMT_FLAG_PSEUDOPAL formats (it was required + * before the deprecation, though). + *) + AV_PIX_FMT_FLAG_PSEUDOPAL = (1 shl 6); + + (* * + * The pixel format has an alpha channel. This is set on all formats that + * support alpha in some way. The exception is AV_PIX_FMT_PAL8, which can + * carry alpha as part of the palette. Details are explained in the + * AVPixelFormat enum, and are also encoded in the corresponding + * AVPixFmtDescriptor. + * + * The alpha is always straight, never pre-multiplied. + * + * If a codec or a filter does not support alpha, it should set all alpha to + * opaque, or use the equivalent pixel formats without alpha component, e.g. + * AV_PIX_FMT_RGB0 (or AV_PIX_FMT_RGB24 etc.) instead of AV_PIX_FMT_RGBA. + *) + AV_PIX_FMT_FLAG_ALPHA = (1 shl 7); + + (* * + * The pixel format is following a Bayer pattern + *) + AV_PIX_FMT_FLAG_BAYER = (1 shl 8); + + (* * + * The pixel format contains IEEE-754 floating point values. Precision (double, + * single, or half) should be determined by the pixel size (64, 32, or 16 bits). + *) + AV_PIX_FMT_FLAG_FLOAT = (1 shl 9); + + (* * + * Return the number of bits per pixel used by the pixel format + * described by pixdesc. Note that this is not the same as the number + * of bits per sample. + * + * The returned number of bits refers to the number of bits actually + * used for storing the pixel information, that is padding bits are + * not counted. + *) + // int av_get_bits_per_pixel(const AVPixFmtDescriptor *pixdesc); +function av_get_bits_per_pixel(const pixdesc: pAVPixFmtDescriptor): int; cdecl; external avutil_dll; +(* * + * Return the number of bits per pixel for the pixel format + * described by pixdesc, including any padding or unused bits. +*) +// int av_get_padded_bits_per_pixel(const AVPixFmtDescriptor *pixdesc); +function av_get_padded_bits_per_pixel(const pixdesc: pAVPixFmtDescriptor): int; cdecl; external avutil_dll; +(* * + * @return a pixel format descriptor for provided pixel format or NULL if + * this pixel format is unknown. +*) +// const AVPixFmtDescriptor *av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt); +function av_pix_fmt_desc_get(pix_fmt: AVPixelFormat): pAVPixFmtDescriptor; cdecl; external avutil_dll; +(* * + * Iterate over all pixel format descriptors known to libavutil. + * + * @param prev previous descriptor. NULL to get the first descriptor. + * + * @return next descriptor or NULL after the last descriptor +*) +// const AVPixFmtDescriptor *av_pix_fmt_desc_next(const AVPixFmtDescriptor *prev); +function av_pix_fmt_desc_next(const prev: pAVPixFmtDescriptor): pAVPixFmtDescriptor; cdecl; external avutil_dll; +(* * + * @return an AVPixelFormat id described by desc, or AV_PIX_FMT_NONE if desc + * is not a valid pointer to a pixel format descriptor. +*) +// enum AVPixelFormat av_pix_fmt_desc_get_id(const AVPixFmtDescriptor *desc); +function av_pix_fmt_desc_get_id(const desc: pAVPixFmtDescriptor): AVPixelFormat; cdecl; external avutil_dll; +(* * + * Utility function to access log2_chroma_w log2_chroma_h from + * the pixel format AVPixFmtDescriptor. + * + * @param[in] pix_fmt the pixel format + * @param[out] h_shift store log2_chroma_w (horizontal/width shift) + * @param[out] v_shift store log2_chroma_h (vertical/height shift) + * + * @return 0 on success, AVERROR(ENOSYS) on invalid or unknown pixel format +*) +// int av_pix_fmt_get_chroma_sub_sample(enum AVPixelFormat pix_fmt,int *h_shift, int *v_shift); +function av_pix_fmt_get_chroma_sub_sample(pix_fmt: AVPixelFormat; var h_shift: int; var v_shift: int): int; cdecl; external avutil_dll; +(* * + * @return number of planes in pix_fmt, a negative AVERROR if pix_fmt is not a + * valid pixel format. +*) +// int av_pix_fmt_count_planes(enum AVPixelFormat pix_fmt); +function av_pix_fmt_count_planes(pix_fmt: AVPixelFormat): int; cdecl; external avutil_dll; +(* * + * @return the name for provided color range or NULL if unknown. +*) +// const char *av_color_range_name(enum AVColorRange range); +function av_color_range_name(range: AVColorRange): PAnsiChar; cdecl; external avutil_dll; +(* * + * @return the AVColorRange value for name or an AVError if not found. +*) +// int av_color_range_from_name(const char *name); +function av_color_range_from_name(const name: PAnsiChar): int; cdecl; external avutil_dll; +(* * + * @return the name for provided color primaries or NULL if unknown. +*) +// const char *av_color_primaries_name(enum AVColorPrimaries primaries); +function av_color_primaries_name(primaries: AVColorPrimaries): PAnsiChar; cdecl; external avutil_dll; +(* * + * @return the AVColorPrimaries value for name or an AVError if not found. +*) +// int av_color_primaries_from_name(const char *name); +function av_color_primaries_from_name(const name: PAnsiChar): int; cdecl; external avutil_dll; +(* * + * @return the name for provided color transfer or NULL if unknown. +*) +// const char *av_color_transfer_name(enum AVColorTransferCharacteristic transfer); +function av_color_transfer_name(transfer: AVColorTransferCharacteristic): PAnsiChar; cdecl; external avutil_dll; +(* * + * @return the AVColorTransferCharacteristic value for name or an AVError if not found. +*) +// int av_color_transfer_from_name(const char *name); +function av_color_transfer_from_name(const name: PAnsiChar): int; cdecl; external avutil_dll; +(* * + * @return the name for provided color space or NULL if unknown. +*) +// const char *av_color_space_name(enum AVColorSpace space); +function av_color_space_name(space: AVColorSpace): PAnsiChar; cdecl; external avutil_dll; +(* * + * @return the AVColorSpace value for name or an AVError if not found. +*) +// int av_color_space_from_name(const char *name); +function av_color_space_from_name(const name: PAnsiChar): int; cdecl; external avutil_dll; +(* * + * @return the name for provided chroma location or NULL if unknown. +*) +// const char *av_chroma_location_name(enum AVChromaLocation location); +function av_chroma_location_name(location: AVChromaLocation): PAnsiChar; cdecl; external avutil_dll; +(* * + * @return the AVChromaLocation value for name or an AVError if not found. +*) +// int av_chroma_location_from_name(const char *name); +function av_chroma_location_from_name(const name: PAnsiChar): int; cdecl; external avutil_dll; +(* * + * Return the pixel format corresponding to name. + * + * If there is no pixel format with name name, then looks for a + * pixel format with the name corresponding to the native endian + * format of name. + * For example in a little-endian system, first looks for "gray16", + * then for "gray16le". + * + * Finally if no pixel format has been found, returns AV_PIX_FMT_NONE. +*) +// enum AVPixelFormat av_get_pix_fmt(const char *name); +function av_get_pix_fmt(const name: PAnsiChar): AVPixelFormat; cdecl; external avutil_dll; +(* * + * Return the short name for a pixel format, NULL in case pix_fmt is + * unknown. + * + * @see av_get_pix_fmt(), av_get_pix_fmt_string() +*) +// const char *av_get_pix_fmt_name(enum AVPixelFormat pix_fmt); +function av_get_pix_fmt_name(pix_fmt: AVPixelFormat): PAnsiChar; cdecl; external avutil_dll; +(* * + * Print in buf the string corresponding to the pixel format with + * number pix_fmt, or a header if pix_fmt is negative. + * + * @param buf the buffer where to write the string + * @param buf_size the size of buf + * @param pix_fmt the number of the pixel format to print the + * corresponding info string, or a negative value to print the + * corresponding header. +*) + +// char *av_get_pix_fmt_string(char *buf, int buf_size, enum AVPixelFormat pix_fmt); +function av_get_pix_fmt_string(buf: PAnsiChar; buf_size: int; pix_fmt: AVPixelFormat): PAnsiChar; cdecl; external avutil_dll; + +(* * + * Read a line from an image, and write the values of the + * pixel format component c to dst. + * + * @param data the array containing the pointers to the planes of the image + * @param linesize the array containing the linesizes of the image + * @param desc the pixel format descriptor for the image + * @param x the horizontal coordinate of the first pixel to read + * @param y the vertical coordinate of the first pixel to read + * @param w the width of the line to read, that is the number of + * values to write to dst + * @param read_pal_component if not zero and the format is a paletted + * format writes the values corresponding to the palette + * component c in data[1] to dst, rather than the palette indexes in + * data[0]. The behavior is undefined if the format is not paletted. + * @param dst_element_size size of elements in dst array (2 or 4 byte) +*) + +Type + Tav_read_array4_puint8_t = record +{$IFDEF REALISE} + array4_puint8_t; +{$ENDIF} + end; + + pav_read_array4_puint8_t = ^Tav_read_array4_puint8_t; + + Tav_read_array4_int = record +{$IFDEF REALISE} + array4_int; +{$ENDIF} + end; + + pav_read_array4_int = ^Tav_read_array4_int; + + // void av_read_image_line2(void *dst, const uint8_t *data[4], + // const int linesize[4], const AVPixFmtDescriptor *desc, + // int x, int y, int c, int w, int read_pal_component, + // int dst_element_size); + +procedure av_read_image_line2(dst: Pointer; const data: pav_read_array4_puint8_t; const linesize: pav_read_array4_int; const desc: pAVPixFmtDescriptor; + x, y, c, w, read_pal_component, dst_element_size: int); cdecl; external avutil_dll; + +// void av_read_image_line(uint16_t *dst, const uint8_t *data[4], +// const int linesize[4], const AVPixFmtDescriptor *desc, +// int x, int y, int c, int w, int read_pal_component); +procedure av_read_image_line(dst: puint16_t; const data: pav_read_array4_puint8_t; const linesize: pav_read_array4_int; const desc: pAVPixFmtDescriptor; + x, y, c, w: int; read_pal_component: int); cdecl; external avutil_dll; +(* * + * Write the values from src to the pixel format component c of an + * image line. + * + * @param src array containing the values to write + * @param data the array containing the pointers to the planes of the + * image to write into. It is supposed to be zeroed. + * @param linesize the array containing the linesizes of the image + * @param desc the pixel format descriptor for the image + * @param x the horizontal coordinate of the first pixel to write + * @param y the vertical coordinate of the first pixel to write + * @param w the width of the line to write, that is the number of + * values to write to the image line + * @param src_element_size size of elements in src array (2 or 4 byte) +*) + +// void av_write_image_line2(const void *src, uint8_t *data[4], +// const int linesize[4], const AVPixFmtDescriptor *desc, +// int x, int y, int c, int w, int src_element_size); +procedure av_write_image_line2(const src: puint16_t; data: pav_read_array4_puint8_t; const linesize: pav_read_array4_int; const desc: pAVPixFmtDescriptor; + x: int; y: int; c: int; w: int; src_element_size: int); cdecl; external avutil_dll; + +// void av_write_image_line(const uint16_t *src, uint8_t *data[4], +// const int linesize[4], const AVPixFmtDescriptor *desc, +// int x, int y, int c, int w); +procedure av_write_image_line(const src: puint16_t; data: pav_read_array4_puint8_t; const linesize: pav_read_array4_int; const desc: pAVPixFmtDescriptor; + x: int; y: int; c: int; w: int); cdecl; external avutil_dll; +(* * + * Utility function to swap the endianness of a pixel format. + * + * @param[in] pix_fmt the pixel format + * + * @return pixel format with swapped endianness if it exists, + * otherwise AV_PIX_FMT_NONE +*) +// enum AVPixelFormat av_pix_fmt_swap_endianness(enum AVPixelFormat pix_fmt); +function av_pix_fmt_swap_endianness(pix_fmt: AVPixelFormat): AVPixelFormat; cdecl; external avutil_dll; + +const + FF_LOSS_RESOLUTION = $0001; (* *< loss due to resolution change *) + FF_LOSS_DEPTH = $0002; (* *< loss due to color depth change *) + FF_LOSS_COLORSPACE = $0004; (* *< loss due to color space conversion *) + FF_LOSS_ALPHA = $0008; (* *< loss of alpha bits *) + FF_LOSS_COLORQUANT = $0010; (* *< loss due to color quantization *) + FF_LOSS_CHROMA = $0020; (* *< loss of chroma (e.g. RGB to gray conversion) *) + + (* * + * Compute what kind of losses will occur when converting from one specific + * pixel format to another. + * When converting from one pixel format to another, information loss may occur. + * For example, when converting from RGB24 to GRAY, the color information will + * be lost. Similarly, other losses occur when converting from some formats to + * other formats. These losses can involve loss of chroma, but also loss of + * resolution, loss of color depth, loss due to the color space conversion, loss + * of the alpha bits or loss due to color quantization. + * av_get_fix_fmt_loss() informs you about the various types of losses + * which will occur when converting from one pixel format to another. + * + * @param[in] dst_pix_fmt destination pixel format + * @param[in] src_pix_fmt source pixel format + * @param[in] has_alpha Whether the source pixel format alpha channel is used. + * @return Combination of flags informing you what kind of losses will occur + * (maximum loss for an invalid dst_pix_fmt). + *) + // int av_get_pix_fmt_loss(enum AVPixelFormat dst_pix_fmt, + // enum AVPixelFormat src_pix_fmt, + // int has_alpha); +function av_get_pix_fmt_loss(dst_pix_fmt: AVPixelFormat; src_pix_fmt: AVPixelFormat; has_alpha: int): int; cdecl; external avutil_dll; +(* * + * Compute what kind of losses will occur when converting from one specific + * pixel format to another. + * When converting from one pixel format to another, information loss may occur. + * For example, when converting from RGB24 to GRAY, the color information will + * be lost. Similarly, other losses occur when converting from some formats to + * other formats. These losses can involve loss of chroma, but also loss of + * resolution, loss of color depth, loss due to the color space conversion, loss + * of the alpha bits or loss due to color quantization. + * av_get_fix_fmt_loss() informs you about the various types of losses + * which will occur when converting from one pixel format to another. + * + * @param[in] dst_pix_fmt destination pixel format + * @param[in] src_pix_fmt source pixel format + * @param[in] has_alpha Whether the source pixel format alpha channel is used. + * @return Combination of flags informing you what kind of losses will occur + * (maximum loss for an invalid dst_pix_fmt). +*) +// enum AVPixelFormat av_find_best_pix_fmt_of_2(enum AVPixelFormat dst_pix_fmt1, enum AVPixelFormat dst_pix_fmt2, +// enum AVPixelFormat src_pix_fmt, int has_alpha, int *loss_ptr); +function av_find_best_pix_fmt_of_2(dst_pix_fmt1: AVPixelFormat; dst_pix_fmt2: AVPixelFormat; src_pix_fmt: AVPixelFormat; has_alpha: int; var loss_ptr: int) + : AVPixelFormat; cdecl; external avutil_dll; +{$ENDREGION} +{$REGION 'imgutils.h'} + +type + Tav_image_array4_int = array4_int; + pav_image_array4_int = ^Tav_image_array4_int; + + Tav_image_array4_puint8_t = array4_puint8_t; + pav_image_array4_puint8_t = ^Tav_image_array4_puint8_t; + + Tav_image_array4_ptrdiff_t = array4_ptrdiff_t; + pav_image_array4_ptrdiff_t = ^Tav_image_array4_ptrdiff_t; + + (* * + * Compute the max pixel step for each plane of an image with a + * format described by pixdesc. + * + * The pixel step is the distance in bytes between the first byte of + * the group of bytes which describe a pixel component and the first + * byte of the successive group in the same plane for the same + * component. + * + * @param max_pixsteps an array which is filled with the max pixel step + * for each plane. Since a plane may contain different pixel + * components, the computed max_pixsteps[plane] is relative to the + * component in the plane with the max pixel step. + * @param max_pixstep_comps an array which is filled with the component + * for each plane which has the max pixel step. May be NULL. + *) + // void av_image_fill_max_pixsteps(int max_pixsteps[4], int max_pixstep_comps[4], + // const AVPixFmtDescriptor *pixdesc); +procedure av_image_fill_max_pixsteps(max_pixsteps: pav_image_array4_int; max_pixstep_comps: pav_image_array4_int; const pixdesc: pAVPixFmtDescriptor); cdecl; + external avutil_dll; +(* * + * Compute the size of an image line with format pix_fmt and width + * width for the plane plane. + * + * @return the computed size in bytes +*) +// int av_image_get_linesize(enum AVPixelFormat pix_fmt, int width, int plane); +function av_image_get_linesize(pix_fmt: AVPixelFormat; width: int; plane: int): int; cdecl; external avutil_dll; +(* * + * Fill plane linesizes for an image with pixel format pix_fmt and + * width width. + * + * @param linesizes array to be filled with the linesize for each plane + * @return >= 0 in case of success, a negative error code otherwise +*) +// int av_image_fill_linesizes(int linesizes[4], enum AVPixelFormat pix_fmt, int width); +function av_image_fill_linesizes(linesizes: pav_image_array4_int; pix_fmt: AVPixelFormat; width: int): int; cdecl; external avutil_dll; +(* * + * Fill plane data pointers for an image with pixel format pix_fmt and + * height height. + * + * @param data pointers array to be filled with the pointer for each image plane + * @param ptr the pointer to a buffer which will contain the image + * @param linesizes the array containing the linesize for each + * plane, should be filled by av_image_fill_linesizes() + * @return the size in bytes required for the image buffer, a negative + * error code in case of failure +*) +// int av_image_fill_pointers(uint8_t *data[4], enum AVPixelFormat pix_fmt, int height, +// uint8_t *ptr, const int linesizes[4]); +function av_image_fill_pointers(data: pav_image_array4_puint8_t; pix_fmt: AVPixelFormat; height: int; ptr: puint8_t; const linesizes: pav_image_array4_int) + : int; cdecl; external avutil_dll; +(* * + * Allocate an image with size w and h and pixel format pix_fmt, and + * fill pointers and linesizes accordingly. + * The allocated image buffer has to be freed by using + * av_freep(&pointers[0]). + * + * @param align the value to use for buffer size alignment + * @return the size in bytes required for the image buffer, a negative + * error code in case of failure +*) +// int av_image_alloc(uint8_t *pointers[4], int linesizes[4], +// int w, int h, enum AVPixelFormat pix_fmt, int align); +function av_image_alloc( // + pointers: pav_image_array4_puint8_t; // + linesizes: pav_image_array4_int; // + w: int; // + h: int; // + pix_fmt: AVPixelFormat; // + align: int): // + int; cdecl; overload; external avutil_dll; + +function av_image_alloc( // + pointers: Pointer; // + linesizes: Pointer; // + w: int; // + h: int; // + pix_fmt: AVPixelFormat; // + align: int): // + int; cdecl; overload; external avutil_dll; + +(* * + * Copy image plane from src to dst. + * That is, copy "height" number of lines of "bytewidth" bytes each. + * The first byte of each successive line is separated by *_linesize + * bytes. + * + * bytewidth must be contained by both absolute values of dst_linesize + * and src_linesize, otherwise the function behavior is undefined. + * + * @param dst_linesize linesize for the image plane in dst + * @param src_linesize linesize for the image plane in src +*) +// void av_image_copy_plane(uint8_t *dst, int dst_linesize, +// const uint8_t *src, int src_linesize, +// int bytewidth, int height); +procedure av_image_copy_plane(dst: puint8_t; dst_linesize: int; const src: puint8_t; src_linesize: int; bytewidth: int; height: int); cdecl; + external avutil_dll; +(* * + * Copy image in src_data to dst_data. + * + * @param dst_linesizes linesizes for the image in dst_data + * @param src_linesizes linesizes for the image in src_data +*) +// void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4], +// const uint8_t *src_data[4], const int src_linesizes[4], +// enum AVPixelFormat pix_fmt, int width, int height); +procedure av_image_copy(dst_data: pav_image_array4_puint8_t; dst_linesizes: pav_image_array4_int; const src_data: pav_image_array4_puint8_t; + const src_linesizes: pav_image_array4_int; pix_fmt: AVPixelFormat; width: int; height: int); cdecl; external avutil_dll; +(* * + * Copy image data located in uncacheable (e.g. GPU mapped) memory. Where + * available, this function will use special functionality for reading from such + * memory, which may result in greatly improved performance compared to plain + * av_image_copy(). + * + * The data pointers and the linesizes must be aligned to the maximum required + * by the CPU architecture. + * + * @note The linesize parameters have the type ptrdiff_t here, while they are + * int for av_image_copy(). + * @note On x86, the linesizes currently need to be aligned to the cacheline + * size (i.e. 64) to get improved performance. +*) +// void av_image_copy_uc_from(uint8_t *dst_data[4], const ptrdiff_t dst_linesizes[4], +// const uint8_t *src_data[4], const ptrdiff_t src_linesizes[4], +// enum AVPixelFormat pix_fmt, int width, int height); +procedure av_image_copy_uc_from(dst_data: pav_image_array4_puint8_t; const dst_linesizes: pav_image_array4_ptrdiff_t; const src_data: pav_image_array4_puint8_t; + const src_linesizes: pav_image_array4_ptrdiff_t; pix_fmt: AVPixelFormat; width: int; height: int); cdecl; external avutil_dll; +(* * + * Setup the data pointers and linesizes based on the specified image + * parameters and the provided array. + * + * The fields of the given image are filled in by using the src + * address which points to the image data buffer. Depending on the + * specified pixel format, one or multiple image data pointers and + * line sizes will be set. If a planar format is specified, several + * pointers will be set pointing to the different picture planes and + * the line sizes of the different planes will be stored in the + * lines_sizes array. Call with src == NULL to get the required + * size for the src buffer. + * + * To allocate the buffer and fill in the dst_data and dst_linesize in + * one call, use av_image_alloc(). + * + * @param dst_data data pointers to be filled in + * @param dst_linesize linesizes for the image in dst_data to be filled in + * @param src buffer which will contain or contains the actual image data, can be NULL + * @param pix_fmt the pixel format of the image + * @param width the width of the image in pixels + * @param height the height of the image in pixels + * @param align the value used in src for linesize alignment + * @return the size in bytes required for src, a negative error code + * in case of failure +*) +// int av_image_fill_arrays(uint8_t *dst_data[4], int dst_linesize[4], +// const uint8_t *src, +// enum AVPixelFormat pix_fmt, int width, int height, int align); +function av_image_fill_arrays(dst_data: pav_image_array4_puint8_t; dst_linesize: pav_image_array4_int; const src: puint8_t; pix_fmt: AVPixelFormat; width: int; + height: int; align: int): int; cdecl; external avutil_dll; +(* * + * Return the size in bytes of the amount of data required to store an + * image with the given parameters. + * + * @param pix_fmt the pixel format of the image + * @param width the width of the image in pixels + * @param height the height of the image in pixels + * @param align the assumed linesize alignment + * @return the buffer size in bytes, a negative error code in case of failure +*) +// int av_image_get_buffer_size(enum AVPixelFormat pix_fmt, int width, int height, int align); +function av_image_get_buffer_size(pix_fmt: AVPixelFormat; width: int; height: int; align: int): int; cdecl; external avutil_dll; +(* * + * Copy image data from an image into a buffer. + * + * av_image_get_buffer_size() can be used to compute the required size + * for the buffer to fill. + * + * @param dst a buffer into which picture data will be copied + * @param dst_size the size in bytes of dst + * @param src_data pointers containing the source image data + * @param src_linesize linesizes for the image in src_data + * @param pix_fmt the pixel format of the source image + * @param width the width of the source image in pixels + * @param height the height of the source image in pixels + * @param align the assumed linesize alignment for dst + * @return the number of bytes written to dst, or a negative value + * (error code) on error +*) +// int av_image_copy_to_buffer(uint8_t *dst, int dst_size, +// const uint8_t * const src_data[4], const int src_linesize[4], +// enum AVPixelFormat pix_fmt, int width, int height, int align); +function av_image_copy_to_buffer(dst: puint8_t; dst_size: int; const src_data: pav_image_array4_puint8_t; const src_linesize: pav_image_array4_int; + pix_fmt: AVPixelFormat; width: int; height: int; align: int): int; cdecl; external avutil_dll; +(* * + * Check if the given dimension of an image is valid, meaning that all + * bytes of the image can be addressed with a signed int. + * + * @param w the width of the picture + * @param h the height of the picture + * @param log_offset the offset to sum to the log level for logging with log_ctx + * @param log_ctx the parent logging context, it may be NULL + * @return >= 0 if valid, a negative error code otherwise +*) +// int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx); +function av_image_check_size(w: unsigned_int; h: unsigned_int; log_offset: int; log_ctx: Pointer): int; cdecl; external avutil_dll; +(* * + * Check if the given dimension of an image is valid, meaning that all + * bytes of a plane of an image with the specified pix_fmt can be addressed + * with a signed int. + * + * @param w the width of the picture + * @param h the height of the picture + * @param max_pixels the maximum number of pixels the user wants to accept + * @param pix_fmt the pixel format, can be AV_PIX_FMT_NONE if unknown. + * @param log_offset the offset to sum to the log level for logging with log_ctx + * @param log_ctx the parent logging context, it may be NULL + * @return >= 0 if valid, a negative error code otherwise +*) +// int av_image_check_size2(unsigned int w, unsigned int h, int64_t max_pixels, enum AVPixelFormat pix_fmt, int log_offset, void *log_ctx); +function av_image_check_size2(w: unsigned_int; h: unsigned_int; max_pixels: int64_t; pix_fmt: AVPixelFormat; log_offset: int; log_ctx: Pointer): int; cdecl; + external avutil_dll; +(* * + * Check if the given sample aspect ratio of an image is valid. + * + * It is considered invalid if the denominator is 0 or if applying the ratio + * to the image size would make the smaller dimension less than 1. If the + * sar numerator is 0, it is considered unknown and will return as valid. + * + * @param w width of the image + * @param h height of the image + * @param sar sample aspect ratio of the image + * @return 0 if valid, a negative AVERROR code otherwise +*) +// int av_image_check_sar(unsigned int w, unsigned int h, AVRational sar); +function av_image_check_sar(w: unsigned_int; h: unsigned_int; sar: AVRational): int; cdecl; external avutil_dll; +(* * + * Overwrite the image data with black. This is suitable for filling a + * sub-rectangle of an image, meaning the padding between the right most pixel + * and the left most pixel on the next line will not be overwritten. For some + * formats, the image size might be rounded up due to inherent alignment. + * + * If the pixel format has alpha, the alpha is cleared to opaque. + * + * This can return an error if the pixel format is not supported. Normally, all + * non-hwaccel pixel formats should be supported. + * + * Passing NULL for dst_data is allowed. Then the function returns whether the + * operation would have succeeded. (It can return an error if the pix_fmt is + * not supported.) + * + * @param dst_data data pointers to destination image + * @param dst_linesize linesizes for the destination image + * @param pix_fmt the pixel format of the image + * @param range the color range of the image (important for colorspaces such as YUV) + * @param width the width of the image in pixels + * @param height the height of the image in pixels + * @return 0 if the image data was cleared, a negative AVERROR code otherwise +*) +// int av_image_fill_black(uint8_t *dst_data[4], const ptrdiff_t dst_linesize[4], +// enum AVPixelFormat pix_fmt, enum AVColorRange range, +// int width, int height); +function av_image_fill_black(dst_data: pav_image_array4_puint8_t; const dst_linesize: pav_image_array4_ptrdiff_t; pix_fmt: AVPixelFormat; range: AVColorRange; + width: int; height: int): int; cdecl; external avutil_dll; +{$ENDREGION} +{$REGION 'time.h'} +(* * + * Get the current time in microseconds. +*) +// int64_t av_gettime(void); +function av_gettime(): int64_t; cdecl; external avutil_dll; +(* * + * Get the current time in microseconds since some unspecified starting point. + * On platforms that support it, the time comes from a monotonic clock + * This property makes this time source ideal for measuring relative time. + * The returned values may not be monotonic on platforms where a monotonic + * clock is not available. +*) +// int64_t av_gettime_relative(void); +function av_gettime_relative(): int64_t; cdecl; external avutil_dll; +(* * + * Indicates with a boolean result if the av_gettime_relative() time source + * is monotonic. +*) +// int av_gettime_relative_is_monotonic(void); +function av_gettime_relative_is_monotonic(): int; cdecl; external avutil_dll; +(* * + * Sleep for a period of time. Although the duration is expressed in + * microseconds, the actual delay may be rounded to the precision of the + * system timer. + * + * @param usec Number of microseconds to sleep. + * @return zero on success or (negative) error code. +*) +// int av_usleep(unsigned usec); +function av_usleep(usec: unsigned): int; cdecl; external avutil_dll; +{$ENDREGION} +{$REGION 'timestamp.h'} + +const + AV_TS_MAX_STRING_SIZE = 32; + + (* * + * Fill the provided buffer with a string containing a timestamp + * representation. + * + * @param buf a buffer with size in bytes of at least AV_TS_MAX_STRING_SIZE + * @param ts the timestamp to represent + * @return the buffer in input + *) + // static inline char *av_ts_make_string(char *buf, int64_t ts) +function av_ts_make_string(buf: PAnsiChar; ts: int64_t): PAnsiChar; + +(* * + * Convenience macro, the return value should be used only directly in + * function arguments but never stand-alone. +*) +// #define av_ts2str(ts) av_ts_make_string((char[AV_TS_MAX_STRING_SIZE]){0}, ts) +function av_ts2str(ts: int64_t): PAnsiChar; + +(* * + * Fill the provided buffer with a string containing a timestamp time + * representation. + * + * @param buf a buffer with size in bytes of at least AV_TS_MAX_STRING_SIZE + * @param ts the timestamp to represent + * @param tb the timebase of the timestamp + * @return the buffer in input +*) +// static inline char *av_ts_make_time_string(char *buf, int64_t ts, AVRational *tb) +function av_ts_make_time_string(buf: PAnsiChar; ts: int64_t; tb: pAVRational): PAnsiChar; + +(* * + * Convenience macro, the return value should be used only directly in + * function arguments but never stand-alone. +*) +// #define av_ts2timestr(ts, tb) av_ts_make_time_string((char[AV_TS_MAX_STRING_SIZE]){0}, ts, tb) +function av_ts2timestr(ts: int64_t; tb: pAVRational): PAnsiChar; + +{$ENDREGION} +{$REGION 'mem.h'} +(* * + * Allocate a memory block with alignment suitable for all memory accesses + * (including vectors if available on the CPU). + * + * @param size Size in bytes for the memory block to be allocated + * @return Pointer to the allocated block, or `NULL` if the block cannot + * be allocated + * @see av_mallocz() +*) +// void *av_malloc(size_t size) av_malloc_attrib av_alloc_size(1); +function av_malloc(size: size_t): Pointer; cdecl; external avutil_dll; + +(* * + * Allocate a memory block with alignment suitable for all memory accesses + * (including vectors if available on the CPU) and zero all the bytes of the + * block. + * + * @param size Size in bytes for the memory block to be allocated + * @return Pointer to the allocated block, or `NULL` if it cannot be allocated + * @see av_malloc() +*) +// void *av_mallocz(size_t size) av_malloc_attrib av_alloc_size(1); +function av_mallocz(size: size_t): Pointer; cdecl; external avutil_dll; +(* * + * Allocate a memory block for an array with av_malloc(). + * + * The allocated memory will have size `size * nmemb` bytes. + * + * @param nmemb Number of element + * @param size Size of a single element + * @return Pointer to the allocated block, or `NULL` if the block cannot + * be allocated + * @see av_malloc() +*) +// av_alloc_size(1, 2) void *av_malloc_array(size_t nmemb, size_t size); +function av_malloc_array(nmemb: size_t; size: size_t): Pointer; cdecl; external avutil_dll; +(* * + * Allocate a memory block for an array with av_mallocz(). + * + * The allocated memory will have size `size * nmemb` bytes. + * + * @param nmemb Number of elements + * @param size Size of the single element + * @return Pointer to the allocated block, or `NULL` if the block cannot + * be allocated + * + * @see av_mallocz() + * @see av_malloc_array() +*) +// av_alloc_size(1, 2) void *av_mallocz_array(size_t nmemb, size_t size); +function av_mallocz_array(nmemb: size_t; size: size_t): Pointer; cdecl; external avutil_dll; +(* * + * Non-inlined equivalent of av_mallocz_array(). + * + * Created for symmetry with the calloc() C function. +*) +// void *av_calloc(size_t nmemb, size_t size) av_malloc_attrib; +function av_calloc(nmemb: size_t; size: size_t): Pointer; cdecl; external avutil_dll; +(* * + * Allocate, reallocate, or free a block of memory. + * + * If `ptr` is `NULL` and `size` > 0, allocate a new block. If `size` is + * zero, free the memory block pointed to by `ptr`. Otherwise, expand or + * shrink that block of memory according to `size`. + * + * @param ptr Pointer to a memory block already allocated with + * av_realloc() or `NULL` + * @param size Size in bytes of the memory block to be allocated or + * reallocated + * + * @return Pointer to a newly-reallocated block or `NULL` if the block + * cannot be reallocated or the function is used to free the memory block + * + * @warning Unlike av_malloc(), the returned pointer is not guaranteed to be + * correctly aligned. + * @see av_fast_realloc() + * @see av_reallocp() +*) +// void *av_realloc(void *ptr, size_t size) av_alloc_size(2); +function av_realloc(ptr: Pointer; size: size_t): Pointer; cdecl; external avutil_dll; +(* * + * Allocate, reallocate, or free a block of memory through a pointer to a + * pointer. + * + * If `*ptr` is `NULL` and `size` > 0, allocate a new block. If `size` is + * zero, free the memory block pointed to by `*ptr`. Otherwise, expand or + * shrink that block of memory according to `size`. + * + * @param[in,out] ptr Pointer to a pointer to a memory block already allocated + * with av_realloc(), or a pointer to `NULL`. The pointer + * is updated on success, or freed on failure. + * @param[in] size Size in bytes for the memory block to be allocated or + * reallocated + * + * @return Zero on success, an AVERROR error code on failure + * + * @warning Unlike av_malloc(), the allocated memory is not guaranteed to be + * correctly aligned. +*) +// av_warn_unused_result +// int av_reallocp(void *ptr, size_t size); +function av_reallocp(ptr: Pointer; size: size_t): int; cdecl; external avutil_dll; +(* * + * Allocate, reallocate, or free a block of memory. + * + * This function does the same thing as av_realloc(), except: + * - It takes two size arguments and allocates `nelem * elsize` bytes, + * after checking the result of the multiplication for integer overflow. + * - It frees the input block in case of failure, thus avoiding the memory + * leak with the classic + * @code{.c} + * buf = realloc(buf); + * if (!buf) + * return -1; + * @endcode + * pattern. +*) +// void *av_realloc_f(void *ptr, size_t nelem, size_t elsize); +function av_realloc_f(ptr: Pointer; nelem: size_t; elsize: size_t): Pointer; cdecl; external avutil_dll; +(* * + * Allocate, reallocate, or free an array. + * + * If `ptr` is `NULL` and `nmemb` > 0, allocate a new block. If + * `nmemb` is zero, free the memory block pointed to by `ptr`. + * + * @param ptr Pointer to a memory block already allocated with + * av_realloc() or `NULL` + * @param nmemb Number of elements in the array + * @param size Size of the single element of the array + * + * @return Pointer to a newly-reallocated block or NULL if the block + * cannot be reallocated or the function is used to free the memory block + * + * @warning Unlike av_malloc(), the allocated memory is not guaranteed to be + * correctly aligned. + * @see av_reallocp_array() +*) +// av_alloc_size(2, 3) void *av_realloc_array(void *ptr, size_t nmemb, size_t size); +function av_realloc_array(ptr: Pointer; nmemb: size_t; size: size_t): Pointer; cdecl; external avutil_dll; +(* * + * Allocate, reallocate, or free an array through a pointer to a pointer. + * + * If `*ptr` is `NULL` and `nmemb` > 0, allocate a new block. If `nmemb` is + * zero, free the memory block pointed to by `*ptr`. + * + * @param[in,out] ptr Pointer to a pointer to a memory block already + * allocated with av_realloc(), or a pointer to `NULL`. + * The pointer is updated on success, or freed on failure. + * @param[in] nmemb Number of elements + * @param[in] size Size of the single element + * + * @return Zero on success, an AVERROR error code on failure + * + * @warning Unlike av_malloc(), the allocated memory is not guaranteed to be + * correctly aligned. +*) +// av_alloc_size(2, 3) int av_reallocp_array(void *ptr, size_t nmemb, size_t size); +function av_reallocp_array(ptr: Pointer; nmemb: size_t; size: size_t): int; cdecl; external avutil_dll; +(* * + * Reallocate the given buffer if it is not large enough, otherwise do nothing. + * + * If the given buffer is `NULL`, then a new uninitialized buffer is allocated. + * + * If the given buffer is not large enough, and reallocation fails, `NULL` is + * returned and `*size` is set to 0, but the original buffer is not changed or + * freed. + * + * A typical use pattern follows: + * + * @code{.c} + * uint8_t *buf = ...; + * uint8_t *new_buf = av_fast_realloc(buf, ¤t_size, size_needed); + * if (!new_buf) { + * // Allocation failed; clean up original buffer + * av_freep(&buf); + * return AVERROR(ENOMEM); + * } + * @endcode + * + * @param[in,out] ptr Already allocated buffer, or `NULL` + * @param[in,out] size Pointer to the size of buffer `ptr`. `*size` is + * updated to the new allocated size, in particular 0 + * in case of failure. + * @param[in] min_size Desired minimal size of buffer `ptr` + * @return `ptr` if the buffer is large enough, a pointer to newly reallocated + * buffer if the buffer was not large enough, or `NULL` in case of + * error + * @see av_realloc() + * @see av_fast_malloc() +*) +// void *av_fast_realloc(void *ptr, unsigned int *size, size_t min_size); +function av_fast_realloc(ptr: Pointer; var size: unsigned_int; min_size: size_t): Pointer; cdecl; external avutil_dll; +(* * + * Allocate a buffer, reusing the given one if large enough. + * + * Contrary to av_fast_realloc(), the current buffer contents might not be + * preserved and on error the old buffer is freed, thus no special handling to + * avoid memleaks is necessary. + * + * `*ptr` is allowed to be `NULL`, in which case allocation always happens if + * `size_needed` is greater than 0. + * + * @code{.c} + * uint8_t *buf = ...; + * av_fast_malloc(&buf, ¤t_size, size_needed); + * if (!buf) { + * // Allocation failed; buf already freed + * return AVERROR(ENOMEM); + * } + * @endcode + * + * @param[in,out] ptr Pointer to pointer to an already allocated buffer. + * `*ptr` will be overwritten with pointer to new + * buffer on success or `NULL` on failure + * @param[in,out] size Pointer to the size of buffer `*ptr`. `*size` is + * updated to the new allocated size, in particular 0 + * in case of failure. + * @param[in] min_size Desired minimal size of buffer `*ptr` + * @see av_realloc() + * @see av_fast_mallocz() +*) +// void av_fast_malloc(void *ptr, unsigned int *size, size_t min_size); +procedure av_fast_malloc(ptr: Pointer; var size: unsigned_int; min_size: size_t); cdecl; external avutil_dll; +(* * + * Allocate and clear a buffer, reusing the given one if large enough. + * + * Like av_fast_malloc(), but all newly allocated space is initially cleared. + * Reused buffer is not cleared. + * + * `*ptr` is allowed to be `NULL`, in which case allocation always happens if + * `size_needed` is greater than 0. + * + * @param[in,out] ptr Pointer to pointer to an already allocated buffer. + * `*ptr` will be overwritten with pointer to new + * buffer on success or `NULL` on failure + * @param[in,out] size Pointer to the size of buffer `*ptr`. `*size` is + * updated to the new allocated size, in particular 0 + * in case of failure. + * @param[in] min_size Desired minimal size of buffer `*ptr` + * @see av_fast_malloc() +*) +// void av_fast_mallocz(void *ptr, unsigned int *size, size_t min_size); +procedure av_fast_mallocz(ptr: Pointer; var size: unsigned_int; min_size: size_t); cdecl; external avutil_dll; +(* * + * Free a memory block which has been allocated with a function of av_malloc() + * or av_realloc() family. + * + * @param ptr Pointer to the memory block which should be freed. + * + * @note `ptr = NULL` is explicitly allowed. + * @note It is recommended that you use av_freep() instead, to prevent leaving + * behind dangling pointers. + * @see av_freep() +*) +// void av_free(void *ptr); +procedure av_free(ptr: Pointer); cdecl; external avutil_dll; +(* * + * Free a memory block which has been allocated with a function of av_malloc() + * or av_realloc() family, and set the pointer pointing to it to `NULL`. + * + * @code{.c} + * uint8_t *buf = av_malloc(16); + * av_free(buf); + * // buf now contains a dangling pointer to freed memory, and accidental + * // dereference of buf will result in a use-after-free, which may be a + * // security risk. + * + * uint8_t *buf = av_malloc(16); + * av_freep(&buf); + * // buf is now NULL, and accidental dereference will only result in a + * // NULL-pointer dereference. + * @endcode + * + * @param ptr Pointer to the pointer to the memory block which should be freed + * @note `*ptr = NULL` is safe and leads to no action. + * @see av_free() +*) +// void av_freep(void *ptr); +procedure av_freep(ptr: Pointer); cdecl; external avutil_dll; +(* * + * Duplicate a string. + * + * @param s String to be duplicated + * @return Pointer to a newly-allocated string containing a + * copy of `s` or `NULL` if the string cannot be allocated + * @see av_strndup() +*) +// char *av_strdup(const char *s) av_malloc_attrib; +function av_strdup(const s: PAnsiChar): PAnsiChar; cdecl; external avutil_dll; +(* * + * Duplicate a substring of a string. + * + * @param s String to be duplicated + * @param len Maximum length of the resulting string (not counting the + * terminating byte) + * @return Pointer to a newly-allocated string containing a + * substring of `s` or `NULL` if the string cannot be allocated +*) +// char *av_strndup(const char *s, size_t len) av_malloc_attrib; +function av_strndup(const s: PAnsiChar; len: size_t): PAnsiChar; cdecl; external avutil_dll; +(* * + * Duplicate a buffer with av_malloc(). + * + * @param p Buffer to be duplicated + * @param size Size in bytes of the buffer copied + * @return Pointer to a newly allocated buffer containing a + * copy of `p` or `NULL` if the buffer cannot be allocated +*) +// void *av_memdup(const void *p, size_t size); +function av_memdup(const p: Pointer; size: size_t): Pointer; cdecl; external avutil_dll; +(* * + * Overlapping memcpy() implementation. + * + * @param dst Destination buffer + * @param back Number of bytes back to start copying (i.e. the initial size of + * the overlapping window); must be > 0 + * @param cnt Number of bytes to copy; must be >= 0 + * + * @note `cnt > back` is valid, this will copy the bytes we just copied, + * thus creating a repeating pattern with a period length of `back`. +*) +// void av_memcpy_backptr(uint8_t *dst, int back, int cnt); +procedure av_memcpy_backptr(dst: puint8_t; back: int; cnt: int); cdecl; external avutil_dll; + +(* * + * @defgroup lavu_mem_dynarray Dynamic Array + * + * Utilities to make an array grow when needed. + * + * Sometimes, the programmer would want to have an array that can grow when + * needed. The libavutil dynamic array utilities fill that need. + * + * libavutil supports two systems of appending elements onto a dynamically + * allocated array, the first one storing the pointer to the value in the + * array, and the second storing the value directly. In both systems, the + * caller is responsible for maintaining a variable containing the length of + * the array, as well as freeing of the array after use. + * + * The first system stores pointers to values in a block of dynamically + * allocated memory. Since only pointers are stored, the function does not need + * to know the size of the type. Both av_dynarray_add() and + * av_dynarray_add_nofree() implement this system. + * + * @code + * type **array = NULL; //< an array of pointers to values + * int nb = 0; //< a variable to keep track of the length of the array + * + * type to_be_added = ...; + * type to_be_added2 = ...; + * + * av_dynarray_add(&array, &nb, &to_be_added); + * if (nb == 0) + * return AVERROR(ENOMEM); + * + * av_dynarray_add(&array, &nb, &to_be_added2); + * if (nb == 0) + * return AVERROR(ENOMEM); + * + * // Now: + * // nb == 2 + * // &to_be_added == array[0] + * // &to_be_added2 == array[1] + * + * av_freep(&array); + * @endcode + * + * The second system stores the value directly in a block of memory. As a + * result, the function has to know the size of the type. av_dynarray2_add() + * implements this mechanism. + * + * @code + * type *array = NULL; //< an array of values + * int nb = 0; //< a variable to keep track of the length of the array + * + * type to_be_added = ...; + * type to_be_added2 = ...; + * + * type *addr = av_dynarray2_add((void ** )&array, &nb, sizeof(*array), NULL); + * if (!addr) + * return AVERROR(ENOMEM); + * memcpy(addr, &to_be_added, sizeof(to_be_added)); + * + * // Shortcut of the above. + * type *addr = av_dynarray2_add((void ** )&array, &nb, sizeof( *array), + * (const void * )&to_be_added2); + * if (!addr) + * return AVERROR(ENOMEM); + * + * // Now: + * // nb == 2 + * // to_be_added == array[0] + * // to_be_added2 == array[1] + * + * av_freep(&array); + * @endcode + * + * @{ +*) + +(* * + * Add the pointer to an element to a dynamic array. + * + * The array to grow is supposed to be an array of pointers to + * structures, and the element to add must be a pointer to an already + * allocated structure. + * + * The array is reallocated when its size reaches powers of 2. + * Therefore, the amortized cost of adding an element is constant. + * + * In case of success, the pointer to the array is updated in order to + * point to the new grown array, and the number pointed to by `nb_ptr` + * is incremented. + * In case of failure, the array is freed, `*tab_ptr` is set to `NULL` and + * `*nb_ptr` is set to 0. + * + * @param[in,out] tab_ptr Pointer to the array to grow + * @param[in,out] nb_ptr Pointer to the number of elements in the array + * @param[in] elem Element to add + * @see av_dynarray_add_nofree(), av_dynarray2_add() +*) +// void av_dynarray_add(void *tab_ptr, int *nb_ptr, void *elem); +procedure av_dynarray_add(tab_ptr: Pointer; var nb_ptr: int; elem: Pointer); cdecl; external avutil_dll; +(* * + * Add an element to a dynamic array. + * + * Function has the same functionality as av_dynarray_add(), + * but it doesn't free memory on fails. It returns error code + * instead and leave current buffer untouched. + * + * @return >=0 on success, negative otherwise + * @see av_dynarray_add(), av_dynarray2_add() +*) +// av_warn_unused_result +// int av_dynarray_add_nofree(void *tab_ptr, int *nb_ptr, void *elem); +function av_dynarray_add_nofree(tab_ptr: Pointer; var nb_ptr: int; elem: Pointer): int; cdecl; external avutil_dll; +(* * + * Add an element of size `elem_size` to a dynamic array. + * + * The array is reallocated when its number of elements reaches powers of 2. + * Therefore, the amortized cost of adding an element is constant. + * + * In case of success, the pointer to the array is updated in order to + * point to the new grown array, and the number pointed to by `nb_ptr` + * is incremented. + * In case of failure, the array is freed, `*tab_ptr` is set to `NULL` and + * `*nb_ptr` is set to 0. + * + * @param[in,out] tab_ptr Pointer to the array to grow + * @param[in,out] nb_ptr Pointer to the number of elements in the array + * @param[in] elem_size Size in bytes of an element in the array + * @param[in] elem_data Pointer to the data of the element to add. If + * `NULL`, the space of the newly added element is + * allocated but left uninitialized. + * + * @return Pointer to the data of the element to copy in the newly allocated + * space + * @see av_dynarray_add(), av_dynarray_add_nofree() +*) +// void *av_dynarray2_add(void **tab_ptr, int *nb_ptr, size_t elem_size, +// const uint8_t *elem_data); +function av_dynarray2_add(var tab_ptr: Pointer; var nb_ptr: int; elem_size: size_t; const elem_data: puint8_t): Pointer; cdecl; external avutil_dll; + +(* * + * @defgroup lavu_mem_misc Miscellaneous Functions + * + * Other functions related to memory allocation. + * + * @{ +*) + +(* * + * Multiply two `size_t` values checking for overflow. + * + * @param[in] a,b Operands of multiplication + * @param[out] r Pointer to the result of the operation + * @return 0 on success, AVERROR(EINVAL) on overflow +*) +// static inline int av_size_mult(size_t a, size_t b, size_t *r) +function av_size_mult(a: size_t; b: size_t; var r: size_t): int; inline; + +(* * + * Set the maximum size that may be allocated in one block. + * + * The value specified with this function is effective for all libavutil's @ref + * lavu_mem_funcs "heap management functions." + * + * By default, the max value is defined as `INT_MAX`. + * + * @param max Value to be set as the new maximum size + * + * @warning Exercise extreme caution when using this function. Don't touch + * this if you do not understand the full consequence of doing so. +*) + +// void av_max_alloc(size_t max); +procedure av_max_alloc(max: size_t); cdecl; external avutil_dll; +{$ENDREGION} +{$REGION 'timecode.h'} + +const + AV_TIMECODE_STR_SIZE = 23; + +type + AVTimecodeFlag = ( // + AV_TIMECODE_FLAG_DROPFRAME = 1 shl 0, + /// < timecode is drop frame + AV_TIMECODE_FLAG_24HOURSMAX = 1 shl 1, + /// < timecode wraps after 24 hours + AV_TIMECODE_FLAG_ALLOWNEGATIVE = 1 shl 2 + /// < negative time values are allowed + ); + + pAVTimecode = ^AVTimecode; + + AVTimecode = record + start: int; + /// < timecode frame start (first base frame number) + flags: uint32_t; + /// < flags such as drop frame, +24 hours support, ... + rate: AVRational; + /// < frame rate in rational form + fps: unsigned; + /// < frame per second; must be consistent with the rate field + end; + + (* * + * Adjust frame number for NTSC drop frame time code. + * + * @param framenum frame number to adjust + * @param fps frame per second, 30 or 60 + * @return adjusted frame number + * @warning adjustment is only valid in NTSC 29.97 and 59.94 + *) + // int av_timecode_adjust_ntsc_framenum2(int framenum, int fps); +function av_timecode_adjust_ntsc_framenum2(framenum: int; fps: int): int; cdecl; external avutil_dll; +(* * + * Convert frame number to SMPTE 12M binary representation. + * + * @param tc timecode data correctly initialized + * @param framenum frame number + * @return the SMPTE binary representation + * + * @note Frame number adjustment is automatically done in case of drop timecode, + * you do NOT have to call av_timecode_adjust_ntsc_framenum2(). + * @note The frame number is relative to tc->start. + * @note Color frame (CF), binary group flags (BGF) and biphase mark polarity + * correction (PC) bits are set to zero. +*) +// uint32_t av_timecode_get_smpte_from_framenum(const AVTimecode *tc, int framenum); +function av_timecode_get_smpte_from_framenum(const tc: pAVTimecode; framenum: int): uint32_t; cdecl; external avutil_dll; +(* * + * Load timecode string in buf. + * + * @param buf destination buffer, must be at least AV_TIMECODE_STR_SIZE long + * @param tc timecode data correctly initialized + * @param framenum frame number + * @return the buf parameter + * + * @note Timecode representation can be a negative timecode and have more than + * 24 hours, but will only be honored if the flags are correctly set. + * @note The frame number is relative to tc->start. +*) +// char *av_timecode_make_string(const AVTimecode *tc, char *buf, int framenum); +function av_timecode_make_string(const tc: pAVTimecode; buf: PAnsiChar; framenum: int): PAnsiChar; cdecl; external avutil_dll; +(* * + * Get the timecode string from the SMPTE timecode format. + * + * @param buf destination buffer, must be at least AV_TIMECODE_STR_SIZE long + * @param tcsmpte the 32-bit SMPTE timecode + * @param prevent_df prevent the use of a drop flag when it is known the DF bit + * is arbitrary + * @return the buf parameter +*) +// char *av_timecode_make_smpte_tc_string(char *buf, uint32_t tcsmpte, int prevent_df); +function av_timecode_make_smpte_tc_string(buf: PAnsiChar; tcsmpte: uint32_t; prevent_df: int): PAnsiChar; cdecl; external avutil_dll; +(* * + * Get the timecode string from the 25-bit timecode format (MPEG GOP format). + * + * @param buf destination buffer, must be at least AV_TIMECODE_STR_SIZE long + * @param tc25bit the 25-bits timecode + * @return the buf parameter +*) +// char *av_timecode_make_mpeg_tc_string(char *buf, uint32_t tc25bit); +function av_timecode_make_mpeg_tc_string(buf: PAnsiChar; tc25bit: uint32_t): PAnsiChar; cdecl; external avutil_dll; +(* * + * Init a timecode struct with the passed parameters. + * + * @param log_ctx a pointer to an arbitrary struct of which the first field + * is a pointer to an AVClass struct (used for av_log) + * @param tc pointer to an allocated AVTimecode + * @param rate frame rate in rational form + * @param flags miscellaneous flags such as drop frame, +24 hours, ... + * (see AVTimecodeFlag) + * @param frame_start the first frame number + * @return 0 on success, AVERROR otherwise +*) +// int av_timecode_init(AVTimecode *tc, AVRational rate, int flags, int frame_start, void *log_ctx); +function av_timecode_init(tc: pAVTimecode; rate: AVRational; flags: int; rame_start: int; log_ctx: Pointer): int; cdecl; external avutil_dll; +(* * + * Parse timecode representation (hh:mm:ss[:;.]ff). + * + * @param log_ctx a pointer to an arbitrary struct of which the first field is a + * pointer to an AVClass struct (used for av_log). + * @param tc pointer to an allocated AVTimecode + * @param rate frame rate in rational form + * @param str timecode string which will determine the frame start + * @return 0 on success, AVERROR otherwise +*) +// int av_timecode_init_from_string(AVTimecode *tc, AVRational rate, const char *str, void *log_ctx); +function av_timecode_init_from_string(tc: pAVTimecode; rate: AVRational; const str: PAnsiChar; log_ctx: Pointer): int; cdecl; external avutil_dll; +(* * + * Check if the timecode feature is available for the given frame rate + * + * @return 0 if supported, <0 otherwise +*) +// int av_timecode_check_frame_rate(AVRational rate); +function av_timecode_check_frame_rate(rate: AVRational): int; cdecl; external avutil_dll; +{$ENDREGION} +{$REGION 'mathematics.h'} + +const + + M_E = 2.7182818284590452354; (* e *) + M_LN2 = 0.69314718055994530942; (* log_e 2 *) + M_LN10 = 2.30258509299404568402; (* log_e 10 *) + M_LOG2_10 = 3.32192809488736234787; (* log_2 10 *) + M_PHI = 1.61803398874989484820; (* phi / golden ratio *) + M_PI = 3.14159265358979323846; (* pi *) + M_PI_2 = 1.57079632679489661923; (* pi/2 *) + M_SQRT1_2 = 0.70710678118654752440; (* 1/sqrt(2) *) + M_SQRT2 = 1.41421356237309504880; (* sqrt(2) *) + // NAN = av_int2float(0x7fc00000); + // INFINITY = av_int2float(0x7f800000); + +type + AVRounding = int; + + (* * + * Rounding methods. + *) +const + // AVRounding = ( // + AV_ROUND_ZERO = 0; + /// < Round toward zero. + AV_ROUND_INF = 1; + /// < Round away from zero. + AV_ROUND_DOWN = 2; + /// < Round toward -infinity. + AV_ROUND_UP = 3; + /// < Round toward +infinity. + AV_ROUND_NEAR_INF = 5; + /// < Round to nearest and halfway cases away from zero. + (* * + * Flag telling rescaling functions to pass `INT64_MIN`/`MAX` through + * unchanged, avoiding special cases for #AV_NOPTS_VALUE. + * + * Unlike other values of the enumeration AVRounding, this value is a + * bitmask that must be used in conjunction with another value of the + * enumeration through a bitwise OR, in order to set behavior for normal + * cases. + * + * @code{.c} + * av_rescale_rnd(3, 1, 2, AV_ROUND_UP | AV_ROUND_PASS_MINMAX); + * // Rescaling 3: + * // Calculating 3 * 1 / 2 + * // 3 / 2 is rounded up to 2 + * // => 2 + * + * av_rescale_rnd(AV_NOPTS_VALUE, 1, 2, AV_ROUND_UP | AV_ROUND_PASS_MINMAX); + * // Rescaling AV_NOPTS_VALUE: + * // AV_NOPTS_VALUE == INT64_MIN + * // AV_NOPTS_VALUE is passed through + * // => AV_NOPTS_VALUE + * @endcode + *) + AV_ROUND_PASS_MINMAX = 8192; + // ); + + (* * + * Compute the greatest common divisor of two integer operands. + * + * @param a,b Operands + * @return GCD of a and b up to sign; if a >= 0 and b >= 0, return value is >= 0; + * if a == 0 and b == 0, returns 0. + *) + // int64_t av_const av_gcd(int64_t a, int64_t b); +function av_gcd(a, b: int64_t): int64_t; cdecl; external avutil_dll; +(* * + * Rescale a 64-bit integer with rounding to nearest. + * + * The operation is mathematically equivalent to `a * b / c`, but writing that + * directly can overflow. + * + * This function is equivalent to av_rescale_rnd() with #AV_ROUND_NEAR_INF. + * + * @see av_rescale_rnd(), av_rescale_q(), av_rescale_q_rnd() +*) +// int64_t av_rescale(int64_t a, int64_t b, int64_t c) av_const; +function av_rescale(a, b, c: int64_t): int64_t; cdecl; external avutil_dll; +(* * + * Rescale a 64-bit integer with specified rounding. + * + * The operation is mathematically equivalent to `a * b / c`, but writing that + * directly can overflow, and does not support different rounding methods. + * + * @see av_rescale(), av_rescale_q(), av_rescale_q_rnd() +*) +// int64_t av_rescale_rnd(int64_t a, int64_t b, int64_t c, enum AVRounding rnd) av_const; +function av_rescale_rnd(a, b, c: int64_t; rnd: AVRounding): int64_t; cdecl; external avutil_dll; +(* * + * Rescale a 64-bit integer by 2 rational numbers. + * + * The operation is mathematically equivalent to `a * bq / cq`. + * + * This function is equivalent to av_rescale_q_rnd() with #AV_ROUND_NEAR_INF. + * + * @see av_rescale(), av_rescale_rnd(), av_rescale_q_rnd() +*) +// int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq) av_const; +function av_rescale_q(a: int64_t; bq: AVRational; cq: AVRational): int64_t; cdecl; external avutil_dll; +(* * + * Rescale a 64-bit integer by 2 rational numbers with specified rounding. + * + * The operation is mathematically equivalent to `a * bq / cq`. + * + * @see av_rescale(), av_rescale_rnd(), av_rescale_q() +*) +// int64_t av_rescale_q_rnd(int64_t a, AVRational bq, AVRational cq, enum AVRounding rnd) av_const; +function av_rescale_q_rnd(a: int64_t; bq: AVRational; cq: AVRational; rnd: AVRounding): int64_t; cdecl; external avutil_dll; +(* * + * Compare two timestamps each in its own time base. + * + * @return One of the following values: + * - -1 if `ts_a` is before `ts_b` + * - 1 if `ts_a` is after `ts_b` + * - 0 if they represent the same position + * + * @warning + * The result of the function is undefined if one of the timestamps is outside + * the `int64_t` range when represented in the other's timebase. +*) +// int av_compare_ts(int64_t ts_a, AVRational tb_a, int64_t ts_b, AVRational tb_b); +function av_compare_ts(ts_a: int64_t; tb_a: AVRational; ts_b: int64_t; tb_b: AVRational): int; cdecl; external avutil_dll; +(* * + * Compare the remainders of two integer operands divided by a common divisor. + * + * In other words, compare the least significant `log2(mod)` bits of integers + * `a` and `b`. + * + * @code{.c} + * av_compare_mod(0x11, 0x02, 0x10) < 0 // since 0x11 % 0x10 (0x1) < 0x02 % 0x10 (0x2) + * av_compare_mod(0x11, 0x02, 0x20) > 0 // since 0x11 % 0x20 (0x11) > 0x02 % 0x20 (0x02) + * @endcode + * + * @param a,b Operands + * @param mod Divisor; must be a power of 2 + * @return + * - a negative value if `a % mod < b % mod` + * - a positive value if `a % mod > b % mod` + * - zero if `a % mod == b % mod` +*) +// int64_t av_compare_mod(uint64_t a, uint64_t b, uint64_t mod); +function av_compare_mod(a: uint64_t; b: uint64_t; _mod: uint64_t): int64_t; cdecl; external avutil_dll; +(* * + * Rescale a timestamp while preserving known durations. + * + * This function is designed to be called per audio packet to scale the input + * timestamp to a different time base. Compared to a simple av_rescale_q() + * call, this function is robust against possible inconsistent frame durations. + * + * The `last` parameter is a state variable that must be preserved for all + * subsequent calls for the same stream. For the first call, `*last` should be + * initialized to #AV_NOPTS_VALUE. + * + * @param[in] in_tb Input time base + * @param[in] in_ts Input timestamp + * @param[in] fs_tb Duration time base; typically this is finer-grained + * (greater) than `in_tb` and `out_tb` + * @param[in] duration Duration till the next call to this function (i.e. + * duration of the current packet/frame) + * @param[in,out] last Pointer to a timestamp expressed in terms of + * `fs_tb`, acting as a state variable + * @param[in] out_tb Output timebase + * @return Timestamp expressed in terms of `out_tb` + * + * @note In the context of this function, "duration" is in term of samples, not + * seconds. +*) +// int64_t av_rescale_delta(AVRational in_tb, int64_t in_ts, AVRational fs_tb, int duration, int64_t *last, AVRational out_tb); +function av_rescale_delta(in_tb: AVRational; in_ts: int64_t; fs_tb: AVRational; duration: int; var last: int64_t; out_tb: AVRational): int64_t; cdecl; + external avutil_dll; +(* * + * Add a value to a timestamp. + * + * This function guarantees that when the same value is repeatly added that + * no accumulation of rounding errors occurs. + * + * @param[in] ts Input timestamp + * @param[in] ts_tb Input timestamp time base + * @param[in] inc Value to be added + * @param[in] inc_tb Time base of `inc` +*) +// int64_t av_add_stable(AVRational ts_tb, int64_t ts, AVRational inc_tb, int64_t inc); +function av_add_stable(ts_tb: AVRational; ts: int64_t; inc_tb: AVRational; inc: int64_t): int64_t; cdecl; external avutil_dll; +{$ENDREGION} +{$REGION 'parseutils.h'} +(* * + * Parse str and store the parsed ratio in q. + * + * Note that a ratio with infinite (1/0) or negative value is + * considered valid, so you should check on the returned value if you + * want to exclude those values. + * + * The undefined value can be expressed using the "0:0" string. + * + * @param[in,out] q pointer to the AVRational which will contain the ratio + * @param[in] str the string to parse: it has to be a string in the format + * num:den, a float number or an expression + * @param[in] max the maximum allowed numerator and denominator + * @param[in] log_offset log level offset which is applied to the log + * level of log_ctx + * @param[in] log_ctx parent logging context + * @return >= 0 on success, a negative error code otherwise +*) +// int av_parse_ratio(AVRational *q, const char *str, int max, int log_offset, void *log_ctx); +function av_parse_ratio(q: pAVRational; const str: PAnsiChar; max, log_offset: int; log_ctx: Pointer): int; cdecl; external avutil_dll; + +// #define av_parse_ratio_quiet(rate, str, max) av_parse_ratio(rate, str, max, AV_LOG_MAX_OFFSET, NULL) +function av_parse_ratio_quiet(q: pAVRational; const str: PAnsiChar; max: int): int; inline; + +(* * + * Parse str and put in width_ptr and height_ptr the detected values. + * + * @param[in,out] width_ptr pointer to the variable which will contain the detected + * width value + * @param[in,out] height_ptr pointer to the variable which will contain the detected + * height value + * @param[in] str the string to parse: it has to be a string in the format + * width x height or a valid video size abbreviation. + * @return >= 0 on success, a negative error code otherwise +*) +// int av_parse_video_size(int *width_ptr, int *height_ptr, const char *str); +function av_parse_video_size(var width_ptr: int; var height_ptr: int; const str: PAnsiChar): int; cdecl; external avutil_dll; +(* * + * Parse str and store the detected values in *rate. + * + * @param[in,out] rate pointer to the AVRational which will contain the detected + * frame rate + * @param[in] str the string to parse: it has to be a string in the format + * rate_num / rate_den, a float number or a valid video rate abbreviation + * @return >= 0 on success, a negative error code otherwise +*) +// int av_parse_video_rate(AVRational *rate, const char *str); +function av_parse_video_rate(rate: pAVRational; const str: PAnsiChar): int; cdecl; external avutil_dll; +(* * + * Put the RGBA values that correspond to color_string in rgba_color. + * + * @param color_string a string specifying a color. It can be the name of + * a color (case insensitive match) or a [0x|#]RRGGBB[AA] sequence, + * possibly followed by "@" and a string representing the alpha + * component. + * The alpha component may be a string composed by "0x" followed by an + * hexadecimal number or a decimal number between 0.0 and 1.0, which + * represents the opacity value (0x00/0.0 means completely transparent, + * 0xff/1.0 completely opaque). + * If the alpha component is not specified then 0xff is assumed. + * The string "random" will result in a random color. + * @param slen length of the initial part of color_string containing the + * color. It can be set to -1 if color_string is a null terminated string + * containing nothing else than the color. + * @return >= 0 in case of success, a negative value in case of + * failure (for example if color_string cannot be parsed). +*) +// int av_parse_color(uint8_t *rgba_color, const char *color_string, int slen, void *log_ctx); +function av_parse_color(rgba_color: puint8_t; const color_string: PAnsiChar; slen: int; log_ctx: Pointer): int; cdecl; external avutil_dll; +(* * + * Get the name of a color from the internal table of hard-coded named + * colors. + * + * This function is meant to enumerate the color names recognized by + * av_parse_color(). + * + * @param color_idx index of the requested color, starting from 0 + * @param rgbp if not NULL, will point to a 3-elements array with the color value in RGB + * @return the color name string or NULL if color_idx is not in the array +*) +// const char *av_get_known_color_name(int color_idx, const uint8_t **rgb); +function av_get_known_color_name(color_idx: int; const rgb: ppuint8_t): PAnsiChar; cdecl; external avutil_dll; +(* * + * Parse timestr and return in *time a corresponding number of + * microseconds. + * + * @param timeval puts here the number of microseconds corresponding + * to the string in timestr. If the string represents a duration, it + * is the number of microseconds contained in the time interval. If + * the string is a date, is the number of microseconds since 1st of + * January, 1970 up to the time of the parsed date. If timestr cannot + * be successfully parsed, set *time to INT64_MIN. + + * @param timestr a string representing a date or a duration. + * - If a date the syntax is: + * @code + * [{YYYY-MM-DD|YYYYMMDD}[T|t| ]]{{HH:MM:SS[.m...]]]}|{HHMMSS[.m...]]]}}[Z] + * now + * @endcode + * If the value is "now" it takes the current time. + * Time is local time unless Z is appended, in which case it is + * interpreted as UTC. + * If the year-month-day part is not specified it takes the current + * year-month-day. + * - If a duration the syntax is: + * @code + * [-][HH:]MM:SS[.m...] + * [-]S+[.m...] + * @endcode + * @param duration flag which tells how to interpret timestr, if not + * zero timestr is interpreted as a duration, otherwise as a date + * @return >= 0 in case of success, a negative value corresponding to an + * AVERROR code otherwise +*) +// int av_parse_time(int64_t *timeval, const char *timestr, int duration); +function av_parse_time(timeval: pint64_t; const timestr: PAnsiChar; duration: int): int; cdecl; external avutil_dll; +(* * + * Attempt to find a specific tag in a URL. + * + * syntax: '?tag1=val1&tag2=val2...'. Little URL decoding is done. + * Return 1 if found. +*) +// int av_find_info_tag(char *arg, int arg_size, const char *tag1, const char *info); +function av_find_info_tag(arg: PAnsiChar; arg_size: int; const tag1: PAnsiChar; const info: PAnsiChar): int; cdecl; external avutil_dll; +(* * + * Simplified version of strptime + * + * Parse the input string p according to the format string fmt and + * store its results in the structure dt. + * This implementation supports only a subset of the formats supported + * by the standard strptime(). + * + * The supported input field descriptors are listed below. + * - %H: the hour as a decimal number, using a 24-hour clock, in the + * range '00' through '23' + * - %J: hours as a decimal number, in the range '0' through INT_MAX + * - %M: the minute as a decimal number, using a 24-hour clock, in the + * range '00' through '59' + * - %S: the second as a decimal number, using a 24-hour clock, in the + * range '00' through '59' + * - %Y: the year as a decimal number, using the Gregorian calendar + * - %m: the month as a decimal number, in the range '1' through '12' + * - %d: the day of the month as a decimal number, in the range '1' + * through '31' + * - %T: alias for '%H:%M:%S' + * - %%: a literal '%' + * + * @return a pointer to the first character not processed in this function + * call. In case the input string contains more characters than + * required by the format string the return value points right after + * the last consumed input character. In case the whole input string + * is consumed the return value points to the null byte at the end of + * the string. On failure NULL is returned. +*) +// char *av_small_strptime(const char *p, const char *fmt, struct tm *dt); +function av_small_strptime(const p: PAnsiChar; const fmt: PAnsiChar; dt: ptm): PAnsiChar; cdecl; external avutil_dll; + +(* * + * Convert the decomposed UTC time in tm to a time_t value. +*) +// time_t av_timegm(struct tm *tm); +function av_timegm(tm: ptm): time_t; cdecl; external avutil_dll; +{$ENDREGION} +{$REGION 'motion_vector.h'} + +type + pAVMotionVector = ^AVMotionVector; + + AVMotionVector = record + (* * + * Where the current macroblock comes from; negative value when it comes + * from the past, positive value when it comes from the future. + * XXX: set exact relative ref frame reference instead of a +/- 1 "direction". + *) + source: int32_t; + (* * + * Width and height of the block. + *) + w, h: uint8_t; + (* * + * Absolute source position. Can be outside the frame area. + *) + src_x, src_y: int16_t; + (* * + * Absolute destination position. Can be outside the frame area. + *) + dst_x, dst_y: int16_t; + (* * + * Extra flag information. + * Currently unused. + *) + flags: uint64_t; + (* * + * Motion vector + * src_x = dst_x + motion_x / motion_scale + * src_y = dst_y + motion_y / motion_scale + *) + motion_x, motion_y: int32_t; + motion_scale: uint16_t; + end; +{$ENDREGION} +{$REGION 'md5.h'} + +type + pAVMD5 = ^AVMD5; + + AVMD5 = record + + end; + + (* * + * Allocate an AVMD5 context. + *) + // struct AVMD5 *av_md5_alloc(void); +function av_md5_alloc(): pAVMD5; cdecl; external avutil_dll; + +(* * + * Initialize MD5 hashing. + * + * @param ctx pointer to the function context (of size av_md5_size) +*) +// void av_md5_init(struct AVMD5 *ctx); +procedure av_md5_init(ctx: pAVMD5); cdecl; external avutil_dll; + +(* * + * Update hash value. + * + * @param ctx hash function context + * @param src input data to update hash with + * @param len input data length +*) + +// #if FF_API_CRYPTO_SIZE_T +// void av_md5_update(struct AVMD5 *ctx, const uint8_t *src, int len); +// #else +// void av_md5_update(struct AVMD5 *ctx, const uint8_t *src, size_t len); +// #endif +procedure av_md5_update(ctx: pAVMD5; const src: puint8_t; len: +{$IFDEF FF_API_CRYPTO_SIZE_T} + int +{$ELSE} + size_t +{$ENDIF} + ); cdecl; external avutil_dll; + +(* * + * Finish hashing and output digest value. + * + * @param ctx hash function context + * @param dst buffer where output digest value is stored +*) +// void av_md5_final(struct AVMD5 *ctx, uint8_t *dst); +procedure av_md5_final(ctx: pAVMD5; dst: puint8_t); cdecl; external avutil_dll; + +(* * + * Hash an array of data. + * + * @param dst The output buffer to write the digest into + * @param src The data to hash + * @param len The length of the data, in bytes +*) + +// #if FF_API_CRYPTO_SIZE_T +// void av_md5_sum(uint8_t *dst, const uint8_t *src, const int len); +// #else +// void av_md5_sum(uint8_t *dst, const uint8_t *src, size_t len); +// #endif +procedure av_md5_sum(dst: puint8_t; const src: puint8_t; const len: +{$IFDEF FF_API_CRYPTO_SIZE_T} + int +{$ELSE} + size_t +{$ENDIF} + ); cdecl; external avutil_dll; + +{$ENDREGION} +{$REGION 'avassert.h'} +(* * + * Assert that floating point opperations can be executed. + * + * This will av_assert0() that the cpu is not in MMX state on X86 +*) +// void av_assert0_fpu(void); +procedure av_assert0_fpu(); cdecl; external avutil_dll; +{$ENDREGION} +{$REGION 'intfloat.h'} + +type + av_intfloat32 = record + case Integer of + 0: + (i: uint32_t); + 1: + (f: float); + end; + + av_intfloat64 = record + case Integer of + 0: + (i: uint64_t); + 1: + (f: double); + end; + + (* * + * Reinterpret a 32-bit integer as a float. + *) + // static av_always_inline float av_int2float(uint32_t i) +function av_int2float(i: uint32_t): float; inline; + +(* * + * Reinterpret a float as a 32-bit integer. +*) +// static av_always_inline uint32_t av_float2int(float f) +function av_float2int(f: float): uint32_t; inline; + +(* * + * Reinterpret a 64-bit integer as a double. +*) +// static av_always_inline double av_int2double(uint64_t i) +function av_int2double(i: uint64_t): double; inline; + +(* * + * Reinterpret a double as a 64-bit integer. +*) +// static av_always_inline uint64_t av_double2int(double f) +function av_double2int(f: double): uint64_t; inline; + +{$ENDREGION} +{$REGION 'mastering_display_metadata.h'} + +type + (* * + * Mastering display metadata capable of representing the color volume of + * the display used to master the content (SMPTE 2086:2014). + * + * To be used as payload of a AVFrameSideData or AVPacketSideData with the + * appropriate type. + * + * @note The struct should be allocated with av_mastering_display_metadata_alloc() + * and its size is not a part of the public ABI. + *) + pAVMasteringDisplayMetadata = ^AVMasteringDisplayMetadata; + + AVMasteringDisplayMetadata = record + (* * + * CIE 1931 xy chromaticity coords of color primaries (r, g, b order). + *) + display_primaries: array [0 .. 2, 0 .. 1] of AVRational; + + (* * + * CIE 1931 xy chromaticity coords of white point. + *) + white_point: array [0 .. 1] of AVRational; + + (* * + * Min luminance of mastering display (cd/m^2). + *) + min_luminance: AVRational; + + (* * + * Max luminance of mastering display (cd/m^2). + *) + max_luminance: AVRational; + + (* * + * Flag indicating whether the display primaries (and white point) are set. + *) + has_primaries: int; + + (* * + * Flag indicating whether the luminance (min_ and max_) have been set. + *) + has_luminance: int; + end; + + (* * + * Allocate an AVMasteringDisplayMetadata structure and set its fields to + * default values. The resulting struct can be freed using av_freep(). + * + * @return An AVMasteringDisplayMetadata filled with default values or NULL + * on failure. + *) + // AVMasteringDisplayMetadata *av_mastering_display_metadata_alloc(void); +function av_mastering_display_metadata_alloc(): pAVMasteringDisplayMetadata; cdecl; external avutil_dll; +(* * + * Allocate a complete AVMasteringDisplayMetadata and add it to the frame. + * + * @param frame The frame which side data is added to. + * + * @return The AVMasteringDisplayMetadata structure to be filled by caller. +*) +// AVMasteringDisplayMetadata *av_mastering_display_metadata_create_side_data(AVFrame *frame); +function av_mastering_display_metadata_create_side_data(frame: pAVFrame): pAVMasteringDisplayMetadata; cdecl; external avutil_dll; + +type + (* * + * Content light level needed by to transmit HDR over HDMI (CTA-861.3). + * + * To be used as payload of a AVFrameSideData or AVPacketSideData with the + * appropriate type. + * + * @note The struct should be allocated with av_content_light_metadata_alloc() + * and its size is not a part of the public ABI. + *) + pAVContentLightMetadata = ^AVContentLightMetadata; + + AVContentLightMetadata = record + (* * + * Max content light level (cd/m^2). + *) + MaxCLL: unsigned; + (* * + * Max average light level per frame (cd/m^2). + *) + MaxFALL: unsigned; + end; + + (* * + * Allocate an AVContentLightMetadata structure and set its fields to + * default values. The resulting struct can be freed using av_freep(). + * + * @return An AVContentLightMetadata filled with default values or NULL + * on failure. + *) + // AVContentLightMetadata *av_content_light_metadata_alloc(size_t *size); +function av_content_light_metadata_alloc(var size: size_t): pAVContentLightMetadata; cdecl; external avutil_dll; +(* * + * Allocate a complete AVContentLightMetadata and add it to the frame. + * + * @param frame The frame which side data is added to. + * + * @return The AVContentLightMetadata structure to be filled by caller. +*) +// AVContentLightMetadata *av_content_light_metadata_create_side_data(AVFrame *frame); +function av_content_light_metadata_create_side_data(frame: pAVFrame): pAVContentLightMetadata; cdecl; external avutil_dll; +{$ENDREGION} +{$REGION 'pixelutils.h'} + +(* * + * Sum of abs(src1[x] - src2[x]) +*) +type + // int (*av_pixelutils_sad_fn)(const uint8_t *src1, ptrdiff_t stride1, + // const uint8_t *src2, ptrdiff_t stride2); + av_pixelutils_sad_fn = function(const src1: puint8_t; stride1: ptrdiff_t; const src2: puint8_t; stride2: ptrdiff_t): int; cdecl; + + (* * + * Get a potentially optimized pointer to a Sum-of-absolute-differences + * function (see the av_pixelutils_sad_fn prototype). + * + * @param w_bits 1<state[c->index & 63] = c->state[(c->index-24) & 63] + c->state[(c->index-55) & 63]; +// c->index += 1U; +// return a; +// } +function av_lfg_get(c: pAVLFG): uint; inline; + +(* * + * Get the next random unsigned 32-bit number using a MLFG. + * + * Please also consider av_lfg_get() above, it is faster. +*) +// static inline unsigned int av_mlfg_get(AVLFG *c){ +// unsigned int a= c->state[(c->index-55) & 63]; +// unsigned int b= c->state[(c->index-24) & 63]; +// a = c->state[c->index & 63] = 2*a*b+a+b; +// c->index += 1U; +// return a; +// } + +function av_mlfg_get(c: pAVLFG): uint; inline; + +(* * + * Get the next two numbers generated by a Box-Muller Gaussian + * generator using the random numbers issued by lfg. + * + * @param out array where the two generated numbers are placed +*) +// void av_bmg_get(AVLFG *lfg, double out[2]); +Type + Tav_bmg_get_arrayofdouble = array [0 .. 1] of double; + +procedure av_bmg_get(lfg: pAVLFG; &out: Tav_bmg_get_arrayofdouble); cdecl; external avutil_dll; +{$ENDREGION} + +implementation + +{$REGION 'common.h'} + +function RSHIFT(a, b: int): int; inline; +begin + if a > 0 then + Result := ((a) + ((1 shl (b)) shr 1)) shr (b) + else + Result := ((a) + ((1 shl (b)) shr 1) - 1) shr (b); +end; + +function ROUNDED_DIV(a, b: int): int; inline; +begin + if a > 0 then + Result := a + (b shr 1) + else + Result := a - (b shr 1) div b; +end; + +function FFUDIV(a, b: int): int; inline; +begin + if a > 0 then + Result := a + else + Result := a - b + 1; + Result := Result div b; + +end; + +function FFUMOD(a, b: int): int; inline; +begin + Result := a - b * FFUDIV(a, b); +end; + +function FFABS(a: int): int; inline; +begin + if a >= 0 then + Result := a + else + Result := -a; +end; + +function FFSIGN(a: int): int; inline; +begin + if a > 0 then + Result := 1 + else + Result := -1; +end; + +function FFNABS(a: int): int; inline; +begin + if a <= 0 then + Result := a + else + Result := -a; +end; + +function FFDIFFSIGN(x, y: int): Boolean; inline; +begin + Result := FFSIGN(x) <> FFSIGN(y); +end; + +function FFMAX(a, b: int): int; inline; +begin + if a > b then + Result := a + else + Result := b; +end; + +function av_clip_c(a: int; amin: int; amax: int): int; inline; +begin + // #if defined(HAVE_AV_CONFIG_H) && defined(ASSERT_LEVEL) && ASSERT_LEVEL >= 2 + // if (amin > amax) abort(); + // #endif + if (a < amin) then + Result := amin + else if (a > amax) then + Result := amax + else + Result := a; +end; + +function av_clip64_c(a: int64_t; amin: int64_t; amax: int64_t): int64_t; inline; +begin + // #if defined(HAVE_AV_CONFIG_H) && defined(ASSERT_LEVEL) && ASSERT_LEVEL >= 2 + // if (amin > amax) abort(); + // #endif + if (a < amin) then + Result := amin + else if (a > amax) then + Result := amax + else + Result := a; +end; + +function av_clip_uint8_c(a: int): uint8_t; inline; +begin + // if (a&(~0xFF)) return (~a)>>31; + // else return a; + if (a and (not $FF)) <> 0 then + Result := (not a) shr 31 + else + Result := a; +end; + +function av_clip_int8_c(a: int): int8_t; inline; +begin + if ((a + $80) and (not $FF)) <> 0 then + Result := (a shr 31) xor $7F + else + Result := a; +end; + +function av_clip_uint16_c(a: int): uint16_t; inline; +begin + if (a and (not $FFFF)) <> 0 then + Result := (not a) shr 31 + else + Result := a; +end; + +function av_clip_int16_c(a: int): int16_t; inline; +begin + if ((a + $8000) and (not $FFFF)) <> 0 then + Result := (a shr 31) xor $7FFF + else + Result := a; +end; + +function av_clipl_int32_c(a: int64_t): int32_t; inline; +begin + if ((a + $80000000) and (not $FFFFFFFF)) <> 0 then + Result := ((a shr 63) xor $7FFFFFFF) + else + Result := a; +end; + +function av_clip_intp2_c(a: int; p: int): int; inline; +begin + if ((a + (1 shl p)) and (not((2 shl p) - 1))) <> 0 then + Result := (a shr 31) xor ((1 shl p) - 1) + else + Result := a; +end; + +function av_clip_uintp2_c(a: int; p: int): uint; inline; +begin + if (a and (not((1 shl p) - 1))) <> 0 then + Result := (not a) shr 31 and ((1 shl p) - 1) + else + Result := a; +end; + +function av_mod_uintp2_c(a: uint; p: uint): uint; inline; +begin + Result := a and ((uint(1) shl p) - 1); +end; + +function av_sat_add32_c(a: int; b: int): int; inline; +begin + Result := av_clipl_int32_c(a + b); +end; + +function av_sat_dadd32_c(a: int; b: int): int; inline; +begin + Result := av_sat_add32_c(a, av_sat_add32_c(b, b)); +end; + +function av_sat_sub32_c(a: int; b: int): int; inline; +begin + Result := av_clipl_int32_c(a - b); +end; + +function av_sat_dsub32_c(a: int; b: int): int; inline; +begin + Result := av_sat_sub32_c(a, av_sat_add32_c(b, b)); +end; + +function av_clipf_c(a: float; amin: float; amax: float): float; inline; +begin + if a < amin then + Result := amin + else if a > amax then + Result := amax + else + Result := a; +end; + +function av_clipd_c(a: double; amin: double; amax: double): double; inline; +begin + if a < amin then + Result := amin + else if a > amax then + Result := amax + else + Result := a; +end; + +function av_ceil_log2_c(x: int): int; inline; +begin + Result := av_log2((x - 1) shl 1); +end; + +function av_popcount_c(x: uint32_t): int; inline; +begin + x := x - ((x shr 1) and $55555555); + x := (x and $33333333) + ((x shr 2) and $33333333); + x := (x + (x shr 4)) and $0F0F0F0F; + x := x + (x shr 8); + Result := (x + (x shr 16)) and $3F; +end; + +function av_popcount64_c(x: uint64_t): int; inline; +begin + Result := av_popcount_c(x) + av_popcount_c(x shr 32); +end; + +function av_parity_c(v: uint32_t): int; inline; +begin + Result := av_popcount_c(v) and 1; +end; + +{$ENDREGION} +{$REGION 'rational.h'} + +function av_make_q(_num: int; _den: int): AVRational; inline; +begin + Result.num := _num; + Result.den := _den; +end; + +function av_cmp_q(a, b: AVRational): int; inline; +Var + tmp: int64_t; +begin + tmp := a.num * b.den - b.num * a.den; + if (tmp <> 0) then + Result := ((tmp xor a.den xor b.den) shr 63) or 1 + else if (b.den and a.den) <> 0 then + Result := 0 + else if (a.num and b.num) <> 0 then + Result := (a.num shr 31) - (b.num shr 31) + else + Result := -MaxInt; +end; + +function av_q2d(a: AVRational): double; inline; +begin + Result := a.num / a.den; +end; + +function av_inv_q(q: AVRational): AVRational; inline; +begin + Result.den := q.den; + Result.num := q.num; +end; + +function av_x_if_null(const p: Pointer; const x: Pointer): Pointer; inline; +begin + // return (void *)(intptr_t)(p ? p : x); + if Assigned(p) then + Result := p + else + Result := x; +end; +{$ENDREGION} +{$REGION 'opt.h'} + +function av_opt_set_int_list(obj: Pointer; name: PAnsiChar; list: Pointer; item_size: int; term: int64_t; flags: int): Integer; inline; +begin + if av_int_list_length(list, item_size, term) > MaxInt / item_size then + Result := AVERROR_EINVAL + else + Result := av_opt_set_bin(obj, name, puint8_t(list), av_int_list_length(list, item_size, term) * item_size, flags); +end; + +function av_int_list_length(list: Pointer; item_size: int; term: int64_t): int; inline; +begin + Result := av_int_list_length_for_size(item_size, list, term); +end; +{$ENDREGION} +{$REGION 'error.h'} + +function av_make_error_string(errbuf: PAnsiChar; errbuf_size: size_t; errnum: int): PAnsiChar; +begin + av_strerror(errnum, @errbuf, errbuf_size); + Result := @errbuf; +end; + +var + error_str: array [0 .. AV_ERROR_MAX_STRING_SIZE - 1] of AnsiChar; + +function av_err2str(errnum: int): PAnsiChar; +begin + FillChar(error_str, SizeOf(error_str), 0); + av_make_error_string(@error_str, AV_ERROR_MAX_STRING_SIZE, errnum); + Result := @error_str; +end; + +{$ENDREGION} +{$REGION 'avstring.h'} + +function av_strnlen(const s: PAnsiChar; len: size_t): size_t; inline; +begin + Result := 0; + While s[Result] <> #0 do + inc(Result); +end; + +function av_isdigit(c: int): Boolean; inline; +begin + Result := (AnsiChar(c) >= '0') and (AnsiChar(c) <= '9'); +end; + +function av_isgraph(c: int): Boolean; inline; +begin + Result := (c > 32) and (c < 127); +end; + +function av_isspace(c1: int): Boolean; inline; +var + c: AnsiChar; +begin + c := AnsiChar(c1); + Result := // + (c = ' ') or // + (c = #$0C) or // + (c = #$0A) or // + (c = #$0D) or // + (c = #$09) or // + (c = #$0B); +end; + +function av_toupper(c1: int): int; inline; +var + c: AnsiChar; +begin + c := AnsiChar(c1); + Result := Ord(c); + if (c >= 'a') and (c <= 'z') then + Result := Result xor $20; +end; + +function av_tolower(c1: int): int; inline; +var + c: AnsiChar; +begin + c := AnsiChar(c1); + Result := Ord(c); + if (c >= 'A') and (c <= 'Z') then + Result := Result xor $20; +end; + +function av_isxdigit(c1: int): Boolean; inline; +var + c: AnsiChar; +begin + c1 := av_tolower(c1); + c := AnsiChar(AnsiChar(c1)); + Result := av_isdigit(c1) or ((c >= 'a') and (c <= 'f')); +end; +{$ENDREGION} +{$REGION 'bprint.h'} + +function av_bprint_is_complete(const buf: pAVBPrint): Boolean; inline; +begin + Result := buf^.len < buf^.size; +end; +{$ENDREGION} +{$REGION 'fifo.h'} + +function av_fifo_peek2(const f: pAVFifoBuffer; offs: int): puint8_t; inline; +var + ptr: puint8_t; +begin + ptr := f^.rptr + offs; + if (ptr >= f^._end) then + ptr := f^.buffer + (ptr - f^._end) + else if (ptr < f^.buffer) then + ptr := f^._end - (f^.buffer - ptr); + Result := ptr; +end; +{$ENDREGION} +{$REGION 'timestamp.h'} + +function av_ts_make_string(buf: PAnsiChar; ts: int64_t): PAnsiChar; +Var + p: AnsiString; + m: size_t; +begin + { + if (ts == AV_NOPTS_VALUE) + snprintf(buf, AV_TS_MAX_STRING_SIZE, "NOPTS"); + else + snprintf(buf, AV_TS_MAX_STRING_SIZE, "%" PRId64, ts); + return buf; + } + if (ts = AV_NOPTS_VALUE) then + p := 'NOPTS' + else + str(ts, p); + m := length(p); + if m > AV_TS_MAX_STRING_SIZE then + m := AV_TS_MAX_STRING_SIZE; + move(p[1], buf^, m); + Result := buf; +end; + +var + av_ts_buf: array [0 .. AV_TS_MAX_STRING_SIZE] of AnsiChar; + +function av_ts2str(ts: int64_t): PAnsiChar; +begin + FillChar(av_ts_buf, SizeOf(av_ts_buf), 0); + Result := av_ts_make_string(@av_ts_buf[0], ts); +end; + +function av_ts_make_time_string(buf: PAnsiChar; ts: int64_t; tb: pAVRational): PAnsiChar; +Var + p: AnsiString; + m: size_t; +begin + { + if (ts == AV_NOPTS_VALUE) snprintf(buf, AV_TS_MAX_STRING_SIZE, "NOPTS"); + else snprintf(buf, AV_TS_MAX_STRING_SIZE, "%.6g", av_q2d(*tb) * ts); + return buf; + } + if (ts = AV_NOPTS_VALUE) then + p := 'NOPTS' + else + str((av_q2d(tb^) * ts): 1: 6, p); + m := length(p); + if m > AV_TS_MAX_STRING_SIZE then + m := AV_TS_MAX_STRING_SIZE; + move(p[1], buf^, m); + Result := buf; +end; + +function av_ts2timestr(ts: int64_t; tb: pAVRational): PAnsiChar; +begin + FillChar(av_ts_buf, SizeOf(av_ts_buf), 0); + Result := av_ts_make_time_string(@av_ts_buf[0], ts, tb); +end; + +{$ENDREGION} +{$REGION 'mem.h'} + +function av_size_mult(a: size_t; b: size_t; var r: size_t): int; inline; +var + t: size_t; +begin + t := a * b; + (* Hack inspired from glibc: don't try the division if nelem and elsize + * are both less than sqrt(SIZE_MAX). *) + if ((a or b) >= (size_t(1) shl (SizeOf(size_t) * 4))) and (a <> 0) and ((t div a) <> b) then + Exit(AVERROR_EINVAL); + r := t; + Result := 0; +end; +{$ENDREGION} +{$REGION 'parseutils.h'} + +function av_parse_ratio_quiet(q: pAVRational; const str: PAnsiChar; max: int): int; inline; +begin + Result := av_parse_ratio(q, str, max, AV_LOG_MAX_OFFSET, nil); +end; +{$ENDREGION} +{$REGION 'intfloat.h'} + +function av_int2float(i: uint32_t): float; inline; +begin + Result := av_intfloat32(i).f; +end; + +function av_float2int(f: float): uint32_t; inline; +begin + Result := av_intfloat32(f).i; +end; + +function av_int2double(i: uint64_t): double; inline; +begin + Result := av_intfloat64(i).f; +end; + +function av_double2int(f: double): uint64_t; inline; +begin + Result := av_intfloat64(f).i; +end; + +{$ENDREGION} +{$REGION 'lfg.h'} + +function av_lfg_get(c: pAVLFG): uint; inline; +begin + // unsigned a = c->state[c->index & 63] = c->state[(c->index-24) & 63] + c->state[(c->index-55) & 63]; + Result := c^.state[(c^.index - 24) and 63] + c^.state[(c^.index - 55) and 63]; + c^.state[c^.index and 63] := Result; + // c->index += 1U; + c^.index := c^.index + 1; + // return a; +end; + +function av_mlfg_get(c: pAVLFG): uint; inline; +var + a, b: uint; +begin + // unsigned int a= c->state[(c->index-55) & 63]; + a := c^.state[(c^.index - 55) and 63]; + // unsigned int b= c->state[(c->index-24) & 63]; + b := c^.state[(c^.index - 24) and 63]; + // a = c->state[c->index & 63] = 2*a*b+a+b; + Result := 2 * a * b + a + b; + c^.state[c^.index and 63] := Result; + // c->index += 1U; + // return a; +end; + +{$ENDREGION} + +end. + \ No newline at end of file diff --git a/ffmpeg/libpostproc.pas b/ffmpeg/libpostproc.pas new file mode 100644 index 0000000..41f8cdb --- /dev/null +++ b/ffmpeg/libpostproc.pas @@ -0,0 +1,108 @@ +unit libpostproc; + +{$IFDEF FPC} +{$MODE Delphi} +{$ENDIF} + +interface + +Uses + ffmpeg_types; + +{$I ffmpeg.inc} + +(* * + * Return the LIBPOSTPROC_VERSION_INT constant. +*) +// unsigned postproc_version(void); +function postproc_version(): unsigned; cdecl; external postproc_dll; + +(* * + * Return the libpostproc build-time configuration. +*) +// const char *postproc_configuration(void); +function postproc_configuration(): pAnsiChar; cdecl; external postproc_dll; +(* * + * Return the libpostproc license. +*) +// const char *postproc_license(void); +function postproc_license(): pAnsiChar; cdecl; external postproc_dll; + +const + PP_QUALITY_MAX = 6; + + // #include + +type + ppp_context = ^pp_context; + + pp_context = record + end; + + ppp_mode = ^pp_mode; + + pp_mode = record + end; + + Tpp_src_puint8_t = array [0 .. 2] of puint8_t; + Tpp_dst_puint8_t = Tpp_src_puint8_t; + Tpp_srcStride_int = array [0 .. 2] of int; + Tpp_dstStride_int = Tpp_srcStride_int; + + (* + #if LIBPOSTPROC_VERSION_INT < (52<<16) + typedef pp_context pp_context_t; + typedef pp_mode pp_mode_t; + extern const char *const pp_help; ///< a simple help text + #else + extern const char pp_help[]; ///< a simple help text + #endif + *) + + // void pp_postprocess(const uint8_t * src[3], const int srcStride[3], + // uint8_t * dst[3], const int dstStride[3], + // int horizontalSize, int verticalSize, + // const int8_t *QP_store, int QP_stride, + // pp_mode *mode, pp_context *ppContext, int pict_type); + +procedure pp_postprocess(const src: Tpp_src_puint8_t; const srcStride: Tpp_srcStride_int; dst: Tpp_dst_puint8_t; + const dstStride: Tpp_dstStride_int; horizontalSize: int; verticalSize: int; const QP_store: pint8_t; QP_stride: int; mode: ppp_mode; + ppContext: ppp_context; pict_type: int); cdecl; external postproc_dll; +(* * + * Return a pp_mode or NULL if an error occurred. + * + * @param name the string after "-pp" on the command line + * @param quality a number from 0 to PP_QUALITY_MAX +*) +// pp_mode *pp_get_mode_by_name_and_quality(const char *name, int quality); +function pp_get_mode_by_name_and_quality(const name: pAnsiChar; quality: int): ppp_mode; cdecl; external postproc_dll; + +// void pp_free_mode(pp_mode *mode); +procedure pp_free_mode(mode: ppp_mode); cdecl; external postproc_dll; + +// pp_context *pp_get_context(int width, int height, int flags); +function pp_get_context(width: int; height: int; flags: int): ppp_context; cdecl; external postproc_dll; + +// void pp_free_context(pp_context *ppContext); +procedure pp_free_context(ppContext: ppp_context); cdecl; external postproc_dll; + +const + PP_CPU_CAPS_MMX = $80000000; + PP_CPU_CAPS_MMX2 = $20000000; + PP_CPU_CAPS_3DNOW = $40000000; + PP_CPU_CAPS_ALTIVEC = $10000000; + PP_CPU_CAPS_AUTO = $00080000; + + PP_FORMAT = $00000008; + PP_FORMAT_420 = ($00000011 or PP_FORMAT); + PP_FORMAT_422 = ($00000001 or PP_FORMAT); + PP_FORMAT_411 = ($00000002 or PP_FORMAT); + PP_FORMAT_444 = ($00000000 or PP_FORMAT); + PP_FORMAT_440 = ($00000010 or PP_FORMAT); + + PP_PICT_TYPE_QP2 = $00000010; + /// < MPEG2 style QScale + +implementation + +end. diff --git a/ffmpeg/libswresample.pas b/ffmpeg/libswresample.pas new file mode 100644 index 0000000..f497092 --- /dev/null +++ b/ffmpeg/libswresample.pas @@ -0,0 +1,564 @@ +unit libswresample; + +{$IFDEF FPC} +{$MODE Delphi} +{$ENDIF} + +interface + +Uses + ffmpeg_types, libavutil; + +{$I ffmpeg.inc} + +(* * + * @defgroup lswr libswresample + * @{ + * + * Audio resampling, sample format conversion and mixing library. + * + * Interaction with lswr is done through SwrContext, which is + * allocated with swr_alloc() or swr_alloc_set_opts(). It is opaque, so all parameters + * must be set with the @ref avoptions API. + * + * The first thing you will need to do in order to use lswr is to allocate + * SwrContext. This can be done with swr_alloc() or swr_alloc_set_opts(). If you + * are using the former, you must set options through the @ref avoptions API. + * The latter function provides the same feature, but it allows you to set some + * common options in the same statement. + * + * For example the following code will setup conversion from planar float sample + * format to interleaved signed 16-bit integer, downsampling from 48kHz to + * 44.1kHz and downmixing from 5.1 channels to stereo (using the default mixing + * matrix). This is using the swr_alloc() function. + * @code + * SwrContext *swr = swr_alloc(); + * av_opt_set_channel_layout(swr, "in_channel_layout", AV_CH_LAYOUT_5POINT1, 0); + * av_opt_set_channel_layout(swr, "out_channel_layout", AV_CH_LAYOUT_STEREO, 0); + * av_opt_set_int(swr, "in_sample_rate", 48000, 0); + * av_opt_set_int(swr, "out_sample_rate", 44100, 0); + * av_opt_set_sample_fmt(swr, "in_sample_fmt", AV_SAMPLE_FMT_FLTP, 0); + * av_opt_set_sample_fmt(swr, "out_sample_fmt", AV_SAMPLE_FMT_S16, 0); + * @endcode + * + * The same job can be done using swr_alloc_set_opts() as well: + * @code + * SwrContext *swr = swr_alloc_set_opts(NULL, // we're allocating a new context + * AV_CH_LAYOUT_STEREO, // out_ch_layout + * AV_SAMPLE_FMT_S16, // out_sample_fmt + * 44100, // out_sample_rate + * AV_CH_LAYOUT_5POINT1, // in_ch_layout + * AV_SAMPLE_FMT_FLTP, // in_sample_fmt + * 48000, // in_sample_rate + * 0, // log_offset + * NULL); // log_ctx + * @endcode + * + * Once all values have been set, it must be initialized with swr_init(). If + * you need to change the conversion parameters, you can change the parameters + * using @ref AVOptions, as described above in the first example; or by using + * swr_alloc_set_opts(), but with the first argument the allocated context. + * You must then call swr_init() again. + * + * The conversion itself is done by repeatedly calling swr_convert(). + * Note that the samples may get buffered in swr if you provide insufficient + * output space or if sample rate conversion is done, which requires "future" + * samples. Samples that do not require future input can be retrieved at any + * time by using swr_convert() (in_count can be set to 0). + * At the end of conversion the resampling buffer can be flushed by calling + * swr_convert() with NULL in and 0 in_count. + * + * The samples used in the conversion process can be managed with the libavutil + * @ref lavu_sampmanip "samples manipulation" API, including av_samples_alloc() + * function used in the following example. + * + * The delay between input and output, can at any time be found by using + * swr_get_delay(). + * + * The following code demonstrates the conversion loop assuming the parameters + * from above and caller-defined functions get_input() and handle_output(): + * @code + * uint8_t **input; + * int in_samples; + * + * while (get_input(&input, &in_samples)) { + * uint8_t *output; + * int out_samples = av_rescale_rnd(swr_get_delay(swr, 48000) + + * in_samples, 44100, 48000, AV_ROUND_UP); + * av_samples_alloc(&output, NULL, 2, out_samples, + * AV_SAMPLE_FMT_S16, 0); + * out_samples = swr_convert(swr, &output, out_samples, + * input, in_samples); + * handle_output(output, out_samples); + * av_freep(&output); + * } + * @endcode + * + * When the conversion is finished, the conversion + * context and everything associated with it must be freed with swr_free(). + * A swr_close() function is also available, but it exists mainly for + * compatibility with libavresample, and is not required to be called. + * + * There will be no memory leak if the data is not completely flushed before + * swr_free(). +*) + +// #include +// #include "libavutil/channel_layout.h" +// #include "libavutil/frame.h" +// #include "libavutil/samplefmt.h" + +// #include "libswresample/version.h" + +const + (* * + * @name Option constants + * These constants are used for the @ref avoptions interface for lswr. + * @{ + * + *) + + SWR_FLAG_RESAMPLE = 1; + /// < Force resampling even if equal sample rate + // TODO use int resample ? + // long term TODO can we enable this dynamically? + +type + (* * Dithering algorithms *) + SwrDitherType = ( // + SWR_DITHER_NONE = 0, SWR_DITHER_RECTANGULAR, SWR_DITHER_TRIANGULAR, SWR_DITHER_TRIANGULAR_HIGHPASS, + + SWR_DITHER_NS = 64, + /// < not part of API/ABI + SWR_DITHER_NS_LIPSHITZ, SWR_DITHER_NS_F_WEIGHTED, SWR_DITHER_NS_MODIFIED_E_WEIGHTED, SWR_DITHER_NS_IMPROVED_E_WEIGHTED, + SWR_DITHER_NS_SHIBATA, SWR_DITHER_NS_LOW_SHIBATA, SWR_DITHER_NS_HIGH_SHIBATA, SWR_DITHER_NB + /// < not part of API/ABI + ); + + (* * Resampling Engines *) + SwrEngine = ( // + SWR_ENGINE_SWR, (* *< SW Resampler *) + SWR_ENGINE_SOXR, (* *< SoX Resampler *) + SWR_ENGINE_NB + /// < not part of API/ABI + ); + + (* * Resampling Filter Types *) + SwrFilterType = ( // + SWR_FILTER_TYPE_CUBIC, (* *< Cubic *) + SWR_FILTER_TYPE_BLACKMAN_NUTTALL, (* *< Blackman Nuttall windowed sinc *) + SWR_FILTER_TYPE_KAISER (* *< Kaiser windowed sinc *) + ); + +type + (* * + * The libswresample context. Unlike libavcodec and libavformat, this structure + * is opaque. This means that if you would like to set options, you must use + * the @ref avoptions API and cannot directly set values to members of the + * structure. + *) + pSwrContext = ^SwrContext; + + SwrContext = record + end; + + (* * + * Get the AVClass for SwrContext. It can be used in combination with + * AV_OPT_SEARCH_FAKE_OBJ for examining options. + * + * @see av_opt_find(). + * @return the AVClass of SwrContext + *) + // const AVClass *swr_get_class(void); +function swr_get_class(): pAVClass; cdecl; external swresample_dll; + +(* * + * @name SwrContext constructor functions + * @{ +*) + +(* * + * Allocate SwrContext. + * + * If you use this function you will need to set the parameters (manually or + * with swr_alloc_set_opts()) before calling swr_init(). + * + * @see swr_alloc_set_opts(), swr_init(), swr_free() + * @return NULL on error, allocated context otherwise +*) +// struct SwrContext *swr_alloc(void); +function swr_alloc(): pSwrContext; cdecl; external swresample_dll; + +(* * + * Initialize context after user parameters have been set. + * @note The context must be configured using the AVOption API. + * + * @see av_opt_set_int() + * @see av_opt_set_dict() + * + * @param[in,out] s Swr context to initialize + * @return AVERROR error code in case of failure. +*) +// int swr_init(struct SwrContext *s); +function swr_init(s: pSwrContext): int; cdecl; external swresample_dll; + +(* * + * Check whether an swr context has been initialized or not. + * + * @param[in] s Swr context to check + * @see swr_init() + * @return positive if it has been initialized, 0 if not initialized +*) +// int swr_is_initialized(struct SwrContext *s); +function swr_is_initialized(s: pSwrContext): int; cdecl; external swresample_dll; +(* * + * Allocate SwrContext if needed and set/reset common parameters. + * + * This function does not require s to be allocated with swr_alloc(). On the + * other hand, swr_alloc() can use swr_alloc_set_opts() to set the parameters + * on the allocated context. + * + * @param s existing Swr context if available, or NULL if not + * @param out_ch_layout output channel layout (AV_CH_LAYOUT_* ) + * @param out_sample_fmt output sample format (AV_SAMPLE_FMT_* ). + * @param out_sample_rate output sample rate (frequency in Hz) + * @param in_ch_layout input channel layout (AV_CH_LAYOUT_* ) + * @param in_sample_fmt input sample format (AV_SAMPLE_FMT_* ). + * @param in_sample_rate input sample rate (frequency in Hz) + * @param log_offset logging level offset + * @param log_ctx parent logging context, can be NULL + * + * @see swr_init(), swr_free() + * @return NULL on error, allocated context otherwise +*) +// struct SwrContext *swr_alloc_set_opts(struct SwrContext *s, +// int64_t out_ch_layout, enum AVSampleFormat out_sample_fmt, int out_sample_rate, +// int64_t in_ch_layout, enum AVSampleFormat in_sample_fmt, int in_sample_rate, +// int log_offset, void *log_ctx); +function swr_alloc_set_opts(s: pSwrContext; out_ch_layout: int64_t; out_sample_fmt: AVSampleFormat; out_sample_rate: int; + in_ch_layout: int64_t; in_sample_fmt: AVSampleFormat; in_sample_rate: int; log_offset: int; log_ctx: Pointer): pSwrContext; cdecl; + external swresample_dll; +(* * + * @} + * + * @name SwrContext destructor functions + * @{ +*) + +(* * + * Free the given SwrContext and set the pointer to NULL. + * + * @param[in] s a pointer to a pointer to Swr context +*) +// void swr_free(struct SwrContext **s); +procedure swr_free(var s: pSwrContext); cdecl; external swresample_dll; +(* * + * Closes the context so that swr_is_initialized() returns 0. + * + * The context can be brought back to life by running swr_init(), + * swr_init() can also be used without swr_close(). + * This function is mainly provided for simplifying the usecase + * where one tries to support libavresample and libswresample. + * + * @param[in,out] s Swr context to be closed +*) +// void swr_close(struct SwrContext *s); +procedure swr_close(s: pSwrContext); cdecl; external swresample_dll; +(* * + * @} + * + * @name Core conversion functions + * @{ +*) + +(* * Convert audio. + * + * in and in_count can be set to 0 to flush the last few samples out at the + * end. + * + * If more input is provided than output space, then the input will be buffered. + * You can avoid this buffering by using swr_get_out_samples() to retrieve an + * upper bound on the required number of output samples for the given number of + * input samples. Conversion will run directly without copying whenever possible. + * + * @param s allocated Swr context, with parameters set + * @param out output buffers, only the first one need be set in case of packed audio + * @param out_count amount of space available for output in samples per channel + * @param in input buffers, only the first one need to be set in case of packed audio + * @param in_count number of input samples available in one channel + * + * @return number of samples output per channel, negative value on error +*) +// int swr_convert(struct SwrContext *s, uint8_t **out, int out_count, +// const uint8_t **in , int in_count); +function swr_convert(s: pSwrContext; _out: ppuint8_t; out_count: int; const _in: ppuint8_t; in_count: int): int; cdecl; external swresample_dll; +(* * + * Convert the next timestamp from input to output + * timestamps are in 1/(in_sample_rate * out_sample_rate) units. + * + * @note There are 2 slightly differently behaving modes. + * @li When automatic timestamp compensation is not used, (min_compensation >= FLT_MAX) + * in this case timestamps will be passed through with delays compensated + * @li When automatic timestamp compensation is used, (min_compensation < FLT_MAX) + * in this case the output timestamps will match output sample numbers. + * See ffmpeg-resampler(1) for the two modes of compensation. + * + * @param s[in] initialized Swr context + * @param pts[in] timestamp for the next input sample, INT64_MIN if unknown + * @see swr_set_compensation(), swr_drop_output(), and swr_inject_silence() are + * function used internally for timestamp compensation. + * @return the output timestamp for the next output sample +*) +// int64_t swr_next_pts(struct SwrContext *s, int64_t pts); +function swr_next_pts(s: pSwrContext; pts: int64_t): int64_t; cdecl; external swresample_dll; +(* * + * @} + * + * @name Low-level option setting functions + * These functons provide a means to set low-level options that is not possible + * with the AVOption API. + * @{ +*) + +(* * + * Activate resampling compensation ("soft" compensation). This function is + * internally called when needed in swr_next_pts(). + * + * @param[in,out] s allocated Swr context. If it is not initialized, + * or SWR_FLAG_RESAMPLE is not set, swr_init() is + * called with the flag set. + * @param[in] sample_delta delta in PTS per sample + * @param[in] compensation_distance number of samples to compensate for + * @return >= 0 on success, AVERROR error codes if: + * @li @c s is NULL, + * @li @c compensation_distance is less than 0, + * @li @c compensation_distance is 0 but sample_delta is not, + * @li compensation unsupported by resampler, or + * @li swr_init() fails when called. +*) +// int swr_set_compensation(struct SwrContext *s, int sample_delta, int compensation_distance); +function swr_set_compensation(s: pSwrContext; sample_delta: int; compensation_distance: int): int; cdecl; external swresample_dll; +(* * + * Set a customized input channel mapping. + * + * @param[in,out] s allocated Swr context, not yet initialized + * @param[in] channel_map customized input channel mapping (array of channel + * indexes, -1 for a muted channel) + * @return >= 0 on success, or AVERROR error code in case of failure. +*) +// int swr_set_channel_mapping(struct SwrContext *s, const int *channel_map); +function swr_set_channel_mapping(s: pSwrContext; const channel_map: pint): int; cdecl; external swresample_dll; +(* * + * Generate a channel mixing matrix. + * + * This function is the one used internally by libswresample for building the + * default mixing matrix. It is made public just as a utility function for + * building custom matrices. + * + * @param in_layout input channel layout + * @param out_layout output channel layout + * @param center_mix_level mix level for the center channel + * @param surround_mix_level mix level for the surround channel(s) + * @param lfe_mix_level mix level for the low-frequency effects channel + * @param rematrix_maxval if 1.0, coefficients will be normalized to prevent + * overflow. if INT_MAX, coefficients will not be + * normalized. + * @param[out] matrix mixing coefficients; matrix[i + stride * o] is + * the weight of input channel i in output channel o. + * @param stride distance between adjacent input channels in the + * matrix array + * @param matrix_encoding matrixed stereo downmix mode (e.g. dplii) + * @param log_ctx parent logging context, can be NULL + * @return 0 on success, negative AVERROR code on failure +*) +// int swr_build_matrix(uint64_t in_layout, uint64_t out_layout, +// double center_mix_level, double surround_mix_level, +// double lfe_mix_level, double rematrix_maxval, +// double rematrix_volume, double *matrix, +// int stride, enum AVMatrixEncoding matrix_encoding, +// void *log_ctx); +function swr_build_matrix(in_layout: uint64_t; out_layout: uint64_t; center_mix_level: double; surround_mix_level: double; + lfe_mix_level: double; rematrix_maxval: double; rematrix_volume: double; var matrix: double; stride: int; + matrix_encoding: AVMatrixEncoding; log_ctx: Pointer): int; cdecl; external swresample_dll; +(* * + * Set a customized remix matrix. + * + * @param s allocated Swr context, not yet initialized + * @param matrix remix coefficients; matrix[i + stride * o] is + * the weight of input channel i in output channel o + * @param stride offset between lines of the matrix + * @return >= 0 on success, or AVERROR error code in case of failure. +*) +// int swr_set_matrix(struct SwrContext *s, const double *matrix, int stride); +function swr_set_matrix(s: pSwrContext; const matrix: pdouble; stride: int): int; cdecl; external swresample_dll; +(* * + * @} + * + * @name Sample handling functions + * @{ +*) + +(* * + * Drops the specified number of output samples. + * + * This function, along with swr_inject_silence(), is called by swr_next_pts() + * if needed for "hard" compensation. + * + * @param s allocated Swr context + * @param count number of samples to be dropped + * + * @return >= 0 on success, or a negative AVERROR code on failure +*) +// int swr_drop_output(struct SwrContext *s, int count); +function swr_drop_output(s: pSwrContext; count: int): int; cdecl; external swresample_dll; +(* * + * Injects the specified number of silence samples. + * + * This function, along with swr_drop_output(), is called by swr_next_pts() + * if needed for "hard" compensation. + * + * @param s allocated Swr context + * @param count number of samples to be dropped + * + * @return >= 0 on success, or a negative AVERROR code on failure +*) +// int swr_inject_silence(struct SwrContext *s, int count); +function swr_inject_silence(s: pSwrContext; count: int): int; cdecl; external swresample_dll; +(* * + * Gets the delay the next input sample will experience relative to the next output sample. + * + * Swresample can buffer data if more input has been provided than available + * output space, also converting between sample rates needs a delay. + * This function returns the sum of all such delays. + * The exact delay is not necessarily an integer value in either input or + * output sample rate. Especially when downsampling by a large value, the + * output sample rate may be a poor choice to represent the delay, similarly + * for upsampling and the input sample rate. + * + * @param s swr context + * @param base timebase in which the returned delay will be: + * @li if it's set to 1 the returned delay is in seconds + * @li if it's set to 1000 the returned delay is in milliseconds + * @li if it's set to the input sample rate then the returned + * delay is in input samples + * @li if it's set to the output sample rate then the returned + * delay is in output samples + * @li if it's the least common multiple of in_sample_rate and + * out_sample_rate then an exact rounding-free delay will be + * returned + * @returns the delay in 1 / @c base units. +*) +// int64_t swr_get_delay(struct SwrContext *s, int64_t base); +function swr_get_delay(s: pSwrContext; base: int64_t): int64_t; cdecl; external swresample_dll; +(* * + * Find an upper bound on the number of samples that the next swr_convert + * call will output, if called with in_samples of input samples. This + * depends on the internal state, and anything changing the internal state + * (like further swr_convert() calls) will may change the number of samples + * swr_get_out_samples() returns for the same number of input samples. + * + * @param in_samples number of input samples. + * @note any call to swr_inject_silence(), swr_convert(), swr_next_pts() + * or swr_set_compensation() invalidates this limit + * @note it is recommended to pass the correct available buffer size + * to all functions like swr_convert() even if swr_get_out_samples() + * indicates that less would be used. + * @returns an upper bound on the number of samples that the next swr_convert + * will output or a negative value to indicate an error +*) +// int swr_get_out_samples(struct SwrContext *s, int in_samples); +function swr_get_out_samples(s: pSwrContext; in_samples: int): int; cdecl; external swresample_dll; +(* * + * @} + * + * @name Configuration accessors + * @{ +*) + +(* * + * Return the @ref LIBSWRESAMPLE_VERSION_INT constant. + * + * This is useful to check if the build-time libswresample has the same version + * as the run-time one. + * + * @returns the unsigned int-typed version +*) +// unsigned swresample_version(void); +function swresample_version(): unsigned; cdecl; external swresample_dll; +(* * + * Return the swr build-time configuration. + * + * @returns the build-time @c ./configure flags +*) +// const char *swresample_configuration(void); +function swresample_configuration(): pAnsiChar; cdecl; external swresample_dll; +(* * + * Return the swr license. + * + * @returns the license of libswresample, determined at build-time +*) +// const char *swresample_license(void); +function swresample_license(): pAnsiChar; cdecl; external swresample_dll; +(* * + * @} + * + * @name AVFrame based API + * @{ +*) + +(* * + * Convert the samples in the input AVFrame and write them to the output AVFrame. + * + * Input and output AVFrames must have channel_layout, sample_rate and format set. + * + * If the output AVFrame does not have the data pointers allocated the nb_samples + * field will be set using av_frame_get_buffer() + * is called to allocate the frame. + * + * The output AVFrame can be NULL or have fewer allocated samples than required. + * In this case, any remaining samples not written to the output will be added + * to an internal FIFO buffer, to be returned at the next call to this function + * or to swr_convert(). + * + * If converting sample rate, there may be data remaining in the internal + * resampling delay buffer. swr_get_delay() tells the number of + * remaining samples. To get this data as output, call this function or + * swr_convert() with NULL input. + * + * If the SwrContext configuration does not match the output and + * input AVFrame settings the conversion does not take place and depending on + * which AVFrame is not matching AVERROR_OUTPUT_CHANGED, AVERROR_INPUT_CHANGED + * or the result of a bitwise-OR of them is returned. + * + * @see swr_delay() + * @see swr_convert() + * @see swr_get_delay() + * + * @param swr audio resample context + * @param output output AVFrame + * @param input input AVFrame + * @return 0 on success, AVERROR on failure or nonmatching + * configuration. +*) +// int swr_convert_frame(SwrContext *swr, AVFrame *output, const AVFrame *input); +function swr_convert_frame(swr: pSwrContext; output: pAVFrame; const input: pAVFrame): int; cdecl; external swresample_dll; +(* * + * Configure or reconfigure the SwrContext using the information + * provided by the AVFrames. + * + * The original resampling context is reset even on failure. + * The function calls swr_close() internally if the context is open. + * + * @see swr_close(); + * + * @param swr audio resample context + * @param output output AVFrame + * @param input input AVFrame + * @return 0 on success, AVERROR on failure. +*) +// int swr_config_frame(SwrContext *swr, const AVFrame *out, const AVFrame *in); +function swr_config_frame(swr: pSwrContext; const _out: pAVFrame; const _in: pAVFrame): int; cdecl; external swresample_dll; + +implementation + +end. diff --git a/ffmpeg/libswscale.pas b/ffmpeg/libswscale.pas new file mode 100644 index 0000000..fd4f89d --- /dev/null +++ b/ffmpeg/libswscale.pas @@ -0,0 +1,372 @@ +unit libswscale; + +{$IFDEF FPC} +{$MODE Delphi} +{$ENDIF} + +interface + +Uses + ffmpeg_types, libavutil; + +{$I ffmpeg.inc} +(* + * @defgroup libsws libswscale + * Color conversion and scaling library. + * + * @{ + * + * Return the LIBSWSCALE_VERSION_INT constant. +*) +// unsigned swscale_version(void); +function swscale_version(): unsigned; cdecl; external swscale_dll; + +(* + * Return the libswscale build-time configuration. +*) +// const char *swscale_configuration(void); +function swscale_configuration(): pAnsiChar; cdecl; external swscale_dll; + +(* + * Return the libswscale license. +*) +// const char *swscale_license(void); +function swscale_license(): pAnsiChar; cdecl; external swscale_dll; + +const + (* values for the flags, the stuff on the command line is different *) + SWS_FAST_BILINEAR = 1; + SWS_BILINEAR = 2; + SWS_BICUBIC = 4; + SWS_X = 8; + SWS_POINT = $10; + SWS_AREA = $20; + SWS_BICUBLIN = $40; + SWS_GAUSS = $80; + SWS_SINC = $100; + SWS_LANCZOS = $200; + SWS_SPLINE = $400; + + SWS_SRC_V_CHR_DROP_MASK = $30000; + SWS_SRC_V_CHR_DROP_SHIFT = 16; + + SWS_PARAM_DEFAULT = 123456; + + SWS_PRINT_INFO = $1000; + + // the following 3 flags are not completely implemented + // internal chrominance subsampling info + SWS_FULL_CHR_H_INT = $2000; + // input subsampling info + SWS_FULL_CHR_H_INP = $4000; + SWS_DIRECT_BGR = $8000; + SWS_ACCURATE_RND = $40000; + SWS_BITEXACT = $80000; + SWS_ERROR_DIFFUSION = $800000; + + SWS_MAX_REDUCE_CUTOFF = 0.002; + + SWS_CS_ITU709 = 1; + SWS_CS_FCC = 4; + SWS_CS_ITU601 = 5; + SWS_CS_ITU624 = 5; + SWS_CS_SMPTE170M = 5; + SWS_CS_SMPTE240M = 7; + SWS_CS_DEFAULT = 5; + SWS_CS_BT2020 = 9; + + (* + * Return a pointer to yuv<->rgb coefficients for the given colorspace + * suitable for sws_setColorspaceDetails(). + * + * @param colorspace One of the SWS_CS_* macros. If invalid, + * SWS_CS_DEFAULT is used. + *) + // const int *sws_getCoefficients(int colorspace); +function sws_getCoefficients(colorspace: int): pInt; cdecl; external swscale_dll; + +// when used for filters they must have an odd number of elements +// coeffs cannot be shared between vectors + +type + SwsVector = record + coeff: pdouble; + /// < pointer to the list of coefficients + length: int; + /// < number of coefficients in the vector + end; + + pSwsVector = ^SwsVector; + + // vectors can be shared + SwsFilter = record + lumH: pSwsVector; + lumV: pSwsVector; + chrH: pSwsVector; + chrV: pSwsVector; + End; + + pSwsFilter = ^SwsFilter; + + SwsContext = record + end; + + pSwsContext = ^SwsContext; + + Tsws_array_uint8_t = array_uint8_t; + psws_array_uint8_t = ^Tsws_array_uint8_t; + + Tsws_array_int = array_int; + psws_array_int = ^Tsws_array_int; + + Tsws_array4_int = array4_int; + psws_array4_int = ^Tsws_array4_int; + + (* + * Return a positive value if pix_fmt is a supported input format, 0 + * otherwise. + *) + // int sws_isSupportedInput(enum AVPixelFormat pix_fmt); +function sws_isSupportedInput(pix_fmt: AVPixelFormat): int; cdecl; external swscale_dll; + +(* + * Return a positive value if pix_fmt is a supported output format, 0 + * otherwise. +*) +// int sws_isSupportedOutput(enum AVPixelFormat pix_fmt); +function sws_isSupportedOutput(pix_fmt: AVPixelFormat): int; cdecl; external swscale_dll; + +(* + * @param[in] pix_fmt the pixel format + * @return a positive value if an endianness conversion for pix_fmt is + * supported, 0 otherwise. +*) +// int sws_isSupportedEndiannessConversion(enum AVPixelFormat pix_fmt); +function sws_isSupportedEndiannessConversion(pix_fmt: AVPixelFormat): int; cdecl; external swscale_dll; + +(* + * Allocate an empty SwsContext. This must be filled and passed to + * sws_init_context(). For filling see AVOptions, options.c and + * sws_setColorspaceDetails(). +*) +// struct SwsContext *sws_alloc_context(void); +function sws_alloc_context(): pSwsContext; cdecl; external swscale_dll; + +(* + * Initialize the swscaler context sws_context. + * + * @return zero or positive value on success, a negative value on + * error +*) +// av_warn_unused_result +// int sws_init_context(struct SwsContext *sws_context, SwsFilter *srcFilter, SwsFilter *dstFilter); +function sws_init_context(sws_context: pSwsContext; srcFilter: pSwsFilter; dstFilter: pSwsFilter): int; cdecl; external swscale_dll; + +(* + * Free the swscaler context swsContext. + * If swsContext is NULL, then does nothing. +*) +// void sws_freeContext(struct SwsContext *swsContext); +procedure sws_freeContext(SwsContext: pSwsContext); cdecl; external swscale_dll; + +(* + * Allocate and return an SwsContext. You need it to perform + * scaling/conversion operations using sws_scale(). + * + * @param srcW the width of the source image + * @param srcH the height of the source image + * @param srcFormat the source image format + * @param dstW the width of the destination image + * @param dstH the height of the destination image + * @param dstFormat the destination image format + * @param flags specify which algorithm and options to use for rescaling + * @param param extra parameters to tune the used scaler + * For SWS_BICUBIC param[0] and [1] tune the shape of the basis + * function, param[0] tunes f(1) and param[1] f´(1) + * For SWS_GAUSS param[0] tunes the exponent and thus cutoff + * frequency + * For SWS_LANCZOS param[0] tunes the width of the window function + * @return a pointer to an allocated context, or NULL in case of error + * @note this function is to be removed after a saner alternative is + * written +*) +// struct SwsContext *sws_getContext(int srcW, int srcH, enum AVPixelFormat srcFormat, +// int dstW, int dstH, enum AVPixelFormat dstFormat, +// int flags, SwsFilter *srcFilter, +// SwsFilter *dstFilter, const double *param); + +function sws_getContext(srcW: int; srcH: int; srcFormat: AVPixelFormat; dstW: int; dstH: int; dstFormat: AVPixelFormat; flags: int; srcFilter: pSwsFilter; + dstFilter: pSwsFilter; const param: pdouble): pSwsContext; cdecl; external swscale_dll; + +(* + * Scale the image slice in srcSlice and put the resulting scaled + * slice in the image in dst. A slice is a sequence of consecutive + * rows in an image. + * + * Slices have to be provided in sequential order, either in + * top-bottom or bottom-top order. If slices are provided in + * non-sequential order the behavior of the function is undefined. + * + * @param c the scaling context previously created with + * sws_getContext() + * @param srcSlice the array containing the pointers to the planes of + * the source slice + * @param srcStride the array containing the strides for each plane of + * the source image + * @param srcSliceY the position in the source image of the slice to + * process, that is the number (counted starting from + * zero) in the image of the first row of the slice + * @param srcSliceH the height of the source slice, that is the number + * of rows in the slice + * @param dst the array containing the pointers to the planes of + * the destination image + * @param dstStride the array containing the strides for each plane of + * the destination image + * @return the height of the output slice +*) + +// int sws_scale(struct SwsContext *c, const uint8_t *const srcSlice[], +// const int srcStride[], int srcSliceY, int srcSliceH, +// uint8_t *const dst[], const int dstStride[]); +function sws_scale(c: pSwsContext; const srcSlice: psws_array_uint8_t; const srcStride: psws_array_int; srcSliceY: int; srcSliceH: int; dst: psws_array_uint8_t; + const dstStride: psws_array_int): int; cdecl; overload; external swscale_dll; + +(* + * @param dstRange flag indicating the while-black range of the output (1=jpeg / 0=mpeg) + * @param srcRange flag indicating the while-black range of the input (1=jpeg / 0=mpeg) + * @param table the yuv2rgb coefficients describing the output yuv space, normally ff_yuv2rgb_coeffs[x] + * @param inv_table the yuv2rgb coefficients describing the input yuv space, normally ff_yuv2rgb_coeffs[x] + * @param brightness 16.16 fixed point brightness correction + * @param contrast 16.16 fixed point contrast correction + * @param saturation 16.16 fixed point saturation correction + * @return -1 if not supported +*) +// int sws_setColorspaceDetails(struct SwsContext *c, const int inv_table[4], +// int srcRange, const int table[4], int dstRange, +// int brightness, int contrast, int saturation); +function sws_setColorspaceDetails(c: pSwsContext; const inv_table: psws_array4_int; srcRange: int; const table: psws_array4_int; dstRange: int; brightness: int; + contrast: int; saturation: int): int; cdecl; external swscale_dll; + +(* + * @return -1 if not supported +*) +// int sws_getColorspaceDetails(struct SwsContext *c, int **inv_table, +// int *srcRange, int **table, int *dstRange, +// int *brightness, int *contrast, int *saturation); +function sws_getColorspaceDetails(c: pSwsContext; var inv_table: pInt; var srcRange: int; var table: pInt; var dstRange: int; var brightness: int; + var contrast: int; var saturation: int): int; cdecl; external swscale_dll; + +(* + * Allocate and return an uninitialized vector with length coefficients. +*) +// SwsVector *sws_allocVec(int length); +function sws_allocVec(length: int): pSwsVector; cdecl; external swscale_dll; + +(* + * Return a normalized Gaussian curve used to filter stuff + * quality = 3 is high quality, lower is lower quality. +*) +// SwsVector *sws_getGaussianVec(double variance, double quality); +function sws_getGaussianVec(variance: double; quality: double): pSwsVector; cdecl; external swscale_dll; + +(* + * Scale all the coefficients of a by the scalar value. +*) +// void sws_scaleVec(SwsVector *a, double scalar); +procedure sws_scaleVec(a: pSwsVector; scalar: double); cdecl; external swscale_dll; + +(* + * Scale all the coefficients of a so that their sum equals height. +*) +// void sws_normalizeVec(SwsVector *a, double height); +procedure sws_normalizeVec(a: pSwsVector; height: double); cdecl; external swscale_dll; + +{$IFDEF FF_API_SWS_VECTOR} +// attribute_deprecated SwsVector *sws_getConstVec(double c, int length); +function sws_getConstVec(c: double; length: int): pSwsVector; cdecl; external swscale_dll; +// attribute_deprecated SwsVector *sws_getIdentityVec(void); +function sws_getIdentityVec(): pSwsVector; cdecl; external swscale_dll; +// attribute_deprecated void sws_convVec(SwsVector *a, SwsVector *b); +procedure sws_convVec(a: pSwsVector; b: pSwsVector); cdecl; external swscale_dll; +// attribute_deprecated void sws_addVec(SwsVector *a, SwsVector *b); +procedure sws_addVec(a: pSwsVector; b: pSwsVector); cdecl; external swscale_dll; +// attribute_deprecated void sws_subVec(SwsVector *a, SwsVector *b); +procedure sws_subVec(a: pSwsVector; b: pSwsVector); cdecl; external swscale_dll; +// attribute_deprecated void sws_shiftVec(SwsVector *a, int shift); +procedure sws_shiftVec(a: pSwsVector; shift: int); cdecl; external swscale_dll; +// attribute_deprecated SwsVector *sws_cloneVec(SwsVector *a); +function sws_cloneVec(a: pSwsVector): pSwsVector; cdecl; external swscale_dll; +// attribute_deprecated void sws_printVec2(SwsVector *a, AVClass *log_ctx, int log_level); +procedure sws_printVec2(a: pSwsVector; log_ctx: pAVClass; log_level: int); cdecl; external swscale_dll; +{$ENDIF} +// void sws_freeVec(SwsVector *a); +procedure sws_freeVec(a: pSwsVector); cdecl; external swscale_dll; + +// SwsFilter *sws_getDefaultFilter(float lumaGBlur, float chromaGBlur, +// float lumaSharpen, float chromaSharpen, +// float chromaHShift, float chromaVShift, +// int verbose); +function sws_getDefaultFilter(lumaGBlur: float; chromaGBlur: float; lumaSharpen: float; chromaSharpen: float; chromaHShift: float; chromaVShift: float; + verbose: int): pSwsFilter; cdecl; external swscale_dll; + +// void sws_freeFilter(SwsFilter *filter); +procedure sws_freeFilter(filter: pSwsFilter); cdecl; external swscale_dll; + +(* + * Check if context can be reused, otherwise reallocate a new one. + * + * If context is NULL, just calls sws_getContext() to get a new + * context. Otherwise, checks if the parameters are the ones already + * saved in context. If that is the case, returns the current + * context. Otherwise, frees context and gets a new context with + * the new parameters. + * + * Be warned that srcFilter and dstFilter are not checked, they + * are assumed to remain the same. +*) +// struct SwsContext *sws_getCachedContext(struct SwsContext *context, +// int srcW, int srcH, enum AVPixelFormat srcFormat, +// int dstW, int dstH, enum AVPixelFormat dstFormat, +// int flags, SwsFilter *srcFilter, +// SwsFilter *dstFilter, const double *param); +function sws_getCachedContext(context: pSwsContext; srcW: int; srcH: int; srcFormat: AVPixelFormat; dstW: int; dstH: int; dstFormat: AVPixelFormat; flags: int; + srcFilter: pSwsFilter; dstFilter: pSwsFilter; const param: pdouble): pSwsContext; cdecl; external swscale_dll; + +(* + * Convert an 8-bit paletted frame into a frame with a color depth of 32 bits. + * + * The output frame will have the same packed format as the palette. + * + * @param src source frame buffer + * @param dst destination frame buffer + * @param num_pixels number of pixels to convert + * @param palette array with [256] entries, which must match color arrangement (RGB or BGR) of src +*) +// void sws_convertPalette8ToPacked32(const uint8_t *src, uint8_t *dst, int num_pixels, const uint8_t *palette); +procedure sws_convertPalette8ToPacked32(const src: puint8_t; var dst: uint8_t; num_pixels: int; const palette: puint8_t); cdecl; external swscale_dll; + +(* + * Convert an 8-bit paletted frame into a frame with a color depth of 24 bits. + * + * With the palette format "ABCD", the destination frame ends up with the format "ABC". + * + * @param src source frame buffer + * @param dst destination frame buffer + * @param num_pixels number of pixels to convert + * @param palette array with [256] entries, which must match color arrangement (RGB or BGR) of src +*) +// void sws_convertPalette8ToPacked24(const uint8_t *src, uint8_t *dst, int num_pixels, const uint8_t *palette); +procedure sws_convertPalette8ToPacked24(const src: puint8_t; var dst: uint8_t; num_pixels: int; const palette: puint8_t); cdecl; external swscale_dll; + +(* + * Get the AVClass for swsContext. It can be used in combination with + * AV_OPT_SEARCH_FAKE_OBJ for examining options. + * + * @see av_opt_find(). +*) +// const AVClass *sws_get_class(void); +function sws_get_class(): pAVClass; cdecl; external swscale_dll; + +implementation + +end. diff --git a/fpPS4.lpi b/fpPS4.lpi index bc5dcb2..2e947ca 100644 --- a/fpPS4.lpi +++ b/fpPS4.lpi @@ -31,7 +31,7 @@ - + @@ -553,6 +553,11 @@ + + + + + @@ -564,7 +569,7 @@ - + diff --git a/fpPS4.lpr b/fpPS4.lpr index 23a454a..3c454f3 100644 --- a/fpPS4.lpr +++ b/fpPS4.lpr @@ -43,6 +43,7 @@ uses ps4_libSceAudioOut, ps4_libSceVoice, ps4_libSceVideoOut, + ps4_libSceAvPlayer, ps4_libScePad, ps4_libSceNpWebApi, ps4_libSceRudp, diff --git a/src/ps4_libsceavplayer.pas b/src/ps4_libsceavplayer.pas new file mode 100644 index 0000000..8d1c139 --- /dev/null +++ b/src/ps4_libsceavplayer.pas @@ -0,0 +1,866 @@ +unit ps4_libSceAvPlayer; + +{$mode ObjFPC}{$H+} + +interface + +uses + libavcodec, + libavdevice, + libavformat, + libavutil, + libswscale, + libswresample, + windows, + ps4_program, + spinlock, + sys_signal, + sys_path, + sys_time, + sys_pthread, + Classes, + SysUtils, + Generics.Collections, + Math; + +implementation + +uses + sys_kernel; + +const + LANGUAGE_CODE_ENG:array[0..3] of Char=('E', 'N', 'G', #0); + DIRECTORY_AVPLAYER_DUMP='avplayer_dump'; + BUFFER_COUNT=2; + +type + TAVPacketQueue=specialize TQueue; + + SceAvPlayerAllocate=function(argP:Pointer;argAlignment:DWord;argSize:DWord):Pointer; SysV_ABI_CDecl; + SceAvPlayerDeallocate=procedure(argP:Pointer;argMemory:Pointer); SysV_ABI_CDecl; + SceAvPlayerAllocateTexture=function(argP:Pointer;argAlignment:DWord;argSize:DWord):Pointer; SysV_ABI_CDecl; + SceAvPlayerDeallocateTexture=procedure(argP:Pointer;argMemory:Pointer); SysV_ABI_CDecl; + + SceAvPlayerOpenFile=function(argP:Pointer;argFilename:PChar):Integer; SysV_ABI_CDecl; + SceAvPlayerCloseFile=function(argP:Pointer):Integer; SysV_ABI_CDecl; + SceAvPlayerReadOffsetFile=function(argP:Pointer;argBuffer:PByte;argPosition:QWord;argLength:DWord):Integer; SysV_ABI_CDecl; + SceAvPlayerSizeFile=function(argP:Pointer):QWord; SysV_ABI_CDecl; + + SceAvPlayerEventCallback=procedure(p:Pointer;argEventId:Integer;argSourceId:Integer;argEventData:Pointer); SysV_ABI_CDecl; + + SceAvPlayerMemAllocator=packed record + objectPointer :Pointer; + allocate :SceAvPlayerAllocate; + deallocate :SceAvPlayerDeallocate; + allocateTexture :SceAvPlayerAllocateTexture; + deallocateTexture:SceAvPlayerDeallocateTexture; + end; + + SceAvPlayerFileReplacement=packed record + objectPointer:Pointer; + open :SceAvPlayerOpenFile; + close :SceAvPlayerCloseFile; + readOffset :SceAvPlayerReadOffsetFile; + size :SceAvPlayerSizeFile; + end; + + SceAvPlayerEventReplacement=packed record + objectPointer:Pointer; + eventCallback:SceAvPlayerEventCallback; + end; + + SceAvPlayerInitData=packed record + memoryReplacement :SceAvPlayerMemAllocator; + fileReplacement :SceAvPlayerFileReplacement; + eventReplacement :SceAvPlayerEventReplacement; + debugLevel :DWord; + basePriority :DWord; + numOutputVideoFrameBuffers:Integer; + autoStart :Boolean; + reserved :array[0..2] of Byte; + defaultLanguage :PChar; + end; + PSceAvPlayerInitData=^SceAvPlayerInitData; + + SceAvPlayerInitDataEx=packed record + thisSize :QWORD; + memoryReplacement :SceAvPlayerMemAllocator; + fileReplacement :SceAvPlayerFileReplacement; + eventReplacement :SceAvPlayerEventReplacement; + defaultLanguage :PChar; + debugLevel :DWORD; //SceAvPlayerDebuglevels + audioDecoderPriority :DWORD; + audioDecoderAffinity :DWORD; + videoDecoderPriority :DWORD; + videoDecoderAffinity :DWORD; + demuxerPriority :DWORD; + demuxerAffinity :DWORD; + controllerPriority :DWORD; + controllerAffinity :DWORD; + httpStreamingPriority :DWORD; + httpStreamingAffinity :DWORD; + fileStreamingPriority :DWORD; + fileStreamingAffinity :DWORD; + numOutputVideoFrameBuffers:Integer; + autoStart :Boolean; + reserved :array[0..2] of Byte; + _align :DWORD; + end; + PSceAvPlayerInitDataEx=^SceAvPlayerInitDataEx; + + SceAvPlayerAudio=packed record + channelCount:Word; + reserved :array[0..1] of Byte; + sampleRate :DWord; + size :DWord; + languageCode:array[0..3] of Char; + end; + + SceAvPlayerVideo=packed record + width :DWord; + height :DWord; + aspectRatio :Single; + languageCode:array[0..3] of Char; + end; + + SceAvPlayerTextPosition=packed record + top :Word; + left :Word; + bottom:Word; + right :Word; + end; + + SceAvPlayerTimedText=packed record + languageCode:array[0..3] of Char; + textSize :Word; + fontSize :Word; + position :SceAvPlayerTextPosition; + end; + + SceAvPlayerStreamDetails=packed record + case byte of //union + 0:(reserved:array[0..15] of Byte); + 1:(audio :SceAvPlayerAudio ); + 2:(video :SceAvPlayerVideo ); + 3:(subs :SceAvPlayerTimedText); + end; + + SceAvPlayerFrameInfo=packed record + pData :PByte; + reserved :DWORD; + _align :DWORD; + timeStamp:QWord; //The timestamp in ms + details :SceAvPlayerStreamDetails; + end; + PSceAvPlayerFrameInfo=^SceAvPlayerFrameInfo; + + SceAvPlayerAudioEx=packed record + channelCount:Word; + reserved :array[0..1] of Byte; + sampleRate :DWord; + size :DWord; + languageCode:array[0..3] of Char; + reserved1 :array[0..63] of Byte; + end; + + SceAvPlayerVideoEx=packed record + width :DWord; + height :DWord; + aspectRatio :Single; + languageCode :array[0..3] of Char; + framerate :DWord; + cropLeftOffset :DWord; + cropRightOffset :DWord; + cropTopOffset :DWord; + cropBottomOffset :DWord; + pitch :DWord; + lumaBitDepth :Byte; + chromaBitDepth :Byte; + videoFullRangeFlag:Boolean; + reserved :array[0..36] of Byte; + end; + + SceAvPlayerTimedTextEx=packed record + languageCode:array[0..3] of Char; + reserved :array[0..75] of Byte; + end; + + SceAvPlayerStreamDetailsEx=packed record + Case Byte of //union + 0:(audio :SceAvPlayerAudioEx ); + 1:(video :SceAvPlayerVideoEx ); + 2:(subs :SceAvPlayerTimedTextEx); + 3:(reserved:array[0..79] of Byte ); + end; + + SceAvPlayerFrameInfoEx=packed record + pData :PByte; + reserved :DWORD; + _align :DWORD; + timeStamp:QWord; //The timestamp in ms + details :SceAvPlayerStreamDetailsEx; + end; + PSceAvPlayerFrameInfoEx=^SceAvPlayerFrameInfoEx; + + PSceAvPlayerPostInitData = Pointer; + + TMemChunk=packed record + pData:Pointer; + fSize:Ptruint; + end; + + TAvPlayerState=class + formatContext :PAVFormatContext; + audioCodecContext :PAVCodecContext; + videoCodecContext :PAVCodecContext; + audioPackets :TAVPacketQueue; + videoPackets :TAVPacketQueue; + lastTimeStamp :QWord; + audioBuffer :array[0..BUFFER_COUNT-1] of PSmallInt; + videoBuffer :array[0..BUFFER_COUNT-1] of PByte; + videoStreamId :Integer; + audioStreamId :Integer; + channelCount, + sampleCount, + sampleRate :Integer; + source :RawByteString; // TODO: "sceAvPlayerAddSource" indicates there may be more than 1 source per instance + info :Pointer; // Pointer to TAvPlayerInfo + constructor Create; + destructor Destroy; override; + procedure CreateMedia(const aSource: RawByteString); + procedure FreeMedia; + function NextPacket(const id:Integer):Boolean; + function ReceiveAudio:TMemChunk; + function ReceiveVideo:TMemChunk; + function GetFramerate:QWord; + function IsPlaying:Boolean; + function Buffer(const aType:DWord;const chunk:TMemChunk):Pointer; + end; + + TAvPlayerInfo=record + playerState :TAvPlayerState; + // + isLooped :Boolean; + isPaused :Boolean; + lastFrameTime :QWord; + memoryReplacement:SceAvPlayerMemAllocator; + fileReplacement :SceAvPlayerFileReplacement; + eventReplacement :SceAvPlayerEventReplacement; + end; + PAvPlayerInfo=^TAvPlayerInfo; + // For now AvPlayer handle is pointer that points directly to player struct + SceAvPlayerHandle=PAvPlayerInfo; + +var + lock:Pointer; + +function GetTimeInUs:QWord; inline; +begin + Result:=SwGetTimeUsec; +end; + +constructor TAvPlayerState.Create; +begin + inherited Create; + videoStreamId:=-1; + audioStreamId:=-1; +end; + +destructor TAvPlayerState.Destroy; +begin + FreeMedia; + inherited; +end; + +procedure TAvPlayerState.CreateMedia(const aSource: RawByteString); +var + videoCodec :PAVCodec; + audioCodec :PAVCodec; + videoStream:PAVStream; + audioStream:PAVStream; + p :Pointer; +begin + FreeMedia; + source:=aSource; + formatContext:=avformat_alloc_context; + + avformat_open_input(formatContext,PChar(source),nil,ppAVDictionary(nil)); + Writeln(SysLogPrefix,source); + Writeln(SysLogPrefix,Format('Format: %s, duration: %dms',[formatContext^.iformat^.long_name,formatContext^.duration div 1000])); + // Print some useful information about media + + videoStreamId:=av_find_best_stream(formatContext,AVMEDIA_TYPE_VIDEO,-1,-1,p,0); + audioStreamId:=av_find_best_stream(formatContext,AVMEDIA_TYPE_AUDIO,-1,-1,p,0); + if videoStreamId>=0 then + begin + videoStream:=formatContext^.streams[videoStreamId]; + videoCodec:=avcodec_find_decoder(videoStream^.codecpar^.codec_id); + videoCodecContext:=avcodec_alloc_context3(videoCodec); + avcodec_parameters_to_context(videoCodecContext,videoStream^.codecpar); + avcodec_open2(videoCodecContext,videoCodec,nil); + Writeln(SysLogPrefix,Format('%d) Video codec: %s, resolution: %d x %d',[videoStreamId,videoCodec^.long_name,videoStream^.codecpar^.width,videoStream^.codecpar^.height])); + end; + if audioStreamId>=0 then + begin + audioStream:=formatContext^.streams[audioStreamId]; + audioCodec:=avcodec_find_decoder(audioStream^.codecpar^.codec_id); + audioCodecContext:=avcodec_alloc_context3(audioCodec); + avcodec_parameters_to_context(audioCodecContext,audioStream^.codecpar); + avcodec_open2(audioCodecContext,audioCodec,nil); + Writeln(SysLogPrefix,Format('%d) Audio codec: %s, channels: %d, sample rate: %d',[audioStreamId,audioCodec^.long_name,audioStream^.codecpar^.channels,audioStream^.codecpar^.sample_rate])); + end; + + audioPackets:=TAVPacketQueue.Create; + videoPackets:=TAVPacketQueue.Create; +end; + +procedure TAvPlayerState.FreeMedia; +var + packet :PAVPacket; + I :Integer; + playerInfo:PAvPlayerInfo; +begin + if formatContext=nil then + Exit; + + playerInfo:=info; + while audioPackets.Count>0 do + begin + packet:=audioPackets.Dequeue; + av_packet_free(packet); + end; + while videoPackets.Count>0 do + begin + packet:=videoPackets.Dequeue; + av_packet_free(packet); + end; + audioPackets.Free; + videoPackets.Free; + + if videoCodecContext<>nil then + begin + avcodec_close(audioCodecContext); + avcodec_free_context(audioCodecContext); + end; + if audioCodecContext<>nil then + begin + avcodec_close(videoCodecContext); + avcodec_free_context(videoCodecContext); + end; + + avformat_close_input(formatContext); + for I:=0 to BUFFER_COUNT-1 do + begin + if audioBuffer[I]<>nil then + begin + FreeMem(audioBuffer[I]); + end; + if videoBuffer[I]<>nil then + playerInfo^.memoryReplacement.deallocateTexture(playerInfo^.memoryReplacement.objectPointer,videoBuffer[I]); + end; + source:=''; + formatContext:=nil; +end; + +function TAvPlayerState.NextPacket(const id:Integer):Boolean; +var + thisQueue, + thatQueue:TAvPacketQueue; + packet :PAVPacket; + err :Integer; +begin + if id=videoStreamId then + begin + thisQueue:=videoPackets; + thatQueue:=audioPackets; + end else + begin + thisQueue:=audioPackets; + thatQueue:=videoPackets; + end; + while True do + begin + if thisQueue.Count>0 then + begin + packet:=thisQueue.Dequeue; + if id=videoStreamId then + begin + err:=avcodec_send_packet(videoCodecContext,packet); + assert(err=0); + end else + begin + err:=avcodec_send_packet(audioCodecContext,packet); + assert(err=0); + end; + av_packet_free(packet); + Exit(True); + end; + packet:=av_packet_alloc; + if av_read_frame(formatContext,packet)<>0 then + begin + Exit(False); + end; + if id=packet^.stream_index then + thisQueue.Enqueue(packet) + else + thatQueue.Enqueue(packet) + end; +end; + +function TAvPlayerState.ReceiveAudio:TMemChunk; +var + err :Integer; + frame :PAVFrame; + i, j :Integer; + fdata :PSingle; + pcmSample:SmallInt; +begin + Result:=Default(TMemChunk); + if (audioStreamId<0) or (not IsPlaying) then Exit; + frame:=av_frame_alloc; + Result.pData:=nil; + while True do + begin + err:=avcodec_receive_frame(audioCodecContext,frame); + if (err=AVERROR_EAGAIN) and (NextPacket(audioStreamId)) then + continue; + if err<>0 then + begin + source:=''; + break; + end; + // + if frame^.format<>Integer(AV_SAMPLE_FMT_FLTP) then + Writeln('Unknown audio format: ',frame^.format); + channelCount:=frame^.channels; + sampleCount:=frame^.nb_samples; + sampleRate:=frame^.sample_rate; + Result.fSize:=sampleCount*channelCount*SizeOf(SmallInt); + GetMem(Result.pData,Result.fSize); + for i:=0 to sampleCount-1 do + for j:=0 to channelCount-1 do + begin + fdata:=PSingle(frame^.data[j]); + pcmSample:=Floor(fdata[i]*High(SmallInt)); + PSmallInt(Result.pData)[i*channelCount+j]:=pcmSample; + end; + break; + end; + av_frame_free(frame); +end; + +function TAvPlayerState.ReceiveVideo:TMemChunk; +var + err :Integer; + frame :PAVFrame; + i :Integer; + fdata :PSingle; + sample :Single; + pcmSamplex:Word; + p :PByte; +begin + Result:=Default(TMemChunk); + if (videoStreamId<0) or (not IsPlaying) then Exit; + frame:=av_frame_alloc; + Result.pData:=nil; + while True do + begin + err:=avcodec_receive_frame(videoCodecContext,frame); + if (err=AVERROR_EAGAIN) and (NextPacket(videoStreamId)) then + continue; + if err<>0 then + begin + source:=''; + end; + // + lastTimeStamp:=frame^.best_effort_timestamp; + Result.fSize:=videoCodecContext^.width*videoCodecContext^.height*SizeOf(DWord); + GetMem(Result.pData,Result.fSize); + + p:=Result.pData; + for i:=0 to frame^.height-1 do + begin + Move(frame^.data[0][frame^.linesize[0]*i],p[0],frame^.width); + p:=p+frame^.width; + end; + for i:=0 to frame^.height div 2-1 do + begin + Move(frame^.data[1][frame^.linesize[1]*i],p[0],frame^.width div 2); + p:=p+frame^.width div 2; + end; + for i:=0 to frame^.height div 2-1 do + begin + Move(frame^.data[2][frame^.linesize[2]*i],p[0],frame^.width div 2); + p:=p+frame^.width div 2; + end; + break; + end; + av_frame_free(frame); +end; + +function TAvPlayerState.GetFramerate:QWord; +var + rational:AVRational; +begin + rational:=formatContext^.streams[videoStreamId]^.avg_frame_rate; + Result:=Round(rational.den/rational.num * 1000000); +end; + +function TAvPlayerState.IsPlaying:Boolean; +begin + Result:=source<>''; +end; + +function TAvPlayerState.Buffer(const aType:DWord;const chunk:TMemChunk):Pointer; +var + playerInfo:PAvPlayerInfo; +begin + playerInfo:=info; + if aType=0 then + begin + if (chunk.pData<>nil) then + begin + if (audioBuffer[0]<>nil) then + begin + FreeMem(audioBuffer[0]); + end; + audioBuffer[0]:=chunk.pData; + end; + Exit(audioBuffer[0]); + end else + begin + if (chunk.pData<>nil) then + begin + if videoBuffer[0]=nil then + begin + videoBuffer[0]:=playerInfo^.memoryReplacement.allocateTexture(playerInfo^.memoryReplacement.objectPointer,0,chunk.fSize); + end; + Move(chunk.pData^,videoBuffer[0]^,chunk.fSize); + FreeMem(chunk.pData); + end; + Exit(videoBuffer[0]); + end; +end; + +function _test_mem_alloc(var m:SceAvPlayerMemAllocator):Boolean; inline; +begin + Result:=False; + if (m.allocate =nil) then Exit; + if (m.deallocate =nil) then Exit; + if (m.allocateTexture =nil) then Exit; + if (m.deallocateTexture=nil) then Exit; + Result:=True; +end; + +function _sceAvPlayerInit(pInit:PSceAvPlayerInitData):SceAvPlayerHandle; +begin + Result:=nil; + if (pInit=nil) then Exit; + + if not _test_mem_alloc(pInit^.memoryReplacement) then + begin + Writeln(SysLogPrefix,'All allocators are required for AVPlayer Initialisation.'); + Exit; + end; + + Writeln(SysLogPrefix,'sceAvPlayerInit'); + + New(Result); + Result^.playerState:=TAvPlayerState.Create; + Result^.playerState.info :=Result; + + Result^.memoryReplacement:=pInit^.memoryReplacement; + Result^.eventReplacement :=pInit^.eventReplacement; + Result^.fileReplacement :=pInit^.fileReplacement; + + Result^.lastFrameTime :=GetTimeInUs; +end; + +function ps4_sceAvPlayerInit(pInit:PSceAvPlayerInitData):SceAvPlayerHandle; SysV_ABI_CDecl; +begin + _sig_lock; + Result:=_sceAvPlayerInit(pInit); + _sig_unlock; +end; + +function _sceAvPlayerInitEx(pInit:PSceAvPlayerInitDataEx):SceAvPlayerHandle; +begin + Result:=nil; + if (pInit=nil) then Exit; + + if not _test_mem_alloc(pInit^.memoryReplacement) then + begin + Writeln(SysLogPrefix,'All allocators are required for AVPlayer Initialisation.'); + Exit; + end; + + Writeln(SysLogPrefix,'sceAvPlayerInitEx'); + + New(Result); + Result^.playerState:=TAvPlayerState.Create; + Result^.playerState.info :=Result; + + Result^.memoryReplacement:=pInit^.memoryReplacement; + Result^.eventReplacement :=pInit^.eventReplacement; + Result^.fileReplacement :=pInit^.fileReplacement; + + Result^.lastFrameTime :=GetTimeInUs; +end; + +function ps4_sceAvPlayerInitEx(pInit:PSceAvPlayerInitDataEx):SceAvPlayerHandle; SysV_ABI_CDecl; +begin + _sig_lock; + Result:=_sceAvPlayerInitEx(pInit); + _sig_unlock; +end; + +function ps4_sceAvPlayerPostInit(handle:SceAvPlayerHandle;pPostInit:PSceAvPlayerPostInitData):Integer; SysV_ABI_CDecl; +begin + Result:=-1; + if (handle=nil) or (pPostInit=nil) then Exit; + Writeln(SysLogPrefix,'sceAvPlayerPostInit'); + Result:=0; +end; + +function _sceAvPlayerAddSource(handle:SceAvPlayerHandle;argFilename:PChar):Integer; +const + BUF_SIZE=512*1024; +var + fileSize, + bytesRemaining, + offset :QWord; + bytesRead :Integer; + actualBufSize :QWord; + buf :array[0..BUF_SIZE-1] of Byte; + p :Pointer; + f :THandle; + source :RawByteString; +begin + Writeln(SysLogPrefix,'sceAvPlayerAddSource:',argFilename); + spin_lock(lock); + // With file functions provided by client + if (handle<>nil) and (handle^.fileReplacement.open<>nil) and (handle^.fileReplacement.close<>nil) + and (handle^.fileReplacement.readOffset<>nil) and (handle^.fileReplacement.size<>nil) then + begin + p:=handle^.fileReplacement.objectPointer; + if handle^.fileReplacement.open(p,argFilename)<0 then + begin + spin_unlock(lock); + Exit(-1); + end; + fileSize:=handle^.fileReplacement.size(p); + if (fileSize=0) then //result is uint64 + begin + spin_unlock(lock); + Exit(-1); + end; + // Read data and write to dump directory + CreateDir(DIRECTORY_AVPLAYER_DUMP); + // + source:=DIRECTORY_AVPLAYER_DUMP+'/'+ExtractFileName(argFilename); + f:=FileCreate(source,fmOpenWrite); + // + bytesRemaining:=fileSize; + offset:=0; + while bytesRemaining>0 do + begin + actualBufSize:=Min(QWORD(BUF_SIZE),bytesRemaining); + bytesRead:=handle^.fileReplacement.readOffset(p,@buf[0],offset,actualBufSize); + if bytesRead<0 then + begin + handle^.fileReplacement.close(p); + spin_unlock(lock); + Exit(-1); + end; + FileWrite(f,buf,actualBufSize); + Dec(bytesRemaining,actualBufSize); + Inc(offset,actualBufSize); + end; + FileClose(f); + handle^.fileReplacement.close(p); + // Init player + handle^.playerState.CreateMedia(source); + Result:=0; + end else + // Without client-side file functions + begin + source:=''; + Result:=parse_filename(argFilename,source); + if (Result=PT_FILE) then //only real files + begin + handle^.playerState.CreateMedia(source); + Result:=0; + end else + begin + Result:=-1; + end; + end; + spin_unlock(lock); +end; + +function ps4_sceAvPlayerAddSource(handle:SceAvPlayerHandle;argFilename:PChar):Integer; SysV_ABI_CDecl; +begin + _sig_lock; + Result:=_sceAvPlayerAddSource(handle,argFilename); + _sig_unlock; +end; + +function ps4_sceAvPlayerIsActive(handle:SceAvPlayerHandle): Boolean; SysV_ABI_CDecl; +begin + //Writeln(SysLogPrefix,'sceAvPlayerIsActive'); + if (handle=nil) or (not handle^.playerState.IsPlaying) then + Exit(False); + Exit(True); +end; + +function ps4_sceAvPlayerSetLooping(handle:SceAvPlayerHandle;loopFlag:Boolean):DWord; SysV_ABI_CDecl; +begin + Writeln(SysLogPrefix,'sceAvPlayerSetLooping'); + Result:=0; + handle^.isLooped:=loopFlag; +end; + +function _sceAvPlayerGetAudioData(handle:SceAvPlayerHandle;frameInfo:PSceAvPlayerFrameInfo):Boolean; +var + audioData:TMemChunk; +begin + //Writeln(SysLogPrefix,'sceAvPlayerGetAudioData'); + Result:=False; + if (frameInfo<>nil) and (handle<>nil) and (handle^.playerState.IsPlaying) and (not handle^.isPaused) then + begin + audioData:=Default(TMemChunk); + spin_lock(lock); + audioData:=handle^.playerState.ReceiveAudio; + if (audioData.pData=nil) then + begin + spin_unlock(lock); + Exit(False); + end; + frameInfo^.timeStamp:=_usec2msec(handle^.playerState.lastTimeStamp); + frameInfo^.details.audio.channelCount:=handle^.playerState.channelCount; + frameInfo^.details.audio.sampleRate:=handle^.playerState.sampleRate; + frameInfo^.details.audio.size:=handle^.playerState.channelCount*handle^.playerState.sampleCount*SizeOf(SmallInt); + frameInfo^.pData:=handle^.playerState.Buffer(0,audioData); + spin_unlock(lock); + Result:=True; + end; +end; + +function ps4_sceAvPlayerGetAudioData(handle:SceAvPlayerHandle;frameInfo:PSceAvPlayerFrameInfo):Boolean; SysV_ABI_CDecl; +begin + _sig_lock; + Result:=_sceAvPlayerGetAudioData(handle,frameInfo); + _sig_unlock; +end; + +function _sceAvPlayerGetVideoDataEx(handle:SceAvPlayerHandle;frameInfo:PSceAvPlayerFrameInfoEx):Boolean; +var + videoData:TMemChunk; +begin + //Writeln(SysLogPrefix,'sceAvPlayerGetVideoDataEx'); + Result:=False; + if (frameInfo<>nil) and (handle<>nil) and (handle^.playerState.IsPlaying) then + begin + videoData:=Default(TMemChunk); + spin_lock(lock); + if handle^.lastFrameTime+handle^.playerState.GetFrameratenil) then + begin + handle^.playerState.Free; + end; + Dispose(handle); + + Result:=0; +end; + +function ps4_sceAvPlayerClose(handle:SceAvPlayerHandle):Integer; SysV_ABI_CDecl; +begin + _sig_lock; + Result:=_sceAvPlayerClose(handle); + _sig_unlock; +end; + +function Load_libSceAvPlayer(Const name:RawByteString):TElf_node; +var + lib:PLIBRARY; +begin + Result:=TElf_node.Create; + Result.pFileName:=name; + + lib:=Result._add_lib('libSceAvPlayer'); + + lib^.set_proc($692EBA448D201A0A,@ps4_sceAvPlayerInit); + lib^.set_proc($A3D79646448BF8CE,@ps4_sceAvPlayerInitEx); + lib^.set_proc($1C3D58295536EBF3,@ps4_sceAvPlayerPostInit); + lib^.set_proc($28C7046BEAC7B08A,@ps4_sceAvPlayerAddSource); + lib^.set_proc($51B42861AC0EB1F6,@ps4_sceAvPlayerIsActive); + lib^.set_proc($395B61B34C467E1A,@ps4_sceAvPlayerSetLooping); + lib^.set_proc($5A7A7539572B6609,@ps4_sceAvPlayerGetAudioData); + lib^.set_proc($25D92C42EF2935D4,@ps4_sceAvPlayerGetVideoDataEx); + lib^.set_proc($642D7BC37BC1E4BA,@ps4_sceAvPlayerStop); + lib^.set_proc($3642700F32A6225C,@ps4_sceAvPlayerClose); +end; + +initialization + ps4_app.RegistredPreLoad('libSceAvPlayer.prx',@Load_libSceAvPlayer); + +end. + diff --git a/sys/sys_time.pas b/sys/sys_time.pas index 958b70d..bf39fd2 100644 --- a/sys/sys_time.pas +++ b/sys/sys_time.pas @@ -34,6 +34,7 @@ function SwGetThreadTime(var ut:QWORD):Boolean; procedure SwGetSystemTimeAsFileTime(var lpSystemTimeAsFileTime:TFILETIME); procedure Swgettimezone(z:Ptimezone); function Swgetntptimeofday(tp:Ptimespec;z:Ptimezone):Integer; +function SwGetTimeUsec:QWORD; Const FILETIME_1970 =116444736000000000; @@ -279,6 +280,25 @@ begin Result:=0; end; +function SwGetTimeUsec:QWORD; +var + pc,pf:QWORD; + + DW0,DW1:QWORD; +begin + pc:=0; + pf:=1; + _sig_lock; + NtQueryPerformanceCounter(@pc,@pf); + _sig_unlock; + + //DW0*1000000/pf + SHL_32* DW1*1000000/pf + + DW0:=(DWORD(pc shr 00)*1000000) div pf; + DW1:=(DWORD(pc shr 32)*1000000) div pf; + + Result:=DW0+(DW1 shl 32); +end; end. diff --git a/vulkan/vHostBufferManager.pas b/vulkan/vHostBufferManager.pas index cb48045..ade515f 100644 --- a/vulkan/vHostBufferManager.pas +++ b/vulkan/vHostBufferManager.pas @@ -263,7 +263,7 @@ begin Assert(t<>nil,'create sparse buffer fail'); end; else - Assert(false,'_is_sparce'); + Assert(false,'Is not GPU Addr:'+HexStr(Addr)); end; t.FAddr:=addr; //save key