amfenc.c 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772
  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include "config.h"
  19. #include "config_components.h"
  20. #include "libavutil/avassert.h"
  21. #include "libavutil/imgutils.h"
  22. #include "libavutil/hwcontext.h"
  23. #include "libavutil/hwcontext_amf.h"
  24. #include "libavutil/hwcontext_amf_internal.h"
  25. #if CONFIG_D3D11VA
  26. #include "libavutil/hwcontext_d3d11va.h"
  27. #endif
  28. #if CONFIG_DXVA2
  29. #define COBJMACROS
  30. #include "libavutil/hwcontext_dxva2.h"
  31. #endif
  32. #include "libavutil/mem.h"
  33. #include "libavutil/pixdesc.h"
  34. #include "libavutil/time.h"
  35. #include "amfenc.h"
  36. #include "encode.h"
  37. #include "internal.h"
  38. #include "libavutil/mastering_display_metadata.h"
  39. #define AMF_AV_FRAME_REF L"av_frame_ref"
  40. #define PTS_PROP L"PtsProp"
  41. static int amf_save_hdr_metadata(AVCodecContext *avctx, const AVFrame *frame, AMFHDRMetadata *hdrmeta)
  42. {
  43. AVFrameSideData *sd_display;
  44. AVFrameSideData *sd_light;
  45. AVMasteringDisplayMetadata *display_meta;
  46. AVContentLightMetadata *light_meta;
  47. sd_display = av_frame_get_side_data(frame, AV_FRAME_DATA_MASTERING_DISPLAY_METADATA);
  48. if (sd_display) {
  49. display_meta = (AVMasteringDisplayMetadata *)sd_display->data;
  50. if (display_meta->has_luminance) {
  51. const unsigned int luma_den = 10000;
  52. hdrmeta->maxMasteringLuminance =
  53. (amf_uint32)(luma_den * av_q2d(display_meta->max_luminance));
  54. hdrmeta->minMasteringLuminance =
  55. FFMIN((amf_uint32)(luma_den * av_q2d(display_meta->min_luminance)), hdrmeta->maxMasteringLuminance);
  56. }
  57. if (display_meta->has_primaries) {
  58. const unsigned int chroma_den = 50000;
  59. hdrmeta->redPrimary[0] =
  60. FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[0][0])), chroma_den);
  61. hdrmeta->redPrimary[1] =
  62. FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[0][1])), chroma_den);
  63. hdrmeta->greenPrimary[0] =
  64. FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[1][0])), chroma_den);
  65. hdrmeta->greenPrimary[1] =
  66. FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[1][1])), chroma_den);
  67. hdrmeta->bluePrimary[0] =
  68. FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[2][0])), chroma_den);
  69. hdrmeta->bluePrimary[1] =
  70. FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[2][1])), chroma_den);
  71. hdrmeta->whitePoint[0] =
  72. FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->white_point[0])), chroma_den);
  73. hdrmeta->whitePoint[1] =
  74. FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->white_point[1])), chroma_den);
  75. }
  76. sd_light = av_frame_get_side_data(frame, AV_FRAME_DATA_CONTENT_LIGHT_LEVEL);
  77. if (sd_light) {
  78. light_meta = (AVContentLightMetadata *)sd_light->data;
  79. if (light_meta) {
  80. hdrmeta->maxContentLightLevel = (amf_uint16)light_meta->MaxCLL;
  81. hdrmeta->maxFrameAverageLightLevel = (amf_uint16)light_meta->MaxFALL;
  82. }
  83. }
  84. return 0;
  85. }
  86. return 1;
  87. }
  88. #if CONFIG_D3D11VA
  89. #include <d3d11.h>
  90. #endif
  91. #ifdef _WIN32
  92. #include "compat/w32dlfcn.h"
  93. #else
  94. #include <dlfcn.h>
  95. #endif
  96. #define FFMPEG_AMF_WRITER_ID L"ffmpeg_amf"
  97. const enum AVPixelFormat ff_amf_pix_fmts[] = {
  98. AV_PIX_FMT_NV12,
  99. AV_PIX_FMT_YUV420P,
  100. #if CONFIG_D3D11VA
  101. AV_PIX_FMT_D3D11,
  102. #endif
  103. #if CONFIG_DXVA2
  104. AV_PIX_FMT_DXVA2_VLD,
  105. #endif
  106. AV_PIX_FMT_P010,
  107. AV_PIX_FMT_AMF_SURFACE,
  108. AV_PIX_FMT_BGR0,
  109. AV_PIX_FMT_RGB0,
  110. AV_PIX_FMT_BGRA,
  111. AV_PIX_FMT_ARGB,
  112. AV_PIX_FMT_RGBA,
  113. AV_PIX_FMT_X2BGR10,
  114. AV_PIX_FMT_RGBAF16,
  115. AV_PIX_FMT_NONE
  116. };
  117. static int64_t next_encoder_index = 0;
  118. static int amf_init_encoder(AVCodecContext *avctx)
  119. {
  120. AMFEncoderContext *ctx = avctx->priv_data;
  121. const wchar_t *codec_id = NULL;
  122. AMF_RESULT res;
  123. enum AVPixelFormat pix_fmt;
  124. AVHWDeviceContext *hw_device_ctx = (AVHWDeviceContext*)ctx->device_ctx_ref->data;
  125. AVAMFDeviceContext *amf_device_ctx = (AVAMFDeviceContext *)hw_device_ctx->hwctx;
  126. int alloc_size;
  127. wchar_t name[512];
  128. alloc_size = swprintf(name, amf_countof(name), L"%s%lld",PTS_PROP, next_encoder_index) + 1;
  129. ctx->pts_property_name = av_memdup(name, alloc_size * sizeof(wchar_t));
  130. if(!ctx->pts_property_name)
  131. return AVERROR(ENOMEM);
  132. alloc_size = swprintf(name, amf_countof(name), L"%s%lld",AMF_AV_FRAME_REF, next_encoder_index) + 1;
  133. ctx->av_frame_property_name = av_memdup(name, alloc_size * sizeof(wchar_t));
  134. if(!ctx->av_frame_property_name)
  135. return AVERROR(ENOMEM);
  136. next_encoder_index++;
  137. switch (avctx->codec->id) {
  138. case AV_CODEC_ID_H264:
  139. codec_id = AMFVideoEncoderVCE_AVC;
  140. break;
  141. case AV_CODEC_ID_HEVC:
  142. codec_id = AMFVideoEncoder_HEVC;
  143. break;
  144. case AV_CODEC_ID_AV1 :
  145. codec_id = AMFVideoEncoder_AV1;
  146. break;
  147. default:
  148. break;
  149. }
  150. AMF_RETURN_IF_FALSE(ctx, codec_id != NULL, AVERROR(EINVAL), "Codec %d is not supported\n", avctx->codec->id);
  151. if (avctx->hw_frames_ctx)
  152. pix_fmt = ((AVHWFramesContext*)avctx->hw_frames_ctx->data)->sw_format;
  153. else
  154. pix_fmt = avctx->pix_fmt;
  155. if (pix_fmt == AV_PIX_FMT_P010) {
  156. AMF_RETURN_IF_FALSE(ctx, amf_device_ctx->version >= AMF_MAKE_FULL_VERSION(1, 4, 32, 0), AVERROR_UNKNOWN, "10-bit encoder is not supported by AMD GPU drivers versions lower than 23.30.\n");
  157. }
  158. ctx->format = av_av_to_amf_format(pix_fmt);
  159. AMF_RETURN_IF_FALSE(ctx, ctx->format != AMF_SURFACE_UNKNOWN, AVERROR(EINVAL),
  160. "Format %s is not supported\n", av_get_pix_fmt_name(pix_fmt));
  161. res = amf_device_ctx->factory->pVtbl->CreateComponent(amf_device_ctx->factory, amf_device_ctx->context, codec_id, &ctx->encoder);
  162. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_ENCODER_NOT_FOUND, "CreateComponent(%ls) failed with error %d\n", codec_id, res);
  163. ctx->submitted_frame = 0;
  164. ctx->encoded_frame = 0;
  165. ctx->eof = 0;
  166. return 0;
  167. }
  168. int av_cold ff_amf_encode_close(AVCodecContext *avctx)
  169. {
  170. AMFEncoderContext *ctx = avctx->priv_data;
  171. if (ctx->encoder) {
  172. ctx->encoder->pVtbl->Terminate(ctx->encoder);
  173. ctx->encoder->pVtbl->Release(ctx->encoder);
  174. ctx->encoder = NULL;
  175. }
  176. av_buffer_unref(&ctx->device_ctx_ref);
  177. av_fifo_freep2(&ctx->timestamp_list);
  178. if (ctx->output_list) {
  179. // release remaining AMF output buffers
  180. while(av_fifo_can_read(ctx->output_list)) {
  181. AMFBuffer* buffer = NULL;
  182. av_fifo_read(ctx->output_list, &buffer, 1);
  183. if(buffer != NULL)
  184. buffer->pVtbl->Release(buffer);
  185. }
  186. av_fifo_freep2(&ctx->output_list);
  187. }
  188. av_freep(&ctx->pts_property_name);
  189. av_freep(&ctx->av_frame_property_name);
  190. return 0;
  191. }
  192. static int amf_copy_surface(AVCodecContext *avctx, const AVFrame *frame,
  193. AMFSurface* surface)
  194. {
  195. AMFPlane *plane;
  196. uint8_t *dst_data[4] = {0};
  197. int dst_linesize[4] = {0};
  198. int planes;
  199. int i;
  200. planes = (int)surface->pVtbl->GetPlanesCount(surface);
  201. av_assert0(planes < FF_ARRAY_ELEMS(dst_data));
  202. for (i = 0; i < planes; i++) {
  203. plane = surface->pVtbl->GetPlaneAt(surface, i);
  204. dst_data[i] = plane->pVtbl->GetNative(plane);
  205. dst_linesize[i] = plane->pVtbl->GetHPitch(plane);
  206. }
  207. av_image_copy2(dst_data, dst_linesize,
  208. frame->data, frame->linesize, frame->format,
  209. avctx->width, avctx->height);
  210. return 0;
  211. }
  212. static int amf_copy_buffer(AVCodecContext *avctx, AVPacket *pkt, AMFBuffer *buffer)
  213. {
  214. AMFEncoderContext *ctx = avctx->priv_data;
  215. int ret;
  216. AMFVariantStruct var = {0};
  217. int64_t timestamp = AV_NOPTS_VALUE;
  218. int64_t size = buffer->pVtbl->GetSize(buffer);
  219. if ((ret = ff_get_encode_buffer(avctx, pkt, size, 0)) < 0) {
  220. return ret;
  221. }
  222. memcpy(pkt->data, buffer->pVtbl->GetNative(buffer), size);
  223. switch (avctx->codec->id) {
  224. case AV_CODEC_ID_H264:
  225. buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE, &var);
  226. if(var.int64Value == AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE_IDR) {
  227. pkt->flags = AV_PKT_FLAG_KEY;
  228. }
  229. break;
  230. case AV_CODEC_ID_HEVC:
  231. buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE, &var);
  232. if (var.int64Value == AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE_IDR) {
  233. pkt->flags = AV_PKT_FLAG_KEY;
  234. }
  235. break;
  236. case AV_CODEC_ID_AV1:
  237. buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_AV1_OUTPUT_FRAME_TYPE, &var);
  238. if (var.int64Value == AMF_VIDEO_ENCODER_AV1_OUTPUT_FRAME_TYPE_KEY) {
  239. pkt->flags = AV_PKT_FLAG_KEY;
  240. }
  241. default:
  242. break;
  243. }
  244. buffer->pVtbl->GetProperty(buffer, ctx->pts_property_name, &var);
  245. pkt->pts = var.int64Value; // original pts
  246. AMF_RETURN_IF_FALSE(ctx, av_fifo_read(ctx->timestamp_list, &timestamp, 1) >= 0,
  247. AVERROR_UNKNOWN, "timestamp_list is empty\n");
  248. // calc dts shift if max_b_frames > 0
  249. if ((ctx->max_b_frames > 0 || ((ctx->pa_adaptive_mini_gop == 1) ? true : false)) && ctx->dts_delay == 0) {
  250. int64_t timestamp_last = AV_NOPTS_VALUE;
  251. size_t can_read = av_fifo_can_read(ctx->timestamp_list);
  252. AMF_RETURN_IF_FALSE(ctx, can_read > 0, AVERROR_UNKNOWN,
  253. "timestamp_list is empty while max_b_frames = %d\n", avctx->max_b_frames);
  254. av_fifo_peek(ctx->timestamp_list, &timestamp_last, 1, can_read - 1);
  255. if (timestamp < 0 || timestamp_last < AV_NOPTS_VALUE) {
  256. return AVERROR(ERANGE);
  257. }
  258. ctx->dts_delay = timestamp_last - timestamp;
  259. }
  260. pkt->dts = timestamp - ctx->dts_delay;
  261. return 0;
  262. }
  263. // amfenc API implementation
  264. int ff_amf_encode_init(AVCodecContext *avctx)
  265. {
  266. int ret;
  267. AMFEncoderContext *ctx = avctx->priv_data;
  268. AVHWDeviceContext *hwdev_ctx = NULL;
  269. // hardcoded to current HW queue size - will auto-realloc if too small
  270. ctx->timestamp_list = av_fifo_alloc2(avctx->max_b_frames + 16, sizeof(int64_t),
  271. AV_FIFO_FLAG_AUTO_GROW);
  272. if (!ctx->timestamp_list) {
  273. return AVERROR(ENOMEM);
  274. }
  275. ctx->output_list = av_fifo_alloc2(2, sizeof(AMFBuffer*), AV_FIFO_FLAG_AUTO_GROW);
  276. if (!ctx->output_list)
  277. return AVERROR(ENOMEM);
  278. ctx->dts_delay = 0;
  279. ctx->hwsurfaces_in_queue = 0;
  280. if (avctx->hw_device_ctx) {
  281. hwdev_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
  282. if (hwdev_ctx->type == AV_HWDEVICE_TYPE_AMF)
  283. {
  284. ctx->device_ctx_ref = av_buffer_ref(avctx->hw_device_ctx);
  285. }
  286. else {
  287. ret = av_hwdevice_ctx_create_derived(&ctx->device_ctx_ref, AV_HWDEVICE_TYPE_AMF, avctx->hw_device_ctx, 0);
  288. AMF_RETURN_IF_FALSE(ctx, ret == 0, ret, "Failed to create derived AMF device context: %s\n", av_err2str(ret));
  289. }
  290. } else if (avctx->hw_frames_ctx) {
  291. AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
  292. if (frames_ctx->device_ref ) {
  293. if (frames_ctx->format == AV_PIX_FMT_AMF_SURFACE) {
  294. ctx->device_ctx_ref = av_buffer_ref(frames_ctx->device_ref);
  295. }
  296. else {
  297. ret = av_hwdevice_ctx_create_derived(&ctx->device_ctx_ref, AV_HWDEVICE_TYPE_AMF, frames_ctx->device_ref, 0);
  298. AMF_RETURN_IF_FALSE(ctx, ret == 0, ret, "Failed to create derived AMF device context: %s\n", av_err2str(ret));
  299. }
  300. }
  301. }
  302. else {
  303. ret = av_hwdevice_ctx_create(&ctx->device_ctx_ref, AV_HWDEVICE_TYPE_AMF, NULL, NULL, 0);
  304. AMF_RETURN_IF_FALSE(ctx, ret == 0, ret, "Failed to create hardware device context (AMF) : %s\n", av_err2str(ret));
  305. }
  306. if ((ret = amf_init_encoder(avctx)) == 0) {
  307. return 0;
  308. }
  309. ff_amf_encode_close(avctx);
  310. return ret;
  311. }
  312. static AMF_RESULT amf_set_property_buffer(AMFSurface *object, const wchar_t *name, AMFBuffer *val)
  313. {
  314. AMF_RESULT res;
  315. AMFVariantStruct var;
  316. res = AMFVariantInit(&var);
  317. if (res == AMF_OK) {
  318. AMFGuid guid_AMFInterface = IID_AMFInterface();
  319. AMFInterface *amf_interface;
  320. res = val->pVtbl->QueryInterface(val, &guid_AMFInterface, (void**)&amf_interface);
  321. if (res == AMF_OK) {
  322. res = AMFVariantAssignInterface(&var, amf_interface);
  323. amf_interface->pVtbl->Release(amf_interface);
  324. }
  325. if (res == AMF_OK) {
  326. res = object->pVtbl->SetProperty(object, name, var);
  327. }
  328. AMFVariantClear(&var);
  329. }
  330. return res;
  331. }
  332. static AMF_RESULT amf_store_attached_frame_ref(AMFEncoderContext *ctx, const AVFrame *frame, AMFSurface *surface)
  333. {
  334. AMF_RESULT res = AMF_FAIL;
  335. int64_t data;
  336. AVFrame *frame_ref = av_frame_clone(frame);
  337. if (frame_ref) {
  338. memcpy(&data, &frame_ref, sizeof(frame_ref)); // store pointer in 8 bytes
  339. AMF_ASSIGN_PROPERTY_INT64(res, surface, ctx->av_frame_property_name, data);
  340. }
  341. return res;
  342. }
  343. static AMF_RESULT amf_release_attached_frame_ref(AMFEncoderContext *ctx, AMFBuffer *buffer)
  344. {
  345. AMFVariantStruct var = {0};
  346. AMF_RESULT res = buffer->pVtbl->GetProperty(buffer, ctx->av_frame_property_name, &var);
  347. if(res == AMF_OK && var.int64Value){
  348. AVFrame *frame_ref;
  349. memcpy(&frame_ref, &var.int64Value, sizeof(frame_ref));
  350. av_frame_free(&frame_ref);
  351. }
  352. return res;
  353. }
  354. static int amf_submit_frame(AVCodecContext *avctx, AVFrame *frame, AMFSurface **surface_resubmit)
  355. {
  356. AMFEncoderContext *ctx = avctx->priv_data;
  357. AVHWDeviceContext *hw_device_ctx = (AVHWDeviceContext*)ctx->device_ctx_ref->data;
  358. AVAMFDeviceContext *amf_device_ctx = (AVAMFDeviceContext *)hw_device_ctx->hwctx;
  359. AMFSurface *surface;
  360. AMF_RESULT res;
  361. int ret;
  362. int hw_surface = 0;
  363. int output_delay = FFMAX(ctx->max_b_frames, 0) + ((avctx->flags & AV_CODEC_FLAG_LOW_DELAY) ? 0 : 1);
  364. // prepare surface from frame
  365. switch (frame->format) {
  366. #if CONFIG_D3D11VA
  367. case AV_PIX_FMT_D3D11:
  368. {
  369. static const GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, { 0x99, 0xd3, 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf } };
  370. ID3D11Texture2D *texture = (ID3D11Texture2D*)frame->data[0]; // actual texture
  371. int index = (intptr_t)frame->data[1]; // index is a slice in texture array is - set to tell AMF which slice to use
  372. av_assert0(frame->hw_frames_ctx && avctx->hw_frames_ctx &&
  373. frame->hw_frames_ctx->data == avctx->hw_frames_ctx->data);
  374. texture->lpVtbl->SetPrivateData(texture, &AMFTextureArrayIndexGUID, sizeof(index), &index);
  375. res = amf_device_ctx->context->pVtbl->CreateSurfaceFromDX11Native(amf_device_ctx->context, texture, &surface, NULL); // wrap to AMF surface
  376. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX11Native() failed with error %d\n", res);
  377. hw_surface = 1;
  378. }
  379. break;
  380. #endif
  381. #if CONFIG_DXVA2
  382. case AV_PIX_FMT_DXVA2_VLD:
  383. {
  384. IDirect3DSurface9 *texture = (IDirect3DSurface9 *)frame->data[3]; // actual texture
  385. res = amf_device_ctx->context->pVtbl->CreateSurfaceFromDX9Native(amf_device_ctx->context, texture, &surface, NULL); // wrap to AMF surface
  386. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX9Native() failed with error %d\n", res);
  387. hw_surface = 1;
  388. }
  389. break;
  390. #endif
  391. case AV_PIX_FMT_AMF_SURFACE:
  392. {
  393. surface = (AMFSurface*)frame->data[0];
  394. surface->pVtbl->Acquire(surface);
  395. hw_surface = 1;
  396. }
  397. break;
  398. default:
  399. {
  400. res = amf_device_ctx->context->pVtbl->AllocSurface(amf_device_ctx->context, AMF_MEMORY_HOST, ctx->format, avctx->width, avctx->height, &surface);
  401. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "AllocSurface() failed with error %d\n", res);
  402. amf_copy_surface(avctx, frame, surface);
  403. }
  404. break;
  405. }
  406. if (hw_surface) {
  407. amf_store_attached_frame_ref(ctx, frame, surface);
  408. ctx->hwsurfaces_in_queue++;
  409. // input HW surfaces can be vertically aligned by 16; tell AMF the real size
  410. surface->pVtbl->SetCrop(surface, 0, 0, frame->width, frame->height);
  411. }
  412. // HDR10 metadata
  413. if (frame->color_trc == AVCOL_TRC_SMPTE2084) {
  414. AMFBuffer * hdrmeta_buffer = NULL;
  415. res = amf_device_ctx->context->pVtbl->AllocBuffer(amf_device_ctx->context, AMF_MEMORY_HOST, sizeof(AMFHDRMetadata), &hdrmeta_buffer);
  416. if (res == AMF_OK) {
  417. AMFHDRMetadata * hdrmeta = (AMFHDRMetadata*)hdrmeta_buffer->pVtbl->GetNative(hdrmeta_buffer);
  418. if (amf_save_hdr_metadata(avctx, frame, hdrmeta) == 0) {
  419. switch (avctx->codec->id) {
  420. case AV_CODEC_ID_H264:
  421. AMF_ASSIGN_PROPERTY_INTERFACE(res, ctx->encoder, AMF_VIDEO_ENCODER_INPUT_HDR_METADATA, hdrmeta_buffer); break;
  422. case AV_CODEC_ID_HEVC:
  423. AMF_ASSIGN_PROPERTY_INTERFACE(res, ctx->encoder, AMF_VIDEO_ENCODER_HEVC_INPUT_HDR_METADATA, hdrmeta_buffer); break;
  424. case AV_CODEC_ID_AV1:
  425. AMF_ASSIGN_PROPERTY_INTERFACE(res, ctx->encoder, AMF_VIDEO_ENCODER_AV1_INPUT_HDR_METADATA, hdrmeta_buffer); break;
  426. }
  427. res = amf_set_property_buffer(surface, L"av_frame_hdrmeta", hdrmeta_buffer);
  428. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SetProperty failed for \"av_frame_hdrmeta\" with error %d\n", res);
  429. }
  430. hdrmeta_buffer->pVtbl->Release(hdrmeta_buffer);
  431. }
  432. }
  433. surface->pVtbl->SetPts(surface, frame->pts);
  434. AMF_ASSIGN_PROPERTY_INT64(res, surface, ctx->pts_property_name, frame->pts);
  435. switch (avctx->codec->id) {
  436. case AV_CODEC_ID_H264:
  437. AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_AUD, !!ctx->aud);
  438. switch (frame->pict_type) {
  439. case AV_PICTURE_TYPE_I:
  440. if (ctx->forced_idr) {
  441. AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_SPS, 1);
  442. AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_PPS, 1);
  443. AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_IDR);
  444. } else {
  445. AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_I);
  446. }
  447. break;
  448. case AV_PICTURE_TYPE_P:
  449. AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_P);
  450. break;
  451. case AV_PICTURE_TYPE_B:
  452. AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_B);
  453. break;
  454. }
  455. break;
  456. case AV_CODEC_ID_HEVC:
  457. AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_INSERT_AUD, !!ctx->aud);
  458. switch (frame->pict_type) {
  459. case AV_PICTURE_TYPE_I:
  460. if (ctx->forced_idr) {
  461. AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_INSERT_HEADER, 1);
  462. AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_HEVC_PICTURE_TYPE_IDR);
  463. } else {
  464. AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_HEVC_PICTURE_TYPE_I);
  465. }
  466. break;
  467. case AV_PICTURE_TYPE_P:
  468. AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_HEVC_PICTURE_TYPE_P);
  469. break;
  470. }
  471. break;
  472. case AV_CODEC_ID_AV1:
  473. if (frame->pict_type == AV_PICTURE_TYPE_I) {
  474. if (ctx->forced_idr) {
  475. AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_AV1_FORCE_INSERT_SEQUENCE_HEADER, 1);
  476. AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE_KEY);
  477. } else {
  478. AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE_INTRA_ONLY);
  479. }
  480. }
  481. break;
  482. default:
  483. break;
  484. }
  485. // submit surface
  486. res = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)surface);
  487. if (res == AMF_INPUT_FULL) { // handle full queue
  488. //store surface for later submission
  489. *surface_resubmit = surface;
  490. } else {
  491. surface->pVtbl->Release(surface);
  492. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
  493. ctx->submitted_frame++;
  494. ret = av_fifo_write(ctx->timestamp_list, &frame->pts, 1);
  495. if (ret < 0)
  496. return ret;
  497. if(ctx->submitted_frame <= ctx->encoded_frame + output_delay)
  498. return AVERROR(EAGAIN); // too soon to poll or wait
  499. }
  500. return 0;
  501. }
  502. static int amf_submit_frame_locked(AVCodecContext *avctx, AVFrame *frame, AMFSurface **surface_resubmit)
  503. {
  504. int ret;
  505. AMFEncoderContext *ctx = avctx->priv_data;
  506. AVHWDeviceContext *hw_device_ctx = (AVHWDeviceContext*)ctx->device_ctx_ref->data;
  507. AVAMFDeviceContext *amf_device_ctx = (AVAMFDeviceContext *)hw_device_ctx->hwctx;
  508. ff_mutex_lock(&amf_device_ctx->mutex);
  509. ret = amf_submit_frame(avctx, frame, surface_resubmit);
  510. ff_mutex_unlock(&amf_device_ctx->mutex);
  511. return ret;
  512. }
  513. static AMF_RESULT amf_query_output(AVCodecContext *avctx, AMFBuffer **buffer)
  514. {
  515. AMFEncoderContext *ctx = avctx->priv_data;
  516. AMFData *data = NULL;
  517. AMF_RESULT ret = ctx->encoder->pVtbl->QueryOutput(ctx->encoder, &data);
  518. *buffer = NULL;
  519. if (data) {
  520. AMFGuid guid = IID_AMFBuffer();
  521. data->pVtbl->QueryInterface(data, &guid, (void**)buffer); // query for buffer interface
  522. data->pVtbl->Release(data);
  523. if (amf_release_attached_frame_ref(ctx, *buffer) == AMF_OK)
  524. ctx->hwsurfaces_in_queue--;
  525. ctx->encoded_frame++;
  526. }
  527. return ret;
  528. }
  529. int ff_amf_receive_packet(AVCodecContext *avctx, AVPacket *avpkt)
  530. {
  531. AMFEncoderContext *ctx = avctx->priv_data;
  532. AMFSurface *surface = NULL;
  533. AMF_RESULT res;
  534. int ret;
  535. AMF_RESULT res_query;
  536. AMFBuffer* buffer = NULL;
  537. AVFrame *frame = av_frame_alloc();
  538. int block_and_wait;
  539. int64_t pts = 0;
  540. int output_delay = FFMAX(ctx->max_b_frames, 0) + ((avctx->flags & AV_CODEC_FLAG_LOW_DELAY) ? 0 : 1);
  541. if (!ctx->encoder){
  542. av_frame_free(&frame);
  543. return AVERROR(EINVAL);
  544. }
  545. // check if some outputs are available
  546. av_fifo_read(ctx->output_list, &buffer, 1);
  547. if (buffer != NULL) { // return already retrieved output
  548. ret = amf_copy_buffer(avctx, avpkt, buffer);
  549. buffer->pVtbl->Release(buffer);
  550. return ret;
  551. }
  552. ret = ff_encode_get_frame(avctx, frame);
  553. if(ret < 0){
  554. if(ret != AVERROR_EOF){
  555. av_frame_free(&frame);
  556. if(ret == AVERROR(EAGAIN)){
  557. if(ctx->submitted_frame <= ctx->encoded_frame + output_delay) // too soon to poll
  558. return ret;
  559. }
  560. }
  561. }
  562. if(ret != AVERROR(EAGAIN)){
  563. if (!frame->buf[0]) { // submit drain
  564. if (!ctx->eof) { // submit drain one time only
  565. if(!ctx->delayed_drain) {
  566. res = ctx->encoder->pVtbl->Drain(ctx->encoder);
  567. if (res == AMF_INPUT_FULL) {
  568. ctx->delayed_drain = 1; // input queue is full: resubmit Drain() in receive loop
  569. } else {
  570. if (res == AMF_OK) {
  571. ctx->eof = 1; // drain started
  572. }
  573. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Drain() failed with error %d\n", res);
  574. }
  575. }
  576. }
  577. } else { // submit frame
  578. ret = amf_submit_frame_locked(avctx, frame, &surface);
  579. if(ret < 0){
  580. av_frame_free(&frame);
  581. return ret;
  582. }
  583. pts = frame->pts;
  584. }
  585. }
  586. av_frame_free(&frame);
  587. do {
  588. block_and_wait = 0;
  589. // poll data
  590. res_query = amf_query_output(avctx, &buffer);
  591. if (buffer) {
  592. ret = amf_copy_buffer(avctx, avpkt, buffer);
  593. buffer->pVtbl->Release(buffer);
  594. AMF_RETURN_IF_FALSE(ctx, ret >= 0, ret, "amf_copy_buffer() failed with error %d\n", ret);
  595. if (ctx->delayed_drain) { // try to resubmit drain
  596. res = ctx->encoder->pVtbl->Drain(ctx->encoder);
  597. if (res != AMF_INPUT_FULL) {
  598. ctx->delayed_drain = 0;
  599. ctx->eof = 1; // drain started
  600. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Repeated Drain() failed with error %d\n", res);
  601. } else {
  602. av_log(avctx, AV_LOG_WARNING, "Data acquired but delayed drain submission got AMF_INPUT_FULL- should not happen\n");
  603. }
  604. }
  605. } else if (ctx->delayed_drain || (ctx->eof && res_query != AMF_EOF) || (ctx->hwsurfaces_in_queue >= ctx->hwsurfaces_in_queue_max) || surface) {
  606. block_and_wait = 1;
  607. // Only sleep if the driver doesn't support waiting in QueryOutput()
  608. // or if we already have output data so we will skip calling it.
  609. if (!ctx->query_timeout_supported || avpkt->data || avpkt->buf) {
  610. av_usleep(1000);
  611. }
  612. }
  613. } while (block_and_wait);
  614. if (res_query == AMF_EOF) {
  615. ret = AVERROR_EOF;
  616. } else if (buffer == NULL) {
  617. ret = AVERROR(EAGAIN);
  618. } else {
  619. if(surface) {
  620. // resubmit surface
  621. do {
  622. res = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)surface);
  623. if (res != AMF_INPUT_FULL)
  624. break;
  625. if (!ctx->query_timeout_supported)
  626. av_usleep(1000);
  627. // Need to free up space in the encoder queue.
  628. // The number of retrieved outputs is limited currently to 21
  629. amf_query_output(avctx, &buffer);
  630. if (buffer != NULL) {
  631. ret = av_fifo_write(ctx->output_list, &buffer, 1);
  632. if (ret < 0)
  633. return ret;
  634. }
  635. } while(res == AMF_INPUT_FULL);
  636. surface->pVtbl->Release(surface);
  637. if (res == AMF_INPUT_FULL) {
  638. av_log(avctx, AV_LOG_WARNING, "Data acquired but delayed SubmitInput returned AMF_INPUT_FULL- should not happen\n");
  639. } else {
  640. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
  641. ret = av_fifo_write(ctx->timestamp_list, &pts, 1);
  642. ctx->submitted_frame++;
  643. if (ret < 0)
  644. return ret;
  645. }
  646. }
  647. ret = 0;
  648. }
  649. return ret;
  650. }
  651. int ff_amf_get_color_profile(AVCodecContext *avctx)
  652. {
  653. amf_int64 color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN;
  654. if (avctx->color_range == AVCOL_RANGE_JPEG) {
  655. /// Color Space for Full (JPEG) Range
  656. switch (avctx->colorspace) {
  657. case AVCOL_SPC_SMPTE170M:
  658. color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_601;
  659. break;
  660. case AVCOL_SPC_BT709:
  661. color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_709;
  662. break;
  663. case AVCOL_SPC_BT2020_NCL:
  664. case AVCOL_SPC_BT2020_CL:
  665. color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_2020;
  666. break;
  667. }
  668. } else {
  669. /// Color Space for Limited (MPEG) range
  670. switch (avctx->colorspace) {
  671. case AVCOL_SPC_SMPTE170M:
  672. color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_601;
  673. break;
  674. case AVCOL_SPC_BT709:
  675. color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_709;
  676. break;
  677. case AVCOL_SPC_BT2020_NCL:
  678. case AVCOL_SPC_BT2020_CL:
  679. color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_2020;
  680. break;
  681. }
  682. }
  683. return color_profile;
  684. }
  685. const AVCodecHWConfigInternal *const ff_amfenc_hw_configs[] = {
  686. #if CONFIG_D3D11VA
  687. HW_CONFIG_ENCODER_FRAMES(D3D11, D3D11VA),
  688. HW_CONFIG_ENCODER_DEVICE(NONE, D3D11VA),
  689. #endif
  690. #if CONFIG_DXVA2
  691. HW_CONFIG_ENCODER_FRAMES(DXVA2_VLD, DXVA2),
  692. HW_CONFIG_ENCODER_DEVICE(NONE, DXVA2),
  693. #endif
  694. HW_CONFIG_ENCODER_FRAMES(AMF_SURFACE, AMF),
  695. HW_CONFIG_ENCODER_DEVICE(NONE, AMF),
  696. NULL,
  697. };