video_stream_webm.cpp 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479
  1. /**************************************************************************/
  2. /* video_stream_webm.cpp */
  3. /**************************************************************************/
  4. /* This file is part of: */
  5. /* GODOT ENGINE */
  6. /* https://godotengine.org */
  7. /**************************************************************************/
  8. /* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
  9. /* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
  10. /* */
  11. /* Permission is hereby granted, free of charge, to any person obtaining */
  12. /* a copy of this software and associated documentation files (the */
  13. /* "Software"), to deal in the Software without restriction, including */
  14. /* without limitation the rights to use, copy, modify, merge, publish, */
  15. /* distribute, sublicense, and/or sell copies of the Software, and to */
  16. /* permit persons to whom the Software is furnished to do so, subject to */
  17. /* the following conditions: */
  18. /* */
  19. /* The above copyright notice and this permission notice shall be */
  20. /* included in all copies or substantial portions of the Software. */
  21. /* */
  22. /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
  23. /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
  24. /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
  25. /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
  26. /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
  27. /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
  28. /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
  29. /**************************************************************************/
  30. #include "video_stream_webm.h"
  31. #include "core/os/file_access.h"
  32. #include "core/os/os.h"
  33. #include "core/project_settings.h"
  34. #include "servers/audio_server.h"
  35. #include "thirdparty/misc/yuv2rgb.h"
  36. // libsimplewebm
  37. #include <OpusVorbisDecoder.hpp>
  38. #include <VPXDecoder.hpp>
  39. // libvpx
  40. #include <vpx/vpx_image.h>
  41. // libwebm
  42. #include <mkvparser/mkvparser.h>
  43. class MkvReader : public mkvparser::IMkvReader {
  44. public:
  45. MkvReader(const String &p_file) {
  46. file = FileAccess::open(p_file, FileAccess::READ);
  47. ERR_FAIL_COND_MSG(!file, "Failed loading resource: '" + p_file + "'.");
  48. }
  49. ~MkvReader() {
  50. if (file) {
  51. memdelete(file);
  52. }
  53. }
  54. virtual int Read(long long pos, long len, unsigned char *buf) {
  55. if (file) {
  56. if (file->get_position() != (uint64_t)pos) {
  57. file->seek(pos);
  58. }
  59. if (file->get_buffer(buf, len) == (uint64_t)len) {
  60. return 0;
  61. }
  62. }
  63. return -1;
  64. }
  65. virtual int Length(long long *total, long long *available) {
  66. if (file) {
  67. const uint64_t len = file->get_len();
  68. if (total) {
  69. *total = len;
  70. }
  71. if (available) {
  72. *available = len;
  73. }
  74. return 0;
  75. }
  76. return -1;
  77. }
  78. private:
  79. FileAccess *file;
  80. };
  81. /**/
  82. VideoStreamPlaybackWebm::VideoStreamPlaybackWebm() :
  83. audio_track(0),
  84. webm(nullptr),
  85. video(nullptr),
  86. audio(nullptr),
  87. video_frames(nullptr),
  88. audio_frame(nullptr),
  89. video_frames_pos(0),
  90. video_frames_capacity(0),
  91. num_decoded_samples(0),
  92. samples_offset(-1),
  93. mix_callback(nullptr),
  94. mix_udata(nullptr),
  95. playing(false),
  96. paused(false),
  97. delay_compensation(0.0),
  98. time(0.0),
  99. video_frame_delay(0.0),
  100. video_pos(0.0),
  101. texture(memnew(ImageTexture)),
  102. pcm(nullptr) {}
  103. VideoStreamPlaybackWebm::~VideoStreamPlaybackWebm() {
  104. delete_pointers();
  105. }
  106. bool VideoStreamPlaybackWebm::open_file(const String &p_file) {
  107. file_name = p_file;
  108. webm = memnew(WebMDemuxer(new MkvReader(file_name), 0, audio_track));
  109. if (webm->isOpen()) {
  110. video = memnew(VPXDecoder(*webm, OS::get_singleton()->get_processor_count()));
  111. if (video->isOpen()) {
  112. audio = memnew(OpusVorbisDecoder(*webm));
  113. if (audio->isOpen()) {
  114. audio_frame = memnew(WebMFrame);
  115. pcm = (float *)memalloc(sizeof(float) * audio->getBufferSamples() * webm->getChannels());
  116. } else {
  117. memdelete(audio);
  118. audio = nullptr;
  119. }
  120. frame_data.resize((webm->getWidth() * webm->getHeight()) << 2);
  121. texture->create(webm->getWidth(), webm->getHeight(), Image::FORMAT_RGBA8, Texture::FLAG_FILTER | Texture::FLAG_VIDEO_SURFACE);
  122. return true;
  123. }
  124. memdelete(video);
  125. video = nullptr;
  126. }
  127. memdelete(webm);
  128. webm = nullptr;
  129. return false;
  130. }
  131. void VideoStreamPlaybackWebm::stop() {
  132. if (playing) {
  133. delete_pointers();
  134. pcm = nullptr;
  135. audio_frame = nullptr;
  136. video_frames = nullptr;
  137. video = nullptr;
  138. audio = nullptr;
  139. open_file(file_name); //Should not fail here...
  140. video_frames_capacity = video_frames_pos = 0;
  141. num_decoded_samples = 0;
  142. samples_offset = -1;
  143. video_frame_delay = video_pos = 0.0;
  144. }
  145. time = 0.0;
  146. playing = false;
  147. }
  148. void VideoStreamPlaybackWebm::play() {
  149. stop();
  150. delay_compensation = ProjectSettings::get_singleton()->get("audio/video_delay_compensation_ms");
  151. delay_compensation /= 1000.0;
  152. playing = true;
  153. }
  154. bool VideoStreamPlaybackWebm::is_playing() const {
  155. return playing;
  156. }
  157. void VideoStreamPlaybackWebm::set_paused(bool p_paused) {
  158. paused = p_paused;
  159. }
  160. bool VideoStreamPlaybackWebm::is_paused() const {
  161. return paused;
  162. }
  163. void VideoStreamPlaybackWebm::set_loop(bool p_enable) {
  164. //Empty
  165. }
  166. bool VideoStreamPlaybackWebm::has_loop() const {
  167. return false;
  168. }
  169. float VideoStreamPlaybackWebm::get_length() const {
  170. if (webm) {
  171. return webm->getLength();
  172. }
  173. return 0.0f;
  174. }
  175. float VideoStreamPlaybackWebm::get_playback_position() const {
  176. return video_pos;
  177. }
  178. void VideoStreamPlaybackWebm::seek(float p_time) {
  179. WARN_PRINT_ONCE("Seeking in Theora and WebM videos is not implemented yet (it's only supported for GDNative-provided video streams).");
  180. }
  181. void VideoStreamPlaybackWebm::set_audio_track(int p_idx) {
  182. audio_track = p_idx;
  183. }
  184. Ref<Texture> VideoStreamPlaybackWebm::get_texture() const {
  185. return texture;
  186. }
  187. void VideoStreamPlaybackWebm::update(float p_delta) {
  188. if ((!playing || paused) || !video) {
  189. return;
  190. }
  191. time += p_delta;
  192. if (time < video_pos) {
  193. return;
  194. }
  195. bool audio_buffer_full = false;
  196. if (samples_offset > -1) {
  197. //Mix remaining samples
  198. const int to_read = num_decoded_samples - samples_offset;
  199. const int mixed = mix_callback(mix_udata, pcm + samples_offset * webm->getChannels(), to_read);
  200. if (mixed != to_read) {
  201. samples_offset += mixed;
  202. audio_buffer_full = true;
  203. } else {
  204. samples_offset = -1;
  205. }
  206. }
  207. const bool hasAudio = (audio && mix_callback);
  208. while ((hasAudio && !audio_buffer_full && !has_enough_video_frames()) ||
  209. (!hasAudio && video_frames_pos == 0)) {
  210. if (hasAudio && !audio_buffer_full && audio_frame->isValid() &&
  211. audio->getPCMF(*audio_frame, pcm, num_decoded_samples) && num_decoded_samples > 0) {
  212. const int mixed = mix_callback(mix_udata, pcm, num_decoded_samples);
  213. if (mixed != num_decoded_samples) {
  214. samples_offset = mixed;
  215. audio_buffer_full = true;
  216. }
  217. }
  218. WebMFrame *video_frame;
  219. if (video_frames_pos >= video_frames_capacity) {
  220. WebMFrame **video_frames_new = (WebMFrame **)memrealloc(video_frames, ++video_frames_capacity * sizeof(void *));
  221. ERR_FAIL_COND(!video_frames_new); //Out of memory
  222. (video_frames = video_frames_new)[video_frames_capacity - 1] = memnew(WebMFrame);
  223. }
  224. video_frame = video_frames[video_frames_pos];
  225. if (!webm->readFrame(video_frame, audio_frame)) { //This will invalidate frames
  226. break; //Can't demux, EOS?
  227. }
  228. if (video_frame->isValid()) {
  229. ++video_frames_pos;
  230. }
  231. };
  232. bool video_frame_done = false;
  233. while (video_frames_pos > 0 && !video_frame_done) {
  234. WebMFrame *video_frame = video_frames[0];
  235. // It seems VPXDecoder::decode has to be executed even though we might skip this frame
  236. if (video->decode(*video_frame)) {
  237. VPXDecoder::IMAGE_ERROR err;
  238. VPXDecoder::Image image;
  239. if (should_process(*video_frame)) {
  240. if ((err = video->getImage(image)) != VPXDecoder::NO_FRAME) {
  241. if (err == VPXDecoder::NO_ERROR && image.w == webm->getWidth() && image.h == webm->getHeight()) {
  242. PoolVector<uint8_t>::Write w = frame_data.write();
  243. bool converted = false;
  244. if (image.chromaShiftW == 0 && image.chromaShiftH == 0 && image.cs == VPX_CS_SRGB) {
  245. uint8_t *wp = w.ptr();
  246. unsigned char *rRow = image.planes[2];
  247. unsigned char *gRow = image.planes[0];
  248. unsigned char *bRow = image.planes[1];
  249. for (int i = 0; i < image.h; i++) {
  250. for (int j = 0; j < image.w; j++) {
  251. *wp++ = rRow[j];
  252. *wp++ = gRow[j];
  253. *wp++ = bRow[j];
  254. *wp++ = 255;
  255. }
  256. rRow += image.linesize[2];
  257. gRow += image.linesize[0];
  258. bRow += image.linesize[1];
  259. }
  260. converted = true;
  261. } else if (image.chromaShiftW == 1 && image.chromaShiftH == 1) {
  262. yuv420_2_rgb8888(w.ptr(), image.planes[0], image.planes[1], image.planes[2], image.w, image.h, image.linesize[0], image.linesize[1], image.w << 2);
  263. //libyuv::I420ToARGB(image.planes[0], image.linesize[0], image.planes[2], image.linesize[2], image.planes[1], image.linesize[1], w.ptr(), image.w << 2, image.w, image.h);
  264. converted = true;
  265. } else if (image.chromaShiftW == 1 && image.chromaShiftH == 0) {
  266. yuv422_2_rgb8888(w.ptr(), image.planes[0], image.planes[1], image.planes[2], image.w, image.h, image.linesize[0], image.linesize[1], image.w << 2);
  267. //libyuv::I422ToARGB(image.planes[0], image.linesize[0], image.planes[2], image.linesize[2], image.planes[1], image.linesize[1], w.ptr(), image.w << 2, image.w, image.h);
  268. converted = true;
  269. } else if (image.chromaShiftW == 0 && image.chromaShiftH == 0) {
  270. yuv444_2_rgb8888(w.ptr(), image.planes[0], image.planes[1], image.planes[2], image.w, image.h, image.linesize[0], image.linesize[1], image.w << 2);
  271. //libyuv::I444ToARGB(image.planes[0], image.linesize[0], image.planes[2], image.linesize[2], image.planes[1], image.linesize[1], w.ptr(), image.w << 2, image.w, image.h);
  272. converted = true;
  273. } else if (image.chromaShiftW == 2 && image.chromaShiftH == 0) {
  274. //libyuv::I411ToARGB(image.planes[0], image.linesize[0], image.planes[2], image.linesize[2] image.planes[1], image.linesize[1], w.ptr(), image.w << 2, image.w, image.h);
  275. //converted = true;
  276. }
  277. if (converted) {
  278. Ref<Image> img = memnew(Image(image.w, image.h, 0, Image::FORMAT_RGBA8, frame_data));
  279. texture->set_data(img); //Zero copy send to visual server
  280. video_frame_done = true;
  281. }
  282. }
  283. }
  284. }
  285. }
  286. video_pos = video_frame->time;
  287. memmove(video_frames, video_frames + 1, (--video_frames_pos) * sizeof(void *));
  288. video_frames[video_frames_pos] = video_frame;
  289. }
  290. if (video_frames_pos == 0 && webm->isEOS()) {
  291. stop();
  292. }
  293. }
  294. void VideoStreamPlaybackWebm::set_mix_callback(VideoStreamPlayback::AudioMixCallback p_callback, void *p_userdata) {
  295. mix_callback = p_callback;
  296. mix_udata = p_userdata;
  297. }
  298. int VideoStreamPlaybackWebm::get_channels() const {
  299. if (audio) {
  300. return webm->getChannels();
  301. }
  302. return 0;
  303. }
  304. int VideoStreamPlaybackWebm::get_mix_rate() const {
  305. if (audio) {
  306. return webm->getSampleRate();
  307. }
  308. return 0;
  309. }
  310. inline bool VideoStreamPlaybackWebm::has_enough_video_frames() const {
  311. if (video_frames_pos > 0) {
  312. // FIXME: AudioServer output latency was fixed in af9bb0e, previously it used to
  313. // systematically return 0. Now that it gives a proper latency, it broke this
  314. // code where the delay compensation likely never really worked.
  315. //const double audio_delay = AudioServer::get_singleton()->get_output_latency();
  316. const double video_time = video_frames[video_frames_pos - 1]->time;
  317. return video_time >= time + /* audio_delay + */ delay_compensation;
  318. }
  319. return false;
  320. }
  321. bool VideoStreamPlaybackWebm::should_process(WebMFrame &video_frame) {
  322. // FIXME: AudioServer output latency was fixed in af9bb0e, previously it used to
  323. // systematically return 0. Now that it gives a proper latency, it broke this
  324. // code where the delay compensation likely never really worked.
  325. //const double audio_delay = AudioServer::get_singleton()->get_output_latency();
  326. return video_frame.time >= time + /* audio_delay + */ delay_compensation;
  327. }
  328. void VideoStreamPlaybackWebm::delete_pointers() {
  329. if (pcm) {
  330. memfree(pcm);
  331. }
  332. if (audio_frame) {
  333. memdelete(audio_frame);
  334. }
  335. if (video_frames) {
  336. for (int i = 0; i < video_frames_capacity; ++i) {
  337. memdelete(video_frames[i]);
  338. }
  339. memfree(video_frames);
  340. }
  341. if (video) {
  342. memdelete(video);
  343. }
  344. if (audio) {
  345. memdelete(audio);
  346. }
  347. if (webm) {
  348. memdelete(webm);
  349. }
  350. }
  351. /**/
  352. VideoStreamWebm::VideoStreamWebm() :
  353. audio_track(0) {}
  354. Ref<VideoStreamPlayback> VideoStreamWebm::instance_playback() {
  355. Ref<VideoStreamPlaybackWebm> pb = memnew(VideoStreamPlaybackWebm);
  356. pb->set_audio_track(audio_track);
  357. if (pb->open_file(file)) {
  358. return pb;
  359. }
  360. return nullptr;
  361. }
  362. void VideoStreamWebm::set_file(const String &p_file) {
  363. file = p_file;
  364. }
  365. String VideoStreamWebm::get_file() {
  366. return file;
  367. }
  368. void VideoStreamWebm::_bind_methods() {
  369. ClassDB::bind_method(D_METHOD("set_file", "file"), &VideoStreamWebm::set_file);
  370. ClassDB::bind_method(D_METHOD("get_file"), &VideoStreamWebm::get_file);
  371. ADD_PROPERTY(PropertyInfo(Variant::STRING, "file", PROPERTY_HINT_NONE, "", PROPERTY_USAGE_NOEDITOR | PROPERTY_USAGE_INTERNAL), "set_file", "get_file");
  372. }
  373. void VideoStreamWebm::set_audio_track(int p_track) {
  374. audio_track = p_track;
  375. }
  376. ////////////
  377. RES ResourceFormatLoaderWebm::load(const String &p_path, const String &p_original_path, Error *r_error, bool p_no_subresource_cache) {
  378. FileAccess *f = FileAccess::open(p_path, FileAccess::READ);
  379. if (!f) {
  380. if (r_error) {
  381. *r_error = ERR_CANT_OPEN;
  382. }
  383. return RES();
  384. }
  385. VideoStreamWebm *stream = memnew(VideoStreamWebm);
  386. stream->set_file(p_path);
  387. Ref<VideoStreamWebm> webm_stream = Ref<VideoStreamWebm>(stream);
  388. if (r_error) {
  389. *r_error = OK;
  390. }
  391. f->close();
  392. memdelete(f);
  393. return webm_stream;
  394. }
  395. void ResourceFormatLoaderWebm::get_recognized_extensions(List<String> *p_extensions) const {
  396. p_extensions->push_back("webm");
  397. }
  398. bool ResourceFormatLoaderWebm::handles_type(const String &p_type) const {
  399. return ClassDB::is_parent_class(p_type, "VideoStream");
  400. }
  401. String ResourceFormatLoaderWebm::get_resource_type(const String &p_path) const {
  402. String el = p_path.get_extension().to_lower();
  403. if (el == "webm") {
  404. return "VideoStreamWebm";
  405. }
  406. return "";
  407. }