diff --git a/.codechecker.json b/.codechecker.json new file mode 100644 index 00000000..790b678d --- /dev/null +++ b/.codechecker.json @@ -0,0 +1,22 @@ +{ + "analyze": [ + "-d", + "clang-diagnostic-reserved-macro-identifier", + "-d", + "clang-diagnostic-reserved-identifier", + "-d", + "cert-err33-c", + "-d", + "clang-diagnostic-sign-compare", + "-d", + "clang-diagnostic-implicit-int-float-conversion", + "-d", + "clang-diagnostic-switch-enum", + "--analyzers", + "clangsa", + "clang-tidy", + "gcc", + "-i", + ".codechecker.skipfile" + ] +} \ No newline at end of file diff --git a/.codechecker.skipfile b/.codechecker.skipfile new file mode 100644 index 00000000..10051fa6 --- /dev/null +++ b/.codechecker.skipfile @@ -0,0 +1,2 @@ ++*/flutter-pi/src +-* diff --git a/.github/workflows/codeql-buildscript.sh b/.github/workflows/codeql-buildscript.sh old mode 100644 new mode 100755 index 2eff8eef..8c2c480d --- a/.github/workflows/codeql-buildscript.sh +++ b/.github/workflows/codeql-buildscript.sh @@ -1,6 +1,24 @@ #!/usr/bin/env bash -sudo apt install -y cmake libgl1-mesa-dev libgles2-mesa-dev libegl1-mesa-dev libdrm-dev libgbm-dev ttf-mscorefonts-installer fontconfig libsystemd-dev libinput-dev libudev-dev libxkbcommon-dev -mkdir build && cd build -cmake .. -make -j`nproc` +# gstreamer and libc++ want different versions of libunwind-dev. +# We explicitly install the version that gstreamer wants so +# we don't get install errors. + +sudo apt-get update +sudo apt-get install -y --no-install-recommends \ + git cmake pkg-config ninja-build clang clang-tools \ + libgl-dev libgles-dev libegl-dev libvulkan-dev libdrm-dev libgbm-dev libsystemd-dev libinput-dev libudev-dev libxkbcommon-dev \ + libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev \ + libunwind-dev + +$WRAPPER cmake \ + -S . -B build \ + -GNinja \ + -DCMAKE_BUILD_TYPE=Debug \ + -DBUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN=ON \ + -DBUILD_GSTREAMER_AUDIO_PLAYER_PLUGIN=ON \ + -DENABLE_VULKAN=ON \ + -DENABLE_SESSION_SWITCHING=ON \ + -DCMAKE_EXPORT_COMPILE_COMMANDS=ON + +$WRAPPER cmake --build build diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml deleted file mode 100644 index f40e3ab8..00000000 --- a/.github/workflows/codeql.yml +++ /dev/null @@ -1,122 +0,0 @@ -# For most projects, this workflow file will not need changing; you simply need -# to commit it to your repository. -# -# You may wish to alter this file to override the set of languages analyzed, -# or to provide custom queries or build logic. -# -# ******** NOTE ******** -# We have attempted to detect the languages in your repository. Please check -# the `language` matrix defined below to confirm you have the correct set of -# supported CodeQL languages. -# -name: "CodeQL" - -on: - push: - branches: [ "main", "master" ] - schedule: - - cron: '0 0 * * *' - pull_request: - branches: '*' - -jobs: - analyze: - name: Analyze - # Runner size impacts CodeQL analysis time. To learn more, please see: - # - https://gh.io/recommended-hardware-resources-for-running-codeql - # - https://gh.io/supported-runners-and-hardware-resources - # - https://gh.io/using-larger-runners - # Consider using larger runners for possible analysis time improvements. - runs-on: 'ubuntu-24.04' - timeout-minutes: 360 - permissions: - actions: read - contents: read - security-events: write - - strategy: - fail-fast: false - matrix: - language: [ 'cpp' ] - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - submodules: recursive - - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v3 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - - # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs - # queries: security-extended,security-and-quality - queries: security-and-quality - - - # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift). - # If this step fails, then you should remove it and run the build manually (see below) - #- name: Autobuild - # uses: github/codeql-action/autobuild@v2 - - # â„šī¸ Command-line programs to run using the OS shell. - # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun - - # If the Autobuild fails above, remove it and uncomment the following three lines. - # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. - - - run: | - ./.github/workflows/codeql-buildscript.sh - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 - with: - category: "/language:${{matrix.language}}" - upload: false - id: step1 - - # Filter out rules with low severity or high false positve rate - # Also filter out warnings in third-party code - - name: Filter out unwanted errors and warnings - uses: advanced-security/filter-sarif@v1 - with: - patterns: | - -**:cpp/path-injection - -**:cpp/world-writable-file-creation - -**:cpp/poorly-documented-function - -**:cpp/potentially-dangerous-function - -**:cpp/use-of-goto - -**:cpp/integer-multiplication-cast-to-long - -**:cpp/comparison-with-wider-type - -**:cpp/leap-year/* - -**:cpp/ambiguously-signed-bit-field - -**:cpp/suspicious-pointer-scaling - -**:cpp/suspicious-pointer-scaling-void - -**:cpp/unsigned-comparison-zero - -**/cmake*/Modules/** - input: ${{ steps.step1.outputs.sarif-output }}/cpp.sarif - output: ${{ steps.step1.outputs.sarif-output }}/cpp.sarif - - - name: Upload CodeQL results to code scanning - uses: github/codeql-action/upload-sarif@v3 - with: - sarif_file: ${{ steps.step1.outputs.sarif-output }} - category: "/language:${{matrix.language}}" - - - name: Upload CodeQL results as an artifact - if: success() || failure() - uses: actions/upload-artifact@v4 - with: - name: codeql-results - path: ${{ steps.step1.outputs.sarif-output }} - retention-days: 5 - - - name: Fail if an error is found - run: | - ./.github/workflows/fail_on_error.py \ - ${{ steps.step1.outputs.sarif-output }}/cpp.sarif diff --git a/.github/workflows/fail_on_error.py b/.github/workflows/fail_on_warning.py similarity index 68% rename from .github/workflows/fail_on_error.py rename to .github/workflows/fail_on_warning.py index 29791742..b6ce953e 100755 --- a/.github/workflows/fail_on_error.py +++ b/.github/workflows/fail_on_warning.py @@ -20,13 +20,18 @@ def codeql_sarif_contain_error(filename): rule_index = res['rule']['index'] else: continue + try: rule_level = rules_metadata[rule_index]['defaultConfiguration']['level'] - except IndexError as e: - print(e, rule_index, len(rules_metadata)) - else: - if rule_level == 'error': - return True + except LookupError: + # According to the SARIF schema (https://www.schemastore.org/schemas/json/sarif-2.1.0-rtm.6.json), + # the defalt level is "warning" if not specified. + rule_level = 'warning' + + if rule_level == 'error': + return True + elif rule_level == 'warning': + return True return False if __name__ == "__main__": diff --git a/.github/workflows/static-analysis.yml b/.github/workflows/static-analysis.yml new file mode 100644 index 00000000..70c90d9d --- /dev/null +++ b/.github/workflows/static-analysis.yml @@ -0,0 +1,58 @@ +name: "Static Analysis" + +on: + push: + branches: [ "main", "master" ] + schedule: + - cron: '0 0 * * *' + pull_request: + branches: '*' + +jobs: + codechecker: + name: CodeChecker + + # Use latest Ubuntu 24.04 for latest GCC. + # CodeChecker requires gcc >= 13.0.0. + # ubuntu-latest is ubuntu 22.04 (atm) + runs-on: ubuntu-24.04 + + permissions: + actions: read + contents: read + security-events: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Install Deps, Configure and Build + run: | + ./.github/workflows/codeql-buildscript.sh + + - name: Run CodeChecker + uses: ardera/CodeChecker-Action@master + id: codechecker + with: + ctu: true + logfile: ${{ github.workspace }}/build/compile_commands.json + config: ${{ github.workspace }}/.codechecker.json + + - uses: actions/upload-artifact@v4 + id: upload + with: + name: "CodeChecker Bug Reports" + path: ${{ steps.codechecker.outputs.result-html-dir }} + + - name: Fail on Warnings + if: ${{ steps.codechecker.outputs.warnings == 'true' }} + run: | + cat <>$GITHUB_STEP_SUMMARY + ## âš ī¸ CodeChecker found warnings + Please see the 'CodeChecker Bug Reports' artifact for more details: + - ${{ steps.upload.outputs.artifact-url }} + EOF + + exit 1 diff --git a/.gitignore b/.gitignore index c4a9d2c9..cac3106c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ /.vscode /build /out +/.codechecker # CMake docs says it should not be checked in. CMakeUserPresets.json @@ -99,3 +100,6 @@ Icon Network Trash Folder Temporary Items .apdisk + +# Used by zed to store clangd cache +.cache diff --git a/CMakeLists.txt b/CMakeLists.txt index 734aba51..b4eba442 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -162,13 +162,14 @@ target_link_libraries(flutterpi_module PUBLIC ) target_include_directories(flutterpi_module PUBLIC - ${CMAKE_SOURCE_DIR}/third_party/flutter_embedder_header/include ${CMAKE_SOURCE_DIR}/src ${CMAKE_BINARY_DIR} + ${CMAKE_SOURCE_DIR}/third_party/mesa3d/include + ${CMAKE_SOURCE_DIR}/third_party/flutter_embedder_header/include ) target_compile_options(flutterpi_module PUBLIC - $<$:-O0 -Wall -Wextra -Wno-sign-compare -Werror -ggdb -U_FORTIFY_SOURCE -DDEBUG> + $<$:-O0 -Wall -Wextra -Wno-sign-compare -Wswitch-enum -Wformat -Wdouble-promotion -Wno-overlength-strings -Wno-gnu-zero-variadic-macro-arguments -pedantic -Werror -ggdb -U_FORTIFY_SOURCE -DDEBUG> $<$:-O3 -Wall -Wextra -Wno-sign-compare -ggdb -DNDEBUG> $<$:-O3 -Wall -Wextra -Wno-sign-compare -DNDEBUG> ) @@ -237,6 +238,7 @@ if (ENABLE_VULKAN) target_sources(flutterpi_module PRIVATE src/vk_gbm_render_surface.c src/vk_renderer.c + src/vulkan.c ) target_link_libraries(flutterpi_module PUBLIC PkgConfig::VULKAN @@ -305,73 +307,65 @@ endif() if (BUILD_TEST_PLUGIN) target_sources(flutterpi_module PRIVATE src/plugins/testplugin.c) endif() -if (BUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN) - if (NOT HAVE_EGL_GLES2) - message(NOTICE "EGL and OpenGL ES2 are required for gstreamer video player. Gstreamer video player plugin won't be build.") - else() + +set(HAVE_GSTREAMER_VIDEO_PLAYER OFF) +if (BUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN OR BUILD_GSTREAMER_AUDIO_PLAYER_PLUGIN)# + pkg_check_modules(LIBGSTREAMER IMPORTED_TARGET + gstreamer-1.0 + gstreamer-plugins-base-1.0 + gstreamer-app-1.0 + gstreamer-allocators-1.0 + gstreamer-video-1.0 + gstreamer-audio-1.0 + ) + + if (LIBGSTREAMER_FOUND) + string(REPLACE "." ";" LIBGSTREAMER_VERSION_AS_LIST ${LIBGSTREAMER_gstreamer-1.0_VERSION}) + list(GET LIBGSTREAMER_VERSION_AS_LIST 0 LIBGSTREAMER_VERSION_MAJOR) + list(GET LIBGSTREAMER_VERSION_AS_LIST 1 LIBGSTREAMER_VERSION_MINOR) + list(GET LIBGSTREAMER_VERSION_AS_LIST 2 LIBGSTREAMER_VERSION_PATCH) + + target_sources(flutterpi_module PRIVATE src/plugins/gstplayer.c) + target_link_libraries(flutterpi_module PUBLIC PkgConfig::LIBGSTREAMER) + endif() + + if (BUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN AND NOT LIBGSTREAMER_FOUND) if (TRY_BUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN) - pkg_check_modules(LIBGSTREAMER IMPORTED_TARGET gstreamer-1.0) - pkg_check_modules(LIBGSTREAMER_PLUGINS_BASE IMPORTED_TARGET gstreamer-plugins-base-1.0) - pkg_check_modules(LIBGSTREAMER_APP IMPORTED_TARGET gstreamer-app-1.0) - pkg_check_modules(LIBGSTREAMER_ALLOCATORS IMPORTED_TARGET gstreamer-allocators-1.0) - pkg_check_modules(LIBGSTREAMER_VIDEO IMPORTED_TARGET gstreamer-video-1.0) + message(NOTICE "Some required gstreamer dependencies were not found. Gstreamer video player plugin won't be built.") else() - pkg_check_modules(LIBGSTREAMER REQUIRED IMPORTED_TARGET gstreamer-1.0) - pkg_check_modules(LIBGSTREAMER_PLUGINS_BASE REQUIRED IMPORTED_TARGET gstreamer-plugins-base-1.0) - pkg_check_modules(LIBGSTREAMER_APP REQUIRED IMPORTED_TARGET gstreamer-app-1.0) - pkg_check_modules(LIBGSTREAMER_ALLOCATORS REQUIRED IMPORTED_TARGET gstreamer-allocators-1.0) - pkg_check_modules(LIBGSTREAMER_VIDEO REQUIRED IMPORTED_TARGET gstreamer-video-1.0) + message(ERROR "Some required gstreamer dependencies were not found. Can't build gstreamer video player plugin.") endif() + endif() - if (LIBGSTREAMER_FOUND AND LIBGSTREAMER_PLUGINS_BASE_FOUND AND LIBGSTREAMER_APP_FOUND AND LIBGSTREAMER_ALLOCATORS_FOUND AND LIBGSTREAMER_VIDEO_FOUND) - # There's no other way to query the libinput version (in code) somehow. - # So we need to roll our own libinput version macro - string(REPLACE "." ";" LIBGSTREAMER_VERSION_AS_LIST ${LIBGSTREAMER_VERSION}) - list(GET LIBGSTREAMER_VERSION_AS_LIST 0 LIBGSTREAMER_VERSION_MAJOR) - list(GET LIBGSTREAMER_VERSION_AS_LIST 1 LIBGSTREAMER_VERSION_MINOR) - list(GET LIBGSTREAMER_VERSION_AS_LIST 2 LIBGSTREAMER_VERSION_PATCH) - - target_sources(flutterpi_module PRIVATE - src/plugins/gstreamer_video_player/plugin.c - src/plugins/gstreamer_video_player/player.c - src/plugins/gstreamer_video_player/frame.c - ) - target_link_libraries(flutterpi_module PUBLIC - PkgConfig::LIBGSTREAMER - PkgConfig::LIBGSTREAMER_PLUGINS_BASE - PkgConfig::LIBGSTREAMER_APP - PkgConfig::LIBGSTREAMER_ALLOCATORS - PkgConfig::LIBGSTREAMER_VIDEO - ) + if (BUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN AND NOT HAVE_EGL_GLES2) + if (TRY_BUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN) + message(NOTICE "EGL and OpenGL ES2 are required for gstreamer video player. Gstreamer video player plugin won't be built.") else() - message(NOTICE "Couldn't find gstreamer libraries. Gstreamer video player plugin won't be build.") + message(ERROR "EGL and OpenGL ES2 are required for gstreamer video player. Can't build gstreamer video player plugin.") endif() endif() -endif() -if (BUILD_GSTREAMER_AUDIO_PLAYER_PLUGIN) - if (TRY_BUILD_GSTREAMER_AUDIO_PLAYER_PLUGIN) - pkg_check_modules(LIBGSTREAMER IMPORTED_TARGET gstreamer-1.0) - pkg_check_modules(LIBGSTREAMER_APP IMPORTED_TARGET gstreamer-app-1.0) - pkg_check_modules(LIBGSTREAMER_AUDIO IMPORTED_TARGET gstreamer-audio-1.0) - else() - pkg_check_modules(LIBGSTREAMER REQUIRED IMPORTED_TARGET gstreamer-1.0) - pkg_check_modules(LIBGSTREAMER_APP REQUIRED IMPORTED_TARGET gstreamer-app-1.0) - pkg_check_modules(LIBGSTREAMER_AUDIO REQUIRED IMPORTED_TARGET gstreamer-audio-1.0) + if (BUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN AND LIBGSTREAMER_FOUND AND HAVE_EGL_GLES2) + set(HAVE_GSTREAMER_VIDEO_PLAYER ON) + target_sources(flutterpi_module PRIVATE + src/plugins/gstreamer_video_player/frame.c + src/plugins/gstreamer_video_player/flutter_texture_sink.c + src/plugins/gstreamer_video_player/plugin.c + ) + endif() + + if (BUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN AND NOT LIBGSTREAMER_FOUND) + if (TRY_BUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN) + message(NOTICE "Some required gstreamer dependencies were not found. Gstreamer audio player plugin won't be built.") + else() + message(ERROR "Some required gstreamer dependencies were not found. Can't build gstreamer audio player plugin.") + endif() endif() - if (LIBGSTREAMER_FOUND AND LIBGSTREAMER_APP_FOUND AND LIBGSTREAMER_AUDIO_FOUND) + if (BUILD_GSTREAMER_AUDIO_PLAYER_PLUGIN AND LIBGSTREAMER_FOUND) target_sources(flutterpi_module PRIVATE src/plugins/audioplayers/plugin.c - src/plugins/audioplayers/player.c ) - target_link_libraries(flutterpi_module PUBLIC - PkgConfig::LIBGSTREAMER - PkgConfig::LIBGSTREAMER_APP - PkgConfig::LIBGSTREAMER_AUDIO - ) - else() - message(NOTICE "Couldn't find gstreamer libraries. Gstreamer audio player plugin won't be build.") endif() endif() @@ -390,10 +384,10 @@ if (BUILD_SENTRY_PLUGIN) if (SENTRY_BACKEND STREQUAL "crashpad" AND SENTRY_PLUGIN_BUNDLE_CRASHPAD_HANDLER) set(HAVE_BUNDLED_CRASHPAD_HANDLER ON) - + target_sources(flutter-pi PRIVATE src/crashpad_handler_trampoline.cc) # link against the same libraries the crashpad_handler uses - + get_target_property(handler_deps crashpad_handler INTERFACE_LINK_LIBRARIES) target_link_libraries(flutter-pi PUBLIC ${handler_deps}) endif() diff --git a/CMakePresets.json b/CMakePresets.json index 866cc51f..31106e29 100644 --- a/CMakePresets.json +++ b/CMakePresets.json @@ -7,15 +7,58 @@ "description": "Sets Ninja generator, build and install directory", "generator": "Ninja", "binaryDir": "${sourceDir}/out/build/${presetName}", + "hidden": true, "cacheVariables": { "CMAKE_BUILD_TYPE": "Debug", "CMAKE_INSTALL_PREFIX": "${sourceDir}/out/install/${presetName}", + "TRY_ENABLE_OPENGL": false, "ENABLE_OPENGL": true, + "TRY_ENABLE_VULKAN": false, + "ENABLE_VULKAN": true, + "BUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN": true, + "TRY_BUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN": false, "BUILD_GSTREAMER_AUDIO_PLAYER_PLUGIN": true, + "TRY_BUILD_GSTREAMER_AUDIO_PLAYER_PLUGIN": false, "BUILD_SENTRY_PLUGIN": true, "ENABLE_TESTS": true } }, + { + "name": "default-clang", + "displayName": "Default OpenGL host build (clang)", + "description": "Sets Ninja generator, build and install directory", + "generator": "Ninja", + "binaryDir": "${sourceDir}/out/build/${presetName}", + "inherits": "default", + "cacheVariables": { + "CMAKE_C_COMPILER": "clang", + "CMAKE_CXX_COMPILER": "clang++" + } + }, + { + "name": "default-clang-20", + "displayName": "Default OpenGL host build (clang-20)", + "description": "Sets Ninja generator, build and install directory", + "generator": "Ninja", + "binaryDir": "${sourceDir}/out/build/${presetName}", + "inherits": "default", + "cacheVariables": { + "CMAKE_C_COMPILER": "clang-20", + "CMAKE_CXX_COMPILER": "clang++-20" + } + }, + { + "name": "default-gcc", + "displayName": "Default OpenGL host build (gcc)", + "description": "Sets Ninja generator, build and install directory", + "generator": "Ninja", + "binaryDir": "${sourceDir}/out/build/${presetName}", + "inherits": "default", + "cacheVariables": { + "CMAKE_C_COMPILER": "gcc", + "CMAKE_CXX_COMPILER": "g++" + } + }, { "name": "cross-aarch64-default", "displayName": "OpenGL AArch64 cross-build", @@ -41,4 +84,4 @@ } } ] -} +} \ No newline at end of file diff --git a/config.h.in b/config.h.in index 8a8d8a88..fe28bd89 100644 --- a/config.h.in +++ b/config.h.in @@ -26,5 +26,6 @@ #cmakedefine ENABLE_MTRACE #cmakedefine ENABLE_ASAN #cmakedefine HAVE_BUNDLED_CRASHPAD_HANDLER +#cmakedefine HAVE_GSTREAMER_VIDEO_PLAYER #endif diff --git a/src/compositor_ng.c b/src/compositor_ng.c index d6f6718b..dcbf6b9c 100644 --- a/src/compositor_ng.c +++ b/src/compositor_ng.c @@ -20,6 +20,7 @@ #include #include +#include #include #include "cursor.h" @@ -33,7 +34,6 @@ #include "surface.h" #include "tracer.h" #include "util/collection.h" -#include "util/dynarray.h" #include "util/logging.h" #include "util/refcounting.h" #include "window.h" @@ -231,7 +231,7 @@ static void fill_platform_view_layer_props( size_t n_mutations, const struct mat3f *display_to_view_transform, const struct mat3f *view_to_display_transform, - double device_pixel_ratio + float device_pixel_ratio ) { (void) view_to_display_transform; @@ -262,8 +262,8 @@ static void fill_platform_view_layer_props( * ``` */ - rect.size.x /= device_pixel_ratio; - rect.size.y /= device_pixel_ratio; + rect.size.x /= (double) device_pixel_ratio; + rect.size.y /= (double) device_pixel_ratio; // okay, now we have the params.finalBoundingRect().x() in aa_back_transformed.x and // params.finalBoundingRect().y() in aa_back_transformed.y. @@ -348,8 +348,9 @@ static int compositor_push_fl_layers(struct compositor *compositor, size_t n_fl_ /// TODO: Implement layer->surface = compositor_get_view_by_id_locked(compositor, fl_layer->platform_view->identifier); if (layer->surface == NULL) { - layer->surface = - CAST_SURFACE(dummy_render_surface_new(compositor->tracer, VEC2I(fl_layer->size.width, fl_layer->size.height))); + layer->surface = CAST_SURFACE( + dummy_render_surface_new(compositor->tracer, VEC2I((int) fl_layer->size.width, (int) fl_layer->size.height)) + ); } #else // in release mode, we just assume the id is valid. @@ -384,10 +385,6 @@ static int compositor_push_fl_layers(struct compositor *compositor, size_t n_fl_ fl_layer_composition_unref(composition); - return 0; - - //fail_free_composition: - //fl_layer_composition_unref(composition); return ok; } @@ -556,14 +553,14 @@ void compositor_set_cursor( struct view_geometry viewgeo = window_get_view_geometry(compositor->main_window); - if (compositor->cursor_pos.x < 0.0f) { - compositor->cursor_pos.x = 0.0f; + if (compositor->cursor_pos.x < 0.0) { + compositor->cursor_pos.x = 0.0; } else if (compositor->cursor_pos.x > viewgeo.view_size.x) { compositor->cursor_pos.x = viewgeo.view_size.x; } - if (compositor->cursor_pos.y < 0.0f) { - compositor->cursor_pos.y = 0.0f; + if (compositor->cursor_pos.y < 0.0) { + compositor->cursor_pos.y = 0.0; } else if (compositor->cursor_pos.y > viewgeo.view_size.y) { compositor->cursor_pos.y = viewgeo.view_size.y; } diff --git a/src/cursor.c b/src/cursor.c index 823d5346..94cc56f5 100644 --- a/src/cursor.c +++ b/src/cursor.c @@ -7,12 +7,12 @@ #include "util/collection.h" #include "util/geometry.h" -#define PIXEL_RATIO_LDPI 1.25 -#define PIXEL_RATIO_MDPI 1.6666 -#define PIXEL_RATIO_HDPI 2.5 -#define PIXEL_RATIO_XHDPI 3.3333 -#define PIXEL_RATIO_XXHDPI 5 -#define PIXEL_RATIO_XXXHDPI 6.6666 +#define PIXEL_RATIO_LDPI 1.25f +#define PIXEL_RATIO_MDPI 1.6666f +#define PIXEL_RATIO_HDPI 2.5f +#define PIXEL_RATIO_XHDPI 3.3333f +#define PIXEL_RATIO_XXHDPI 5.0f +#define PIXEL_RATIO_XXXHDPI 6.6666f struct pointer_icon { enum pointer_kind kind; @@ -1450,7 +1450,7 @@ static void run_length_decode(void *image_buf, const void *rle_data, size_t size } } -const struct pointer_icon *pointer_icon_for_details(enum pointer_kind kind, double pixel_ratio) { +const struct pointer_icon *pointer_icon_for_details(enum pointer_kind kind, float pixel_ratio) { const struct pointer_icon *best; best = NULL; @@ -1461,7 +1461,7 @@ const struct pointer_icon *pointer_icon_for_details(enum pointer_kind kind, doub if (best == NULL) { best = icon; continue; - } else if (fabs(pixel_ratio - icon->pixel_ratio) < fabs(pixel_ratio - best->pixel_ratio)) { + } else if (fabsf(pixel_ratio - icon->pixel_ratio) < fabsf(pixel_ratio - best->pixel_ratio)) { best = icon; continue; } diff --git a/src/cursor.h b/src/cursor.h index 270404e7..049b9fb4 100644 --- a/src/cursor.h +++ b/src/cursor.h @@ -56,7 +56,7 @@ enum pointer_kind { struct pointer_icon; -const struct pointer_icon *pointer_icon_for_details(enum pointer_kind kind, double pixel_ratio); +const struct pointer_icon *pointer_icon_for_details(enum pointer_kind kind, float pixel_ratio); enum pointer_kind pointer_icon_get_kind(const struct pointer_icon *icon); diff --git a/src/dmabuf_surface.c b/src/dmabuf_surface.c index e8e3f310..fa4990a8 100644 --- a/src/dmabuf_surface.c +++ b/src/dmabuf_surface.c @@ -68,7 +68,7 @@ void refcounted_dmabuf_destroy(struct refcounted_dmabuf *dmabuf) { free(dmabuf); } -DEFINE_STATIC_REF_OPS(refcounted_dmabuf, n_refs); +DEFINE_STATIC_REF_OPS(refcounted_dmabuf, n_refs) struct dmabuf_surface { struct surface surface; @@ -298,10 +298,10 @@ static int dmabuf_surface_present_kms(struct surface *_s, const struct fl_layer_ .src_w = DOUBLE_TO_FP1616_ROUNDED(s->next_buf->buf.width), .src_h = DOUBLE_TO_FP1616_ROUNDED(s->next_buf->buf.height), - .dst_x = props->aa_rect.offset.x, - .dst_y = props->aa_rect.offset.y, - .dst_w = props->aa_rect.size.x, - .dst_h = props->aa_rect.size.y, + .dst_x = (int) round(props->aa_rect.offset.x), + .dst_y = (int) round(props->aa_rect.offset.y), + .dst_w = (int) round(props->aa_rect.size.x), + .dst_h = (int) round(props->aa_rect.size.y), .has_rotation = false, .rotation = PLANE_TRANSFORM_ROTATE_0, diff --git a/src/egl.h b/src/egl.h index 9a9a936c..ca7b9efe 100644 --- a/src/egl.h +++ b/src/egl.h @@ -475,7 +475,8 @@ static inline const char *egl_strerror(EGLenum result) { } } - #define LOG_EGL_ERROR(result, fmt, ...) LOG_ERROR(fmt ": %s\n", __VA_ARGS__ egl_strerror(result)) + #define LOG_EGL_ERROR_FMT(result, fmt, ...) LOG_ERROR(fmt ": %s\n", __VA_ARGS__ egl_strerror(result)) + #define LOG_EGL_ERROR(result, fmt) LOG_ERROR(fmt ": %s\n", egl_strerror(result)) #endif #endif // _FLUTTERPI_SRC_EGL_H diff --git a/src/egl_gbm_render_surface.c b/src/egl_gbm_render_surface.c index 30484b45..74b0ae01 100644 --- a/src/egl_gbm_render_surface.c +++ b/src/egl_gbm_render_surface.c @@ -146,6 +146,7 @@ static int egl_gbm_render_surface_init( } #endif + int with_modifiers_errno = 0; gbm_surface = NULL; if (allowed_modifiers != NULL) { gbm_surface = gbm_surface_create_with_modifiers( @@ -157,11 +158,10 @@ static int egl_gbm_render_surface_init( n_allowed_modifiers ); if (gbm_surface == NULL) { - ok = errno; - LOG_ERROR("Couldn't create GBM surface for rendering. gbm_surface_create_with_modifiers: %s\n", strerror(ok)); - LOG_ERROR("Will retry without modifiers\n"); + with_modifiers_errno = errno; } } + if (gbm_surface == NULL) { gbm_surface = gbm_surface_create( gbm_device, @@ -172,8 +172,20 @@ static int egl_gbm_render_surface_init( ); if (gbm_surface == NULL) { ok = errno; - LOG_ERROR("Couldn't create GBM surface for rendering. gbm_surface_create: %s\n", strerror(ok)); - return ok; + + if (allowed_modifiers != NULL) { + LOG_ERROR( + "Couldn't create GBM surface for rendering. gbm_surface_create_with_modifiers: %s, gbm_surface_create: %s\n", + strerror(with_modifiers_errno), + strerror(ok) + ); + } else { + LOG_ERROR("Couldn't create GBM surface for rendering. gbm_surface_create: %s\n", strerror(ok)); + } + + // Return an error != 0 in any case, so the caller doesn't think + // that the surface was created successfully. + return ok ? ok : EIO; } } @@ -383,10 +395,8 @@ static void on_release_layer(void *userdata) { static int egl_gbm_render_surface_present_kms(struct surface *s, const struct fl_layer_props *props, struct kms_req_builder *builder) { struct egl_gbm_render_surface *egl_surface; struct gbm_bo_meta *meta; - struct drmdev *drmdev; struct gbm_bo *bo; enum pixfmt pixel_format; - uint32_t fb_id, opaque_fb_id; int ok; egl_surface = CAST_THIS(s); @@ -410,16 +420,18 @@ static int egl_gbm_render_surface_present_kms(struct surface *s, const struct fl goto fail_unlock; } - drmdev = kms_req_builder_get_drmdev(builder); - ASSERT_NOT_NULL(drmdev); + meta->drmdev = kms_req_builder_get_drmdev(builder); + ASSERT_NOT_NULL(meta->drmdev); + + drmdev_ref(meta->drmdev); struct drm_crtc *crtc = kms_req_builder_get_crtc(builder); ASSERT_NOT_NULL(crtc); - if (drm_crtc_any_plane_supports_format(drmdev, crtc, egl_surface->pixel_format)) { + if (drm_crtc_any_plane_supports_format(meta->drmdev, crtc, egl_surface->pixel_format)) { TRACER_BEGIN(egl_surface->surface.tracer, "drmdev_add_fb (non-opaque)"); - fb_id = drmdev_add_fb_from_gbm_bo( - drmdev, + uint32_t fb_id = drmdev_add_fb_from_gbm_bo( + meta->drmdev, bo, /* cast_opaque */ false ); @@ -428,7 +440,7 @@ static int egl_gbm_render_surface_present_kms(struct surface *s, const struct fl if (fb_id == 0) { ok = EIO; LOG_ERROR("Couldn't add GBM buffer as DRM framebuffer.\n"); - goto fail_free_meta; + goto fail_unref_drmdev; } meta->has_nonopaque_fb_id = true; @@ -441,16 +453,16 @@ static int egl_gbm_render_surface_present_kms(struct surface *s, const struct fl // if this EGL surface is non-opaque and has an opaque equivalent if (!get_pixfmt_info(egl_surface->pixel_format)->is_opaque && pixfmt_opaque(egl_surface->pixel_format) != egl_surface->pixel_format && - drm_crtc_any_plane_supports_format(drmdev, crtc, pixfmt_opaque(egl_surface->pixel_format))) { - opaque_fb_id = drmdev_add_fb_from_gbm_bo( - drmdev, + drm_crtc_any_plane_supports_format(meta->drmdev, crtc, pixfmt_opaque(egl_surface->pixel_format))) { + uint32_t opaque_fb_id = drmdev_add_fb_from_gbm_bo( + meta->drmdev, bo, /* cast_opaque */ true ); if (opaque_fb_id == 0) { ok = EIO; LOG_ERROR("Couldn't add GBM buffer as opaque DRM framebuffer.\n"); - goto fail_remove_fb; + goto fail_rm_nonopaque_fb; } meta->has_opaque_fb_id = true; @@ -463,11 +475,9 @@ static int egl_gbm_render_surface_present_kms(struct surface *s, const struct fl if (!meta->has_nonopaque_fb_id && !meta->has_opaque_fb_id) { ok = EIO; LOG_ERROR("Couldn't add GBM buffer as DRM framebuffer.\n"); - goto fail_free_meta; + goto fail_remove_opaque_fb; } - meta->drmdev = drmdev_ref(drmdev); - meta->nonopaque_fb_id = fb_id; gbm_bo_set_user_data(bo, meta, on_destroy_gbm_bo_meta); } else { // We can only add this GBM BO to a single KMS device as an fb right now. @@ -493,6 +503,8 @@ static int egl_gbm_render_surface_present_kms(struct surface *s, const struct fl ); */ + uint32_t fb_id; + // So we just cast our fb to an XRGB8888 framebuffer and scanout that instead. if (meta->has_nonopaque_fb_id && !meta->has_opaque_fb_id) { fb_id = meta->nonopaque_fb_id; @@ -555,10 +567,18 @@ static int egl_gbm_render_surface_present_kms(struct surface *s, const struct fl locked_fb_unref(egl_surface->locked_front_fb); goto fail_unlock; -fail_remove_fb: - drmdev_rm_fb(drmdev, fb_id); +fail_remove_opaque_fb: + if (meta->has_opaque_fb_id) { + drmdev_rm_fb(meta->drmdev, meta->opaque_fb_id); + } + +fail_rm_nonopaque_fb: + if (meta->has_nonopaque_fb_id) { + drmdev_rm_fb(meta->drmdev, meta->nonopaque_fb_id); + } -fail_free_meta: +fail_unref_drmdev: + drmdev_unref(meta->drmdev); free(meta); fail_unlock: @@ -647,10 +667,10 @@ static int egl_gbm_render_surface_queue_present(struct render_surface *s, const LOG_DEBUG( "using fourcc %c%c%c%c (%s) with modifier 0x%" PRIx64 "\n", - fourcc & 0xFF, - (fourcc >> 8) & 0xFF, - (fourcc >> 16) & 0xFF, - (fourcc >> 24) & 0xFF, + (char) (fourcc & 0xFF), + (char) ((fourcc >> 8) & 0xFF), + (char) ((fourcc >> 16) & 0xFF), + (char) ((fourcc >> 24) & 0xFF), has_format ? get_pixfmt_info(format)->name : "?", modifier ); diff --git a/src/filesystem_layout.c b/src/filesystem_layout.c index 39924c30..4f73cb89 100644 --- a/src/filesystem_layout.c +++ b/src/filesystem_layout.c @@ -135,16 +135,14 @@ static struct flutter_paths *resolve( // We still haven't found it. Fail because we need it to run flutter. if (path_exists(icudtl_path) == false) { LOG_DEBUG("icudtl file not found at %s.\n", icudtl_path); - free(icudtl_path); - LOG_ERROR("icudtl file not found!\n"); - goto fail_free_asset_bundle_path; + goto fail_free_icudtl_path; } // Find the kernel_blob.bin file. Only necessary for JIT (debug) mode. ok = asprintf(&kernel_blob_path, "%s/%s", app_bundle_path_real, kernel_blob_subpath); if (ok == -1) { - goto fail_free_asset_bundle_path; + goto fail_free_icudtl_path; } if (FLUTTER_RUNTIME_MODE_IS_JIT(runtime_mode) && !path_exists(kernel_blob_path)) { @@ -222,6 +220,9 @@ static struct flutter_paths *resolve( fail_free_kernel_blob_path: free(kernel_blob_path); +fail_free_icudtl_path: + free(icudtl_path); + fail_free_asset_bundle_path: free(asset_bundle_path); diff --git a/src/flutter-pi.c b/src/flutter-pi.c index 10219914..afbafb6e 100644 --- a/src/flutter-pi.c +++ b/src/flutter-pi.c @@ -394,7 +394,8 @@ static void *proc_resolver(void *userdata, const char *name) { flutterpi = userdata; ASSERT_NOT_NULL(flutterpi->gl_renderer); - return gl_renderer_get_proc_address(flutterpi->gl_renderer, name); + fn_ptr_t fn = gl_renderer_get_proc_address(flutterpi->gl_renderer, name); + return *((void **) &fn); } #endif @@ -408,7 +409,8 @@ UNUSED static void *on_get_vulkan_proc_address(void *userdata, FlutterVulkanInst name = "vkGetInstanceProcAddr"; } - return (void *) vkGetInstanceProcAddr((VkInstance) instance, name); + PFN_vkVoidFunction fn = vkGetInstanceProcAddr((VkInstance) instance, name); + return *(void **) (&fn); #else (void) userdata; (void) instance; @@ -546,7 +548,7 @@ UNUSED static void on_frame_request(void *userdata, intptr_t baton) { req->flutterpi = flutterpi; req->baton = baton; req->vblank_ns = get_monotonic_time(); - req->next_vblank_ns = req->vblank_ns + (1000000000.0 / compositor_get_refresh_rate(flutterpi->compositor)); + req->next_vblank_ns = req->vblank_ns + (uint64_t) (1000000000.0 / compositor_get_refresh_rate(flutterpi->compositor)); if (flutterpi_runs_platform_tasks_on_current_thread(req->flutterpi)) { TRACER_INSTANT(req->flutterpi->tracer, "FlutterEngineOnVsync"); @@ -768,7 +770,7 @@ static int on_execute_flutter_task(void *userdata) { result = flutterpi->flutter.procs.RunTask(flutterpi->flutter.engine, task); if (result != kSuccess) { - LOG_ERROR("Error running platform task. FlutterEngineRunTask: %d\n", result); + LOG_ERROR("Error running platform task. FlutterEngineRunTask: %u\n", result); free(task); return EINVAL; } @@ -1039,8 +1041,13 @@ struct gl_renderer *flutterpi_get_gl_renderer(struct flutterpi *flutterpi) { return flutterpi->gl_renderer; } +struct tracer *flutterpi_get_tracer(struct flutterpi *flutterpi) { + ASSERT_NOT_NULL(flutterpi); + return flutterpi->tracer; +} + void flutterpi_set_pointer_kind(struct flutterpi *flutterpi, enum pointer_kind kind) { - return compositor_set_cursor(flutterpi->compositor, false, false, true, kind, false, VEC2F(0, 0)); + compositor_set_cursor(flutterpi->compositor, false, false, true, kind, false, VEC2F(0, 0)); } void flutterpi_trace_event_instant(struct flutterpi *flutterpi, const char *name) { @@ -1166,19 +1173,20 @@ static void unload_flutter_engine_lib(void *handle) { dlclose(handle); } -static int get_flutter_engine_procs(void *engine_handle, FlutterEngineProcTable *procs_out) { - // clang-format off - FlutterEngineResult (*get_proc_addresses)(FlutterEngineProcTable *table); - // clang-format on +typedef FlutterEngineResult (*flutter_engine_get_proc_addresses_t)(FlutterEngineProcTable *table); +static int get_flutter_engine_procs(void *engine_handle, FlutterEngineProcTable *procs_out) { FlutterEngineResult engine_result; - get_proc_addresses = dlsym(engine_handle, "FlutterEngineGetProcAddresses"); - if (get_proc_addresses == NULL) { + void *fn = dlsym(engine_handle, "FlutterEngineGetProcAddresses"); + if (fn == NULL) { LOG_ERROR("Could not resolve flutter engine function FlutterEngineGetProcAddresses.\n"); return EINVAL; } + flutter_engine_get_proc_addresses_t get_proc_addresses; + *((void **) &get_proc_addresses) = fn; + procs_out->struct_size = sizeof(FlutterEngineProcTable); engine_result = get_proc_addresses(procs_out); if (engine_result != kSuccess) { @@ -1451,9 +1459,9 @@ static int flutterpi_run(struct flutterpi *flutterpi) { memset(&window_metrics_event, 0, sizeof(window_metrics_event)); window_metrics_event.struct_size = sizeof(FlutterWindowMetricsEvent); - window_metrics_event.width = geometry.view_size.x; - window_metrics_event.height = geometry.view_size.y; - window_metrics_event.pixel_ratio = geometry.device_pixel_ratio; + window_metrics_event.width = (size_t) geometry.view_size.x; + window_metrics_event.height = (size_t) geometry.view_size.y; + window_metrics_event.pixel_ratio = (double) geometry.device_pixel_ratio; window_metrics_event.left = 0; window_metrics_event.top = 0; window_metrics_event.physical_view_inset_top = 0; @@ -1920,7 +1928,7 @@ bool flutterpi_parse_cmdline_args(int argc, char **argv, struct flutterpi_cmdlin "%s", usage ); - return false; + goto fail; } break; @@ -1934,7 +1942,7 @@ bool flutterpi_parse_cmdline_args(int argc, char **argv, struct flutterpi_cmdlin "%s", usage ); - return false; + goto fail; } result_out->rotation = rotation; @@ -1945,13 +1953,13 @@ bool flutterpi_parse_cmdline_args(int argc, char **argv, struct flutterpi_cmdlin ok = parse_vec2i(optarg, &result_out->physical_dimensions); if (!ok) { LOG_ERROR("ERROR: Invalid argument for --dimensions passed.\n"); - return false; + goto fail; } if (result_out->physical_dimensions.x < 0 || result_out->physical_dimensions.y < 0) { LOG_ERROR("ERROR: Invalid argument for --dimensions passed.\n"); result_out->physical_dimensions = VEC2I(0, 0); - return false; + goto fail; } result_out->has_physical_dimensions = true; @@ -1974,7 +1982,7 @@ bool flutterpi_parse_cmdline_args(int argc, char **argv, struct flutterpi_cmdlin "%s", usage ); - return false; + goto fail; valid_format: break; @@ -1982,7 +1990,11 @@ bool flutterpi_parse_cmdline_args(int argc, char **argv, struct flutterpi_cmdlin case 'v':; char *vmode_dup = strdup(optarg); if (vmode_dup == NULL) { - return false; + goto fail; + } + + if (result_out->desired_videomode != NULL) { + free(result_out->desired_videomode); } result_out->desired_videomode = vmode_dup; @@ -1992,15 +2004,15 @@ bool flutterpi_parse_cmdline_args(int argc, char **argv, struct flutterpi_cmdlin ok = parse_vec2i(optarg, &result_out->dummy_display_size); if (!ok) { LOG_ERROR("ERROR: Invalid argument for --dummy-display-size passed.\n"); - return false; + goto fail; } break; - case 'h': printf("%s", usage); return false; + case 'h': printf("%s", usage); goto fail; case '?': - case ':': LOG_ERROR("Invalid option specified.\n%s", usage); return false; + case ':': LOG_ERROR("Invalid option specified.\n%s", usage); goto fail; case -1: finished_parsing_options = true; break; @@ -2011,7 +2023,7 @@ bool flutterpi_parse_cmdline_args(int argc, char **argv, struct flutterpi_cmdlin if (optind >= argc) { LOG_ERROR("ERROR: Expected asset bundle path after options.\n"); printf("%s", usage); - return false; + goto fail; } result_out->bundle_path = strdup(argv[optind]); @@ -2027,6 +2039,17 @@ bool flutterpi_parse_cmdline_args(int argc, char **argv, struct flutterpi_cmdlin result_out->dummy_display = !!dummy_display_int; return true; + +fail: + if (result_out->bundle_path != NULL) { + free(result_out->bundle_path); + } + + if (result_out->desired_videomode != NULL) { + free(result_out->desired_videomode); + } + + return false; } static int on_drmdev_open(const char *path, int flags, void **fd_metadata_out, void *userdata) { @@ -2109,7 +2132,7 @@ static struct drmdev *find_drmdev(struct libseat *libseat) { ASSERT_EQUALS(libseat, NULL); #endif - ok = drmGetDevices2(0, devices, sizeof(devices) / sizeof(*devices)); + ok = drmGetDevices2(0, devices, ARRAY_SIZE(devices)); if (ok < 0) { LOG_ERROR("Could not query DRM device list: %s\n", strerror(-ok)); return NULL; @@ -2166,12 +2189,12 @@ static struct drmdev *find_drmdev(struct libseat *libseat) { return NULL; } -static struct gbm_device *open_rendernode_as_gbm_device() { +static struct gbm_device *open_rendernode_as_gbm_device(void) { struct gbm_device *gbm; drmDevicePtr devices[64]; int ok, n_devices; - ok = drmGetDevices2(0, devices, sizeof(devices) / sizeof(*devices)); + ok = drmGetDevices2(0, devices, ARRAY_SIZE(devices)); if (ok < 0) { LOG_ERROR("Could not query DRM device list: %s\n", strerror(-ok)); return NULL; @@ -2319,7 +2342,7 @@ struct flutterpi *flutterpi_new_from_args(int argc, char **argv) { struct tracer *tracer; struct window *window; void *engine_handle; - char *bundle_path, **engine_argv, *desired_videomode; + char **engine_argv, *desired_videomode; int ok, engine_argc, wakeup_fd; fpi = malloc(sizeof *fpi); @@ -2339,15 +2362,14 @@ struct flutterpi *flutterpi_new_from_args(int argc, char **argv) { if (cmd_args.use_vulkan == true) { LOG_ERROR("ERROR: --vulkan was specified, but flutter-pi was built without vulkan support.\n"); printf("%s", usage); - return NULL; + goto fail_free_cmd_args; } #endif runtime_mode = cmd_args.has_runtime_mode ? cmd_args.runtime_mode : FLUTTER_RUNTIME_MODE_DEBUG; - bundle_path = cmd_args.bundle_path; + engine_argc = cmd_args.engine_argc; engine_argv = cmd_args.engine_argv; - #if defined(HAVE_EGL_GLES2) && defined(HAVE_VULKAN) renderer_type = cmd_args.use_vulkan ? kVulkan_RendererType : kOpenGL_RendererType; #elif defined(HAVE_EGL_GLES2) && !defined(HAVE_VULKAN) @@ -2361,16 +2383,13 @@ struct flutterpi *flutterpi_new_from_args(int argc, char **argv) { desired_videomode = cmd_args.desired_videomode; - if (bundle_path == NULL) { - LOG_ERROR("ERROR: Bundle path does not exist.\n"); - goto fail_free_cmd_args; - } - - paths = setup_paths(runtime_mode, bundle_path); + paths = setup_paths(runtime_mode, cmd_args.bundle_path); if (paths == NULL) { goto fail_free_cmd_args; } + fpi->flutter.bundle_path = realpath(cmd_args.bundle_path, NULL); + wakeup_fd = eventfd(0, EFD_CLOEXEC | EFD_NONBLOCK); if (wakeup_fd < 0) { LOG_ERROR("Could not create fd for waking up the main loop. eventfd: %s\n", strerror(errno)); @@ -2478,7 +2497,6 @@ struct flutterpi *flutterpi_new_from_args(int argc, char **argv) { vk_renderer = vk_renderer_new(); if (vk_renderer == NULL) { LOG_ERROR("Couldn't create vulkan renderer.\n"); - ok = EIO; goto fail_unref_scheduler; } #else @@ -2490,7 +2508,6 @@ struct flutterpi *flutterpi_new_from_args(int argc, char **argv) { gl_renderer = gl_renderer_new_from_gbm_device(tracer, gbm_device, cmd_args.has_pixel_format, cmd_args.pixel_format); if (gl_renderer == NULL) { LOG_ERROR("Couldn't create EGL/OpenGL renderer.\n"); - ok = EIO; goto fail_unref_scheduler; } @@ -2593,8 +2610,8 @@ struct flutterpi *flutterpi_new_from_args(int argc, char **argv) { fpi, &geometry.display_to_view_transform, &geometry.view_to_display_transform, - geometry.display_size.x, - geometry.display_size.y + (unsigned int) geometry.display_size.x, + (unsigned int) geometry.display_size.y ); if (input == NULL) { LOG_ERROR("Couldn't initialize user input. flutter-pi will run without user input.\n"); @@ -2696,6 +2713,8 @@ struct flutterpi *flutterpi_new_from_args(int argc, char **argv) { frame_scheduler_unref(scheduler); window_unref(window); + free(cmd_args.bundle_path); + pthread_mutex_init(&fpi->event_loop_mutex, get_default_mutex_attrs()); fpi->event_loop_thread = pthread_self(); fpi->wakeup_event_loop_fd = wakeup_fd; @@ -2707,7 +2726,6 @@ struct flutterpi *flutterpi_new_from_args(int argc, char **argv) { fpi->vk_renderer = vk_renderer; fpi->user_input = input; fpi->flutter.runtime_mode = runtime_mode; - fpi->flutter.bundle_path = realpath(bundle_path, NULL); fpi->flutter.engine_argc = engine_argc; fpi->flutter.engine_argv = engine_argv; fpi->flutter.paths = paths; @@ -2785,6 +2803,7 @@ struct flutterpi *flutterpi_new_from_args(int argc, char **argv) { fail_free_cmd_args: free(cmd_args.bundle_path); + free(cmd_args.desired_videomode); fail_free_fpi: free(fpi); diff --git a/src/flutter-pi.h b/src/flutter-pi.h index d2e831d9..00f9eb65 100644 --- a/src/flutter-pi.h +++ b/src/flutter-pi.h @@ -92,6 +92,7 @@ struct drmdev; struct locales; struct vk_renderer; struct flutterpi; +struct tracer; /// TODO: Remove this extern struct flutterpi *flutterpi; @@ -188,6 +189,8 @@ bool flutterpi_has_gl_renderer(struct flutterpi *flutterpi); struct gl_renderer *flutterpi_get_gl_renderer(struct flutterpi *flutterpi); +struct tracer *flutterpi_get_tracer(struct flutterpi *flutterpi); + void flutterpi_set_pointer_kind(struct flutterpi *flutterpi, enum pointer_kind kind); void flutterpi_trace_event_instant(struct flutterpi *flutterpi, const char *name); diff --git a/src/flutter_embedder.h b/src/flutter_embedder.h new file mode 100644 index 00000000..b5fbe6a7 --- /dev/null +++ b/src/flutter_embedder.h @@ -0,0 +1,11 @@ +#ifndef _FLUTTERPI_SRC_FLUTTER_EMBEDDER_H +#define _FLUTTERPI_SRC_FLUTTER_EMBEDDER_H + +#include "util/macros.h" + +PRAGMA_DIAGNOSTIC_PUSH +PRAGMA_DIAGNOSTIC_IGNORED("-Wstrict-prototypes") +#include "flutter_embedder_header/flutter_embedder.h" +PRAGMA_DIAGNOSTIC_POP + +#endif // _FLUTTERPI_SRC_FLUTTER_EMBEDDER_H diff --git a/src/gl_renderer.c b/src/gl_renderer.c index 58b25ea2..a151310e 100644 --- a/src/gl_renderer.c +++ b/src/gl_renderer.c @@ -62,24 +62,23 @@ struct gl_renderer { #endif }; -static void *try_get_proc_address(const char *name) { - void *address; - - address = eglGetProcAddress(name); - if (address) { - return address; +static fn_ptr_t try_get_proc_address(const char *name) { + fn_ptr_t fn = eglGetProcAddress(name); + if (fn) { + return fn; } - address = dlsym(RTLD_DEFAULT, name); - if (address) { - return address; + void *void_fn = dlsym(RTLD_DEFAULT, name); + if (void_fn) { + *((void **) &fn) = void_fn; + return fn; } - return NULL; + return (fn_ptr_t) NULL; } -static void *get_proc_address(const char *name) { - void *address; +static fn_ptr_t get_proc_address(const char *name) { + fn_ptr_t address; address = try_get_proc_address(name); if (address == NULL) { @@ -177,13 +176,13 @@ struct gl_renderer *gl_renderer_new_from_gbm_device( // PFNEGLGETPLATFORMDISPLAYEXTPROC, PFNEGLCREATEPLATFORMWINDOWSURFACEEXTPROC // are defined by EGL_EXT_platform_base. #ifdef EGL_EXT_platform_base - PFNEGLGETPLATFORMDISPLAYEXTPROC egl_get_platform_display_ext; - PFNEGLCREATEPLATFORMWINDOWSURFACEEXTPROC egl_create_platform_window_surface_ext; + PFNEGLGETPLATFORMDISPLAYEXTPROC egl_get_platform_display_ext = NULL; + PFNEGLCREATEPLATFORMWINDOWSURFACEEXTPROC egl_create_platform_window_surface_ext = NULL; #endif if (supports_egl_ext_platform_base) { #ifdef EGL_EXT_platform_base - egl_get_platform_display_ext = try_get_proc_address("eglGetPlatformDisplayEXT"); + egl_get_platform_display_ext = (PFNEGLGETPLATFORMDISPLAYEXTPROC) try_get_proc_address("eglGetPlatformDisplayEXT"); if (egl_get_platform_display_ext == NULL) { LOG_ERROR("Couldn't resolve \"eglGetPlatformDisplayEXT\" even though \"EGL_EXT_platform_base\" was listed as supported.\n"); supports_egl_ext_platform_base = false; @@ -195,7 +194,8 @@ struct gl_renderer *gl_renderer_new_from_gbm_device( if (supports_egl_ext_platform_base) { #ifdef EGL_EXT_platform_base - egl_create_platform_window_surface_ext = try_get_proc_address("eglCreatePlatformWindowSurfaceEXT"); + egl_create_platform_window_surface_ext = (PFNEGLCREATEPLATFORMWINDOWSURFACEEXTPROC + ) try_get_proc_address("eglCreatePlatformWindowSurfaceEXT"); if (egl_create_platform_window_surface_ext == NULL) { LOG_ERROR( "Couldn't resolve \"eglCreatePlatformWindowSurfaceEXT\" even though \"EGL_EXT_platform_base\" was listed as supported.\n" @@ -217,8 +217,9 @@ struct gl_renderer *gl_renderer_new_from_gbm_device( bool failed_before = false; #ifdef EGL_VERSION_1_5 - PFNEGLGETPLATFORMDISPLAYPROC egl_get_platform_display = try_get_proc_address("eglGetPlatformDisplay"); - PFNEGLCREATEPLATFORMWINDOWSURFACEPROC egl_create_platform_window_surface = try_get_proc_address("eglCreatePlatformWindowSurface"); + PFNEGLGETPLATFORMDISPLAYPROC egl_get_platform_display = (PFNEGLGETPLATFORMDISPLAYPROC) try_get_proc_address("eglGetPlatformDisplay"); + PFNEGLCREATEPLATFORMWINDOWSURFACEPROC egl_create_platform_window_surface = (PFNEGLCREATEPLATFORMWINDOWSURFACEPROC + ) try_get_proc_address("eglCreatePlatformWindowSurface"); if (egl_display == EGL_NO_DISPLAY && egl_get_platform_display != NULL) { egl_display = egl_get_platform_display(EGL_PLATFORM_GBM_KHR, gbm_device, NULL); @@ -550,7 +551,7 @@ int gl_renderer_clear_current(struct gl_renderer *renderer) { return 0; } -void *gl_renderer_get_proc_address(ASSERTED struct gl_renderer *renderer, const char *name) { +fn_ptr_t gl_renderer_get_proc_address(ASSERTED struct gl_renderer *renderer, const char *name) { ASSERT_NOT_NULL(renderer); ASSERT_NOT_NULL(name); return get_proc_address(name); @@ -628,7 +629,7 @@ int gl_renderer_make_this_a_render_thread(struct gl_renderer *renderer) { return 0; } -void gl_renderer_cleanup_this_render_thread() { +void gl_renderer_cleanup_this_render_thread(void) { EGLDisplay display; EGLContext context; EGLBoolean egl_ok; diff --git a/src/gl_renderer.h b/src/gl_renderer.h index d6c8160a..6afcd577 100644 --- a/src/gl_renderer.h +++ b/src/gl_renderer.h @@ -26,6 +26,8 @@ #include "egl.h" +typedef void (*fn_ptr_t)(void); + struct tracer; struct gl_renderer *gl_renderer_new_from_gbm_device( @@ -59,7 +61,7 @@ int gl_renderer_clear_current(struct gl_renderer *renderer); EGLContext gl_renderer_create_context(struct gl_renderer *renderer); -void *gl_renderer_get_proc_address(struct gl_renderer *renderer, const char *name); +fn_ptr_t gl_renderer_get_proc_address(struct gl_renderer *renderer, const char *name); EGLDisplay gl_renderer_get_egl_display(struct gl_renderer *renderer); @@ -71,7 +73,7 @@ bool gl_renderer_is_llvmpipe(struct gl_renderer *renderer); int gl_renderer_make_this_a_render_thread(struct gl_renderer *renderer); -void gl_renderer_cleanup_this_render_thread(); +void gl_renderer_cleanup_this_render_thread(void); ATTR_PURE EGLConfig gl_renderer_choose_config(struct gl_renderer *renderer, bool has_desired_pixel_format, enum pixfmt desired_pixel_format); diff --git a/src/locales.c b/src/locales.c index ff66a098..7f13b1a6 100644 --- a/src/locales.c +++ b/src/locales.c @@ -220,7 +220,7 @@ static int add_locale_variants(struct list_head *locales, const char *locale_des } // then append all possible combinations - for (int i = 0b111; i >= 0; i--) { + for (int i = 7; i >= 0; i--) { char *territory_2 = NULL, *codeset_2 = NULL, *modifier_2 = NULL; if ((i & 1) != 0) { @@ -311,7 +311,7 @@ struct locales *locales_new(void) { // Use those to create our flutter locales. n_locales = list_length(&locales->locales); - fl_locales = calloc(n_locales, sizeof *fl_locales); + fl_locales = calloc(n_locales == 0 ? 1 : n_locales, sizeof(const FlutterLocale *)); if (fl_locales == NULL) { goto fail_free_allocated_locales; } @@ -322,6 +322,18 @@ struct locales *locales_new(void) { i++; } + // If we have no locales, add a default "C" locale. + if (i == 0) { + fl_locales[0] = &(const FlutterLocale){ + .struct_size = sizeof(FlutterLocale), + .language_code = "C", + .country_code = NULL, + .script_code = NULL, + .variant_code = NULL, + }; + i++; + } + if (streq(fl_locales[0]->language_code, "C")) { LOG_LOCALES_ERROR("Warning: The system has no configured locale. The default \"C\" locale may or may not be supported by the app.\n" ); diff --git a/src/modesetting.c b/src/modesetting.c index 722a71ea..80508a1b 100644 --- a/src/modesetting.c +++ b/src/modesetting.c @@ -417,8 +417,10 @@ static int fetch_crtc(int drm_fd, int crtc_index, uint32_t crtc_id, struct drm_c prop_info = NULL; } + ASSUME(0 <= crtc_index && crtc_index < 32); + crtc_out->id = crtc->crtc_id; - crtc_out->index = crtc_index; + crtc_out->index = (uint8_t) crtc_index; crtc_out->bitmask = 1u << crtc_index; crtc_out->ids = ids; crtc_out->committed_state.has_mode = crtc->mode_valid; @@ -610,15 +612,16 @@ extern void drmModeFreeFB2(struct _drmModeFB2 *ptr) __attribute__((weak)); static int fetch_plane(int drm_fd, uint32_t plane_id, struct drm_plane *plane_out) { struct drm_plane_prop_ids ids; drmModeObjectProperties *props; - drm_plane_transform_t hardcoded_rotation, supported_rotations, committed_rotation; - enum drm_blend_mode committed_blend_mode; - enum drm_plane_type type; + drm_plane_transform_t hardcoded_rotation = PLANE_TRANSFORM_NONE, supported_rotations = PLANE_TRANSFORM_NONE, + committed_rotation = PLANE_TRANSFORM_NONE; + enum drm_blend_mode committed_blend_mode = kNone_DrmBlendMode; + enum drm_plane_type type = kPrimary_DrmPlaneType; drmModePropertyRes *info; drmModePlane *plane; uint32_t comitted_crtc_x, comitted_crtc_y, comitted_crtc_w, comitted_crtc_h; uint32_t comitted_src_x, comitted_src_y, comitted_src_w, comitted_src_h; - uint16_t committed_alpha; - int64_t min_zpos, max_zpos, hardcoded_zpos, committed_zpos; + uint16_t committed_alpha = 0; + int64_t min_zpos = 0, max_zpos = 0, hardcoded_zpos = 0, committed_zpos = 0; bool supported_blend_modes[kCount_DrmBlendMode] = { 0 }; bool supported_formats[PIXFMT_COUNT] = { 0 }; bool has_type, has_rotation, has_zpos, has_hardcoded_zpos, has_hardcoded_rotation, has_alpha, has_blend_mode; @@ -848,12 +851,12 @@ static int fetch_plane(int drm_fd, uint32_t plane_id, struct drm_plane *plane_ou plane_out->id = plane->plane_id; plane_out->possible_crtcs = plane->possible_crtcs; plane_out->ids = ids; - plane_out->type = type; + plane_out->type = has_type ? type : kPrimary_DrmPlaneType; plane_out->has_zpos = has_zpos; - plane_out->min_zpos = min_zpos; - plane_out->max_zpos = max_zpos; + plane_out->min_zpos = has_zpos ? min_zpos : 0; + plane_out->max_zpos = has_zpos ? max_zpos : 0; plane_out->has_hardcoded_zpos = has_hardcoded_zpos; - plane_out->hardcoded_zpos = hardcoded_zpos; + plane_out->hardcoded_zpos = has_hardcoded_zpos ? hardcoded_zpos : 0; plane_out->has_rotation = has_rotation; plane_out->supported_rotations = supported_rotations; plane_out->has_hardcoded_rotation = has_hardcoded_rotation; @@ -914,7 +917,7 @@ static int fetch_planes(struct drmdev *drmdev, struct drm_plane **planes_out, si ok = fetch_plane(drmdev->fd, drmdev->plane_res->planes[i], planes + i); if (ok != 0) { for (int j = 0; j < i; j++) { - free_plane(planes + i); + free_plane(planes + j); } free(planes); return ENOMEM; @@ -936,7 +939,7 @@ static void free_planes(struct drm_plane *planes, size_t n_planes) { free(planes); } -static void assert_rotations_work() { +static void assert_rotations_work(void) { assert(PLANE_TRANSFORM_ROTATE_0.rotate_0 == true); assert(PLANE_TRANSFORM_ROTATE_0.rotate_90 == false); assert(PLANE_TRANSFORM_ROTATE_0.rotate_180 == false); @@ -2270,7 +2273,7 @@ struct kms_req_builder *drmdev_create_request_builder(struct drmdev *drmdev, uin } if (crtc == NULL) { - LOG_ERROR("Invalid CRTC id: %" PRId32 "\n", crtc_id); + LOG_ERROR("Invalid CRTC id: %" PRIu32 "\n", crtc_id); goto fail_unlock; } @@ -2448,9 +2451,6 @@ int kms_req_builder_push_fb_layer( /* id_range */ false, 0 // clang-format on ); - if (plane == NULL) { - LOG_DEBUG("Couldn't find a fitting cursor plane.\n"); - } } /// TODO: Not sure we can use crtc_x, crtc_y, etc with primary planes @@ -2633,15 +2633,15 @@ UNUSED struct kms_req *kms_req_ref(struct kms_req *req) { } UNUSED void kms_req_unref(struct kms_req *req) { - return kms_req_builder_unref((struct kms_req_builder *) req); + kms_req_builder_unref((struct kms_req_builder *) req); } UNUSED void kms_req_unrefp(struct kms_req **req) { - return kms_req_builder_unrefp((struct kms_req_builder **) req); + kms_req_builder_unrefp((struct kms_req_builder **) req); } UNUSED void kms_req_swap_ptrs(struct kms_req **oldp, struct kms_req *new) { - return kms_req_builder_swap_ptrs((struct kms_req_builder **) oldp, (struct kms_req_builder *) new); + kms_req_builder_swap_ptrs((struct kms_req_builder **) oldp, (struct kms_req_builder *) new); } static bool drm_plane_is_active(struct drm_plane *plane) { @@ -2731,13 +2731,13 @@ kms_req_commit_common(struct kms_req *req, bool blocking, kms_scanout_cb_t scano struct drm_plane *plane = layer->plane; ASSERT_NOT_NULL(plane); +#ifndef DEBUG if (plane->committed_state.has_format && plane->committed_state.format == layer->layer.format) { needs_set_crtc = false; } else { needs_set_crtc = true; } - -#ifdef DEBUG +#else drmModeFBPtr committed_fb = drmModeGetFB(builder->drmdev->master_fd, plane->committed_state.fb_id); if (committed_fb == NULL) { needs_set_crtc = true; @@ -2918,6 +2918,8 @@ kms_req_commit_common(struct kms_req *req, bool blocking, kms_scanout_cb_t scano goto fail_unref_builder; } + struct drmdev *drmdev = builder->drmdev; + drmdev_on_page_flip_locked( builder->drmdev->fd, (unsigned int) sequence, @@ -2926,16 +2928,22 @@ kms_req_commit_common(struct kms_req *req, bool blocking, kms_scanout_cb_t scano builder->crtc->id, kms_req_ref(req) ); + + drmdev_unlock(drmdev); } else if (blocking) { + struct drmdev *drmdev = builder->drmdev; + // handle the page-flip event here, rather than via the eventfd ok = drmdev_on_modesetting_fd_ready_locked(builder->drmdev); if (ok != 0) { LOG_ERROR("Couldn't synchronously handle pageflip event.\n"); goto fail_unset_scanout_callback; } - } - drmdev_unlock(builder->drmdev); + drmdev_unlock(drmdev); + } else { + drmdev_unlock(builder->drmdev); + } return 0; @@ -2945,8 +2953,15 @@ kms_req_commit_common(struct kms_req *req, bool blocking, kms_scanout_cb_t scano builder->drmdev->per_crtc_state[builder->crtc->index].userdata = NULL; goto fail_unlock; -fail_unref_builder: +fail_unref_builder: { + struct drmdev *drmdev = builder->drmdev; kms_req_builder_unref(builder); + if (mode_blob != NULL) { + drm_mode_blob_destroy(mode_blob); + } + drmdev_unlock(drmdev); + return ok; +} fail_maybe_destroy_mode_blob: if (mode_blob != NULL) diff --git a/src/modesetting.h b/src/modesetting.h index 401e1150..bbbea305 100644 --- a/src/modesetting.h +++ b/src/modesetting.h @@ -751,7 +751,7 @@ struct kms_req_builder; struct kms_req_builder *drmdev_create_request_builder(struct drmdev *drmdev, uint32_t crtc_id); -DECLARE_REF_OPS(kms_req_builder); +DECLARE_REF_OPS(kms_req_builder) /** * @brief Gets the @ref drmdev associated with this KMS request builder. @@ -900,7 +900,7 @@ int kms_req_builder_push_zpos_placeholder_layer(struct kms_req_builder *builder, */ struct kms_req; -DECLARE_REF_OPS(kms_req); +DECLARE_REF_OPS(kms_req) /** * @brief Build the KMS request builder into an actual, immutable KMS request diff --git a/src/notifier_listener.c b/src/notifier_listener.c index d8156538..3cb8adea 100644 --- a/src/notifier_listener.c +++ b/src/notifier_listener.c @@ -48,7 +48,7 @@ int value_notifier_init(struct notifier *notifier, void *initial_value, void_cal return 0; } -struct notifier *change_notifier_new() { +struct notifier *change_notifier_new(void) { struct notifier *n; int ok; @@ -112,10 +112,12 @@ struct listener *notifier_listen(struct notifier *notifier, listener_cb_t notify return NULL; } - r = listener_notify(l, notifier->state); - if (r == kUnlisten) { - listener_destroy(l); - return NULL; + if (notifier->is_value_notifier) { + r = listener_notify(l, notifier->state); + if (r == kUnlisten) { + listener_destroy(l); + return NULL; + } } notifier_lock(notifier); diff --git a/src/notifier_listener.h b/src/notifier_listener.h index 3861182e..cbc042b9 100644 --- a/src/notifier_listener.h +++ b/src/notifier_listener.h @@ -59,7 +59,7 @@ int value_notifier_init(struct notifier *notifier, void *initial_value, void_cal * For the behaviour of change notifiers, see @ref change_notifier_init. * */ -struct notifier *change_notifier_new(); +struct notifier *change_notifier_new(void); /** * @brief Create a new heap allocated value notifier. diff --git a/src/pixel_format.c b/src/pixel_format.c index 6b936619..a1d09b2f 100644 --- a/src/pixel_format.c +++ b/src/pixel_format.c @@ -77,7 +77,7 @@ const size_t n_pixfmt_infos = n_pixfmt_infos_constexpr; COMPILE_ASSERT(n_pixfmt_infos_constexpr == PIXFMT_MAX + 1); #ifdef DEBUG -void assert_pixfmt_list_valid() { +void assert_pixfmt_list_valid(void) { for (enum pixfmt format = 0; format < PIXFMT_COUNT; format++) { assert(pixfmt_infos[format].format == format); } diff --git a/src/pixel_format.h b/src/pixel_format.h index 3ad991ee..14119039 100644 --- a/src/pixel_format.h +++ b/src/pixel_format.h @@ -359,7 +359,7 @@ extern const struct pixfmt_info pixfmt_infos[]; extern const size_t n_pixfmt_infos; #ifdef DEBUG -void assert_pixfmt_list_valid(); +void assert_pixfmt_list_valid(void); #endif /** diff --git a/src/platformchannel.c b/src/platformchannel.c index 4bc13be1..05a5c03c 100644 --- a/src/platformchannel.c +++ b/src/platformchannel.c @@ -3,6 +3,7 @@ #include #include #include +#include #include #include #include @@ -165,45 +166,48 @@ static int _readSize(const uint8_t **pbuffer, uint32_t *psize, size_t *remaining return 0; } -int platch_free_value_std(struct std_value *value) { - int ok; - +void platch_free_value_std(struct std_value *value) { switch (value->type) { + case kStdNull: + case kStdTrue: + case kStdFalse: + case kStdInt32: + case kStdInt64: + case kStdLargeInt: + case kStdFloat64: break; case kStdString: free(value->string_value); break; + case kStdUInt8Array: + case kStdInt32Array: + case kStdInt64Array: + case kStdFloat64Array: break; case kStdList: for (int i = 0; i < value->size; i++) { - ok = platch_free_value_std(&(value->list[i])); - if (ok != 0) - return ok; + platch_free_value_std(value->list + i); } free(value->list); break; case kStdMap: for (int i = 0; i < value->size; i++) { - ok = platch_free_value_std(&(value->keys[i])); - if (ok != 0) - return ok; - ok = platch_free_value_std(&(value->values[i])); - if (ok != 0) - return ok; + platch_free_value_std(value->keys + i); + platch_free_value_std(value->values + i); } free(value->keys); break; + case kStdFloat32Array: break; default: break; } - - return 0; } -int platch_free_json_value(struct json_value *value, bool shallow) { - int ok; - +void platch_free_json_value(struct json_value *value, bool shallow) { switch (value->type) { + case kJsonNull: + case kJsonTrue: + case kJsonFalse: + case kJsonNumber: + case kJsonString: break; case kJsonArray: if (!shallow) { for (int i = 0; i < value->size; i++) { - ok = platch_free_json_value(&(value->array[i]), false); - if (ok != 0) - return ok; + platch_free_json_value(&(value->array[i]), false); } } @@ -212,9 +216,7 @@ int platch_free_json_value(struct json_value *value, bool shallow) { case kJsonObject: if (!shallow) { for (int i = 0; i < value->size; i++) { - ok = platch_free_json_value(&(value->values[i]), false); - if (ok != 0) - return ok; + platch_free_json_value(&(value->values[i]), false); } } @@ -223,11 +225,11 @@ int platch_free_json_value(struct json_value *value, bool shallow) { break; default: break; } - - return 0; } -int platch_free_obj(struct platch_obj *object) { + +void platch_free_obj(struct platch_obj *object) { switch (object->codec) { + case kNotImplemented: break; case kStringCodec: free(object->string_value); break; case kBinaryCodec: break; case kJSONMessageCodec: platch_free_json_value(&(object->json_value), false); break; @@ -235,12 +237,34 @@ int platch_free_obj(struct platch_obj *object) { case kStandardMethodCall: free(object->method); platch_free_value_std(&(object->std_arg)); + break; + case kStandardMethodCallResponse: + if (object->success) { + platch_free_value_std(&(object->std_result)); + } else { + free(object->error_code); + if (object->error_msg) { + free(object->error_msg); + } + platch_free_value_std(&(object->std_error_details)); + } + break; case kJSONMethodCall: platch_free_json_value(&(object->json_arg), false); break; - default: break; - } + case kJSONMethodCallResponse: + if (object->success) { + platch_free_json_value(&(object->json_result), false); + } else { + free(object->error_code); + if (object->error_msg) { + free(object->error_msg); + } + platch_free_json_value(&(object->json_error_details), false); + } - return 0; + break; + default: UNREACHABLE(); + } } int platch_calc_value_size_std(struct std_value *value, size_t *size_out) { @@ -330,6 +354,15 @@ int platch_calc_value_size_std(struct std_value *value, size_t *size_out) { } break; + case kStdFloat32Array: + element_size = value->size; + + _advance_size_bytes(&size, element_size, NULL); + _align(&size, 4, NULL); + _advance(&size, element_size * 4, NULL); + + break; + default: return EINVAL; } @@ -342,7 +375,7 @@ int platch_write_value_to_buffer_std(struct std_value *value, uint8_t **pbuffer) size_t size; int ok; - _write_u8(pbuffer, value->type, NULL); + _write_u8(pbuffer, (uint8_t) value->type, NULL); switch (value->type) { case kStdNull: @@ -425,6 +458,16 @@ int platch_write_value_to_buffer_std(struct std_value *value, uint8_t **pbuffer) return ok; } break; + case kStdFloat32Array: + size = value->size; + + _writeSize(pbuffer, size, NULL); + _align((uintptr_t *) pbuffer, 4, NULL); + + for (int i = 0; i < size; i++) { + _write_float(pbuffer, value->float32array[i], NULL); + } + break; default: return EINVAL; } @@ -678,7 +721,7 @@ int platch_decode_value_std(const uint8_t **pbuffer, size_t *premaining, struct return ok; break; - case kStdList: + case kStdList: { ok = _readSize(pbuffer, &size, premaining); if (ok != 0) return ok; @@ -687,36 +730,80 @@ int platch_decode_value_std(const uint8_t **pbuffer, size_t *premaining, struct value_out->list = calloc(size, sizeof(struct std_value)); for (int i = 0; i < size; i++) { - ok = platch_decode_value_std(pbuffer, premaining, &value_out->list[i]); - if (ok != 0) + ok = platch_decode_value_std(pbuffer, premaining, value_out->list + i); + if (ok != 0) { + for (int j = 0; j < i; j++) { + platch_free_value_std(value_out->list + j); + } + free(value_out->list); return ok; + } } break; - case kStdMap: + } + case kStdMap: { ok = _readSize(pbuffer, &size, premaining); - if (ok != 0) + if (ok != 0) { return ok; + } value_out->size = size; value_out->keys = calloc(size * 2, sizeof(struct std_value)); - if (!value_out->keys) + if (!value_out->keys) { return ENOMEM; + } value_out->values = &value_out->keys[size]; for (int i = 0; i < size; i++) { ok = platch_decode_value_std(pbuffer, premaining, &(value_out->keys[i])); - if (ok != 0) + if (ok != 0) { + for (int j = 0; j < i; j++) { + platch_free_value_std(&(value_out->values[j])); + platch_free_value_std(&(value_out->keys[j])); + } + free(value_out->keys); return ok; + } ok = platch_decode_value_std(pbuffer, premaining, &(value_out->values[i])); - if (ok != 0) + if (ok != 0) { + platch_free_value_std(&(value_out->keys[i])); + for (int j = 0; j < i; j++) { + platch_free_value_std(&(value_out->values[j])); + platch_free_value_std(&(value_out->keys[j])); + } + free(value_out->keys); return ok; + } } break; + } + + case kStdFloat32Array: { + ok = _readSize(pbuffer, &size, premaining); + if (ok != 0) + return ok; + + ok = _align((uintptr_t *) pbuffer, 4, premaining); + if (ok != 0) + return ok; + + if (*premaining < size * 4) + return EBADMSG; + + value_out->size = size; + value_out->float32array = (float *) *pbuffer; + + ok = _advance((uintptr_t *) pbuffer, size * 4, premaining); + if (ok != 0) + return ok; + + break; + } default: return EBADMSG; } @@ -792,8 +879,10 @@ int platch_decode_value_json(char *message, size_t size, jsmntok_t **pptoken, si for (int i = 0; i < ptoken->size; i++) { ok = platch_decode_value_json(message, size, pptoken, ptokensremaining, &array[i]); - if (ok != 0) + if (ok != 0) { + free(array); return ok; + } } value_out->type = kJsonArray; @@ -801,25 +890,52 @@ int platch_decode_value_json(char *message, size_t size, jsmntok_t **pptoken, si value_out->array = array; break; - case JSMN_OBJECT:; - struct json_value key; + case JSMN_OBJECT: { char **keys = calloc(ptoken->size, sizeof(char *)); + if (!keys) { + return ENOMEM; + } + struct json_value *values = calloc(ptoken->size, sizeof(struct json_value)); - if ((!keys) || (!values)) + if (!values) { + free(keys); return ENOMEM; + } for (int i = 0; i < ptoken->size; i++) { + struct json_value key; + ok = platch_decode_value_json(message, size, pptoken, ptokensremaining, &key); - if (ok != 0) + if (ok != 0) { + for (int j = 0; j < i; j++) { + free(keys[j]); + } + free(keys); + free(values); return ok; + } - if (key.type != kJsonString) + if (key.type != kJsonString) { + platch_free_json_value(&key, true); + for (int j = 0; j < i; j++) { + free(keys[j]); + } + free(keys); + free(values); return EBADMSG; + } + keys[i] = key.string_value; ok = platch_decode_value_json(message, size, pptoken, ptokensremaining, &values[i]); - if (ok != 0) + if (ok != 0) { + for (int j = 0; j < i; j++) { + free(keys[j]); + } + free(keys); + free(values); return ok; + } } value_out->type = kJsonObject; @@ -828,6 +944,7 @@ int platch_decode_value_json(char *message, size_t size, jsmntok_t **pptoken, si value_out->values = values; break; + } default: return EBADMSG; } } @@ -840,135 +957,191 @@ int platch_decode_json(char *string, struct json_value *out) { } int platch_decode(const uint8_t *buffer, size_t size, enum platch_codec codec, struct platch_obj *object_out) { - struct json_value root_jsvalue; - const uint8_t *buffer_cursor = buffer; - size_t remaining = size; int ok; - if ((size == 0) && (buffer == NULL)) { - object_out->codec = kNotImplemented; - return 0; + if (codec != kNotImplemented && ((size == 0) || (buffer == NULL))) { + return EINVAL; } + const uint8_t *buffer_cursor = buffer; + size_t remaining = size; + object_out->codec = codec; switch (codec) { - case kStringCodec:; - /// buffer is a non-null-terminated, UTF8-encoded string. - /// it's really sad we have to allocate a new memory block for this, but we have to since string codec buffers are not null-terminated. + case kNotImplemented: { + if (size != 0) { + return EINVAL; + } + if (buffer != NULL) { + return EINVAL; + } + + break; + } - char *string; - if (!(string = malloc(size + 1))) + case kStringCodec: { + char *string = malloc(size + 1); + if (string == NULL) { return ENOMEM; - memcpy(string, buffer, size); + } + + strncpy(string, (char *) buffer, size); string[size] = '\0'; object_out->string_value = string; - break; - case kBinaryCodec: + } + case kBinaryCodec: { + if (size == 0) { + return EINVAL; + } + if (buffer == NULL) { + return EINVAL; + } + object_out->binarydata = buffer; object_out->binarydata_size = size; - break; - case kJSONMessageCodec: - ok = platch_decode_value_json((char *) buffer, size, NULL, NULL, &(object_out->json_value)); - if (ok != 0) + } + case kJSONMessageCodec: { + ok = platch_decode_value_json((char *) buffer, size, NULL, NULL, &object_out->json_value); + if (ok != 0) { return ok; + } break; - case kJSONMethodCall:; - ok = platch_decode_value_json((char *) buffer, size, NULL, NULL, &root_jsvalue); - if (ok != 0) - return ok; + } + case kJSONMethodCall: { + struct json_value root; - if (root_jsvalue.type != kJsonObject) - return EBADMSG; + ok = platch_decode_value_json((char *) buffer, size, NULL, NULL, &root); + if (ok != 0) { + return ok; + } - for (int i = 0; i < root_jsvalue.size; i++) { - if ((streq(root_jsvalue.keys[i], "method")) && (root_jsvalue.values[i].type == kJsonString)) { - object_out->method = root_jsvalue.values[i].string_value; - } else if (streq(root_jsvalue.keys[i], "args")) { - object_out->json_arg = root_jsvalue.values[i]; - } else - return EBADMSG; + if (root.type != kJsonObject) { + platch_free_json_value(&root, true); + return EINVAL; } - platch_free_json_value(&root_jsvalue, true); + for (int i = 0; i < root.size; i++) { + if ((streq(root.keys[i], "method")) && (root.values[i].type == kJsonString)) { + object_out->method = root.values[i].string_value; + } else if (streq(root.keys[i], "args")) { + object_out->json_arg = root.values[i]; + } else { + return EINVAL; + } + } + platch_free_json_value(&root, true); break; - case kJSONMethodCallResponse:; - ok = platch_decode_value_json((char *) buffer, size, NULL, NULL, &root_jsvalue); - if (ok != 0) + } + case kJSONMethodCallResponse: { + struct json_value root; + + ok = platch_decode_value_json((char *) buffer, size, NULL, NULL, &root); + if (ok != 0) { return ok; - if (root_jsvalue.type != kJsonArray) - return EBADMSG; + } + + if (root.type != kJsonArray) { + platch_free_json_value(&root, true); + return EINVAL; + } - if (root_jsvalue.size == 1) { + if (root.size == 1) { object_out->success = true; - object_out->json_result = root_jsvalue.array[0]; - return platch_free_json_value(&root_jsvalue, true); - } else if ((root_jsvalue.size == 3) && - (root_jsvalue.array[0].type == kJsonString) && - ((root_jsvalue.array[1].type == kJsonString) || (root_jsvalue.array[1].type == kJsonNull))) { + object_out->json_result = root.array[0]; + } else if ((root.size == 3) && (root.array[0].type == kJsonString) && + ((root.array[1].type == kJsonString) || (root.array[1].type == kJsonNull))) { object_out->success = false; - object_out->error_code = root_jsvalue.array[0].string_value; - object_out->error_msg = root_jsvalue.array[1].string_value; - object_out->json_error_details = root_jsvalue.array[2]; - return platch_free_json_value(&root_jsvalue, true); - } else - return EBADMSG; + object_out->error_code = root.array[0].string_value; + object_out->error_msg = root.array[1].string_value; + object_out->json_error_details = root.array[2]; + } else { + platch_free_json_value(&root, true); + return EINVAL; + } + platch_free_json_value(&root, true); break; - case kStandardMessageCodec: + } + case kStandardMessageCodec: { ok = platch_decode_value_std(&buffer_cursor, &remaining, &object_out->std_value); - if (ok != 0) + if (ok != 0) { return ok; + } + break; - case kStandardMethodCall:; + } + case kStandardMethodCall: { struct std_value methodname; ok = platch_decode_value_std(&buffer_cursor, &remaining, &methodname); - if (ok != 0) + if (ok != 0) { return ok; + } + if (methodname.type != kStdString) { platch_free_value_std(&methodname); - return EBADMSG; + return EINVAL; } + object_out->method = methodname.string_value; ok = platch_decode_value_std(&buffer_cursor, &remaining, &object_out->std_arg); - if (ok != 0) + if (ok != 0) { return ok; + } break; - case kStandardMethodCallResponse:; + } + case kStandardMethodCallResponse: { ok = _read_u8(&buffer_cursor, (uint8_t *) &object_out->success, &remaining); + if (ok != 0) { + return ok; + } if (object_out->success) { ok = platch_decode_value_std(&buffer_cursor, &remaining, &(object_out->std_result)); - if (ok != 0) + if (ok != 0) { return ok; + } } else { struct std_value error_code, error_msg; ok = platch_decode_value_std(&buffer_cursor, &remaining, &error_code); - if (ok != 0) + if (ok != 0) { return ok; + } + ok = platch_decode_value_std(&buffer_cursor, &remaining, &error_msg); - if (ok != 0) + if (ok != 0) { + platch_free_value_std(&error_code); return ok; + } + ok = platch_decode_value_std(&buffer_cursor, &remaining, &(object_out->std_error_details)); - if (ok != 0) + if (ok != 0) { + platch_free_value_std(&error_msg); + platch_free_value_std(&error_code); return ok; + } if ((error_code.type == kStdString) && ((error_msg.type == kStdString) || (error_msg.type == kStdNull))) { object_out->error_code = error_code.string_value; object_out->error_msg = (error_msg.type == kStdString) ? error_msg.string_value : NULL; } else { - return EBADMSG; + platch_free_value_std(&object_out->std_error_details); + platch_free_value_std(&error_code); + platch_free_value_std(&error_msg); + return EINVAL; } } + break; + } default: return EINVAL; } @@ -976,153 +1149,224 @@ int platch_decode(const uint8_t *buffer, size_t size, enum platch_codec codec, s } int platch_encode(struct platch_obj *object, uint8_t **buffer_out, size_t *size_out) { - struct std_value stdmethod, stderrcode, stderrmessage; - uint8_t *buffer, *buffer_cursor; - size_t size = 0; int ok = 0; - *size_out = 0; - *buffer_out = NULL; - switch (object->codec) { - case kNotImplemented: + case kNotImplemented: { *size_out = 0; *buffer_out = NULL; return 0; - case kStringCodec: size = strlen(object->string_value); break; - case kBinaryCodec: + } + case kStringCodec: { + *buffer_out = (uint8_t *) strdup(object->string_value); + if (buffer_out == NULL) { + return ENOMEM; + } + + *size_out = strlen(object->string_value); + return 0; + } + case kBinaryCodec: { /// FIXME: Copy buffer instead *buffer_out = (uint8_t *) object->binarydata; *size_out = object->binarydata_size; return 0; - case kJSONMessageCodec: - size = platch_calc_value_size_json(&(object->json_value)); + } + case kJSONMessageCodec: { + size_t size = platch_calc_value_size_json(&(object->json_value)); size += 1; // JSONMsgCodec uses sprintf, which null-terminates strings, // so lets allocate one more byte for the last null-terminator. // this is decremented again in the second switch-case, so flutter // doesn't complain about a malformed message. + + uint8_t *buffer = malloc(size); + if (buffer == NULL) { + return ENOMEM; + } + + uint8_t *buffer_cursor = buffer; + + ok = platch_write_value_to_buffer_json(&(object->json_value), &buffer_cursor); + if (ok != 0) { + free(buffer); + return ok; + } + + *buffer_out = buffer; + *size_out = size; break; - case kStandardMessageCodec: - ok = platch_calc_value_size_std(&(object->std_value), &size); - if (ok != 0) + } + case kStandardMessageCodec: { + size_t size; + + ok = platch_calc_value_size_std(&object->std_value, &size); + if (ok != 0) { + return ok; + } + + uint8_t *buffer = malloc(size); + if (buffer == NULL) { + return ENOMEM; + } + + uint8_t *buffer_cursor = buffer; + + ok = platch_write_value_to_buffer_std(&object->std_value, &buffer_cursor); + if (ok != 0) { + free(buffer); return ok; + } + + *buffer_out = buffer; + *size_out = size; break; - case kStandardMethodCall: + } + case kStandardMethodCall: { + struct std_value stdmethod; + size_t size; + stdmethod.type = kStdString; stdmethod.string_value = object->method; ok = platch_calc_value_size_std(&stdmethod, &size); - if (ok != 0) + if (ok != 0) { return ok; + } ok = platch_calc_value_size_std(&(object->std_arg), &size); - if (ok != 0) + if (ok != 0) { return ok; + } + uint8_t *buffer = malloc(size); + if (buffer == NULL) { + return ENOMEM; + } + + uint8_t *buffer_cursor = buffer; + ok = platch_write_value_to_buffer_std(&stdmethod, &buffer_cursor); + if (ok != 0) { + free(buffer); + return ok; + } + + ok = platch_write_value_to_buffer_std(&(object->std_arg), &buffer_cursor); + if (ok != 0) { + free(buffer); + return ok; + } + + *buffer_out = buffer; + *size_out = size; break; - case kStandardMethodCallResponse: - size += 1; + } + case kStandardMethodCallResponse: { + size_t size = 1; if (object->success) { ok = platch_calc_value_size_std(&(object->std_result), &size); - if (ok != 0) + if (ok != 0) { return ok; + } } else { - stderrcode = (struct std_value){ .type = kStdString, .string_value = object->error_code }; - stderrmessage = (struct std_value){ .type = kStdString, .string_value = object->error_msg }; - - ok = platch_calc_value_size_std(&stderrcode, &size); - if (ok != 0) + ok = platch_calc_value_size_std(&STDSTRING(object->error_code), &size); + if (ok != 0) { return ok; - ok = platch_calc_value_size_std(&stderrmessage, &size); - if (ok != 0) + } + + ok = platch_calc_value_size_std(&STDSTRING(object->error_msg), &size); + if (ok != 0) { return ok; + } + ok = platch_calc_value_size_std(&(object->std_error_details), &size); - if (ok != 0) + if (ok != 0) { return ok; + } } - break; - case kJSONMethodCall: - size = platch_calc_value_size_json(&JSONOBJECT2("method", JSONSTRING(object->method), "args", object->json_arg)); - size += 1; - break; - case kJSONMethodCallResponse: - if (object->success) { - size = 1 + platch_calc_value_size_json(&JSONARRAY1(object->json_result)); - } else { - size = 1 + platch_calc_value_size_json(&JSONARRAY3( - JSONSTRING(object->error_code), - (object->error_msg != NULL) ? JSONSTRING(object->error_msg) : JSONNULL, - object->json_error_details - )); - } - break; - default: return EINVAL; - } - - buffer = malloc(size); - if (buffer == NULL) { - return ENOMEM; - } - buffer_cursor = buffer; - - switch (object->codec) { - case kStringCodec: memcpy(buffer, object->string_value, size); break; - case kStandardMessageCodec: - ok = platch_write_value_to_buffer_std(&(object->std_value), &buffer_cursor); - if (ok != 0) - goto free_buffer_and_return_ok; - break; - case kStandardMethodCall: - ok = platch_write_value_to_buffer_std(&stdmethod, &buffer_cursor); - if (ok != 0) - goto free_buffer_and_return_ok; - - ok = platch_write_value_to_buffer_std(&(object->std_arg), &buffer_cursor); - if (ok != 0) - goto free_buffer_and_return_ok; + uint8_t *buffer = malloc(size); + if (buffer == NULL) { + return ENOMEM; + } - break; - case kStandardMethodCallResponse: + uint8_t *buffer_cursor = buffer; if (object->success) { _write_u8(&buffer_cursor, 0x00, NULL); - ok = platch_write_value_to_buffer_std(&(object->std_result), &buffer_cursor); - if (ok != 0) - goto free_buffer_and_return_ok; + ok = platch_write_value_to_buffer_std(&object->std_result, &buffer_cursor); + if (ok != 0) { + free(buffer); + return ok; + } } else { _write_u8(&buffer_cursor, 0x01, NULL); - ok = platch_write_value_to_buffer_std(&stderrcode, &buffer_cursor); - if (ok != 0) - goto free_buffer_and_return_ok; - ok = platch_write_value_to_buffer_std(&stderrmessage, &buffer_cursor); - if (ok != 0) - goto free_buffer_and_return_ok; + ok = platch_write_value_to_buffer_std(&STDSTRING(object->error_code), &buffer_cursor); + if (ok != 0) { + free(buffer); + return ok; + } + + ok = platch_write_value_to_buffer_std(&STDSTRING(object->error_msg), &buffer_cursor); + if (ok != 0) { + free(buffer); + return ok; + } + ok = platch_write_value_to_buffer_std(&(object->std_error_details), &buffer_cursor); - if (ok != 0) - goto free_buffer_and_return_ok; + if (ok != 0) { + free(buffer); + return ok; + } } + *buffer_out = buffer; + *size_out = size; break; - case kJSONMessageCodec: - size -= 1; - ok = platch_write_value_to_buffer_json(&(object->json_value), &buffer_cursor); - if (ok != 0) - goto free_buffer_and_return_ok; - break; - case kJSONMethodCall: - size -= 1; + } + case kJSONMethodCall: { + size_t size = platch_calc_value_size_json(&JSONOBJECT2("method", JSONSTRING(object->method), "args", object->json_arg)); + + uint8_t *buffer = malloc(size + 1); + if (buffer == NULL) { + return ENOMEM; + } + + uint8_t *buffer_cursor = buffer; + ok = platch_write_value_to_buffer_json( &JSONOBJECT2("method", JSONSTRING(object->method), "args", object->json_arg), &buffer_cursor ); if (ok != 0) { - goto free_buffer_and_return_ok; + free(buffer); + return ok; } + + *buffer_out = buffer; + *size_out = size; break; - case kJSONMethodCallResponse: + } + case kJSONMethodCallResponse: { + size_t size; + + if (object->success) { + size = platch_calc_value_size_json(&JSONARRAY1(object->json_result)); + } else { + size = platch_calc_value_size_json(&JSONARRAY3( + JSONSTRING(object->error_code), + (object->error_msg != NULL) ? JSONSTRING(object->error_msg) : JSONNULL, + object->json_error_details + )); + } + + uint8_t *buffer = malloc(size + 1); + if (buffer == NULL) { + return ENOMEM; + } + + uint8_t *buffer_cursor = buffer; if (object->success) { ok = platch_write_value_to_buffer_json(&JSONARRAY1(object->json_result), &buffer_cursor); } else { @@ -1135,21 +1379,21 @@ int platch_encode(struct platch_obj *object, uint8_t **buffer_out, size_t *size_ &buffer_cursor ); } - size -= 1; + if (ok != 0) { - goto free_buffer_and_return_ok; + free(buffer); + return ok; } + + *buffer_out = buffer; + *size_out = size; + break; + } default: return EINVAL; } - *buffer_out = buffer; - *size_out = size; return 0; - -free_buffer_and_return_ok: - free(buffer); - return ok; } void platch_on_response_internal(const uint8_t *buffer, size_t size, void *userdata) { @@ -1168,9 +1412,7 @@ void platch_on_response_internal(const uint8_t *buffer, size_t size, void *userd free(handlerdata); - ok = platch_free_obj(&object); - if (ok != 0) - return; + platch_free_obj(&object); } int platch_send( @@ -1193,7 +1435,8 @@ int platch_send( if (on_response) { handlerdata = malloc(sizeof(struct platch_msg_resp_handler_data)); if (!handlerdata) { - return ENOMEM; + ok = ENOMEM; + goto fail_free_object; } handlerdata->codec = response_codec; @@ -1232,6 +1475,11 @@ int platch_send( free(handlerdata); } +fail_free_object: + if (object->codec != kBinaryCodec) { + free(buffer); + } + return ok; } @@ -1266,7 +1514,7 @@ int platch_respond(const FlutterPlatformMessageResponseHandle *handle, struct pl free(buffer); } - return 0; + return ok; } int platch_respond_not_implemented(const FlutterPlatformMessageResponseHandle *handle) { @@ -1320,6 +1568,10 @@ int platch_respond_native_error_std(const FlutterPlatformMessageResponseHandle * return platch_respond_error_std(handle, "nativeerror", strerror(_errno), &STDINT32(_errno)); } +int platch_respond_malformed_message_std(const FlutterPlatformMessage *message) { + return platch_respond_error_std(message->response_handle, "malformed-message", "The platform message received was malformed.", &STDNULL); +} + /************************ * JSON METHOD CHANNELS * ************************/ @@ -1574,7 +1826,11 @@ bool stdvalue_equals(struct std_value *a, struct std_value *b) { ASSERT_NOT_NULL(a->string_value); ASSERT_NOT_NULL(b->string_value); return streq(a->string_value, b->string_value); - case kStdFloat64: return a->float64_value == b->float64_value; + case kStdFloat64: + PRAGMA_DIAGNOSTIC_PUSH + PRAGMA_DIAGNOSTIC_IGNORED("-Wfloat-equal") + return a->float64_value == b->float64_value; + PRAGMA_DIAGNOSTIC_POP case kStdUInt8Array: if (a->size != b->size) return false; @@ -1612,16 +1868,24 @@ bool stdvalue_equals(struct std_value *a, struct std_value *b) { return false; return true; case kStdFloat64Array: - if (a->size != b->size) - return false; if (a->float64array == b->float64array) return true; + if (a->size != b->size) + return false; + ASSERT_NOT_NULL(a->float64array); ASSERT_NOT_NULL(b->float64array); - for (int i = 0; i < a->size; i++) - if (a->float64array[i] != b->float64array[i]) + + PRAGMA_DIAGNOSTIC_PUSH + PRAGMA_DIAGNOSTIC_IGNORED("-Wfloat-equal") + for (int i = 0; i < a->size; i++) { + if (a->float64array[i] != b->float64array[i]) { return false; + } + } + PRAGMA_DIAGNOSTIC_POP + return true; case kStdList: // the order of list elements is important @@ -1680,6 +1944,25 @@ bool stdvalue_equals(struct std_value *a, struct std_value *b) { return true; } + case kStdFloat32Array: + if (a->float32array == b->float32array) + return true; + + if (a->size != b->size) + return false; + + ASSERT_NOT_NULL(a->float32array); + ASSERT_NOT_NULL(b->float32array); + + PRAGMA_DIAGNOSTIC_PUSH + PRAGMA_DIAGNOSTIC_IGNORED("-Wfloat-equal") + for (int i = 0; i < a->size; i++) { + if (a->float32array[i] != b->float32array[i]) { + return false; + } + } + PRAGMA_DIAGNOSTIC_POP + return true; default: return false; } @@ -1944,7 +2227,12 @@ ATTR_PURE bool raw_std_value_equals(const struct raw_std_value *a, const struct case kStdFalse: return true; case kStdInt32: return raw_std_value_as_int32(a) == raw_std_value_as_int32(b); case kStdInt64: return raw_std_value_as_int64(a) == raw_std_value_as_int64(b); - case kStdFloat64: return raw_std_value_as_float64(a) == raw_std_value_as_float64(b); + case kStdFloat64: + PRAGMA_DIAGNOSTIC_PUSH + PRAGMA_DIAGNOSTIC_IGNORED("-Wfloat-equal") + return raw_std_value_as_float64(a) == raw_std_value_as_float64(b); + PRAGMA_DIAGNOSTIC_POP + case kStdLargeInt: case kStdString: alignment = 0; element_size = 1; @@ -1978,11 +2266,15 @@ ATTR_PURE bool raw_std_value_equals(const struct raw_std_value *a, const struct length = raw_std_value_get_size(a); const double *a_doubles = raw_std_value_as_float64array(a); const double *b_doubles = raw_std_value_as_float64array(b); + + PRAGMA_DIAGNOSTIC_PUSH + PRAGMA_DIAGNOSTIC_IGNORED("-Wfloat-equal") for (int i = 0; i < length; i++) { if (a_doubles[i] != b_doubles[i]) { return false; } } + PRAGMA_DIAGNOSTIC_POP return true; case kStdList: @@ -2062,11 +2354,15 @@ ATTR_PURE bool raw_std_value_equals(const struct raw_std_value *a, const struct length = raw_std_value_get_size(a); const float *a_floats = raw_std_value_as_float32array(a); const float *b_floats = raw_std_value_as_float32array(b); + + PRAGMA_DIAGNOSTIC_PUSH + PRAGMA_DIAGNOSTIC_IGNORED("-Wfloat-equal") for (int i = 0; i < length; i++) { if (a_floats[i] != b_floats[i]) { return false; } } + PRAGMA_DIAGNOSTIC_POP return true; default: assert(false); return false; @@ -2277,6 +2573,7 @@ ATTR_PURE bool raw_std_value_check(const struct raw_std_value *value, size_t buf case kStdInt32: return buffer_size >= 5; case kStdInt64: return buffer_size >= 9; case kStdFloat64: return buffer_size >= 9; + case kStdLargeInt: case kStdString: alignment = 0; element_size = 1; @@ -2334,9 +2631,6 @@ ATTR_PURE bool raw_std_value_check(const struct raw_std_value *value, size_t buf return false; } - // get the value size. - size = raw_std_value_get_size(value); - for_each_element_in_raw_std_list(element, value) { int diff = (intptr_t) element - (intptr_t) value; if (buffer_size < diff) { @@ -2483,3 +2777,13 @@ MALLOCLIKE MUST_CHECK char *raw_std_method_call_get_method_dup(const struct raw_ ATTR_PURE const struct raw_std_value *raw_std_method_call_get_arg(const struct raw_std_value *value) { return raw_std_value_after(value); } + +ATTR_PURE const struct raw_std_value *raw_std_method_call_from_buffer(const void *buffer, size_t buffer_size) { + const struct raw_std_value *envelope = (const struct raw_std_value *) buffer; + + if (!raw_std_method_call_check(envelope, buffer_size)) { + return NULL; + } else { + return envelope; + } +} diff --git a/src/platformchannel.h b/src/platformchannel.h index 76c03315..8520d2e7 100644 --- a/src/platformchannel.h +++ b/src/platformchannel.h @@ -3,7 +3,7 @@ * Platform Channels * * Encoding/Decoding of flutter platform messages, with different - * + * * Supported codecs: * - standard message & method codec, * - json message & method codec @@ -89,6 +89,7 @@ struct std_value { const uint8_t *uint8array; int32_t *int32array; int64_t *int64array; + float *float32array; double *float64array; struct std_value *list; struct { @@ -1491,6 +1492,8 @@ int platch_respond_illegal_arg_ext_std(const FlutterPlatformMessageResponseHandl int platch_respond_native_error_std(const FlutterPlatformMessageResponseHandle *handle, int _errno); +int platch_respond_malformed_message_std(const FlutterPlatformMessage *message); + int platch_respond_success_json(const FlutterPlatformMessageResponseHandle *handle, struct json_value *return_value); int platch_respond_error_json( @@ -1538,9 +1541,7 @@ int platch_send_error_event_json(char *channel, char *error_code, char *error_ms /// frees a ChannelObject that was decoded using PlatformChannel_decode. /// not freeing ChannelObjects may result in a memory leak. -int platch_free_obj(struct platch_obj *object); - -int platch_free_json_value(struct json_value *value, bool shallow); +void platch_free_obj(struct platch_obj *object); /// returns true if values a and b are equal. /// for JS arrays, the order of the values is relevant @@ -1614,6 +1615,7 @@ ATTR_PURE bool raw_std_method_call_check(const struct raw_std_value *value, size ATTR_PURE bool raw_std_method_call_response_check(const struct raw_std_value *value, size_t buffer_size); ATTR_PURE bool raw_std_event_check(const struct raw_std_value *value, size_t buffer_size); +ATTR_PURE const struct raw_std_value *raw_std_method_call_from_buffer(const void *buffer, size_t buffer_size); ATTR_PURE const struct raw_std_value *raw_std_method_call_get_method(const struct raw_std_value *value); ATTR_PURE bool raw_std_method_call_is_method(const struct raw_std_value *value, const char *method_name); MALLOCLIKE MUST_CHECK char *raw_std_method_call_get_method_dup(const struct raw_std_value *value); diff --git a/src/pluginregistry.c b/src/pluginregistry.c index 1c6ae730..f78b917b 100644 --- a/src/pluginregistry.c +++ b/src/pluginregistry.c @@ -89,6 +89,7 @@ static struct plugin_instance *get_plugin_by_name(struct plugin_registry *regist return instance; } +// clang-format off static struct platch_obj_cb_data *get_cb_data_by_channel_locked(struct plugin_registry *registry, const char *channel) { list_for_each_entry(struct platch_obj_cb_data, data, ®istry->callbacks, entry) { if (streq(data->channel, channel)) { @@ -98,6 +99,7 @@ static struct platch_obj_cb_data *get_cb_data_by_channel_locked(struct plugin_re return NULL; } +// clang-format on struct plugin_registry *plugin_registry_new(struct flutterpi *flutterpi) { struct plugin_registry *reg; @@ -211,7 +213,7 @@ void plugin_registry_add_plugin(struct plugin_registry *registry, const struct f plugin_registry_unlock(registry); } -static void static_plugin_registry_ensure_initialized(); +static void static_plugin_registry_ensure_initialized(void); int plugin_registry_add_plugins_from_static_registry(struct plugin_registry *registry) { ASSERTED int ok; @@ -301,7 +303,7 @@ static int set_receiver_locked( char *channel_dup; ASSERT_MSG((!!callback) != (!!callback_v2), "Exactly one of callback or callback_v2 must be non-NULL."); - ASSERT_MUTEX_LOCKED(registry->lock); + assert_mutex_locked(®istry->lock); data_ptr = get_cb_data_by_channel_locked(registry, channel); if (data_ptr == NULL) { @@ -398,6 +400,16 @@ int plugin_registry_remove_receiver_v2_locked(struct plugin_registry *registry, } list_del(&data->entry); + + // Analyzer thinks get_cb_data_by_channel might still return our data + // after list_del and emits a "use-after-free" warning. + // assert()s can change the assumptions of the analyzer, so we use them here. +#ifdef DEBUG + list_for_each_entry(struct platch_obj_cb_data, data_iter, ®istry->callbacks, entry) { + ASSUME(data_iter != data); + } +#endif + free(data->channel); free(data); @@ -456,7 +468,7 @@ void *plugin_registry_get_plugin_userdata_locked(struct plugin_registry *registr return instance != NULL ? instance->userdata : NULL; } -static void static_plugin_registry_initialize() { +static void static_plugin_registry_initialize(void) { ASSERTED int ok; list_inithead(&static_plugins); @@ -465,7 +477,7 @@ static void static_plugin_registry_initialize() { ASSERT_ZERO(ok); } -static void static_plugin_registry_ensure_initialized() { +static void static_plugin_registry_ensure_initialized(void) { pthread_once(&static_plugins_init_flag, static_plugin_registry_initialize); } @@ -480,7 +492,7 @@ void static_plugin_registry_add_plugin(const struct flutterpi_plugin_v2 *plugin) entry = malloc(sizeof *entry); ASSERT_NOT_NULL(entry); - + entry->plugin = plugin; list_addtail(&entry->entry, &static_plugins); diff --git a/src/pluginregistry.h b/src/pluginregistry.h index 4efe6bf8..b629e720 100644 --- a/src/pluginregistry.h +++ b/src/pluginregistry.h @@ -20,16 +20,6 @@ struct flutterpi; struct plugin_registry; -typedef enum plugin_init_result (*plugin_init_t)(struct flutterpi *flutterpi, void **userdata_out); - -typedef void (*plugin_deinit_t)(struct flutterpi *flutterpi, void *userdata); - -struct flutterpi_plugin_v2 { - const char *name; - plugin_init_t init; - plugin_deinit_t deinit; -}; - /// The return value of a plugin initializer function. enum plugin_init_result { PLUGIN_INIT_RESULT_INITIALIZED, ///< The plugin was successfully initialized. @@ -40,6 +30,16 @@ enum plugin_init_result { /// Flutter-pi may decide to abort the startup phase of the whole flutter-pi instance at that point. }; +typedef enum plugin_init_result (*plugin_init_t)(struct flutterpi *flutterpi, void **userdata_out); + +typedef void (*plugin_deinit_t)(struct flutterpi *flutterpi, void *userdata); + +struct flutterpi_plugin_v2 { + const char *name; + plugin_init_t init; + plugin_deinit_t deinit; +}; + struct _FlutterPlatformMessageResponseHandle; typedef struct _FlutterPlatformMessageResponseHandle FlutterPlatformMessageResponseHandle; @@ -162,16 +162,18 @@ void static_plugin_registry_add_plugin(const struct flutterpi_plugin_v2 *plugin) void static_plugin_registry_remove_plugin(const char *plugin_name); -#define FLUTTERPI_PLUGIN(_name, _identifier_name, _init, _deinit) \ - __attribute__((constructor)) static void __reg_plugin_##_identifier_name() { \ - static struct flutterpi_plugin_v2 plugin = { \ - .name = (_name), \ - .init = (_init), \ - .deinit = (_deinit), \ - }; \ - static_plugin_registry_add_plugin(&plugin); \ - } \ - \ - __attribute__((destructor)) static void __unreg_plugin_##_identifier_name() { static_plugin_registry_remove_plugin(_name); } +#define FLUTTERPI_PLUGIN(_name, _identifier_name, _init, _deinit) \ + __attribute__((constructor)) static void __reg_plugin_##_identifier_name(void) { \ + static struct flutterpi_plugin_v2 plugin = { \ + .name = (_name), \ + .init = (_init), \ + .deinit = (_deinit), \ + }; \ + static_plugin_registry_add_plugin(&plugin); \ + } \ + \ + __attribute__((destructor)) static void __unreg_plugin_##_identifier_name(void) { \ + static_plugin_registry_remove_plugin(_name); \ + } #endif // _FLUTTERPI_SRC_PLUGINREGISTRY_H diff --git a/src/plugins/audioplayers/player.c b/src/plugins/audioplayers/player.c deleted file mode 100644 index 1e948e62..00000000 --- a/src/plugins/audioplayers/player.c +++ /dev/null @@ -1,605 +0,0 @@ -#define _GNU_SOURCE - -#include -#include - -#include -#include -#include -#include - -#include "flutter-pi.h" -#include "platformchannel.h" -#include "plugins/audioplayers.h" -#include "util/asserts.h" -#include "util/logging.h" - -struct audio_player { - GstElement *source; - GstElement *playbin; - GstBus *bus; - - GstElement *panorama; - GstElement *audiobin; - GstElement *audiosink; - GstPad *panoramaSinkPad; - - bool is_initialized; - bool is_playing; - bool is_looping; - bool is_seek_completed; - double playback_rate; - - char *url; - char *player_id; - char *event_channel_name; - - _Atomic bool event_subscribed; -}; - -// Private Class functions -static gboolean audio_player_on_bus_message(GstBus *bus, GstMessage *message, struct audio_player *data); -static gboolean audio_player_on_refresh(struct audio_player *data); -static void audio_player_set_playback(struct audio_player *self, int64_t seekTo, double rate); -static void audio_player_on_media_error(struct audio_player *self, GError *error, gchar *debug); -static void audio_player_on_media_state_change(struct audio_player *self, GstObject *src, GstState *old_state, GstState *new_state); -static void audio_player_on_prepared(struct audio_player *self, bool value); -static void audio_player_on_position_update(struct audio_player *self); -static void audio_player_on_duration_update(struct audio_player *self); -static void audio_player_on_seek_completed(struct audio_player *self); -static void audio_player_on_playback_ended(struct audio_player *self); - -static int on_bus_fd_ready(sd_event_source *src, int fd, uint32_t revents, void *userdata) { - struct audio_player *player = userdata; - GstMessage *msg; - - (void) src; - (void) fd; - (void) revents; - - /* DEBUG_TRACE_BEGIN(player, "on_bus_fd_ready"); */ - - msg = gst_bus_pop(player->bus); - if (msg != NULL) { - audio_player_on_bus_message(player->bus, msg, player); - gst_message_unref(msg); - } - - /* DEBUG_TRACE_END(player, "on_bus_fd_ready"); */ - - return 0; -} - -static void audio_player_source_setup(GstElement *playbin, GstElement *source, GstElement **p_src) { - (void)(playbin); - (void)(p_src); - - if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), "ssl-strict") != 0) { - g_object_set(G_OBJECT(source), "ssl-strict", FALSE, NULL); - } -} - -struct audio_player *audio_player_new(char *player_id, char *channel) { - GPollFD fd; - sd_event_source *busfd_event_source; - int ok; - - struct audio_player *self = malloc(sizeof(struct audio_player)); - if (self == NULL) { - return NULL; - } - - self->url = NULL; - self->source = NULL; - self->is_initialized = false; - self->is_playing = false; - self->is_looping = false; - self->is_seek_completed = false; - self->playback_rate = 1.0; - self->event_subscribed = false; - - gst_init(NULL, NULL); - self->playbin = gst_element_factory_make("playbin", NULL); - if (!self->playbin) { - LOG_ERROR("Could not create gstreamer playbin.\n"); - goto deinit_self; - } - - // Setup stereo balance controller - self->panorama = gst_element_factory_make("audiopanorama", NULL); - if (self->panorama) { - self->audiobin = gst_bin_new(NULL); - self->audiosink = gst_element_factory_make("autoaudiosink", NULL); - - gst_bin_add_many(GST_BIN(self->audiobin), self->panorama, self->audiosink, NULL); - gst_element_link(self->panorama, self->audiosink); - - GstPad *sinkpad = gst_element_get_static_pad(self->panorama, "sink"); - self->panoramaSinkPad = gst_ghost_pad_new("sink", sinkpad); - gst_element_add_pad(self->audiobin, self->panoramaSinkPad); - gst_object_unref(GST_OBJECT(sinkpad)); - - g_object_set(G_OBJECT(self->playbin), "audio-sink", self->audiobin, NULL); - g_object_set(G_OBJECT(self->panorama), "method", 1, NULL); - } else { - self->audiobin = NULL; - self->audiosink = NULL; - self->panoramaSinkPad = NULL; - } - - g_signal_connect(self->playbin, "source-setup", G_CALLBACK(audio_player_source_setup), &self->source); - - self->bus = gst_element_get_bus(self->playbin); - - gst_bus_get_pollfd(self->bus, &fd); - - flutterpi_sd_event_add_io(&busfd_event_source, fd.fd, EPOLLIN, on_bus_fd_ready, self); - - // Refresh continuously to emit recurring events - g_timeout_add(1000, (GSourceFunc) audio_player_on_refresh, self); - - self->player_id = strdup(player_id); - if (self->player_id == NULL) { - goto deinit_player; - } - - // audioplayers player event channel clang: - // /events/ - ok = asprintf(&self->event_channel_name, "%s/events/%s", channel, player_id); - ASSERT_MSG(ok, "event channel name OEM"); - - if (ok < 0) { - goto deinit_player_id; - } - - return self; - - //Deinit doesn't require to NULL, as we just delete player. -deinit_player_id: - free(self->player_id); - -deinit_player: - gst_object_unref(self->bus); - - if (self->panorama != NULL) { - gst_element_set_state(self->audiobin, GST_STATE_NULL); - - gst_element_remove_pad(self->audiobin, self->panoramaSinkPad); - gst_bin_remove(GST_BIN(self->audiobin), self->audiosink); - gst_bin_remove(GST_BIN(self->audiobin), self->panorama); - - self->panorama = NULL; - self->audiosink = NULL; - self->panoramaSinkPad = NULL; - self->audiobin = NULL; - } - - gst_element_set_state(self->playbin, GST_STATE_NULL); - gst_object_unref(self->playbin); - -deinit_self: - free(self); - return NULL; -} - -gboolean audio_player_on_bus_message(GstBus *bus, GstMessage *message, struct audio_player *data) { - (void) bus; - switch (GST_MESSAGE_TYPE(message)) { - case GST_MESSAGE_ERROR: { - GError *err; - gchar *debug; - - gst_message_parse_error(message, &err, &debug); - audio_player_on_media_error(data, err, debug); - g_error_free(err); - g_free(debug); - break; - } - case GST_MESSAGE_STATE_CHANGED: { - GstState old_state, new_state; - - gst_message_parse_state_changed(message, &old_state, &new_state, NULL); - audio_player_on_media_state_change(data, message->src, &old_state, &new_state); - break; - } - case GST_MESSAGE_EOS: - audio_player_on_playback_ended(data); - break; - case GST_MESSAGE_DURATION_CHANGED: - audio_player_on_duration_update(data); - break; - case GST_MESSAGE_ASYNC_DONE: - if (!data->is_seek_completed) { - audio_player_on_seek_completed(data); - data->is_seek_completed = true; - } - break; - default: - // For more GstMessage types see: - // https://gstreamer.freedesktop.org/documentation/gstreamer/gstmessage.html?gi-language=c#enumerations - break; - } - - // Continue watching for messages - return TRUE; -} - -gboolean audio_player_on_refresh(struct audio_player *self) { - if (self == NULL) { - return FALSE; - } - - GstState playbinState; - gst_element_get_state(self->playbin, &playbinState, NULL, GST_CLOCK_TIME_NONE); - if (playbinState == GST_STATE_PLAYING) { - audio_player_on_position_update(self); - } - return TRUE; -} - -void audio_player_set_playback(struct audio_player *self, int64_t seekTo, double rate) { - const GstSeekFlags seek_flags = GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE; - - if (!self->is_initialized) { - return; - } - // See: - // https://gstreamer.freedesktop.org/documentation/tutorials/basic/playback-speed.html?gi-language=c - if (!self->is_seek_completed) { - return; - } - if (rate == 0) { - // Do not set rate if it's 0, rather pause. - audio_player_pause(self); - return; - } - self->playback_rate = rate; - self->is_seek_completed = false; - - GstEvent *seek_event; - if (rate > 0) { - seek_event = gst_event_new_seek(rate, GST_FORMAT_TIME, seek_flags, GST_SEEK_TYPE_SET, seekTo * GST_MSECOND, GST_SEEK_TYPE_NONE, -1); - } else { - seek_event = gst_event_new_seek(rate, GST_FORMAT_TIME, seek_flags, GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, seekTo * GST_MSECOND); - } - - if (!gst_element_send_event(self->playbin, seek_event)) { - // Not clear how to treat this error? - const int64_t seekMs = seekTo * GST_MSECOND; - LOG_ERROR("Could not set playback to position " GST_STIME_FORMAT " and rate %f.\n", GST_TIME_ARGS(seekMs), rate); - self->is_seek_completed = true; - } -} - -void audio_player_on_media_error(struct audio_player *self, GError *error, gchar *debug) { - if (!self->event_subscribed) { - return; - } - - char error_code[16] = {0}; - snprintf(error_code, sizeof(error_code), "%d", error->code); - // clang-format off - platch_send_error_event_std( - self->event_channel_name, - error_code, - error->message, - debug ? &STDSTRING(debug) : NULL - ); - // clang-format on -} - -void audio_player_on_media_state_change(struct audio_player *self, GstObject *src, GstState *old_state, GstState *new_state) { - (void) old_state; - if (src == GST_OBJECT(self->playbin)) { - LOG_DEBUG("%s: on_media_state_change(old_state=%d, new_state=%d)\n", self->player_id, *old_state, *new_state); - if (*new_state == GST_STATE_READY) { - // Need to set to pause state, in order to make player functional - GstStateChangeReturn ret = gst_element_set_state(self->playbin, GST_STATE_PAUSED); - if (ret == GST_STATE_CHANGE_FAILURE) { - LOG_ERROR("Unable to set the pipeline to the paused state.\n"); - } - - self->is_initialized = false; - } else if (*old_state == GST_STATE_PAUSED && *new_state == GST_STATE_PLAYING) { - audio_player_on_position_update(self); - audio_player_on_duration_update(self); - } else if (*new_state >= GST_STATE_PAUSED) { - if (!self->is_initialized) { - self->is_initialized = true; - audio_player_on_prepared(self, true); - if (self->is_playing) { - audio_player_resume(self); - } - } - } else if (self->is_initialized) { - self->is_initialized = false; - } - } -} - -void audio_player_on_prepared(struct audio_player *self, bool value) { - if (!self->event_subscribed) { - return; - } - - // clang-format off - platch_send_success_event_std( - self->event_channel_name, - &STDMAP2( - STDSTRING("event"), STDSTRING("audio.onPrepared"), - STDSTRING("value"), STDBOOL(value) - ) - ); - // clang-format on -} - -void audio_player_on_position_update(struct audio_player *self) { - if (!self->event_subscribed) { - return; - } - - // clang-format off - platch_send_success_event_std( - self->event_channel_name, - &STDMAP2( - STDSTRING("event"), STDSTRING("audio.onCurrentPosition"), - STDSTRING("value"), STDINT64(audio_player_get_position(self)) - ) - ); - // clang-format on -} - -void audio_player_on_duration_update(struct audio_player *self) { - if (!self->event_subscribed) { - return; - } - // clang-format off - platch_send_success_event_std( - self->event_channel_name, - &STDMAP2( - STDSTRING("event"), STDSTRING("audio.onDuration"), - STDSTRING("value"), STDINT64(audio_player_get_duration(self)) - ) - ); - // clang-format on -} -void audio_player_on_seek_completed(struct audio_player *self) { - audio_player_on_position_update(self); - - if (self->event_subscribed) { - // clang-format off - platch_send_success_event_std( - self->event_channel_name, - &STDMAP2( - STDSTRING("event"), STDSTRING("audio.onSeekComplete"), - STDSTRING("value"), STDBOOL(true) - ) - ); - // clang-format on - } - self->is_seek_completed = true; -} -void audio_player_on_playback_ended(struct audio_player *self) { - if (self->event_subscribed) { - // clang-format off - platch_send_success_event_std( - self->event_channel_name, - &STDMAP2( - STDSTRING("event"), STDSTRING("audio.onComplete"), - STDSTRING("value"), STDBOOL(true) - ) - ); - // clang-format on - } - - if (audio_player_get_looping(self)) { - audio_player_play(self); - } else { - audio_player_pause(self); - audio_player_set_position(self, 0); - } -} - -void audio_player_set_looping(struct audio_player *self, bool is_looping) { - self->is_looping = is_looping; -} - -bool audio_player_get_looping(struct audio_player *self) { - return self->is_looping; -} - -void audio_player_play(struct audio_player *self) { - audio_player_set_position(self, 0); - audio_player_resume(self); -} - -void audio_player_pause(struct audio_player *self) { - self->is_playing = false; - - if (!self->is_initialized) { - return; - } - - GstStateChangeReturn ret = gst_element_set_state(self->playbin, GST_STATE_PAUSED); - if (ret == GST_STATE_CHANGE_FAILURE) { - LOG_ERROR("Unable to set the pipeline to the paused state.\n"); - return; - } - audio_player_on_position_update(self); // Update to exact position when pausing -} - -void audio_player_resume(struct audio_player *self) { - self->is_playing = true; - if (!self->is_initialized) { - return; - } - - GstStateChangeReturn ret = gst_element_set_state(self->playbin, GST_STATE_PLAYING); - if (ret == GST_STATE_CHANGE_FAILURE) { - LOG_ERROR("Unable to set the pipeline to the playing state.\n"); - return; - } - audio_player_on_position_update(self); - audio_player_on_duration_update(self); -} - -void audio_player_destroy(struct audio_player *self) { - if (self->is_initialized) { - audio_player_pause(self); - } - - if (self->source) { - gst_object_unref(GST_OBJECT(self->source)); - self->source = NULL; - } - - gst_object_unref(self->bus); - self->bus = NULL; - - if (self->panorama != NULL) { - gst_element_set_state(self->audiobin, GST_STATE_NULL); - - gst_element_remove_pad(self->audiobin, self->panoramaSinkPad); - gst_bin_remove(GST_BIN(self->audiobin), self->audiosink); - gst_bin_remove(GST_BIN(self->audiobin), self->panorama); - - self->panorama = NULL; - self->audiosink = NULL; - self->panoramaSinkPad = NULL; - self->audiobin = NULL; - } - - gst_element_set_state(self->playbin, GST_STATE_NULL); - gst_object_unref(self->playbin); - self->playbin = NULL; - - self->is_initialized = false; - - if (self->url != NULL) { - free(self->url); - self->url = NULL; - } - - if (self->player_id != NULL) { - free(self->player_id); - self->player_id = NULL; - } - - if (self->event_channel_name != NULL) { - free(self->event_channel_name); - self->event_channel_name = NULL;; - } - - free(self); -} - -int64_t audio_player_get_position(struct audio_player *self) { - gint64 current = 0; - if (!gst_element_query_position(self->playbin, GST_FORMAT_TIME, ¤t)) { - LOG_ERROR("Could not query current position.\n"); - return 0; - } - return current / 1000000; -} - -int64_t audio_player_get_duration(struct audio_player *self) { - gint64 duration = 0; - if (!gst_element_query_duration(self->playbin, GST_FORMAT_TIME, &duration)) { - LOG_ERROR("Could not query current duration.\n"); - return 0; - } - return duration / 1000000; -} - -void audio_player_set_volume(struct audio_player *self, double volume) { - if (volume > 1) { - volume = 1; - } else if (volume < 0) { - volume = 0; - } - g_object_set(G_OBJECT(self->playbin), "volume", volume, NULL); -} - -void audio_player_set_balance(struct audio_player *self, double balance) { - if (!self->panorama) { - return; - } - - if (balance > 1.0l) { - balance = 1.0l; - } else if (balance < -1.0l) { - balance = -1.0l; - } - g_object_set(G_OBJECT(self->panorama), "panorama", balance, NULL); -} - -void audio_player_set_playback_rate(struct audio_player *self, double rate) { - audio_player_set_playback(self, audio_player_get_position(self), rate); -} - -void audio_player_set_position(struct audio_player *self, int64_t position) { - if (!self->is_initialized) { - return; - } - audio_player_set_playback(self, position, self->playback_rate); -} - -void audio_player_set_source_url(struct audio_player *self, char *url) { - ASSERT_NOT_NULL(url); - if (self->url == NULL || !streq(self->url, url)) { - LOG_DEBUG("%s: set source=%s\n", self->player_id, url); - if (self->url != NULL) { - free(self->url); - self->url = NULL; - } - self->url = strdup(url); - gst_element_set_state(self->playbin, GST_STATE_NULL); - self->is_initialized = false; - self->is_playing = false; - - if (strlen(self->url) != 0) { - g_object_set(self->playbin, "uri", self->url, NULL); - if (self->playbin->current_state != GST_STATE_READY) { - if (gst_element_set_state(self->playbin, GST_STATE_READY) == GST_STATE_CHANGE_FAILURE) { - //This should not happen generally - LOG_ERROR("Could not set player into ready state.\n"); - } - } - } - } else { - audio_player_on_prepared(self, true); - } -} - -bool audio_player_is_id(struct audio_player *self, char *player_id) { - return streq(self->player_id, player_id); -} - -const char* audio_player_subscribe_channel_name(const struct audio_player *self) { - return self->event_channel_name; -} - -bool audio_player_set_subscription_status(struct audio_player *self, const char *channel, bool value) { - if (strcmp(self->event_channel_name, channel) == 0) { - self->event_subscribed = value; - return true; - } else { - return false; - } -} - -void audio_player_release(struct audio_player *self) { - self->is_initialized = false; - self->is_playing = false; - if (self->url != NULL) { - free(self->url); - self->url = NULL; - } - - GstState playbinState; - gst_element_get_state(self->playbin, &playbinState, NULL, GST_CLOCK_TIME_NONE); - - if (playbinState > GST_STATE_NULL) { - gst_element_set_state(self->playbin, GST_STATE_NULL); - } -} diff --git a/src/plugins/audioplayers/plugin.c b/src/plugins/audioplayers/plugin.c index 2f136f1a..a27f882b 100644 --- a/src/plugins/audioplayers/plugin.c +++ b/src/plugins/audioplayers/plugin.c @@ -1,333 +1,1065 @@ #define _GNU_SOURCE +#include +#include +#include + +#include +#include + +#include "flutter_embedder.h" +#include "util/asserts.h" +#include "util/macros.h" + #include "flutter-pi.h" #include "platformchannel.h" #include "pluginregistry.h" -#include "plugins/audioplayers.h" +#include "notifier_listener.h" + #include "util/collection.h" #include "util/list.h" #include "util/logging.h" +#include "util/khash.h" +#include "plugins/gstplayer.h" #define AUDIOPLAYERS_LOCAL_CHANNEL "xyz.luan/audioplayers" #define AUDIOPLAYERS_GLOBAL_CHANNEL "xyz.luan/audioplayers.global" -static struct audio_player *audioplayers_linux_plugin_get_player(char *player_id, char *mode); -static void audioplayers_linux_plugin_dispose_player(struct audio_player *player); +#define STR_LINK_TROUBLESHOOTING \ + "https://github.com/bluefireteam/audioplayers/blob/main/troubleshooting.md" + +KHASH_MAP_INIT_STR(audioplayers, struct gstplayer *) + +struct audioplayer_meta { + char *id; + char *event_channel; + bool subscribed; + bool release_on_stop; -struct audio_player_entry { - struct list_head entry; - struct audio_player *player; + struct listener *duration_listener; + struct listener *eos_listener; + struct listener *error_listener; }; -static struct plugin { +struct plugin { struct flutterpi *flutterpi; bool initialized; - struct list_head players; -} plugin; + khash_t(audioplayers) players; +}; -static int on_local_method_call(char *channel, struct platch_obj *object, FlutterPlatformMessageResponseHandle *responsehandle) { - struct audio_player *player; - struct std_value *args, *tmp; - const char *method; - char *player_id, *mode; - struct std_value result = STDNULL; - int ok; +static const char *player_get_id(struct gstplayer *player) { + struct audioplayer_meta *meta = gstplayer_get_userdata(player); + + return meta->id; +} + +#define LOG_AUDIOPLAYER_DEBUG(player, fmtstring, ...) LOG_DEBUG("audio player \"%s\": " fmtstring, player_get_id(player), ##__VA_ARGS__) +#define LOG_AUDIOPLAYER_ERROR(player, fmtstring, ...) LOG_ERROR("audio player \"%s\": " fmtstring, player_get_id(player), ##__VA_ARGS__) + +static void on_receive_event_ch(void *userdata, const FlutterPlatformMessage *message); + +static void respond_plugin_error_ext(const FlutterPlatformMessageResponseHandle *response_handle, const char *message, struct std_value *details) { + platch_respond_error_std(response_handle, "LinuxAudioError", (char*) message, details); +} + +static void respond_plugin_error(const FlutterPlatformMessageResponseHandle *response_handle, const char *message) { + respond_plugin_error_ext(response_handle, message, NULL); +} + +static bool ensure_gstreamer_initialized(struct plugin *plugin, const FlutterPlatformMessageResponseHandle *responsehandle) { + if (plugin->initialized) { + return true; + } + + GError *error; + gboolean success = gst_init_check(NULL, NULL, &error); + if (success) { + plugin->initialized = true; + return true; + } + + char *details = NULL; + int status = asprintf(&details, "%s (Domain: %s, Code: %d)", error->message, g_quark_to_string(error->domain), error->code); + if (status == -1) { + // ENOMEM; + return false; + } + + // clang-format off + respond_plugin_error_ext( + responsehandle, + "Failed to initialize gstreamer.", + &STDSTRING(details) + ); + // clang-format on + + free(details); + + return false; +} + +static struct gstplayer *get_player_by_id(struct plugin *plugin, const char *id) { + khint_t index = kh_get_audioplayers(&plugin->players, id); + if (index == kh_end(&plugin->players)) { + return NULL; + } - (void) responsehandle; - (void) channel; - method = object->method; - args = &object->std_arg; + return kh_value(&plugin->players, index); +} - LOG_DEBUG("call(method=%s)\n", method); +static const struct raw_std_value *get_player_id_from_arg(const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + if (!raw_std_value_is_map(arg)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg` to be a map."); + return NULL; + } - if (args == NULL || !STDVALUE_IS_MAP(*args)) { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg` to be a map."); + const struct raw_std_value *player_id = raw_std_map_find_str(arg, "playerId"); + if (player_id == NULL || !raw_std_value_is_string(player_id)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['playerId']` to be a string."); + return NULL; } - tmp = stdmap_get_str(&object->std_arg, "playerId"); - if (tmp == NULL || !STDVALUE_IS_STRING(*tmp)) { - LOG_ERROR("Call missing mandatory parameter player_id.\n"); - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['playerId'] to be a string."); + return player_id; +} + +static struct gstplayer *get_player_from_arg(struct plugin *plugin, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + const struct raw_std_value *id = get_player_id_from_arg(arg, responsehandle); + if (id == NULL) { + return NULL; + } + + char *id_duped = raw_std_string_dup(id); + if (id_duped == NULL) { + return NULL; + } + + struct gstplayer *player = get_player_by_id(plugin, id_duped); + + free(id_duped); + + if (player == NULL) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['playerId']` to be a valid player id."); + return NULL; } - player_id = STDVALUE_AS_STRING(*tmp); - tmp = stdmap_get_str(args, "mode"); - if (tmp == NULL) { - mode = ""; - } else if (STDVALUE_IS_STRING(*tmp)) { - mode = STDVALUE_AS_STRING(*tmp); + + return player; +} + +static void send_error_event(struct audioplayer_meta *meta, GError *error) { + if (!meta->subscribed) { + return; + } + + gchar* message; + if (error->domain == GST_STREAM_ERROR || + error->domain == GST_RESOURCE_ERROR) { + message = + "Failed to set source. For troubleshooting, " + "see: " STR_LINK_TROUBLESHOOTING; } else { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['mode']` to be a string or null."); + message = "Unknown GstGError. See details."; + } + + char *details = NULL; + int status = asprintf(&details, "%s (Domain: %s, Code: %d)", error->message, g_quark_to_string(error->domain), error->code); + if (status == -1) { + // ENOMEM; + return; + } + + // clang-format off + platch_send_error_event_std( + meta->event_channel, + "LinuxAudioError", + message, + &STDSTRING(details) + ); + // clang-format on + + free(details); +} + +static void send_prepared_event(struct audioplayer_meta *meta, bool prepared) { + if (!meta->subscribed) { + return; + } + + // clang-format off + platch_send_success_event_std( + meta->event_channel, + &STDMAP2( + STDSTRING("event"), STDSTRING("audio.onPrepared"), + STDSTRING("value"), STDBOOL(prepared) + ) + ); + // clang-format on +} + +static void send_duration_update(struct audioplayer_meta *meta, bool has_duration, int64_t duration_ms) { + if (!meta->subscribed) { + return; + } + + if (!has_duration) { + // TODO: Check the behaviour in upstream audioplayers + return; + } + + // clang-format off + platch_send_success_event_std( + meta->event_channel, + &STDMAP2( + STDSTRING("event"), STDSTRING("audio.onDuration"), + STDSTRING("value"), STDINT64(duration_ms) + ) + ); + // clang-format on +} + +static void send_seek_completed(struct audioplayer_meta *meta) { + if (!meta->subscribed) { + return; + } + + // clang-format off + platch_send_success_event_std( + meta->event_channel, + &STDMAP1( + STDSTRING("event"), STDSTRING("audio.onSeekComplete") + ) + ); + // clang-format on +} + +static void send_playback_complete(struct audioplayer_meta *meta) { + if (!meta->subscribed) { + return; + } + + // clang-format off + platch_send_success_event_std( + meta->event_channel, + &STDMAP1( + STDSTRING("event"), STDSTRING("audio.onComplete") + ) + ); + // clang-format on +} + +UNUSED static void send_player_log(struct audioplayer_meta *meta, const char *message) { + if (!meta->subscribed) { + return; + } + + // clang-format off + platch_send_success_event_std( + meta->event_channel, + &STDMAP2( + STDSTRING("event"), STDSTRING("audio.onLog"), + STDSTRING("value"), STDSTRING((char*) message) + ) + ); + // clang-format on +} + +static void on_create(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + const struct raw_std_value *player_id = get_player_id_from_arg(arg, responsehandle); + if (!player_id) { + return; + } + + if (!ensure_gstreamer_initialized(p, responsehandle)) { + return; + } + + struct audioplayer_meta *meta = calloc(1, sizeof(struct audioplayer_meta)); + if (meta == NULL) { + platch_respond_native_error_std(responsehandle, ENOMEM); + return; } - player = audioplayers_linux_plugin_get_player(player_id, mode); + meta->id = raw_std_string_dup(player_id); + if (meta->id == NULL) { + free(meta); + platch_respond_native_error_std(responsehandle, ENOMEM); + return; + } + + LOG_DEBUG("create(id: \"%s\")\n", meta->id); + + int status = 0; + khint_t index = kh_put(audioplayers, &p->players, meta->id, &status); + if (status == -1) { + free(meta->id); + free(meta); + platch_respond_native_error_std(responsehandle, ENOMEM); + return; + } else if (status == 0) { + free(meta->id); + free(meta); + + platch_respond_illegal_arg_std(responsehandle, "Player with given id already exists."); + return; + } + + status = asprintf(&meta->event_channel, "xyz.luan/audioplayers/events/%s", meta->id); + if (status == -1) { + kh_del(audioplayers, &p->players, index); + free(meta->id); + free(meta); + + platch_respond_native_error_std(responsehandle, ENOMEM); + return; + } + + struct gstplayer *player = gstplayer_new( + p->flutterpi, + NULL, + meta, + /* play_video */ false, /* play_audio */ true, + NULL + ); if (player == NULL) { - return platch_respond_native_error_std(responsehandle, ENOMEM); - } - - if (streq(method, "create")) { - //audioplayers_linux_plugin_get_player() creates player if it doesn't exist - } else if (streq(method, "pause")) { - audio_player_pause(player); - } else if (streq(method, "resume")) { - audio_player_resume(player); - } else if (streq(method, "stop")) { - audio_player_pause(player); - audio_player_set_position(player, 0); - } else if (streq(method, "release")) { - audio_player_release(player); - } else if (streq(method, "seek")) { - tmp = stdmap_get_str(args, "position"); - if (tmp == NULL || !STDVALUE_IS_INT(*tmp)) { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['position']` to be an int."); - } + free(meta->event_channel); + kh_del(audioplayers, &p->players, index); + free(meta->id); + free(meta); - int64_t position = STDVALUE_AS_INT(*tmp); - audio_player_set_position(player, position); - } else if (streq(method, "setSourceUrl")) { - tmp = stdmap_get_str(args, "url"); - if (tmp == NULL || !STDVALUE_IS_STRING(*tmp)) { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['url']` to be a string."); - } - char *url = STDVALUE_AS_STRING(*tmp); + platch_respond_error_std(responsehandle, "not-initialized", "Could not initialize gstplayer.", NULL); + return; + } - tmp = stdmap_get_str(args, "isLocal"); - if (tmp == NULL || !STDVALUE_IS_BOOL(*tmp)) { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['isLocal']` to be a bool."); - } + gstplayer_set_userdata(player, meta); - bool is_local = STDVALUE_AS_BOOL(*tmp); - if (is_local) { - char *local_url = NULL; - ok = asprintf(&local_url, "file://%s", url); - if (ok < 0) { - return platch_respond_native_error_std(responsehandle, ENOMEM); - } - url = local_url; - } + plugin_registry_set_receiver_v2( + flutterpi_get_plugin_registry(flutterpi), + meta->event_channel, + on_receive_event_ch, + player + ); - audio_player_set_source_url(player, url); - } else if (streq(method, "getDuration")) { - result = STDINT64(audio_player_get_duration(player)); - } else if (streq(method, "setVolume")) { - tmp = stdmap_get_str(args, "volume"); - if (tmp != NULL && STDVALUE_IS_FLOAT(*tmp)) { - audio_player_set_volume(player, STDVALUE_AS_FLOAT(*tmp)); - } else { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['volume']` to be a float."); - } - } else if (streq(method, "getCurrentPosition")) { - result = STDINT64(audio_player_get_position(player)); - } else if (streq(method, "setPlaybackRate")) { - tmp = stdmap_get_str(args, "playbackRate"); - if (tmp != NULL && STDVALUE_IS_FLOAT(*tmp)) { - audio_player_set_playback_rate(player, STDVALUE_AS_FLOAT(*tmp)); - } else { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['playbackRate']` to be a float."); - } - } else if (streq(method, "setReleaseMode")) { - tmp = stdmap_get_str(args, "releaseMode"); - if (tmp != NULL && STDVALUE_IS_STRING(*tmp)) { - char *release_mode = STDVALUE_AS_STRING(*tmp); - bool looping = strstr(release_mode, "loop") != NULL; - audio_player_set_looping(player, looping); - } else { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['releaseMode']` to be a string."); - } - } else if (streq(method, "setPlayerMode")) { - // TODO check support for low latency mode: - // https://gstreamer.freedesktop.org/documentation/additional/design/latency.html?gi-language=c - } else if (strcmp(method, "setBalance") == 0) { - tmp = stdmap_get_str(args, "balance"); - if (tmp != NULL && STDVALUE_IS_FLOAT(*tmp)) { - audio_player_set_balance(player, STDVALUE_AS_FLOAT(*tmp)); - } else { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['balance']` to be a float."); + kh_value(&p->players, index) = player; + + platch_respond_success_std(responsehandle, NULL); +} + +static void on_pause(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } + + LOG_AUDIOPLAYER_DEBUG(player, "pause()\n"); + + gstplayer_pause(player); + + platch_respond_success_std(responsehandle, NULL); +} + +static void on_resume(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } + + LOG_AUDIOPLAYER_DEBUG(player, "resume()\n"); + + /// TODO: Should resume behave different to play? + gstplayer_play(player); + + platch_respond_success_std(responsehandle, NULL); +} + +static void on_stop(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } + + LOG_AUDIOPLAYER_DEBUG(player, "stop()\n"); + + /// TODO: Maybe provide gstplayer_stop + int err = gstplayer_pause(player); + if (err != 0) { + platch_respond_success_std(responsehandle, NULL); + return; + } + + err = gstplayer_seek_to(player, 0, /* nearest_keyframe */ false); + if (err != 0) { + platch_respond_success_std(responsehandle, NULL); + return; + } + + platch_respond_success_std(responsehandle, NULL); +} + +static void on_release(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } + + LOG_AUDIOPLAYER_DEBUG(player, "release()\n"); + + gstplayer_set_source(player, NULL); + + platch_respond_success_std(responsehandle, NULL); +} + +static void on_seek(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } + + const struct raw_std_value *position = raw_std_map_find_str(arg, "position"); + if (position == NULL || !raw_std_value_is_int(position)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['position'] to be an int."); + return; + } + + int64_t position_int = raw_std_value_as_int(position); + + LOG_AUDIOPLAYER_DEBUG(player, "seek(position_ms: %"PRIi64")\n", position_int); + + gstplayer_seek_with_completer( + player, + position_int, + /* nearest_keyframe */ false, + (struct async_completer) { + .on_done = (void_callback_t) send_seek_completed, + .on_error = NULL, + .userdata = gstplayer_get_userdata(player) } - } else if (strcmp(method, "emitLog") == 0) { - tmp = stdmap_get_str(args, "message"); - char *message; - - if (tmp == NULL) { - message = ""; - } else if (STDVALUE_IS_STRING(*tmp)) { - message = STDVALUE_AS_STRING(*tmp); - } else { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['message']` to be a string."); + ); + + platch_respond_success_std(responsehandle, NULL); +} + +static void on_set_source_url_complete(void *userdata) { + struct audioplayer_meta *meta = userdata; + + send_prepared_event(meta, true); +} + +static void on_set_source_url(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } + + const struct raw_std_value *src_url = raw_std_map_find_str(arg, "url"); + if (src_url == NULL || !raw_std_value_is_string(src_url)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['url']` to be a string."); + return; + } + + const struct raw_std_value *is_local = raw_std_map_find_str(arg, "isLocal"); + if (src_url != NULL && !raw_std_value_is_null(is_local) && !raw_std_value_is_bool(is_local)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['isLocal']` to be a bool or null."); + return; + } + + const struct raw_std_value *mime_type = raw_std_map_find_str(arg, "mimeType"); + if (mime_type != NULL && !raw_std_value_is_null(mime_type) && !raw_std_value_is_string(mime_type)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['mimeType']` to be a bool or null."); + return; + } + + char *src_url_duped = raw_std_string_dup(src_url); + if (!src_url_duped) return; + + LOG_AUDIOPLAYER_DEBUG(player, "set_source_url(url: \"%s\")\n", src_url_duped); + + // audioplayers attempts to use file paths (e.g. /tmp/abcd) as source URIs. + // detect that and constrcut a proper url from it. + if (src_url_duped[0] == '/') { + free(src_url_duped); + + int result = asprintf( + &src_url_duped, + "file://%.*s", + (int) raw_std_string_get_length(src_url), + raw_std_string_get_nonzero_terminated(src_url) + ); + if (result < 0) { + return; } + } - // Avoid unused variable compile message if debugging is disabled. - (void) message; - - LOG_DEBUG("%s\n", message); - //TODO: https://github.com/bluefireteam/audioplayers/blob/main/packages/audioplayers_linux/linux/audio_player.cc#L247 - } else if (strcmp(method, "emitError") == 0) { - tmp = stdmap_get_str(args, "code"); - char *code; - - if (tmp == NULL) { - code = ""; - } else if (STDVALUE_IS_STRING(*tmp)) { - code = STDVALUE_AS_STRING(*tmp); - } else { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['code']` to be a string."); + bool ok = gstplayer_set_source_with_completer( + player, + src_url_duped, + (struct async_completer) { + .on_done = on_set_source_url_complete, + .userdata = gstplayer_get_userdata(player) } + ); - tmp = stdmap_get_str(args, "message"); - char *message; + free(src_url_duped); - if (tmp == NULL) { - message = ""; - } else if (STDVALUE_IS_STRING(*tmp)) { - message = STDVALUE_AS_STRING(*tmp); - } else { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['message']` to be a string."); - } + if (!ok) { + respond_plugin_error(responsehandle, "Could not preroll pipeline."); + return; + } - LOG_ERROR("Error: %s; message=%s\n", code, message); - //TODO: https://github.com/bluefireteam/audioplayers/blob/main/packages/audioplayers_linux/linux/audio_player.cc#L144 - } else if (strcmp(method, "dispose") == 0) { - audioplayers_linux_plugin_dispose_player(player); - player = NULL; - } else { - return platch_respond_not_implemented(responsehandle); + platch_respond_success_std(responsehandle, NULL); +} + +static void on_get_duration(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } + + LOG_AUDIOPLAYER_DEBUG(player, "get_duration()\n"); + + int64_t duration_ms = gstplayer_get_duration(player); + if (duration_ms == -1) { + platch_respond_success_std(responsehandle, NULL); + return; } - return platch_respond_success_std(responsehandle, &result); + platch_respond_success_std(responsehandle, &STDINT64(duration_ms)); } -static int on_global_method_call(char *channel, struct platch_obj *object, FlutterPlatformMessageResponseHandle *responsehandle) { - (void) responsehandle; - (void) channel; - (void) object; +static void on_set_volume(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } + + const struct raw_std_value *volume = raw_std_map_find_str(arg, "volume"); + if (volume == NULL || !raw_std_value_is_float64(volume)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['volume'] to be a double."); + return; + } + + double volume_float = raw_std_value_as_float64(volume); + + LOG_AUDIOPLAYER_DEBUG(player, "set_volume(volume: %f)\n", volume_float); - return platch_respond_success_std(responsehandle, &STDBOOL(true)); + gstplayer_set_volume(player, volume_float); + + platch_respond_success_std(responsehandle, NULL); } -static int on_receive_event_ch(char *channel, struct platch_obj *object, FlutterPlatformMessageResponseHandle *responsehandle) { - if (strcmp(object->method, "listen") == 0) { - LOG_DEBUG("%s: listen()\n", channel); +static void on_get_position(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } - list_for_each_entry_safe(struct audio_player_entry, entry, &plugin.players, entry) { - if (audio_player_set_subscription_status(entry->player, channel, true)) { - return platch_respond_success_std(responsehandle, NULL); - } - } + int64_t position = gstplayer_get_position(player); + if (position < 0) { + platch_respond_success_std(responsehandle, &STDNULL); + return; + } - LOG_ERROR("%s: player not found\n", channel); - return platch_respond_not_implemented(responsehandle); - } else if (strcmp(object->method, "cancel") == 0) { - LOG_DEBUG("%s: cancel()\n", channel); + platch_respond_success_std(responsehandle, &STDINT64(position)); +} - list_for_each_entry_safe(struct audio_player_entry, entry, &plugin.players, entry) { - if (audio_player_set_subscription_status(entry->player, channel, false)) { - return platch_respond_success_std(responsehandle, NULL); - } - } +static void on_set_playback_rate(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } + + const struct raw_std_value *rate = raw_std_map_find_str(arg, "playbackRate"); + if (rate == NULL || !raw_std_value_is_float64(rate)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['playbackRate'] to be a double."); + return; + } + + double rate_float = raw_std_value_as_float64(rate); - LOG_ERROR("%s: player not found\n", channel); - return platch_respond_not_implemented(responsehandle); + LOG_AUDIOPLAYER_DEBUG(player, "set_playback_rate(rate: %f)\n", rate_float); + + if (rate_float < 0.0) { + respond_plugin_error(responsehandle, "Backward playback is not supported.\n"); + return; + } else if (rate_float == 0.0) { + gstplayer_pause(player); } else { - return platch_respond_not_implemented(responsehandle); + gstplayer_set_playback_speed(player, rate_float); } - return 0; + platch_respond_success_std(responsehandle, NULL); } -enum plugin_init_result audioplayers_plugin_init(struct flutterpi *flutterpi, void **userdata_out) { - int ok; +static void on_set_release_mode(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } - (void) userdata_out; + const struct raw_std_value *mode = raw_std_map_find_str(arg, "releaseMode"); + if (mode == NULL || !raw_std_value_is_string(mode)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['releaseMode'] to be a string."); + return; + } + + LOG_AUDIOPLAYER_DEBUG(player, "set_release_mode(mode: %.*s)\n", + (int) raw_std_string_get_length(mode), + raw_std_string_get_nonzero_terminated(mode) + ); - plugin.flutterpi = flutterpi; - plugin.initialized = false; - list_inithead(&plugin.players); + bool is_release = false; + bool is_loop = false; + bool is_stop = false; - ok = plugin_registry_set_receiver_locked(AUDIOPLAYERS_GLOBAL_CHANNEL, kStandardMethodCall, on_global_method_call); - if (ok != 0) { - return PLUGIN_INIT_RESULT_ERROR; + if (raw_std_string_equals(mode, "ReleaseMode.release")) { + is_release = true; + } else if (raw_std_string_equals(mode, "ReleaseMode.loop")) { + is_loop = true; + } else if (raw_std_string_equals(mode, "ReleaseMode.stop")) { + is_stop = true; + } else { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['releaseMode']` to be a string-ification of a ReleaseMode enum value."); + return; } - ok = plugin_registry_set_receiver_locked(AUDIOPLAYERS_LOCAL_CHANNEL, kStandardMethodCall, on_local_method_call); - if (ok != 0) { - goto fail_remove_global_receiver; + // TODO: Handle ReleaseMode.release & ReleaseMode.stop + (void) is_release; + (void) is_stop; + + int err = gstplayer_set_looping(player, is_loop, false); + if (err != 0) { + platch_respond_success_std(responsehandle, NULL); + return; } - return PLUGIN_INIT_RESULT_INITIALIZED; + platch_respond_success_std(responsehandle, NULL); +} -fail_remove_global_receiver: - plugin_registry_remove_receiver_locked(AUDIOPLAYERS_GLOBAL_CHANNEL); +static void on_set_player_mode(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } - return PLUGIN_INIT_RESULT_ERROR; + const struct raw_std_value *mode = raw_std_map_find_str(arg, "playerMode"); + if (mode == NULL || !raw_std_value_is_string(mode)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['playerMode'] to be a string."); + return; + } + + LOG_AUDIOPLAYER_DEBUG(player, "set_player_mode(mode: %.*s)\n", + (int) raw_std_string_get_length(mode), + raw_std_string_get_nonzero_terminated(mode) + ); + + bool is_media_player = false; + bool is_low_latency = false; + + if (raw_std_string_equals(mode, "PlayerMode.mediaPlayer")) { + is_media_player = true; + } else if (raw_std_string_equals(mode, "PlayerMode.lowLatency")) { + is_low_latency = true; + } else { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['playerMode']` to be a string-ification of a PlayerMode enum value."); + return; + } + + // TODO: Handle player mode + // TODO check support for low latency mode: + // https://gstreamer.freedesktop.org/documentation/additional/design/latency.html?gi-language=c + (void) is_media_player; + (void) is_low_latency; + + platch_respond_success_std(responsehandle, NULL); } -void audioplayers_plugin_deinit(struct flutterpi *flutterpi, void *userdata) { - (void) flutterpi; - (void) userdata; +static void on_set_balance(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } - plugin_registry_remove_receiver_locked(AUDIOPLAYERS_GLOBAL_CHANNEL); - plugin_registry_remove_receiver_locked(AUDIOPLAYERS_LOCAL_CHANNEL); + const struct raw_std_value *balance = raw_std_map_find_str(arg, "balance"); + if (balance == NULL || !raw_std_value_is_float64(balance)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['balance'] to be a double."); + return; + } + + double balance_float = raw_std_value_as_float64(balance); + + LOG_AUDIOPLAYER_DEBUG(player, "set_balance(balance: %f)\n", balance_float); - list_for_each_entry_safe(struct audio_player_entry, entry, &plugin.players, entry) { - audio_player_destroy(entry->player); - list_del(&entry->entry); - free(entry); + if (balance_float < -1.0) { + balance_float = -1.0; + } else if (balance_float > 1.0) { + balance_float = 1.0; } + + gstplayer_set_audio_balance(player, balance_float); + + platch_respond_success_std(responsehandle, NULL); } -static struct audio_player *audioplayers_linux_plugin_get_player(char *player_id, char *mode) { - struct audio_player_entry *entry; - struct audio_player *player; +static void on_player_emit_log(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } - (void) mode; + const struct raw_std_value *message = raw_std_map_find_str(arg, "message"); + if (message == NULL || !raw_std_value_is_string(message)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['message'] to be a string."); + return; + } - list_for_each_entry_safe(struct audio_player_entry, entry, &plugin.players, entry) { - if (audio_player_is_id(entry->player, player_id)) { - return entry->player; - } + LOG_DEBUG("%.*s", (int) raw_std_string_get_length(message), raw_std_string_get_nonzero_terminated(message)); + + platch_respond_success_std(responsehandle, NULL); +} + +static void on_player_emit_error(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; } - entry = malloc(sizeof *entry); - ASSUME(entry != NULL); + const struct raw_std_value *code = raw_std_map_find_str(arg, "code"); + if (code == NULL || !raw_std_value_is_string(code)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['code'] to be a string."); + return; + } - LOG_DEBUG("Create player(id=%s)\n", player_id); - player = audio_player_new(player_id, AUDIOPLAYERS_LOCAL_CHANNEL); + const struct raw_std_value *message = raw_std_map_find_str(arg, "message"); + if (message == NULL || !raw_std_value_is_string(message)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['message'] to be a string."); + return; + } + + LOG_ERROR( + "%.*s, %.*s", + (int) raw_std_string_get_length(code), raw_std_string_get_nonzero_terminated(code), + (int) raw_std_string_get_length(message), raw_std_string_get_nonzero_terminated(message) + ); + platch_respond_success_std(responsehandle, NULL); +} + +static void on_dispose(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + const struct raw_std_value *id = get_player_id_from_arg(arg, responsehandle); + if (id == NULL) { + return; + } + + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); if (player == NULL) { - LOG_ERROR("player(id=%s) cannot be created", player_id); - free(entry); - return NULL; + return; + } + + LOG_AUDIOPLAYER_DEBUG(player, "dispose()\n"); + + char *id_duped = raw_std_string_dup(id); + + khint_t index = kh_get(audioplayers, &p->players, id_duped); + + // Should be valid since we already know the player exists from above + assert(index <= kh_end(&p->players)); + + free(id_duped); + + // Remove the entry from the hashmap + kh_del(audioplayers, &p->players, index); + + struct audioplayer_meta *meta = gstplayer_get_userdata(player); + + plugin_registry_remove_receiver_v2(flutterpi_get_plugin_registry(p->flutterpi), meta->event_channel); + free(meta->event_channel); + free(meta->id); + free(meta); + + // Destroy the player + gstplayer_destroy(player); + + platch_respond_success_std(responsehandle, NULL); +} + +static void on_player_method_call(void *userdata, const FlutterPlatformMessage *message) { + struct plugin *plugin = userdata; + + const struct raw_std_value *envelope = raw_std_method_call_from_buffer(message->message, message->message_size); + if (!envelope) { + platch_respond_malformed_message_std(message); + return; } - const char* event_channel = audio_player_subscribe_channel_name(player); - // set a receiver on the videoEvents event channel - int ok = plugin_registry_set_receiver( - event_channel, - kStandardMethodCall, - on_receive_event_ch + const struct raw_std_value *arg = raw_std_method_call_get_arg(envelope); + ASSERT_NOT_NULL(arg); + + if (raw_std_method_call_is_method(envelope, "create")) { + on_create(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "pause")) { + on_pause(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "resume")) { + on_resume(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "stop")) { + on_stop(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "release")) { + on_release(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "seek")) { + on_seek(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "setSourceUrl")) { + on_set_source_url(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "getDuration")) { + on_get_duration(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "setVolume")) { + on_set_volume(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "getCurrentPosition")) { + on_get_position(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "setPlaybackRate")) { + on_set_playback_rate(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "setReleaseMode")) { + on_set_release_mode(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "setPlayerMode")) { + on_set_player_mode(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "setBalance") == 0) { + on_set_balance(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "emitLog") == 0) { + on_player_emit_log(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "emitError") == 0) { + on_player_emit_error(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "dispose") == 0) { + on_dispose(plugin, arg, message->response_handle); + } else { + platch_respond_not_implemented(message->response_handle); + } +} + +static void on_init(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + (void) p; + (void) arg; + platch_respond_success_std(responsehandle, NULL); +} + +static void on_set_audio_context(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + (void) p; + (void) arg; + platch_respond_success_std(responsehandle, NULL); +} + +static void on_emit_log( + struct plugin *p, + const struct raw_std_value *arg, + const FlutterPlatformMessageResponseHandle *responsehandle +) { + (void) p; + + const struct raw_std_value *message = raw_std_map_find_str(arg, "message"); + if (message == NULL || !raw_std_value_is_string(message)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['message'] to be a string."); + return; + } + + LOG_DEBUG("%.*s", (int) raw_std_string_get_length(message), raw_std_string_get_nonzero_terminated(message)); + + platch_respond_success_std(responsehandle, NULL); +} + +static void on_emit_error( + struct plugin *p, + const struct raw_std_value *arg, + const FlutterPlatformMessageResponseHandle *responsehandle +) { + (void) p; + + const struct raw_std_value *code = raw_std_map_find_str(arg, "code"); + if (code == NULL || !raw_std_value_is_string(code)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['code'] to be a string."); + return; + } + + const struct raw_std_value *message = raw_std_map_find_str(arg, "message"); + if (message == NULL || !raw_std_value_is_string(message)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['message'] to be a string."); + return; + } + + LOG_ERROR( + "%.*s, %.*s", + (int) raw_std_string_get_length(code), raw_std_string_get_nonzero_terminated(code), + (int) raw_std_string_get_length(message), raw_std_string_get_nonzero_terminated(message) ); - if (ok != 0) { - LOG_ERROR("Cannot set player receiver for event channel: %s\n", event_channel); - audio_player_destroy(player); - free(entry); - return NULL; + + platch_respond_success_std(responsehandle, NULL); +} + +static void on_global_method_call(void *userdata, const FlutterPlatformMessage *message) { + struct plugin *plugin = userdata; + + const struct raw_std_value *envelope = raw_std_method_call_from_buffer(message->message, message->message_size); + if (!envelope) { + platch_respond_malformed_message_std(message); + return; } - entry->entry = (struct list_head){ NULL, NULL }; - entry->player = player; + const struct raw_std_value *arg = raw_std_method_call_get_arg(envelope); + ASSERT_NOT_NULL(arg); - list_add(&entry->entry, &plugin.players); - return player; + if (raw_std_method_call_is_method(envelope, "init")) { + on_init(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "setAudioContext")) { + on_set_audio_context(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "emitLog")) { + on_emit_log(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "emitError")) { + on_emit_error(plugin, arg, message->response_handle); + } else { + platch_respond_not_implemented(message->response_handle); + } +} + +static enum listener_return on_duration_notify(void *arg, void *userdata) { + ASSERT_NOT_NULL(userdata); + struct gstplayer *player = userdata; + + struct audioplayer_meta *meta = gstplayer_get_userdata(player); + ASSERT_NOT_NULL(meta); + + if (arg != NULL) { + int64_t *duration_ms = arg; + send_duration_update(meta, true, *duration_ms); + } else { + send_duration_update(meta, false, -1); + } + + return kNoAction; +} + +static enum listener_return on_eos_notify(void *arg, void *userdata) { + (void) arg; + + ASSERT_NOT_NULL(userdata); + struct gstplayer *player = userdata; + + struct audioplayer_meta *meta = gstplayer_get_userdata(player); + ASSERT_NOT_NULL(meta); + + send_playback_complete(meta); + + return kNoAction; +} + +static enum listener_return on_error_notify(void *arg, void *userdata) { + ASSERT_NOT_NULL(arg); + GError *error = arg; + + ASSERT_NOT_NULL(userdata); + struct gstplayer *player = userdata; + + struct audioplayer_meta *meta = gstplayer_get_userdata(player); + ASSERT_NOT_NULL(meta); + + send_error_event(meta, error); + + return kNoAction; } -static void audioplayers_linux_plugin_dispose_player(struct audio_player *player) { - list_for_each_entry_safe(struct audio_player_entry, entry, &plugin.players, entry) { - if (entry->player == player) { - list_del(&entry->entry); - plugin_registry_remove_receiver(audio_player_subscribe_channel_name(player)); - audio_player_destroy(player); +static void on_receive_event_ch(void *userdata, const FlutterPlatformMessage *message) { + ASSERT_NOT_NULL(userdata); + + struct gstplayer *player = userdata; + + struct audioplayer_meta *meta = gstplayer_get_userdata(player); + ASSERT_NOT_NULL(meta); + + const struct raw_std_value *envelope = raw_std_method_call_from_buffer(message->message, message->message_size); + if (envelope == NULL) { + platch_respond_malformed_message_std(message); + return; + } + + /// TODO: Implement + if (raw_std_method_call_is_method(envelope, "listen")) { + platch_respond_success_std(message->response_handle, NULL); + + if (!meta->subscribed) { + meta->subscribed = true; + + meta->duration_listener = notifier_listen(gstplayer_get_duration_notifier(player), on_duration_notify, NULL, player); + meta->eos_listener = notifier_listen(gstplayer_get_eos_notifier(player), on_eos_notify, NULL, player); + meta->error_listener = notifier_listen(gstplayer_get_error_notifier(player), on_error_notify, NULL, player); + } + } else if (raw_std_method_call_is_method(envelope, "cancel")) { + platch_respond_success_std(message->response_handle, NULL); + + if (meta->subscribed) { + meta->subscribed = false; + + notifier_unlisten(gstplayer_get_eos_notifier(player), meta->error_listener); + notifier_unlisten(gstplayer_get_eos_notifier(player), meta->eos_listener); + notifier_unlisten(gstplayer_get_duration_notifier(player), meta->duration_listener); } + } else { + platch_respond_not_implemented(message->response_handle); } } +enum plugin_init_result audioplayers_plugin_init(struct flutterpi *flutterpi, void **userdata_out) { + int ok; + + (void) userdata_out; + + struct plugin *plugin = calloc(1, sizeof(struct plugin)); + if (plugin == NULL) { + return PLUGIN_INIT_RESULT_ERROR; + } + + plugin->flutterpi = flutterpi; + plugin->initialized = false; + + ok = plugin_registry_set_receiver_v2_locked( + flutterpi_get_plugin_registry(flutterpi), + AUDIOPLAYERS_GLOBAL_CHANNEL, + on_global_method_call, + plugin + ); + if (ok != 0) { + return PLUGIN_INIT_RESULT_ERROR; + } + + ok = plugin_registry_set_receiver_v2_locked( + flutterpi_get_plugin_registry(flutterpi), + AUDIOPLAYERS_LOCAL_CHANNEL, + on_player_method_call, + plugin + ); + if (ok != 0) { + goto fail_remove_global_receiver; + } + + return PLUGIN_INIT_RESULT_INITIALIZED; + +fail_remove_global_receiver: + plugin_registry_remove_receiver_v2_locked( + flutterpi_get_plugin_registry(flutterpi), + AUDIOPLAYERS_GLOBAL_CHANNEL + ); + + return PLUGIN_INIT_RESULT_ERROR; +} + +void audioplayers_plugin_deinit(struct flutterpi *flutterpi, void *userdata) { + (void) flutterpi; + + ASSERT_NOT_NULL(userdata); + struct plugin *plugin = userdata; + + plugin_registry_remove_receiver_v2_locked(flutterpi_get_plugin_registry(flutterpi), AUDIOPLAYERS_GLOBAL_CHANNEL); + plugin_registry_remove_receiver_v2_locked(flutterpi_get_plugin_registry(flutterpi), AUDIOPLAYERS_LOCAL_CHANNEL); + + const char *id; + struct gstplayer *player; + kh_foreach(&plugin->players, id, player, { + gstplayer_destroy(player); + free((char*) id); + }) +} + FLUTTERPI_PLUGIN("audioplayers", audioplayers, audioplayers_plugin_init, audioplayers_plugin_deinit) diff --git a/src/plugins/gstplayer.c b/src/plugins/gstplayer.c new file mode 100644 index 00000000..a0c6cd9d --- /dev/null +++ b/src/plugins/gstplayer.c @@ -0,0 +1,1805 @@ +#define _GNU_SOURCE + +#include +#include +#include +#include + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "flutter-pi.h" +#include "notifier_listener.h" +#include "platformchannel.h" +#include "pluginregistry.h" +#include "plugins/gstplayer.h" +#include "texture_registry.h" +#include "tracer.h" +#include "util/logging.h" +#include "util/macros.h" +#include "util/collection.h" +#include "util/asserts.h" + +#include "config.h" + +#ifdef HAVE_GSTREAMER_VIDEO_PLAYER + #include "gstreamer_video_player.h" +#endif + +#define LOG_PLAYER_DEBUG(player, fmtstring, ...) LOG_DEBUG("gstplayer-%"PRIi64": " fmtstring, player->debug_id, ##__VA_ARGS__) +#ifdef DEBUG + #define LOG_PLAYER_ERROR(player, fmtstring, ...) LOG_ERROR("gstplayer-%"PRIi64": " fmtstring, player->debug_id, ##__VA_ARGS__) +#else + #define LOG_PLAYER_ERROR(player, fmtstring, ...) LOG_ERROR(fmtstring, ##__VA_ARGS__) +#endif + +#define LOG_GST_SET_STATE_ERROR(player, _element) \ + LOG_PLAYER_ERROR( \ + player, \ + "setting gstreamer playback state failed. gst_element_set_state(element name: %s): GST_STATE_CHANGE_FAILURE\n", \ + GST_ELEMENT_NAME(_element) \ + ) + +#define LOG_GST_GET_STATE_ERROR(player, _element) \ + LOG_PLAYER_ERROR( \ + player, \ + "last gstreamer state change failed. gst_element_get_state(element name: %s): GST_STATE_CHANGE_FAILURE\n", \ + GST_ELEMENT_NAME(_element) \ + ) + +struct incomplete_video_info { + bool has_resolution; + bool has_fps; + bool has_duration; + bool has_seeking_info; + struct video_info info; +}; + +enum playpause_state { kPaused, kPlaying, kStepping }; + +enum playback_direction { kForward, kBackward }; + +#define PLAYPAUSE_STATE_AS_STRING(playpause_state) \ + ((playpause_state) == kPaused ? "paused" : \ + (playpause_state) == kPlaying ? "playing" : \ + (playpause_state) == kStepping ? "stepping" : \ + "?") + + +#ifdef DEBUG +static int64_t allocate_id(void) { + static atomic_int_fast64_t next_id = 1; + + return atomic_fetch_add_explicit(&next_id, 1, memory_order_relaxed); +} +#endif +struct gstplayer { +#ifdef DEBUG + int64_t debug_id; +#endif + + struct flutterpi *flutterpi; + + void *userdata; + + /** + * @brief The desired playback rate that should be used when @ref playpause_state is kPlayingForward. (should be > 0) + * + */ + double playback_rate_forward; + + /** + * @brief The desired playback rate that should be used when @ref playpause_state is kPlayingBackward. (should be < 0) + * + */ + double playback_rate_backward; + + /** + * @brief True if the video should seemlessly start from the beginning once the end is reached. + * + */ + bool looping; + + /** + * @brief True if the looping should use gapless looping using either the about-to-finish callback + * from playbin or segments. + * + * Configured in gstplayer_set_looping + */ + bool gapless_looping; + + /** + * @brief The desired playback state. Either paused, playing, or single-frame stepping. + * + */ + enum playpause_state playpause_state; + + /** + * @brief The desired playback direction. + * + */ + enum playback_direction direction; + + /** + * @brief The actual, currently used playback rate. + * + */ + double current_playback_rate; + + /** + * @brief The position reported if gstreamer position queries fail (for example, because gstreamer is currently + * seeking to a new position. In that case, fallback_position_ms will be the seeking target position, so we report the + * new position while we're seeking to it) + */ + int64_t fallback_position_ms; + + /** + * @brief True if there's a position that apply_playback_state should seek to. + * + */ + bool has_desired_position; + + /** + * @brief True if gstplayer should seek to the nearest keyframe instead, which is a bit faster. + * + */ + bool do_fast_seeking; + + /** + * @brief The position, if any, that apply_playback_state should seek to. + * + */ + int64_t desired_position_ms; + + struct notifier video_info_notifier, buffering_state_notifier, error_notifier; + struct notifier duration_notifier, seeking_info_notifier; + struct notifier eos_notifier; + + bool has_sent_info; + struct incomplete_video_info info; + + bool has_duration; + int64_t duration; + + bool has_seeking_info; + struct seeking_info seeking_info; + + /** + * The flutter texture that this video player is pushing frames to. + */ + struct texture *texture; + + sd_event_source *busfd_events; + + /** + * The gstreamer playbin. + * + * In most cases this is the same as the pipeline (since a playbin is a pipeline). + * The only exception is when the gstplayer was initialized using a pipeline description, + * in which case we don't have a playbin. In that case, playbin will be NULL and + * pipeline will be valid. + */ + GstElement *playbin; + + /** + * The gstreamer pipeline. + */ + GstElement *pipeline; + + /** + * The gstreamer audiopanorama element, used as the "audio-filter" + * if audio playback is enabled, and used to change the audio + * left/right balance. + */ + GstElement *audiopanorama; + + /** + * True if we're playing back a live source, + * e.g. a live stream + */ + bool is_live; + + /** + * Callbacks to be called on ASYNC_DONE gstreamer messages. + * + * ASYNC_DONE messages indicate completion of an async state + * change or a flushing seek. + */ + size_t n_async_completers; + struct async_completer completers[8]; + + /** + * @brief Use the playbin "uri" property and "about-to-finish" signal + * to achieve gapless looping, if looping is desired. + * + * It's a bit unclear whether this is worse or equally as good as + * using segments; so segment looping is preferred for now. + * + * However, segments are not always super reliable (e.g. playbin3 + * segment looping is broken in gstreamer < 1.22.9), so the playbin + * method is kept intact still as a backup. + */ + bool playbin_gapless; + + /** + * @brief Use segments to do gapless looping, if looping is desired. + * + * (Instead of e.g. seeking back to start on EOS, or setting the + * playbin uri property in about-to-finish) + */ + bool segment_gapless; + + /** + * The source uri this gstplayer should play back. + * + * Mostly used to as the argument to `g_object_set(p->playbin, "uri", ...)` + * in on_about_to_finish, as querying the current source uri from the playbin + * is not always reliable. + */ + char *uri; + + /** + * True if we did already issue a flushing seek + * with GST_SEEK_FLAG_SEGMENT. + * + * A flushing seek with GST_SEEK_FLAG_SEGMENT has to be + * issued to start gapless looping. + */ + bool did_configure_segment; + + struct tracer *tracer; +}; + +static struct async_completer pop_completer(struct gstplayer *player) { + ASSERT(player->n_async_completers > 0); + + struct async_completer completer = player->completers[0]; + + player->n_async_completers--; + if (player->n_async_completers > 0) { + memmove(player->completers + 0, player->completers + 1, player->n_async_completers * sizeof(struct async_completer)); + } + + return completer; +} + +static void on_async_done_message(struct gstplayer *player) { + if (player->n_async_completers > 0) { + struct async_completer completer = pop_completer(player); + + if (completer.on_done) { + completer.on_done(completer.userdata); + } + } +} + +static void on_async_error(struct gstplayer *player, GError *error) { + if (player->n_async_completers > 0) { + struct async_completer completer = pop_completer(player); + + if (completer.on_error) { + completer.on_error(completer.userdata, error); + } + } +} + +static int maybe_send_video_info(struct gstplayer *player) { + struct video_info *duped; + + if (player->info.has_resolution && player->info.has_fps && player->info.has_duration && player->info.has_seeking_info) { + // we didn't send the info yet but we have complete video info now. + // send it! + duped = memdup(&(player->info.info), sizeof(player->info.info)); + if (duped == NULL) { + return ENOMEM; + } + + notifier_notify(&player->video_info_notifier, duped); + } + + return 0; +} + +static void fetch_duration(struct gstplayer *player) { + gboolean ok; + int64_t duration; + + ok = gst_element_query_duration(player->pipeline, GST_FORMAT_TIME, &duration); + if (ok == FALSE) { + if (player->is_live) { + player->info.info.duration_ms = INT64_MAX; + player->info.has_duration = true; + + player->has_duration = true; + player->duration = INT64_MAX; + return; + } else { + LOG_PLAYER_ERROR(player, "Could not fetch duration. (gst_element_query_duration)\n"); + return; + } + } + + player->info.info.duration_ms = GST_TIME_AS_MSECONDS(duration); + player->info.has_duration = true; + + player->duration = GST_TIME_AS_MSECONDS(duration); + player->has_duration = true; +} + +static void fetch_seeking(struct gstplayer *player) { + GstQuery *seeking_query; + gboolean ok, seekable; + int64_t seek_begin, seek_end; + + seeking_query = gst_query_new_seeking(GST_FORMAT_TIME); + ok = gst_element_query(player->pipeline, seeking_query); + if (ok == FALSE) { + if (player->is_live) { + player->info.info.can_seek = false; + player->info.info.seek_begin_ms = 0; + player->info.info.seek_end_ms = 0; + player->info.has_seeking_info = true; + + player->seeking_info.can_seek = false; + player->seeking_info.seek_begin_ms = 0; + player->seeking_info.seek_end_ms = 0; + player->has_seeking_info = true; + return; + } else { + LOG_PLAYER_DEBUG(player, "Could not query seeking info. (gst_element_query)\n"); + return; + } + } + + gst_query_parse_seeking(seeking_query, NULL, &seekable, &seek_begin, &seek_end); + + gst_query_unref(seeking_query); + + player->info.info.can_seek = seekable; + player->info.info.seek_begin_ms = GST_TIME_AS_MSECONDS(seek_begin); + player->info.info.seek_end_ms = GST_TIME_AS_MSECONDS(seek_end); + player->info.has_seeking_info = true; + + player->seeking_info.can_seek = seekable; + player->seeking_info.seek_begin_ms = GST_TIME_AS_MSECONDS(seek_begin); + player->seeking_info.seek_end_ms = GST_TIME_AS_MSECONDS(seek_end); + player->has_seeking_info = true; +} + +static void update_buffering_state(struct gstplayer *player, GstObject *element) { + struct buffering_state *state; + GstBufferingMode mode; + GstQuery *query; + gboolean ok, busy; + int64_t start, stop, buffering_left; + int n_ranges, percent, avg_in, avg_out; + + query = gst_query_new_buffering(GST_FORMAT_TIME); + ok = gst_element_query(GST_ELEMENT(element), query); + if (ok == FALSE) { + LOG_PLAYER_DEBUG(player, "Could not query precise buffering state.\n"); + goto fail_unref_query; + } + + gst_query_parse_buffering_percent(query, &busy, &percent); + gst_query_parse_buffering_stats(query, &mode, &avg_in, &avg_out, &buffering_left); + + n_ranges = (int) gst_query_get_n_buffering_ranges(query); + + state = malloc(sizeof(*state) + n_ranges * sizeof(struct buffering_range)); + if (state == NULL) { + goto fail_unref_query; + } + + for (int i = 0; i < n_ranges; i++) { + ok = gst_query_parse_nth_buffering_range(query, (unsigned int) i, &start, &stop); + if (ok == FALSE) { + LOG_ERROR("Could not parse %dth buffering range from buffering state. (gst_query_parse_nth_buffering_range)\n", i); + goto fail_free_state; + } + + state->ranges[i].start_ms = GST_TIME_AS_MSECONDS(start); + state->ranges[i].stop_ms = GST_TIME_AS_MSECONDS(stop); + } + + gst_query_unref(query); + + state->percent = percent; + state->mode = + (mode == GST_BUFFERING_STREAM ? BUFFERING_MODE_STREAM : + mode == GST_BUFFERING_DOWNLOAD ? BUFFERING_MODE_DOWNLOAD : + mode == GST_BUFFERING_TIMESHIFT ? BUFFERING_MODE_TIMESHIFT : + mode == GST_BUFFERING_LIVE ? BUFFERING_MODE_LIVE : + (assert(0), BUFFERING_MODE_STREAM)); + state->avg_in = avg_in; + state->avg_out = avg_out; + state->time_left_ms = buffering_left; + state->n_ranges = n_ranges; + + notifier_notify(&player->buffering_state_notifier, state); + return; + +fail_free_state: + free(state); + +fail_unref_query: + gst_query_unref(query); +} + +static int apply_playback_state(struct gstplayer *player) { + GstStateChangeReturn ok; + GstState desired_state, current_state, pending_state; + double desired_rate; + int64_t position; + + TRACER_BEGIN(player->tracer, "apply_playback_state()"); + + TRACER_BEGIN(player->tracer, "gst_element_get_state()"); + ok = gst_element_get_state(player->pipeline, ¤t_state, &pending_state, 0); + TRACER_END(player->tracer, "gst_element_get_state()"); + + if (ok == GST_STATE_CHANGE_FAILURE) { + LOG_PLAYER_DEBUG( + player, + "last gstreamer pipeline state change failed. gst_element_get_state(element name: %s): GST_STATE_CHANGE_FAILURE\n", + GST_ELEMENT_NAME(player->pipeline) + ); + goto fail_stop_trace; + } + + if (current_state == GST_STATE_NULL) { + // We don't have a playback source right now. + // Don't do anything. + TRACER_END(player->tracer, "apply_playback_state()"); + return 0; + } + + desired_state = player->playpause_state == kPlaying ? GST_STATE_PLAYING : GST_STATE_PAUSED; /* use GST_STATE_PAUSED if we're stepping */ + + /// Use 1.0 if we're stepping, otherwise use the stored playback rate for the current direction. + if (player->playpause_state == kStepping) { + desired_rate = player->direction == kForward ? 1.0 : -1.0; + } else { + desired_rate = player->direction == kForward ? player->playback_rate_forward : player->playback_rate_backward; + } + + bool is_segment_looping = player->looping && player->gapless_looping && player->segment_gapless; + if (player->current_playback_rate != desired_rate || player->has_desired_position || (player->did_configure_segment != is_segment_looping)) { + if (player->has_desired_position) { + position = player->desired_position_ms * GST_MSECOND; + } else { + TRACER_BEGIN(player->tracer, "gst_element_query_position()"); + ok = gst_element_query_position(GST_ELEMENT(player->pipeline), GST_FORMAT_TIME, &position); + TRACER_END(player->tracer, "gst_element_query_position()"); + + if (ok == FALSE) { + LOG_PLAYER_ERROR(player, "Could not get the current playback position to apply the playback speed.\n"); + goto fail_stop_trace; + } + } + + GstSeekFlags seek_flags = GST_SEEK_FLAG_FLUSH; + + // Only configure segment looping if we actually + // are segment looping, because it will + // swallow the end-of-stream events apparently. + if (is_segment_looping) { + seek_flags |= GST_SEEK_FLAG_SEGMENT; + } + + if (player->do_fast_seeking) { + seek_flags |= GST_SEEK_FLAG_KEY_UNIT | GST_SEEK_FLAG_SNAP_NEAREST; + } else { + seek_flags |= GST_SEEK_FLAG_ACCURATE; + } + + if (player->direction == kForward) { + LOG_PLAYER_DEBUG( + player, + "gst_element_seek(..., rate: %f, start: %" GST_TIME_FORMAT ", end: %" GST_TIME_FORMAT ", ...)\n", + desired_rate, + GST_TIME_ARGS(position), + GST_TIME_ARGS(GST_CLOCK_TIME_NONE) + ); + + TRACER_BEGIN(player->tracer, "gst_element_seek()"); + ok = gst_element_seek( + GST_ELEMENT(player->pipeline), + desired_rate, + GST_FORMAT_TIME, + seek_flags, + GST_SEEK_TYPE_SET, position, + GST_SEEK_TYPE_SET, GST_CLOCK_TIME_NONE + ); + TRACER_END(player->tracer, "gst_element_seek()"); + + if (ok == FALSE) { + LOG_PLAYER_ERROR( + player, + "Could not set the new playback speed / playback position (speed: %f, pos: %" GST_TIME_FORMAT ").\n", + desired_rate, + GST_TIME_ARGS(position) + ); + goto fail_stop_trace; + } + } else { + LOG_PLAYER_DEBUG( + player, + "gst_element_seek(..., rate: %f, start: %" GST_TIME_FORMAT ", end: %" GST_TIME_FORMAT ", ...)\n", + desired_rate, + GST_TIME_ARGS(0), + GST_TIME_ARGS(position) + ); + + TRACER_BEGIN(player->tracer, "gst_element_seek()"); + ok = gst_element_seek( + GST_ELEMENT(player->pipeline), + desired_rate, + GST_FORMAT_TIME, + seek_flags, + GST_SEEK_TYPE_SET, 0, + GST_SEEK_TYPE_SET, position + ); + TRACER_END(player->tracer, "gst_element_seek()"); + + if (ok == FALSE) { + LOG_PLAYER_ERROR( + player, + "Could not set the new playback speed / playback position (speed: %f, pos: %" GST_TIME_FORMAT ").\n", + desired_rate, + GST_TIME_ARGS(position) + ); + goto fail_stop_trace; + } + } + + player->current_playback_rate = desired_rate; + player->fallback_position_ms = GST_TIME_AS_MSECONDS(position); + player->has_desired_position = false; + player->did_configure_segment = is_segment_looping; + } + + if (pending_state == GST_STATE_VOID_PENDING) { + if (current_state == desired_state) { + // we're already in the desired state, and we're also not changing it + // no need to do anything. + LOG_PLAYER_DEBUG( + player, + "apply_playback_state(playing: %s): already in desired state and none pending\n", + PLAYPAUSE_STATE_AS_STRING(player->playpause_state) + ); + TRACER_END(player->tracer, "apply_playback_state()"); + return 0; + } + + LOG_PLAYER_DEBUG( + player, + "apply_playback_state(playing: %s): setting state to %s\n", + PLAYPAUSE_STATE_AS_STRING(player->playpause_state), + gst_element_state_get_name(desired_state) + ); + + TRACER_BEGIN(player->tracer, "gst_element_set_state()"); + ok = gst_element_set_state(player->pipeline, desired_state); + TRACER_END(player->tracer, "gst_element_set_state()"); + + if (ok == GST_STATE_CHANGE_FAILURE) { + LOG_GST_SET_STATE_ERROR(player, player->pipeline); + goto fail_stop_trace; + } + } else if (pending_state != desired_state) { + // queue to be executed when pending async state change completes + /// TODO: Implement properly + + LOG_PLAYER_DEBUG( + player, + "apply_playback_state(playing: %s): async state change in progress, setting state to %s\n", + PLAYPAUSE_STATE_AS_STRING(player->playpause_state), + gst_element_state_get_name(desired_state) + ); + + TRACER_BEGIN(player->tracer, "gst_element_set_state()"); + ok = gst_element_set_state(player->pipeline, desired_state); + TRACER_END(player->tracer, "gst_element_set_state()"); + + if (ok == GST_STATE_CHANGE_FAILURE) { + LOG_GST_SET_STATE_ERROR(player, player->pipeline); + goto fail_stop_trace; + } + } + + TRACER_END(player->tracer, "apply_playback_state()"); + return 0; + +fail_stop_trace: + TRACER_END(player->tracer, "apply_playback_state()"); + return EIO; +} + +static void on_eos_message(struct gstplayer *player, GstMessage *msg) { + if (GST_MESSAGE_SRC(msg) == GST_OBJECT(player->pipeline)) { + if (player->looping) { + LOG_PLAYER_DEBUG(player, "pipeline end of stream, seeking back to start (flushing)\n"); + player->desired_position_ms = 0; + player->has_desired_position = true; + apply_playback_state(player); + } else { + LOG_PLAYER_DEBUG(player, "pipeline end of stream\n"); + notifier_notify(&player->eos_notifier, NULL); + } + } else { + LOG_PLAYER_DEBUG(player, "end of stream for element: %s\n", GST_MESSAGE_SRC_NAME(msg)); + } +} + +static void on_gstreamer_error_message(struct gstplayer *player, GstMessage *msg) { + (void) player; + + GError *error; + gchar *debug_info; + + gst_message_parse_error(msg, &error, &debug_info); + + LOG_PLAYER_ERROR( + player, + "gstreamer error: code: %d, domain: %s, msg: %s (debug info: %s)\n", + error->code, + g_quark_to_string(error->domain), + error->message, + debug_info + ); + + on_async_error(player, error); + + notifier_notify(&player->error_notifier, error); + + g_clear_error(&error); + g_free(debug_info); +} + +static void on_gstreamer_warning_message(struct gstplayer *player, GstMessage *msg) { + (void) player; + + GError *error; + gchar *debug_info; + + gst_message_parse_warning(msg, &error, &debug_info); + + LOG_PLAYER_ERROR( + player, + "gstreamer warning: code: %d, domain: %s, msg: %s (debug info: %s)\n", + error->code, + g_quark_to_string(error->domain), + error->message, + debug_info + ); + g_clear_error(&error); + g_free(debug_info); +} + +static void on_gstreamer_info_message(struct gstplayer *player, GstMessage *msg) { + GError *error; + gchar *debug_info; + + gst_message_parse_info(msg, &error, &debug_info); + + LOG_PLAYER_DEBUG(player, "gstreamer info: %s (debug info: %s)\n", error->message, debug_info); + g_clear_error(&error); + g_free(debug_info); +} + +static void on_buffering_message(struct gstplayer *player, GstMessage *msg) { + GstBufferingMode mode; + int64_t buffering_left; + int percent, avg_in, avg_out; + + gst_message_parse_buffering(msg, &percent); + gst_message_parse_buffering_stats(msg, &mode, &avg_in, &avg_out, &buffering_left); + + if (percent == 0 || percent == 100) { + LOG_PLAYER_DEBUG( + player, + "buffering, src: %s, percent: %d, mode: %s, avg in: %d B/s, avg out: %d B/s, %" GST_TIME_FORMAT "\n", + GST_MESSAGE_SRC_NAME(msg), + percent, + mode == GST_BUFFERING_STREAM ? "stream" : + mode == GST_BUFFERING_DOWNLOAD ? "download" : + mode == GST_BUFFERING_TIMESHIFT ? "timeshift" : + mode == GST_BUFFERING_LIVE ? "live" : + "?", + avg_in, + avg_out, + GST_TIME_ARGS(buffering_left * GST_MSECOND) + ); + } + + /// TODO: GST_MESSAGE_BUFFERING is only emitted when we actually need to wait on some buffering till we can resume the playback. + /// However, the info we send to the callback also contains information on the buffered video ranges. + /// That information is constantly changing, but we only notify the player about it when we actively wait for the buffer to be filled. + update_buffering_state(player, GST_MESSAGE_SRC(msg)); +} + +static void on_state_changed_message(struct gstplayer *player, GstMessage *msg) { + GstState old, current, pending; + + gst_message_parse_state_changed(msg, &old, ¤t, &pending); + + if (GST_MESSAGE_SRC(msg) == GST_OBJECT(player->pipeline)) { + LOG_PLAYER_DEBUG( + player, + "pipeline state changed: old: %s, current: %s, pending: %s\n", + gst_element_state_get_name(old), + gst_element_state_get_name(current), + gst_element_state_get_name(pending) + ); + + if (current == GST_STATE_READY || current == GST_STATE_NULL) { + if (player->has_duration) { + player->has_duration = false; + notifier_notify(&player->duration_notifier, NULL); + } + + player->info.has_duration = false; + + player->has_seeking_info = false; + player->info.has_seeking_info = false; + + player->did_configure_segment = false; + } else if ((current == GST_STATE_PAUSED || current == GST_STATE_PLAYING) && (old == GST_STATE_READY || old == GST_STATE_NULL)) { + // it's our pipeline that changed to either playing / paused, and we don't have info about our video duration yet. + // get that info now. + // technically we can already fetch the duration when the decodebin changed to PAUSED state. + + if (!player->has_duration) { + fetch_duration(player); + + if (player->has_duration) { + int64_t *duped = memdup(&player->duration, sizeof(int64_t)); + + notifier_notify(&player->duration_notifier, duped); + } + } + + if (!player->has_seeking_info) { + fetch_seeking(player); + + if (player->has_seeking_info) { + struct seeking_info *duped = memdup(&player->seeking_info, sizeof(struct seeking_info)); + + notifier_notify(&player->seeking_info_notifier, duped); + } + } + + maybe_send_video_info(player); + } + } +} + +static void on_segment_start_message(struct gstplayer *player, GstMessage *msg) { + GstFormat format; + gint64 position; + gst_message_parse_segment_start(msg, &format, &position); + + if (format == GST_FORMAT_TIME) { + LOG_PLAYER_DEBUG( + player, + "segment start. src: %s, position: %" GST_TIME_FORMAT "\n", + GST_MESSAGE_SRC_NAME(msg), + GST_TIME_ARGS(position) + ); + } else { + LOG_PLAYER_DEBUG( + player, + "segment start. src: %s, position: %" PRId64 " (%s)\n", + GST_MESSAGE_SRC_NAME(msg), + position, + gst_format_get_name(format) + ); + } +} + +static void on_segment_done_message(struct gstplayer *player, GstMessage *msg) { + (void) msg; + + if (player->looping && player->gapless_looping && player->segment_gapless) { + LOG_PLAYER_DEBUG(player, "Segment done. Seeking back to segment start (segment, non-flushing)\n"); + gboolean ok = gst_element_seek( + player->pipeline, + player->current_playback_rate, + GST_FORMAT_TIME, + GST_SEEK_FLAG_SEGMENT, + GST_SEEK_TYPE_SET, 0, + GST_SEEK_TYPE_SET, GST_CLOCK_TIME_NONE + ); + if (!ok) { + LOG_PLAYER_DEBUG(player, "Could not seek back to segment start.\n"); + } + } +} + +static void on_duration_changed_message(struct gstplayer *player, GstMessage *msg) { + (void) msg; + + if (!player->has_duration) { + fetch_duration(player); + + if (player->has_duration) { + int64_t *duped = memdup(&player->duration, sizeof(int64_t)); + + notifier_notify(&player->duration_notifier, duped); + } + } + + if (!player->has_seeking_info) { + fetch_seeking(player); + + if (player->has_seeking_info) { + struct seeking_info *duped = memdup(&player->seeking_info, sizeof(struct seeking_info)); + + notifier_notify(&player->seeking_info_notifier, duped); + } + } + + maybe_send_video_info(player); +} + +static void on_about_to_finish_message(struct gstplayer *player) { + ASSERT_NOT_NULL(player->playbin); + + if (player->looping && player->uri && player->playbin_gapless) { + LOG_PLAYER_DEBUG(player, "Got about-to-finish signal, configuring next playback item\n"); + g_object_set(player->playbin, "uri", player->uri, NULL); + } else { + LOG_PLAYER_DEBUG(player, "Got about-to-finish signal\n"); + } +} + +static void on_application_message(struct gstplayer *player, GstMessage *msg) { + if (gst_message_has_name(msg, "appsink-eos")) { + // unhandled + } else if (gst_message_has_name(msg, "video-info")) { + const GstStructure *structure = gst_message_get_structure(msg); + + const GValue *value = gst_structure_get_value(structure, "info"); + + assert(G_VALUE_HOLDS_POINTER(value)); // NOLINT(bugprone-assert-side-effect) + + GstVideoInfo *info = g_value_get_pointer(value); + + player->info.info.width = GST_VIDEO_INFO_WIDTH(info); + player->info.info.height = GST_VIDEO_INFO_HEIGHT(info); + player->info.info.fps = (double) GST_VIDEO_INFO_FPS_N(info) / GST_VIDEO_INFO_FPS_D(info); + player->info.has_resolution = true; + player->info.has_fps = true; + + gst_video_info_free(info); + + LOG_PLAYER_DEBUG(player, "Determined resolution: %d x %d and framerate: %f\n", player->info.info.width, player->info.info.height, player->info.info.fps); + } else if (gst_message_has_name(msg, "about-to-finish")) { + on_about_to_finish_message(player); + } +} + +static void start_async(struct gstplayer *player, struct async_completer completer) { + ASSERT(player->n_async_completers < ARRAY_SIZE(player->completers)); + + player->completers[player->n_async_completers++] = completer; +} + +static void on_bus_message(struct gstplayer *player, GstMessage *msg) { + PRAGMA_DIAGNOSTIC_PUSH + PRAGMA_DIAGNOSTIC_IGNORED("-Wswitch-enum") + switch (GST_MESSAGE_TYPE(msg)) { + case GST_MESSAGE_EOS: + on_eos_message(player, msg); + break; + + case GST_MESSAGE_ERROR: + on_gstreamer_error_message(player, msg); + break; + + case GST_MESSAGE_WARNING: + on_gstreamer_warning_message(player, msg); + break; + + case GST_MESSAGE_INFO: + on_gstreamer_info_message(player, msg); + break; + + case GST_MESSAGE_TAG: { + if (0) { + GstTagList *tags; + gst_message_parse_tag(msg, &tags); + + char *str = gst_tag_list_to_string(tags); + + LOG_PLAYER_DEBUG(player, "%s found tags: %s\n", GST_MESSAGE_SRC_NAME(msg), str); + + free(str); + } + break; + } + + case GST_MESSAGE_BUFFERING: + on_buffering_message(player, msg); + break; + + case GST_MESSAGE_STATE_CHANGED: + on_state_changed_message(player, msg); + break; + + case GST_MESSAGE_APPLICATION: + on_application_message(player, msg); + break; + + case GST_MESSAGE_SEGMENT_START: + on_segment_start_message(player, msg); + break; + + case GST_MESSAGE_SEGMENT_DONE: + on_segment_done_message(player, msg); + break; + + case GST_MESSAGE_DURATION_CHANGED: + on_duration_changed_message(player, msg); + break; + + case GST_MESSAGE_LATENCY: + LOG_PLAYER_DEBUG(player, "redistributing latency\n"); + gst_bin_recalculate_latency(GST_BIN(player->pipeline)); + break; + + case GST_MESSAGE_ASYNC_DONE: + on_async_done_message(player); + break; + + case GST_MESSAGE_REQUEST_STATE: { + GstState requested; + + gst_message_parse_request_state(msg, &requested); + gst_element_set_state(GST_ELEMENT(player->pipeline), requested); + break; + } + + case GST_MESSAGE_QOS: { + if (0) { + gboolean live = false; + uint64_t running_time = 0; + uint64_t stream_time = 0; + uint64_t timestamp = 0; + uint64_t duration = 0; + + GstFormat format = GST_FORMAT_DEFAULT; + uint64_t processed = 0; + uint64_t dropped = 0; + + int64_t jitter = 0; + double proportion = 1.0; + int quality = 0; + + gst_message_parse_qos(msg, &live, &running_time, &stream_time, ×tamp, &duration); + gst_message_parse_qos_stats(msg, &format, &processed, &dropped); + gst_message_parse_qos_values(msg, &jitter, &proportion, &quality); + + LOG_PLAYER_DEBUG( + player, + "Quality of Service: %s\n" + " live: %s\n" + " running time: %" GST_TIME_FORMAT "\n" + " stream time: %" GST_TIME_FORMAT "\n" + " timestamp: %" GST_TIME_FORMAT "\n" + " duration: %" GST_TIME_FORMAT "\n" + " processed: %" PRIu64 " (%s)\n" + " dropped: %" PRIu64 " (%s)\n" + " jitter: %" PRId64 "\n" + " proportion: %f\n" + " quality: %d\n", + GST_MESSAGE_SRC_NAME(msg), + live ? "yes" : "no", + GST_TIME_ARGS(running_time), + GST_TIME_ARGS(stream_time), + GST_TIME_ARGS(timestamp), + GST_TIME_ARGS(duration), + processed, gst_format_get_name(format), + dropped, gst_format_get_name(format), + jitter, + proportion, + quality + ); + } + break; + } + + default: + if (0) { + LOG_PLAYER_DEBUG(player, "gstreamer message: %s, src: %s\n", GST_MESSAGE_TYPE_NAME(msg), GST_MESSAGE_SRC_NAME(msg)); + } + + break; + } + PRAGMA_DIAGNOSTIC_POP + return; +} + +static int on_bus_fd_ready(sd_event_source *s, int fd, uint32_t revents, void *userdata) { + (void) s; + (void) fd; + (void) revents; + + struct gstplayer *player = userdata; + + GstMessage *msg = gst_bus_pop(gst_element_get_bus(player->pipeline)); + if (msg != NULL) { + TRACER_BEGIN(player->tracer, "on_bus_message()"); + on_bus_message(player, msg); + TRACER_END(player->tracer, "on_bus_message()"); + + gst_message_unref(msg); + } + + return 0; +} + +void on_source_setup(GstElement *playbin, GstElement *source, gpointer userdata) { + (void) playbin; + + ASSERT_NOT_NULL(userdata); + + if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), "extra-headers") != NULL) { + g_object_set(source, "extra-headers", (GstStructure *) userdata, NULL); + } else { + LOG_ERROR("Failed to set custom HTTP headers because gstreamer source element has no 'extra-headers' property.\n"); + } +} + +/** + * See: https://gitlab.freedesktop.org/gstreamer/gstreamer/-/blob/main/subprojects/gst-plugins-base/gst/playback/gstplay-enum.h + */ +typedef enum { + GST_PLAY_FLAG_VIDEO = (1 << 0), + GST_PLAY_FLAG_AUDIO = (1 << 1), + GST_PLAY_FLAG_TEXT = (1 << 2) +} GstPlayFlags; + +UNUSED static void on_element_setup(GstElement *playbin, GstElement *element, gpointer userdata) { + (void) playbin; + (void) userdata; + + GstElementFactory *factory = gst_element_get_factory(element); + if (factory == NULL) { + return; + } + + const char *factory_name = gst_plugin_feature_get_name(factory); + + if (g_str_has_prefix(factory_name, "v4l2video") && g_str_has_suffix(factory_name, "dec")) { + gst_util_set_object_arg(G_OBJECT(element), "capture-io-mode", "dmabuf"); + LOG_DEBUG("Applied capture-io-mode = dmabuf\n"); + } +} + +static void on_about_to_finish(GstElement *playbin, gpointer userdata) { + (void) userdata; + + GstBus *bus = gst_element_get_bus(playbin); + if (bus == NULL) { + LOG_ERROR("Could not acquire bus to post about-to-finish message.\n"); + return; + } + + GstStructure *s = gst_structure_new_empty("about-to-finish"); + if (s == NULL) { + LOG_ERROR("Could not create about-to-finish gst structure.\n"); + gst_object_unref(bus); + return; + } + + GstMessage *msg = gst_message_new_application(GST_OBJECT(playbin), s); + if (msg == NULL) { + LOG_ERROR("Could not create about-to-finish gst message.\n"); + gst_structure_free(s); + gst_object_unref(bus); + return; + } + + gboolean ok = gst_bus_post(bus, msg); + if (ok != TRUE) { + LOG_ERROR("Could not notify player about about-to-finish signal.\n"); + } + + gst_object_unref(bus); +} + +UNUSED static GstPadProbeReturn on_video_sink_event(GstPad *pad, GstPadProbeInfo *info, gpointer userdata) { + GstBus *bus = userdata; + + (void) pad; + + GstEvent *event = gst_pad_probe_info_get_event(info); + if (event == NULL) { + return GST_PAD_PROBE_OK; + } + + if (GST_EVENT_TYPE(event) != GST_EVENT_CAPS) { + return GST_PAD_PROBE_OK; + } + + GstCaps *caps = NULL; + gst_event_parse_caps(event, &caps); + + if (!caps) { + LOG_ERROR("Could not parse caps event.\n"); + return GST_PAD_PROBE_OK; + } + + GstVideoInfo *videoinfo = gst_video_info_new(); + ASSUME(videoinfo != NULL); + + if (!gst_video_info_from_caps(videoinfo, caps)) { + LOG_ERROR("Could not determine video properties of caps event.\n"); + return GST_PAD_PROBE_OK; + } + + GValue v = G_VALUE_INIT; + g_value_init(&v, G_TYPE_POINTER); + g_value_set_pointer(&v, videoinfo); + + GstStructure *msg_structure = gst_structure_new_empty("video-info"); + gst_structure_set_value(msg_structure, "info", &v); + + gst_bus_post(bus, gst_message_new_application(GST_OBJECT(pad), msg_structure)); + + // We're just interested in the caps event. + // Once we have that, we can unlisten. + return GST_PAD_PROBE_REMOVE; +} + +struct gstplayer *gstplayer_new(struct flutterpi *flutterpi, const char *uri, void *userdata, bool play_video, bool play_audio, GstStructure *headers) { + ASSERT_NOT_NULL(flutterpi); + + struct gstplayer *p = calloc(1, sizeof(struct gstplayer)); + if (p == NULL) { + return NULL; + } + +#ifdef DEBUG + p->debug_id = allocate_id(); +#endif + p->userdata = userdata; + p->current_playback_rate = 1.0; + p->playback_rate_forward = 1.0; + p->playback_rate_backward = 1.0; + + // Gapless looping is configured in the gstplayer_set_looping call. + // + // Without gapless looping, we'll just seek back to start on EOS, + // which always works. + p->gapless_looping = false; + + // Gapless looping using playbin "about-to-finish" is unreliable + // in audio playback. + // + // E.g., using the audioplayers example and looping the first ("coin") + // sound, switching to the second sound will first play the second sound, + // then play part of the first sound at higher pitch, and then loop the + // second sound. + // + // Also, it seems like the playbin recreates all the elements & decoders, + // so it's not super resource-saving either. + p->playbin_gapless = false; + + // Segment gapless looping works mostly fine, but is also + // not completely reliable. + // + // E.g., looping the second ("laser") sound of the audioplayers + // example will play back 1-2 seconds of noise after + // the laser sound, then play the laser sound, then noise, etc. + // + // Segment looping does not work with playbin3 in gstreamer + // < 1.22.9 because of a bug in multiqueue. + p->segment_gapless = true; + + p->tracer = flutterpi_get_tracer(flutterpi); + + TRACER_BEGIN(p->tracer, "gstplayer_new()"); + + value_notifier_init(&p->video_info_notifier, NULL, free); + value_notifier_init(&p->duration_notifier, NULL, free); + value_notifier_init(&p->seeking_info_notifier, NULL, free); + value_notifier_init(&p->buffering_state_notifier, NULL, free); + change_notifier_init(&p->error_notifier); + change_notifier_init(&p->eos_notifier); + + // playbin is more reliable for now than playbin3 (see above) + p->playbin = gst_element_factory_make("playbin", "playbin"); + if (p->playbin == NULL) { + LOG_PLAYER_ERROR(p, "Couldn't create playbin instance.\n"); + goto fail_free_p; + } + + p->pipeline = p->playbin; + + gint flags = 0; + + g_object_get(p->playbin, "flags", &flags, NULL); + + if (play_video) { + flags |= GST_PLAY_FLAG_VIDEO; + } else { + flags &= ~GST_PLAY_FLAG_VIDEO; + } + + if (play_audio) { + flags |= GST_PLAY_FLAG_AUDIO; + } else { + flags &= ~GST_PLAY_FLAG_AUDIO; + } + + flags &= ~GST_PLAY_FLAG_TEXT; + + g_object_set(p->playbin, "flags", flags, NULL); + + if (play_video) { +#ifdef HAVE_GSTREAMER_VIDEO_PLAYER + p->texture = flutterpi_create_texture(flutterpi); + if (p->texture == NULL) { + goto fail_unref_playbin; + } + + struct gl_renderer *gl_renderer = flutterpi_get_gl_renderer(flutterpi); + + GstElement *sink = flutter_gl_texture_sink_new(p->texture, gl_renderer, p->tracer); + if (sink == NULL) { + goto fail_destroy_texture; + } + + GstPad *video_sink_pad = gst_element_get_static_pad(sink, "sink"); + if (video_sink_pad == NULL) { + LOG_PLAYER_ERROR(p, "Could not acquire sink pad of video sink to wait for video configuration.\n"); + goto fail_destroy_texture; + } + + // This will send a `video-info` application message to the bus when it sees a caps event. + gst_pad_add_probe(video_sink_pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, on_video_sink_event, gst_pipeline_get_bus(GST_PIPELINE(p->playbin)), NULL); + + gst_object_unref(video_sink_pad); + video_sink_pad = NULL; + + // playbin (playsink) takes a (sinking) reference + // on the video sink + g_object_set(p->playbin, "video-sink", sink, NULL); + + // Apply capture-io-mode: dmabuf to any v4l2 decoders. + /// TODO: This might be unnecessary / deprecated nowadays. + g_signal_connect(p->playbin, "element-setup", G_CALLBACK(on_element_setup), NULL); +#else + (void) flutterpi; + + ASSERT_MSG(0, "Video playback with gstplayer is only supported when building with the gstreamer video player plugin."); + goto fail_unref_playbin; +#endif + } + + + if (play_audio) { + p->audiopanorama = gst_element_factory_make("audiopanorama", NULL); + if (p->audiopanorama != NULL) { + g_object_set(p->playbin, "audio-filter", p->audiopanorama, NULL); + } + } + + // Only try to configure headers if we actually have some. + if (headers != NULL && gst_structure_n_fields(headers) > 0) { + g_signal_connect(p->playbin, "source-setup", G_CALLBACK(on_source_setup), headers); + } + + g_signal_connect(p->playbin, "about-to-finish", G_CALLBACK(on_about_to_finish), NULL); + + // Listen to the bus + GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(p->playbin)); + ASSERT_NOT_NULL(bus); + + GPollFD fd; + gst_bus_get_pollfd(bus, &fd); + + flutterpi_sd_event_add_io(&p->busfd_events, fd.fd, EPOLLIN, on_bus_fd_ready, p); + + gst_object_unref(bus); + + // If we have a URI, preroll it. + if (uri != NULL) { + g_object_set(p->playbin, "uri", uri, NULL); + + GstStateChangeReturn status = gst_element_set_state(p->playbin, GST_STATE_PAUSED); + if (status == GST_STATE_CHANGE_NO_PREROLL) { + LOG_PLAYER_DEBUG(p, "Is live!\n"); + p->is_live = true; + } else if (status == GST_STATE_CHANGE_FAILURE) { + LOG_PLAYER_ERROR(p, "Could not set pipeline to paused state.\n"); + goto fail_rm_event_source; + } else { + LOG_PLAYER_DEBUG(p, "Not live!\n"); + p->is_live = false; + } + + p->uri = strdup(uri); + } + + TRACER_END(p->tracer, "gstplayer_new()"); + + LOG_PLAYER_DEBUG(p, "gstplayer_new(\"%s\", %s): %s\n", uri ? uri : "", play_audio ? "with audio" : "without audio", p->is_live ? "live" : "not live"); + + return p; + +fail_rm_event_source: + sd_event_source_set_enabled(p->busfd_events, false); + sd_event_source_unref(p->busfd_events); + +fail_destroy_texture: UNUSED + gst_object_unref(p->playbin); + + // The flutter upload sink uses the texture internally, + // so the playbin (which contains the upload sink) must be destroyed first, + // before the texture can be destroyed. + if (play_video) { + texture_destroy(p->texture); + } + return NULL; + +fail_unref_playbin: + gst_object_unref(p->playbin); + +fail_free_p: + TRACER_END(p->tracer, "gstplayer_new()"); + free(p); + return NULL; +} + +struct gstplayer *gstplayer_new_from_asset(struct flutterpi *flutterpi, const char *asset_path, const char *package_name, bool play_video, bool play_audio, void *userdata) { + struct gstplayer *player; + char *uri; + int ok; + + (void) package_name; + + ok = asprintf(&uri, "file://%s/%s", flutterpi_get_asset_bundle_path(flutterpi), asset_path); + if (ok < 0) { + return NULL; + } + + player = gstplayer_new(flutterpi, uri, userdata, /* play_video */ play_video, /* play_audio */ play_audio, NULL); + + free(uri); + + return player; +} + +struct gstplayer *gstplayer_new_from_network(struct flutterpi *flutterpi, const char *uri, enum format_hint format_hint, bool play_video, bool play_audio, void *userdata, GstStructure *headers) { + (void) format_hint; + return gstplayer_new(flutterpi, uri, userdata, /* play_video */ play_video, /* play_audio */ play_audio, headers); +} + +struct gstplayer *gstplayer_new_from_file(struct flutterpi *flutterpi, const char *uri, bool play_video, bool play_audio, void *userdata) { + return gstplayer_new(flutterpi, uri, userdata, /* play_video */ play_video, /* play_audio */ play_audio, NULL); +} + +struct gstplayer *gstplayer_new_from_content_uri(struct flutterpi *flutterpi, const char *uri, bool play_video, bool play_audio, void *userdata, GstStructure *headers) { + return gstplayer_new(flutterpi, uri, userdata, /* play_video */ play_video, /* play_audio */ play_audio, headers); +} + +struct gstplayer *gstplayer_new_from_pipeline(struct flutterpi *flutterpi, const char *pipeline_descr, void *userdata) { + ASSERT_NOT_NULL(flutterpi); + + struct gstplayer *p = calloc(1, sizeof(struct gstplayer)); + if (p == NULL) { + return NULL; + } + +#ifdef DEBUG + p->debug_id = allocate_id(); +#endif + p->userdata = userdata; + p->current_playback_rate = 1.0; + p->playback_rate_forward = 1.0; + p->playback_rate_backward = 1.0; + + p->gapless_looping = false; + p->playbin_gapless = false; + p->segment_gapless = false; + + p->tracer = flutterpi_get_tracer(flutterpi); + + value_notifier_init(&p->video_info_notifier, NULL, free); + value_notifier_init(&p->duration_notifier, NULL, free); + value_notifier_init(&p->seeking_info_notifier, NULL, free); + value_notifier_init(&p->buffering_state_notifier, NULL, free); + change_notifier_init(&p->error_notifier); + change_notifier_init(&p->eos_notifier); + + GError *error = NULL; + p->pipeline = gst_parse_launch(pipeline_descr, &error); + if (p->pipeline == NULL) { + LOG_ERROR("Could create GStreamer pipeline from description: %s (pipeline: `%s`)\n", error->message, pipeline_descr); + return NULL; + } + + // Remove the sink from the parsed pipeline description, and add our own sink. + GstElement *sink = gst_bin_get_by_name(GST_BIN(p->pipeline), "sink"); + if (sink == NULL) { + LOG_ERROR("Couldn't find appsink in pipeline bin.\n"); + goto fail_unref_pipeline; + } + + p->texture = flutterpi_create_texture(flutterpi); + if (p->texture == NULL) { + goto fail_unref_pipeline; + } + + struct gl_renderer *gl_renderer = flutterpi_get_gl_renderer(flutterpi); + + if (!flutter_gl_texture_sink_patch(sink, p->texture, gl_renderer, p->tracer)) { + LOG_ERROR("Could not setup appsink.\n"); + goto fail_unref_pipeline; + } + + // Listen to the bus + GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(p->pipeline)); + ASSERT_NOT_NULL(bus); + + GPollFD fd; + gst_bus_get_pollfd(bus, &fd); + + flutterpi_sd_event_add_io(&p->busfd_events, fd.fd, EPOLLIN, on_bus_fd_ready, p); + + gst_object_unref(bus); + + GstStateChangeReturn status = gst_element_set_state(p->pipeline, GST_STATE_PAUSED); + if (status == GST_STATE_CHANGE_NO_PREROLL) { + LOG_PLAYER_DEBUG(p, "Is live!\n"); + p->is_live = true; + } else if (status == GST_STATE_CHANGE_FAILURE) { + LOG_PLAYER_ERROR(p, "Could not set pipeline to paused state.\n"); + goto fail_rm_event_source; + } else { + LOG_PLAYER_DEBUG(p, "Not live!\n"); + p->is_live = false; + } + + return p; + +fail_rm_event_source: + sd_event_source_set_enabled(p->busfd_events, false); + sd_event_source_unref(p->busfd_events); + +fail_destroy_texture: UNUSED + gst_object_unref(p->pipeline); + + // The flutter upload sink uses the texture internally, + // so the appsink (which contains the upload sink) must be destroyed first, + // before the texture can be destroyed. + texture_destroy(p->texture); + return NULL; + +fail_unref_pipeline: + gst_object_unref(p->pipeline); + return NULL; +} + +void gstplayer_destroy(struct gstplayer *player) { + LOG_PLAYER_DEBUG(player, "destroy()\n"); + notifier_deinit(&player->video_info_notifier); + notifier_deinit(&player->duration_notifier); + notifier_deinit(&player->seeking_info_notifier); + notifier_deinit(&player->buffering_state_notifier); + notifier_deinit(&player->error_notifier); + notifier_deinit(&player->eos_notifier); + + gst_element_set_state(GST_ELEMENT(player->pipeline), GST_STATE_READY); + gst_element_set_state(GST_ELEMENT(player->pipeline), GST_STATE_NULL); + + player->playbin = NULL; + gst_object_unref(player->pipeline); + + if (player->texture) { + texture_destroy(player->texture); + } + + free(player); +} + +int64_t gstplayer_get_texture_id(struct gstplayer *player) { + // If the player was started with play_video == false, player->texture is NULL. + return player->texture ? texture_get_id(player->texture) : -1; +} + +void gstplayer_set_userdata(struct gstplayer *player, void *userdata) { + player->userdata = userdata; +} + +void *gstplayer_get_userdata(struct gstplayer *player) { + return player->userdata; +} + +int gstplayer_play(struct gstplayer *player) { + LOG_PLAYER_DEBUG(player, "play()\n"); + player->playpause_state = kPlaying; + player->direction = kForward; + return apply_playback_state(player); +} + +int gstplayer_pause(struct gstplayer *player) { + LOG_PLAYER_DEBUG(player, "pause()\n"); + player->playpause_state = kPaused; + player->direction = kForward; + return apply_playback_state(player); +} + +int gstplayer_set_looping(struct gstplayer *player, bool looping, bool gapless) { + LOG_PLAYER_DEBUG(player, "set_looping(%s, gapless: %s)\n", looping ? "true" : "false", gapless ? "true" : "false"); + + if (player->playbin_gapless && gapless) { + ASSERT_NOT_NULL(player->playbin); + + // If we're enabling (gapless) looping, + // already configure the next playback URI, + // since we don't know if the about-to-finish callback + // has already arrived or not. + if (!player->looping && looping && player->uri) { + g_object_set(player->playbin, "uri", player->uri, NULL); + } + } + + player->looping = looping; + player->gapless_looping = gapless; + + apply_playback_state(player); + + return 0; +} + +int gstplayer_set_volume(struct gstplayer *player, double volume) { + if (player->playbin) { + LOG_PLAYER_DEBUG(player, "set_volume(%f)\n", volume); + g_object_set(player->playbin, "volume", (gdouble) volume, NULL); + } else { + LOG_PLAYER_DEBUG(player, "set_volume(%f): can't set volume on pipeline video player\n", volume); + } + + return 0; +} + +int64_t gstplayer_get_position(struct gstplayer *player) { + GstState current, pending; + gboolean ok; + int64_t position; + + GstStateChangeReturn statechange = gst_element_get_state(GST_ELEMENT(player->pipeline), ¤t, &pending, 0); + if (statechange == GST_STATE_CHANGE_FAILURE) { + LOG_GST_GET_STATE_ERROR(player, player->pipeline); + return -1; + } + + if (statechange == GST_STATE_CHANGE_ASYNC) { + // we don't have position data yet. + // report the latest known (or the desired) position. + return player->fallback_position_ms; + } + + ok = gst_element_query_position(player->pipeline, GST_FORMAT_TIME, &position); + if (ok == FALSE) { + LOG_PLAYER_ERROR(player, "Could not query gstreamer position. (gst_element_query_position)\n"); + return 0; + } + + return GST_TIME_AS_MSECONDS(position); +} + +int64_t gstplayer_get_duration(struct gstplayer *player) { + if (!player->has_duration) { + return -1; + } else { + return player->duration; + } +} + +int gstplayer_seek_to(struct gstplayer *player, int64_t position, bool nearest_keyframe) { + LOG_PLAYER_DEBUG(player, "seek_to(%" PRId64 ")\n", position); + player->has_desired_position = true; + player->desired_position_ms = position; + player->do_fast_seeking = nearest_keyframe; + return apply_playback_state(player); +} + +int gstplayer_seek_with_completer(struct gstplayer *player, int64_t position, bool nearest_keyframe, struct async_completer completer) { + LOG_PLAYER_DEBUG(player, "seek_to(%" PRId64 ")\n", position); + player->has_desired_position = true; + player->desired_position_ms = position; + player->do_fast_seeking = nearest_keyframe; + + if (completer.on_done || completer.on_error) { + start_async(player, completer); + } + + return apply_playback_state(player); +} + +int gstplayer_set_playback_speed(struct gstplayer *player, double playback_speed) { + LOG_PLAYER_DEBUG(player, "set_playback_speed(%f)\n", playback_speed); + ASSERT_MSG(playback_speed > 0, "playback speed must be > 0."); + player->playback_rate_forward = playback_speed; + return apply_playback_state(player); +} + +int gstplayer_step_forward(struct gstplayer *player) { + gboolean gst_ok; + int ok; + + ASSERT_NOT_NULL(player); + + player->playpause_state = kStepping; + player->direction = kForward; + ok = apply_playback_state(player); + if (ok != 0) { + return ok; + } + + gst_ok = gst_element_send_event(player->pipeline, gst_event_new_step(GST_FORMAT_BUFFERS, 1, 1, TRUE, FALSE)); + if (gst_ok == FALSE) { + LOG_PLAYER_ERROR(player, "Could not send frame-step event to pipeline. (gst_element_send_event)\n"); + return EIO; + } + return 0; +} + +int gstplayer_step_backward(struct gstplayer *player) { + gboolean gst_ok; + int ok; + + ASSERT_NOT_NULL(player); + + player->playpause_state = kStepping; + player->direction = kBackward; + ok = apply_playback_state(player); + if (ok != 0) { + return ok; + } + + gst_ok = gst_element_send_event(player->pipeline, gst_event_new_step(GST_FORMAT_BUFFERS, 1, 1, TRUE, FALSE)); + if (gst_ok == FALSE) { + LOG_PLAYER_ERROR(player, "Could not send frame-step event to pipeline. (gst_element_send_event)\n"); + return EIO; + } + + return 0; +} + +void gstplayer_set_audio_balance(struct gstplayer *player, float balance) { + if (player->audiopanorama) { + g_object_set(player->audiopanorama, "panorama", (double) balance, NULL); + } +} + +float gstplayer_get_audio_balance(struct gstplayer *player) { + if (player->audiopanorama) { + gfloat balance = 0.0; + g_object_get(player->audiopanorama, "panorama", &balance, NULL); + return balance; + } else { + return 0.0; + } +} + +bool gstplayer_set_source_with_completer(struct gstplayer *p, const char *uri, struct async_completer completer) { + GstStateChangeReturn result; + const char *current_uri = NULL; + + if (!uri) { + LOG_PLAYER_ERROR(p, "Can't set source to NULL.\n"); + return false; + } + + if (!p->playbin) { + LOG_PLAYER_ERROR(p, "Can't set source for a pipeline video player.\n"); + return false; + } + + g_object_get(p->playbin, "current-uri", ¤t_uri, NULL); + + // If we're already playing back the desired uri, don't change it. + if ((current_uri == uri) || (uri && current_uri && streq(current_uri, uri))) { + if (completer.on_done) { + completer.on_done(completer.userdata); + } + + return true; + } + + p->uri = strdup(uri); + + // If the playbin supports instant-uri, use it. + // if (g_object_class_find_property(G_OBJECT_GET_CLASS(p->playbin), "instant-uri")) { + // g_object_set(p->playbin, "instant-uri", TRUE, "uri", uri, NULL); + // } else { + + result = gst_element_set_state(p->playbin, GST_STATE_NULL); + if (result != GST_STATE_CHANGE_SUCCESS) { + LOG_PLAYER_ERROR(p, "Could not set pipeline to NULL state to change uri.\n"); + return false; + } + + g_object_set(p->playbin, "uri", uri, NULL); + + result = gst_element_set_state(p->playbin, GST_STATE_PAUSED); + if (result == GST_STATE_CHANGE_FAILURE) { + LOG_PLAYER_ERROR(p, "Could not set pipeline to PAUSED state to play new uri.\n"); + return false; + } else if (result == GST_STATE_CHANGE_NO_PREROLL) { + p->is_live = true; + + if (completer.on_done != NULL) { + completer.on_done(completer.userdata); + } + } else if (result == GST_STATE_CHANGE_SUCCESS) { + p->is_live = false; + + if (completer.on_done) { + completer.on_done(completer.userdata); + } + } else if (result == GST_STATE_CHANGE_ASYNC) { + /// TODO: What is is_live here? + p->is_live = false; + + if (completer.on_done || completer.on_error) { + start_async(p, completer); + } + } + + gstplayer_seek_to(p, 0, false); + + return true; +} + +bool gstplayer_set_source(struct gstplayer *p, const char *uri) { + return gstplayer_set_source_with_completer(p, uri, (struct async_completer) { + .on_done = NULL, + .on_error = NULL, + .userdata = NULL + }); +} + +struct notifier *gstplayer_get_video_info_notifier(struct gstplayer *player) { + return &player->video_info_notifier; +} + +struct notifier *gstplayer_get_duration_notifier(struct gstplayer *player) { + return &player->duration_notifier; +} + +struct notifier *gstplayer_get_seeking_info_notifier(struct gstplayer *player) { + return &player->seeking_info_notifier; +} + +struct notifier *gstplayer_get_buffering_state_notifier(struct gstplayer *player) { + return &player->buffering_state_notifier; +} + +struct notifier *gstplayer_get_error_notifier(struct gstplayer *player) { + return &player->error_notifier; +} + +struct notifier *gstplayer_get_eos_notifier(struct gstplayer *player) { + return &player->eos_notifier; +} diff --git a/src/plugins/gstplayer.h b/src/plugins/gstplayer.h new file mode 100644 index 00000000..6cbd8aca --- /dev/null +++ b/src/plugins/gstplayer.h @@ -0,0 +1,258 @@ +#ifndef _FLUTTERPI_INCLUDE_PLUGINS_GSTPLAYER_H +#define _FLUTTERPI_INCLUDE_PLUGINS_GSTPLAYER_H + +#include +#include + +#include + +#include "util/collection.h" + +#include "config.h" + +#define GSTREAMER_VER(major, minor, patch) ((((major) &0xFF) << 16) | (((minor) &0xFF) << 8) | ((patch) &0xFF)) +#define THIS_GSTREAMER_VER GSTREAMER_VER(LIBGSTREAMER_VERSION_MAJOR, LIBGSTREAMER_VERSION_MINOR, LIBGSTREAMER_VERSION_PATCH) + +enum format_hint { FORMAT_HINT_NONE, FORMAT_HINT_MPEG_DASH, FORMAT_HINT_HLS, FORMAT_HINT_SS, FORMAT_HINT_OTHER }; + +enum buffering_mode { BUFFERING_MODE_STREAM, BUFFERING_MODE_DOWNLOAD, BUFFERING_MODE_TIMESHIFT, BUFFERING_MODE_LIVE }; + +struct buffering_range { + int64_t start_ms; + int64_t stop_ms; +}; + +struct buffering_state { + // The percentage that the buffer is filled. + // If this is 100 playback will resume. + int percent; + + // The buffering mode currently used by the pipeline. + enum buffering_mode mode; + + // The average input / consumption speed in bytes per second. + int avg_in, avg_out; + + // Time left till buffering finishes, in ms. + // 0 means not buffering right now. + int64_t time_left_ms; + + // The ranges of already buffered video. + // For the BUFFERING_MODE_DOWNLOAD and BUFFERING_MODE_TIMESHIFT buffering modes, this specifies the ranges + // where efficient seeking is possible. + // For the BUFFERING_MODE_STREAM and BUFFERING_MODE_LIVE buffering modes, this describes the oldest and + // newest item in the buffer. + int n_ranges; + + // Flexible array member. + // For example, if n_ranges is 2, just allocate using + // `state = malloc(sizeof(struct buffering_state) + 2*sizeof(struct buffering_range))` + // and we can use state->ranges[0] and so on. + // This is cool because we don't need to allocate two blocks of memory and we can just call + // `free` once to free the whole thing. + // More precisely, we don't need to define a new function we can give to value_notifier_init + // as the value destructor, we can just use `free`. + struct buffering_range ranges[]; +}; + +#define BUFFERING_STATE_SIZE(n_ranges) (sizeof(struct buffering_state) + (n_ranges) * sizeof(struct buffering_range)) + +struct video_info; +struct gstplayer; +struct flutterpi; +struct notifier; + +typedef struct _GstStructure GstStructure; + +struct async_completer { + void_callback_t on_done; + void (*on_error)(void *userdata, GError *error); + void *userdata; +}; + +/** + * @brief Create a gstreamer player that loads the media from a generic (file, network) URI. + * + * @arg uri The URI to the media. + * @arg format_hint A hint to the format of the video. FORMAT_HINT_NONE means there's no hint. + * @arg play_video Whether the player should play video. + * @arg play_audio Whether the player should play audio. + * @arg userdata The userdata associated with this player. + * @arg headers HTTP headers to use for the network requests. + */ +struct gstplayer *gstplayer_new( + struct flutterpi *flutterpi, + const char *uri, + void *userdata, + bool play_video, + bool play_audio, + GstStructure *headers +); + +/** + * @brief Create a gstreamer player that loads the media from a flutter asset. + * + * @arg asset_path The path of the asset inside the asset bundle. + * @arg package_name The name of the package containing the asset + * @arg play_video Whether the player should play video. + * @arg play_audio Whether the player should play audio. + * @arg userdata The userdata associated with this player + */ +struct gstplayer *gstplayer_new_from_asset(struct flutterpi *flutterpi, const char *asset_path, const char *package_name, bool play_video, bool play_audio, void *userdata); + +/** + * @brief Create a gstreamer player that loads the media from a network URI. + * + * @arg uri The URI to the media. (for example, http://, https://, rtmp://, rtsp://) + * @arg format_hint A hint to the format of the media. FORMAT_HINT_NONE means there's no hint. + * @arg play_video Whether the player should play video. + * @arg play_audio Whether the player should play audio. + * @arg userdata The userdata associated with this player. + * @arg headers HTTP headers to use for the network requests. + */ +struct gstplayer *gstplayer_new_from_network(struct flutterpi *flutterpi, const char *uri, enum format_hint format_hint, bool play_video, bool play_audio, void *userdata, GstStructure *headers); + +/** + * @brief Create a gstreamer player that loads the media from a file URI. + * + * @arg uri The file:// URI to the audio or video file. + * @arg play_video Whether the player should play video. + * @arg play_audio Whether the player should play audio. + * @arg userdata The userdata associated with this player. + */ +struct gstplayer *gstplayer_new_from_file(struct flutterpi *flutterpi, const char *uri, bool play_video, bool play_audio, void *userdata); + +/** + * @brief Create a gstreamer player with a custom gstreamer pipeline. + * + * @arg pipeline_descr The description of the custom pipeline that should be used. Should contain an appsink called "sink". + * @arg userdata The userdata associated with this player. + */ +struct gstplayer *gstplayer_new_from_pipeline(struct flutterpi *flutterpi, const char *pipeline_descr, void *userdata); + +/// Destroy this gstreamer player instance and the resources +/// associated with it. (texture, gstreamer pipeline, etc) +/// +/// Should be called on the flutterpi main/platform thread, +/// because otherwise destroying the gstreamer event bus listener +/// might be a race condition. +void gstplayer_destroy(struct gstplayer *player); + +/// Set the generic userdata associated with this gstreamer player instance. +/// Overwrites the userdata set in the constructor and any userdata previously +/// set using @ref gstplayer_set_userdata. +/// @arg userdata The new userdata that should be associated with this player. +void gstplayer_set_userdata(struct gstplayer *player, void *userdata); + +/// Get the userdata that was given to the constructor or was previously set using +/// @ref gstplayer_set_userdata. +/// @returns userdata associated with this player. +void *gstplayer_get_userdata(struct gstplayer *player); + +/// Get the id of the flutter external texture that this player is rendering into. +int64_t gstplayer_get_texture_id(struct gstplayer *player); + +/// Add a http header (consisting of a string key and value) to the list of http headers that +/// gstreamer will use when playing back from a HTTP/S URI. +/// This has no effect after @ref gstplayer_initialize was called. +void gstplayer_put_http_header(struct gstplayer *player, const char *key, const char *value); + +/// Initializes the video playback, i.e. boots up the gstreamer pipeline, starts +/// buffering the video. +/// @returns 0 if initialization was successfull, errno-style error code if an error ocurred. +int gstplayer_initialize(struct gstplayer *player); + +/// Set the current playback state to "playing" if that's not the case already. +/// @returns 0 if initialization was successfull, errno-style error code if an error ocurred. +int gstplayer_play(struct gstplayer *player); + +/// Sets the current playback state to "paused" if that's not the case already. +/// @returns 0 if initialization was successfull, errno-style error code if an error ocurred. +int gstplayer_pause(struct gstplayer *player); + +/// Get the current playback position. +/// @returns Current playback position, in milliseconds from the beginning of the video. +int64_t gstplayer_get_position(struct gstplayer *player); + +/// Get the duration of the currently playing medium. +/// @returns Duration of the current medium in milliseconds, -1 if the duration +/// is not yet known, or INT64_MAX for live sources. +int64_t gstplayer_get_duration(struct gstplayer *player); + +/// Set whether the video should loop. +/// @arg looping Whether the video should start playing from the beginning when the +/// end is reached. +int gstplayer_set_looping(struct gstplayer *player, bool looping, bool gapless); + +/// Set the playback volume. +/// @arg volume Desired volume as a value between 0 and 1. +int gstplayer_set_volume(struct gstplayer *player, double volume); + +/// Seek to a specific position in the video. +/// @arg position Position to seek to in milliseconds from the beginning of the video. +/// @arg nearest_keyframe If true, seek to the nearest keyframe instead. Might be faster but less accurate. +int gstplayer_seek_to(struct gstplayer *player, int64_t position, bool nearest_keyframe); + +/// Seek to a specific position in the video and call +/// @arg on_seek_done with @arg userdata when done. +int gstplayer_seek_with_completer(struct gstplayer *player, int64_t position, bool nearest_keyframe, struct async_completer completer); + +/// Set the playback speed of the player. +/// 1.0: normal playback speed +/// 0.5: half playback speed +/// 2.0: double playback speed +int gstplayer_set_playback_speed(struct gstplayer *player, double playback_speed); + +int gstplayer_step_forward(struct gstplayer *player); + +int gstplayer_step_backward(struct gstplayer *player); + +void gstplayer_set_audio_balance(struct gstplayer *player, float balance); + +float gstplayer_get_audio_balance(struct gstplayer *player); + +bool gstplayer_set_source(struct gstplayer *p, const char *uri); + +bool gstplayer_set_source_with_completer(struct gstplayer *p, const char *uri, struct async_completer completer); + +struct video_info { + int width, height; + + double fps; + + int64_t duration_ms; + + bool can_seek; + int64_t seek_begin_ms, seek_end_ms; +}; + +/// @brief Get the value notifier for the video info. +/// +/// Gets notified with a value of type `struct video_info*` when the video info changes. +/// The listeners will be called on an internal gstreamer thread. +/// So you need to make sure you do the proper rethreading in the listener callback. +struct notifier *gstplayer_get_video_info_notifier(struct gstplayer *player); + +struct seeking_info { + bool can_seek; + int64_t seek_begin_ms, seek_end_ms; +}; + +struct notifier *gstplayer_get_seeking_info_notifier(struct gstplayer *player); + +struct notifier *gstplayer_get_duration_notifier(struct gstplayer *player); + +struct notifier *gstplayer_get_eos_notifier(struct gstplayer *player); + +/// @brief Get the value notifier for the buffering state. +/// +/// Gets notified with a value of type `struct buffering_state*` when the buffering state changes. +/// The listeners will be called on the main flutterpi platform thread. +struct notifier *gstplayer_get_buffering_state_notifier(struct gstplayer *player); + +/// @brief Get the change notifier for errors. +/// +/// Gets notified when an error happens. (Not yet implemented) +struct notifier *gstplayer_get_error_notifier(struct gstplayer *player); + +#endif diff --git a/src/plugins/gstreamer_video_player.h b/src/plugins/gstreamer_video_player.h index 02e6ed25..4d087b1b 100644 --- a/src/plugins/gstreamer_video_player.h +++ b/src/plugins/gstreamer_video_player.h @@ -1,10 +1,12 @@ -#ifndef _FLUTTERPI_INCLUDE_PLUGINS_OMXPLAYER_VIDEO_PLUGIN_H -#define _FLUTTERPI_INCLUDE_PLUGINS_OMXPLAYER_VIDEO_PLUGIN_H +#ifndef _FLUTTERPI_INCLUDE_PLUGINS_GSTREAMER_VIDEO_PLAYER_H +#define _FLUTTERPI_INCLUDE_PLUGINS_GSTREAMER_VIDEO_PLAYER_H #include "util/collection.h" #include "util/lock_ops.h" #include "util/refcounting.h" +#include + #include "config.h" #if !defined(HAVE_EGL_GLES2) @@ -14,173 +16,9 @@ #include "gles.h" #endif -enum format_hint { FORMAT_HINT_NONE, FORMAT_HINT_MPEG_DASH, FORMAT_HINT_HLS, FORMAT_HINT_SS, FORMAT_HINT_OTHER }; - -enum buffering_mode { BUFFERING_MODE_STREAM, BUFFERING_MODE_DOWNLOAD, BUFFERING_MODE_TIMESHIFT, BUFFERING_MODE_LIVE }; - -struct buffering_range { - int64_t start_ms; - int64_t stop_ms; -}; - -struct buffering_state { - // The percentage that the buffer is filled. - // If this is 100 playback will resume. - int percent; - - // The buffering mode currently used by the pipeline. - enum buffering_mode mode; - - // The average input / consumption speed in bytes per second. - int avg_in, avg_out; - - // Time left till buffering finishes, in ms. - // 0 means not buffering right now. - int64_t time_left_ms; - - // The ranges of already buffered video. - // For the BUFFERING_MODE_DOWNLOAD and BUFFERING_MODE_TIMESHIFT buffering modes, this specifies the ranges - // where efficient seeking is possible. - // For the BUFFERING_MODE_STREAM and BUFFERING_MODE_LIVE buffering modes, this describes the oldest and - // newest item in the buffer. - int n_ranges; - - // Flexible array member. - // For example, if n_ranges is 2, just allocate using - // `state = malloc(sizeof(struct buffering_state) + 2*sizeof(struct buffering_range))` - // and we can use state->ranges[0] and so on. - // This is cool because we don't need to allocate two blocks of memory and we can just call - // `free` once to free the whole thing. - // More precisely, we don't need to define a new function we can give to value_notifier_init - // as the value destructor, we can just use `free`. - struct buffering_range ranges[]; -}; - -#define BUFFERING_STATE_SIZE(n_ranges) (sizeof(struct buffering_state) + (n_ranges) * sizeof(struct buffering_range)) - -struct video_info; -struct gstplayer; -struct flutterpi; - -/// Create a gstreamer video player that loads the video from a flutter asset. -/// @arg asset_path The path of the asset inside the asset bundle. -/// @arg package_name The name of the package containing the asset -/// @arg userdata The userdata associated with this player -struct gstplayer *gstplayer_new_from_asset(struct flutterpi *flutterpi, const char *asset_path, const char *package_name, void *userdata); - -/// Create a gstreamer video player that loads the video from a network URI. -/// @arg uri The URI to the video. (for example, http://, https://, rtmp://, rtsp://) -/// @arg format_hint A hint to the format of the video. FORMAT_HINT_NONE means there's no hint. -/// @arg userdata The userdata associated with this player. -struct gstplayer *gstplayer_new_from_network(struct flutterpi *flutterpi, const char *uri, enum format_hint format_hint, void *userdata); - -/// Create a gstreamer video player that loads the video from a file URI. -/// @arg uri The file:// URI to the video. -/// @arg userdata The userdata associated with this player. -struct gstplayer *gstplayer_new_from_file(struct flutterpi *flutterpi, const char *uri, void *userdata); - -/// Create a gstreamer video player with a custom gstreamer pipeline. -/// @arg pipeline The description of the custom pipeline that should be used. Should contain an appsink called "sink". -/// @arg userdata The userdata associated with this player. -struct gstplayer *gstplayer_new_from_pipeline(struct flutterpi *flutterpi, const char *pipeline, void *userdata); - -/// Destroy this gstreamer player instance and the resources -/// associated with it. (texture, gstreamer pipeline, etc) -/// -/// Should be called on the flutterpi main/platform thread, -/// because otherwise destroying the gstreamer event bus listener -/// might be a race condition. -void gstplayer_destroy(struct gstplayer *player); - -DECLARE_LOCK_OPS(gstplayer) - -/// Set the generic userdata associated with this gstreamer player instance. -/// Overwrites the userdata set in the constructor and any userdata previously -/// set using @ref gstplayer_set_userdata_locked. -/// @arg userdata The new userdata that should be associated with this player. -void gstplayer_set_userdata_locked(struct gstplayer *player, void *userdata); - -/// Get the userdata that was given to the constructor or was previously set using -/// @ref gstplayer_set_userdata_locked. -/// @returns userdata associated with this player. -void *gstplayer_get_userdata_locked(struct gstplayer *player); - -/// Get the id of the flutter external texture that this player is rendering into. -int64_t gstplayer_get_texture_id(struct gstplayer *player); - -//void gstplayer_set_info_callback(struct gstplayer *player, gstplayer_info_callback_t cb, void *userdata); - -//void gstplayer_set_buffering_callback(struct gstplayer *player, gstplayer_buffering_callback_t callback, void *userdata); - -/// Add a http header (consisting of a string key and value) to the list of http headers that -/// gstreamer will use when playing back from a HTTP/S URI. -/// This has no effect after @ref gstplayer_initialize was called. -void gstplayer_put_http_header(struct gstplayer *player, const char *key, const char *value); - -/// Initializes the video playback, i.e. boots up the gstreamer pipeline, starts -/// buffering the video. -/// @returns 0 if initialization was successfull, errno-style error code if an error ocurred. -int gstplayer_initialize(struct gstplayer *player); - -/// Get the video info. If the video info (format, size, etc) is already known, @arg callback will be called -/// synchronously, inside this call. If the video info is not known, @arg callback will be called on the flutter-pi -/// platform thread as soon as the info is known. -/// @returns The handle for the deferred callback. -//struct sd_event_source_generic *gstplayer_probe_video_info(struct gstplayer *player, gstplayer_info_callback_t callback, void *userdata); - -/// Set the current playback state to "playing" if that's not the case already. -/// @returns 0 if initialization was successfull, errno-style error code if an error ocurred. -int gstplayer_play(struct gstplayer *player); - -/// Sets the current playback state to "paused" if that's not the case already. -/// @returns 0 if initialization was successfull, errno-style error code if an error ocurred. -int gstplayer_pause(struct gstplayer *player); - -/// Get the current playback position. -/// @returns Current playback position, in milliseconds from the beginning of the video. -int64_t gstplayer_get_position(struct gstplayer *player); - -/// Set whether the video should loop. -/// @arg looping Whether the video should start playing from the beginning when the -/// end is reached. -int gstplayer_set_looping(struct gstplayer *player, bool looping); - -/// Set the playback volume. -/// @arg volume Desired volume as a value between 0 and 1. -int gstplayer_set_volume(struct gstplayer *player, double volume); - -/// Seek to a specific position in the video. -/// @arg position Position to seek to in milliseconds from the beginning of the video. -/// @arg nearest_keyframe If true, seek to the nearest keyframe instead. Might be faster but less accurate. -int gstplayer_seek_to(struct gstplayer *player, int64_t position, bool nearest_keyframe); - -/// Set the playback speed of the player. -/// 1.0: normal playback speed -/// 0.5: half playback speed -/// 2.0: double playback speed -int gstplayer_set_playback_speed(struct gstplayer *player, double playback_speed); - -int gstplayer_step_forward(struct gstplayer *player); - -int gstplayer_step_backward(struct gstplayer *player); - -/// @brief Get the value notifier for the video info. -/// -/// Gets notified with a value of type `struct video_info*` when the video info changes. -/// The listeners will be called on an internal gstreamer thread. -/// So you need to make sure you do the proper rethreading in the listener callback. -struct notifier *gstplayer_get_video_info_notifier(struct gstplayer *player); - -/// @brief Get the value notifier for the buffering state. -/// -/// Gets notified with a value of type `struct buffering_state*` when the buffering state changes. -/// The listeners will be called on the main flutterpi platform thread. -struct notifier *gstplayer_get_buffering_state_notifier(struct gstplayer *player); - -/// @brief Get the change notifier for errors. -/// -/// Gets notified when an error happens. (Not yet implemented) -struct notifier *gstplayer_get_error_notifier(struct gstplayer *player); +#if !defined(HAVE_GSTREAMER_VIDEO_PLAYER) + #error "gstreamer_video_player.h can't be used when building without gstreamer video player." +#endif struct video_frame; struct gl_renderer; @@ -212,19 +50,6 @@ DECLARE_REF_OPS(frame_interface) typedef struct _GstVideoInfo GstVideoInfo; typedef struct _GstVideoMeta GstVideoMeta; -struct video_info { - int width, height; - double fps; - int64_t duration_ms; - bool can_seek; - int64_t seek_begin_ms, seek_end_ms; -}; - -struct frame_info { - const GstVideoInfo *gst_info; - uint32_t drm_format; - EGLint egl_color_space; -}; struct _GstSample; @@ -238,4 +63,13 @@ struct gl_texture_frame; const struct gl_texture_frame *frame_get_gl_frame(struct video_frame *frame); +struct texture; +struct gl_renderer; +typedef struct _GstElement GstElement; +struct tracer; + +bool flutter_gl_texture_sink_patch(GstElement *element, struct texture *texture, struct gl_renderer *renderer, struct tracer *tracer); + +GstElement *flutter_gl_texture_sink_new(struct texture *texture, struct gl_renderer *renderer, struct tracer *tracer); + #endif diff --git a/src/plugins/gstreamer_video_player/flutter_texture_sink.c b/src/plugins/gstreamer_video_player/flutter_texture_sink.c new file mode 100644 index 00000000..201d9f77 --- /dev/null +++ b/src/plugins/gstreamer_video_player/flutter_texture_sink.c @@ -0,0 +1,344 @@ +#include +#include +#include +#include +#include + +#include "plugins/gstreamer_video_player.h" +#include "plugins/gstplayer.h" +#include "texture_registry.h" +#include "util/logging.h" +#include "tracer.h" + +#if !defined(HAVE_EGL_GLES2) + #error "gstreamer video player requires EGL and OpenGL ES2 support." +#else + #include "egl.h" + #include "gles.h" +#endif + +struct texture_sink { + struct texture *fl_texture; + struct frame_interface *interface; + struct tracer *tracer; +}; + +static void on_destroy_texture_frame(const struct texture_frame *texture_frame, void *userdata) { + struct video_frame *frame; + + (void) texture_frame; + + ASSERT_NOT_NULL(texture_frame); + ASSERT_NOT_NULL(userdata); + + frame = userdata; + + frame_destroy(frame); +} + +static void on_appsink_eos(GstAppSink *appsink, void *userdata) { + gboolean ok; + + ASSERT_NOT_NULL(appsink); + ASSERT_NOT_NULL(userdata); + + (void) userdata; + + LOG_DEBUG("on_appsink_eos()\n"); + + // this method is called from the streaming thread. + // we shouldn't access the player directly here, it could change while we use it. + // post a message to the gstreamer bus instead, will be handled by + // @ref on_bus_message. + ok = gst_element_post_message( + GST_ELEMENT(appsink), + gst_message_new_application(GST_OBJECT(appsink), gst_structure_new_empty("appsink-eos")) + ); + if (ok == FALSE) { + LOG_ERROR("Could not post appsink end-of-stream event to the message bus.\n"); + } +} + +static GstFlowReturn on_appsink_new_preroll(GstAppSink *appsink, void *userdata) { + struct video_frame *frame; + GstSample *sample; + + ASSERT_NOT_NULL(appsink); + ASSERT_NOT_NULL(userdata); + + struct texture_sink *meta = userdata; + + TRACER_BEGIN(meta->tracer, "on_appsink_new_preroll()"); + + TRACER_BEGIN(meta->tracer, "gst_app_sink_try_pull_preroll()"); + sample = gst_app_sink_try_pull_preroll(appsink, 0); + TRACER_END(meta->tracer, "gst_app_sink_try_pull_preroll()"); + + if (sample == NULL) { + LOG_ERROR("gstreamer returned a NULL sample.\n"); + goto fail_stop_tracing; + } + + TRACER_BEGIN(meta->tracer, "frame_new()"); + // supply video info here + frame = frame_new(meta->interface, sample, NULL); + TRACER_END(meta->tracer, "frame_new()"); + + // the frame has a reference on the sample internally. + gst_sample_unref(sample); + + if (frame != NULL) { + TRACER_BEGIN(meta->tracer, "texture_push_frame()"); + texture_push_frame( + meta->fl_texture, + &(struct texture_frame){ + .gl = *frame_get_gl_frame(frame), + .destroy = on_destroy_texture_frame, + .userdata = frame, + } + ); + TRACER_END(meta->tracer, "texture_push_frame()"); + } + + TRACER_END(meta->tracer, "on_appsink_new_preroll()"); + return GST_FLOW_OK; + +fail_stop_tracing: + TRACER_END(meta->tracer, "on_appsink_new_preroll()"); + return GST_FLOW_ERROR; +} + +static GstFlowReturn on_appsink_new_sample(GstAppSink *appsink, void *userdata) { + struct video_frame *frame; + GstSample *sample; + + ASSERT_NOT_NULL(appsink); + ASSERT_NOT_NULL(userdata); + + struct texture_sink *meta = userdata; + + TRACER_BEGIN(meta->tracer, "on_appsink_new_sample()"); + + TRACER_BEGIN(meta->tracer, "gst_app_sink_try_pull_sample()"); + sample = gst_app_sink_try_pull_sample(appsink, 0); + TRACER_END(meta->tracer, "gst_app_sink_try_pull_sample()"); + + if (sample == NULL) { + LOG_ERROR("gstreamer returned a NULL sample.\n"); + goto fail_stop_tracing; + } + + TRACER_BEGIN(meta->tracer, "frame_new()"); + // supply video info here + frame = frame_new(meta->interface, sample, NULL); + TRACER_END(meta->tracer, "frame_new()"); + + // the frame has a reference on the sample internally. + gst_sample_unref(sample); + + if (frame != NULL) { + TRACER_BEGIN(meta->tracer, "texture_push_frame()"); + texture_push_frame( + meta->fl_texture, + &(struct texture_frame){ + .gl = *frame_get_gl_frame(frame), + .destroy = on_destroy_texture_frame, + .userdata = frame, + } + ); + TRACER_END(meta->tracer, "texture_push_frame()"); + } + + TRACER_END(meta->tracer, "on_appsink_new_preroll()"); + return GST_FLOW_OK; + +fail_stop_tracing: + TRACER_END(meta->tracer, "on_appsink_new_preroll()"); + return GST_FLOW_ERROR; +} + +static void on_appsink_cbs_destroy(void *userdata) { + struct texture_sink *meta; + + LOG_DEBUG("on_appsink_cbs_destroy()\n"); + ASSERT_NOT_NULL(userdata); + + meta = userdata; + + // meta->texture is not owned by us. freed by the player + tracer_unref(meta->tracer); + frame_interface_unref(meta->interface); + free(meta); +} + +static GstCaps *caps_for_frame_interface(struct frame_interface *interface) { + GstCaps *caps = gst_caps_new_empty(); + if (caps == NULL) { + return NULL; + } + + /// TODO: Add dmabuf caps here + for_each_format_in_frame_interface(i, format, interface) { + GstVideoFormat gst_format = gst_video_format_from_drm_format(format->format); + if (gst_format == GST_VIDEO_FORMAT_UNKNOWN) { + continue; + } + + gst_caps_append(caps, gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, gst_video_format_to_string(gst_format), NULL)); + } + + return caps; +} + +#if THIS_GSTREAMER_VER >= GSTREAMER_VER(1, 20, 0) +static gboolean on_appsink_new_event(GstAppSink *appsink, gpointer userdata) { + (void) userdata; + + GstMiniObject *obj; + + do { + obj = gst_app_sink_try_pull_object(appsink, 0); + if (obj == NULL) { + return FALSE; + } + + if (!GST_IS_EVENT(obj)) { + LOG_DEBUG("Got non-event from gst_app_sink_try_pull_object.\n"); + } + } while (obj && !GST_IS_EVENT(obj)); + + // GstEvent *event = GST_EVENT_CAST(obj); + + // char *str = gst_structure_to_string(gst_event_get_structure(event)); + // LOG_DEBUG("Got event: %s\n", str); + // g_free(str); + + gst_mini_object_unref(obj); + + return FALSE; +} +#endif + +UNUSED static gboolean on_appsink_propose_allocation(GstAppSink *appsink, GstQuery *query, gpointer userdata) { + (void) appsink; + (void) userdata; + + gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL); + + return FALSE; +} + +UNUSED static GstPadProbeReturn on_query_appsink_pad(GstPad *pad, GstPadProbeInfo *info, void *userdata) { + GstQuery *query; + + (void) pad; + (void) userdata; + + query = gst_pad_probe_info_get_query(info); + if (query == NULL) { + LOG_DEBUG("Couldn't get query from pad probe info.\n"); + return GST_PAD_PROBE_OK; + } + + if (GST_QUERY_TYPE(query) != GST_QUERY_ALLOCATION) { + return GST_PAD_PROBE_OK; + } + + gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL); + + return GST_PAD_PROBE_HANDLED; +} + +bool flutter_gl_texture_sink_patch(GstElement *element, struct texture *texture, struct gl_renderer *renderer, struct tracer *tracer) { + ASSERT_NOT_NULL(element); + ASSERT_NOT_NULL(texture); + ASSERT_NOT_NULL(renderer); + + struct texture_sink *meta = calloc(1, sizeof(struct texture_sink)); + if (meta == NULL) { + return false; + } + + meta->fl_texture = texture; + + meta->interface = frame_interface_new(renderer); + if (meta->interface == NULL) { + free(meta); + return false; + } + + GstCaps *caps = caps_for_frame_interface(meta->interface); + if (caps == NULL) { + frame_interface_unref(meta->interface); + free(meta); + return false; + } + + GstBaseSink *basesink = GST_BASE_SINK(element); + GstAppSink *appsink = GST_APP_SINK(element); + + gst_base_sink_set_max_lateness(basesink, 20 * GST_MSECOND); + gst_base_sink_set_qos_enabled(basesink, TRUE); + gst_base_sink_set_sync(basesink, TRUE); + gst_app_sink_set_max_buffers(appsink, 2); + gst_app_sink_set_emit_signals(appsink, TRUE); + gst_app_sink_set_drop(appsink, FALSE); + gst_app_sink_set_caps(appsink, caps); + gst_caps_unref(caps); + + GstAppSinkCallbacks cbs; + memset(&cbs, 0, sizeof(cbs)); + + cbs.new_preroll = on_appsink_new_preroll; + cbs.new_sample = on_appsink_new_sample; + cbs.eos = on_appsink_eos; +#if THIS_GSTREAMER_VER >= GSTREAMER_VER(1, 20, 0) + cbs.new_event = on_appsink_new_event; +#endif + +#if THIS_GSTREAMER_VER >= GSTREAMER_VER(1, 24, 0) + cbs.propose_allocation = on_appsink_propose_allocation; +#endif + + // If instead of conditional compilation so + // this is type-checked even for >= 1.24.0. + if (THIS_GSTREAMER_VER < GSTREAMER_VER(1, 24, 0)) { + GstPad *pad = gst_element_get_static_pad(element, "sink"); + if (pad == NULL) { + LOG_ERROR("Couldn't get static pad `sink` from appsink.\n"); + frame_interface_unref(meta->interface); + free(meta); + return false; + } + + gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM, on_query_appsink_pad, NULL, NULL); + } + + meta->tracer = tracer_ref(tracer); + + gst_app_sink_set_callbacks( + GST_APP_SINK(appsink), + &cbs, + meta, + on_appsink_cbs_destroy + ); + + return element; +} + +GstElement *flutter_gl_texture_sink_new(struct texture *texture, struct gl_renderer *renderer, struct tracer *tracer) { + ASSERT_NOT_NULL(texture); + ASSERT_NOT_NULL(renderer); + + GstElement *element = gst_element_factory_make("appsink", "appsink"); + if (element == NULL) { + return NULL; + } + + if (!flutter_gl_texture_sink_patch(element, texture, renderer, tracer)) { + gst_object_unref(element); + return NULL; + } + + return element; +} diff --git a/src/plugins/gstreamer_video_player/frame.c b/src/plugins/gstreamer_video_player/frame.c index 66498ce6..a6225f90 100644 --- a/src/plugins/gstreamer_video_player/frame.c +++ b/src/plugins/gstreamer_video_player/frame.c @@ -16,16 +16,15 @@ // This will error if we don't have EGL / OpenGL ES support. #include "gl_renderer.h" #include "plugins/gstreamer_video_player.h" +#include "plugins/gstplayer.h" #include "util/logging.h" #include "util/refcounting.h" #define MAX_N_PLANES 4 -#define GSTREAMER_VER(major, minor, patch) ((((major) &0xFF) << 16) | (((minor) &0xFF) << 8) | ((patch) &0xFF)) -#define THIS_GSTREAMER_VER GSTREAMER_VER(LIBGSTREAMER_VERSION_MAJOR, LIBGSTREAMER_VERSION_MINOR, LIBGSTREAMER_VERSION_PATCH) - #define DRM_FOURCC_FORMAT "c%c%c%c" -#define DRM_FOURCC_ARGS(format) (format) & 0xFF, ((format) >> 8) & 0xFF, ((format) >> 16) & 0xFF, ((format) >> 24) & 0xFF +#define DRM_FOURCC_ARGS(format) \ + (char) ((format) & 0xFF), (char) (((format) >> 8) & 0xFF), (char) (((format) >> 16) & 0xFF), (char) (((format) >> 24) & 0xFF) struct video_frame { GstSample *sample; @@ -38,7 +37,6 @@ struct video_frame { int dmabuf_fds[MAX_N_PLANES]; EGLImageKHR image; - size_t width, height; struct gl_texture_frame gl_frame; }; @@ -122,6 +120,10 @@ static bool query_formats( } } + if (n_modified_formats == 0 || max_n_modifiers == 0) { + goto fail_free_formats; + } + modified_formats = malloc(n_modified_formats * sizeof *modified_formats); if (modified_formats == NULL) { goto fail_free_formats; @@ -142,7 +144,7 @@ static bool query_formats( egl_ok = egl_query_dmabuf_modifiers(display, formats[i], max_n_modifiers, modifiers, external_only, &n_modifiers); if (egl_ok != EGL_TRUE) { LOG_ERROR("Could not query dmabuf formats supported by EGL.\n"); - goto fail_free_formats; + goto fail_free_external_only; } LOG_DEBUG_UNPREFIXED("%" DRM_FOURCC_FORMAT ", ", DRM_FOURCC_ARGS(formats[i])); @@ -164,6 +166,9 @@ static bool query_formats( *formats_out = modified_formats; return true; +fail_free_external_only: + free(external_only); + fail_free_modifiers: free(modifiers); @@ -359,14 +364,18 @@ UNUSED int dup_gst_buffer_range_as_dmabuf(struct gbm_device *gbm_device, GstBuff return -1; } - bo = gbm_bo_create(gbm_device, map_info.size, 1, GBM_FORMAT_R8, GBM_BO_USE_LINEAR); + // Create a square texture large enough to fit our bytes instead of one with only one huge row, + // because some drivers have limitations on the row length. (Intel) + uint32_t dim = (uint32_t) ceil(sqrt(map_info.size)); + + bo = gbm_bo_create(gbm_device, dim, dim, GBM_FORMAT_R8, GBM_BO_USE_LINEAR); if (bo == NULL) { LOG_ERROR("Couldn't create GBM BO to copy video frame into.\n"); goto fail_unmap_buffer; } map_data = NULL; - map = gbm_bo_map(bo, 0, 0, map_info.size, 1, GBM_BO_TRANSFER_WRITE, &stride, &map_data); + map = gbm_bo_map(bo, 0, 0, dim, dim, GBM_BO_TRANSFER_WRITE, &stride, &map_data); if (map == NULL) { LOG_ERROR("Couldn't mmap GBM BO to copy video frame into it.\n"); goto fail_destroy_bo; @@ -415,14 +424,18 @@ UNUSED int dup_gst_memory_as_dmabuf(struct gbm_device *gbm_device, GstMemory *me return -1; } - bo = gbm_bo_create(gbm_device, map_info.size, 1, GBM_FORMAT_R8, GBM_BO_USE_LINEAR); + // Create a square texture large enough to fit our bytes instead of one with only one huge row, + // because some drivers have limitations on the row length. (Intel) + uint32_t dim = (uint32_t) ceil(sqrt(map_info.size)); + + bo = gbm_bo_create(gbm_device, dim, dim, GBM_FORMAT_R8, GBM_BO_USE_LINEAR); if (bo == NULL) { LOG_ERROR("Couldn't create GBM BO to copy video frame into.\n"); goto fail_unmap_buffer; } map_data = NULL; - map = gbm_bo_map(bo, 0, 0, map_info.size, 1, GBM_BO_TRANSFER_WRITE, &stride, &map_data); + map = gbm_bo_map(bo, 0, 0, dim, dim, GBM_BO_TRANSFER_WRITE, &stride, &map_data); if (map == NULL) { LOG_ERROR("Couldn't mmap GBM BO to copy video frame into it.\n"); goto fail_destroy_bo; @@ -558,6 +571,11 @@ get_plane_infos(GstBuffer *buffer, const GstVideoInfo *info, struct gbm_device * n_planes = GST_VIDEO_INFO_N_PLANES(info); + if (n_planes <= 0 || n_planes > MAX_N_PLANES) { + LOG_ERROR("Unsupported number of planes in video frame.\n"); + return EINVAL; + } + // There's so many ways to get the plane sizes. // 1. Preferably we should use the video meta. // 2. If that doesn't work, we'll use gst_video_info_align_full() with the video info. @@ -597,6 +615,8 @@ get_plane_infos(GstBuffer *buffer, const GstVideoInfo *info, struct gbm_device * has_plane_sizes = true; } + ASSERT_MSG(has_plane_sizes, "Couldn't determine video frame plane sizes.\n"); + for (int i = 0; i < n_planes; i++) { size_t offset_in_memory = 0; size_t offset_in_buffer = 0; @@ -619,14 +639,22 @@ get_plane_infos(GstBuffer *buffer, const GstVideoInfo *info, struct gbm_device * goto fail_close_fds; } + static bool logged_dmabuf_feedback = false; + if (n_memories != 1) { + if (!logged_dmabuf_feedback) { + LOG_DEBUG("INFO: Flutter-Pi is using manual dmabuf uploads to show video frames. This can result in poor performance.\n"); + logged_dmabuf_feedback = true; + } + ok = dup_gst_buffer_range_as_dmabuf(gbm_device, buffer, memory_index, n_memories); if (ok < 0) { - LOG_ERROR("Could not duplicate gstreamer buffer range as dmabuf.\n"); + LOG_ERROR("Could not upload gstreamer buffer range into dmabufs.\n"); ok = EIO; goto fail_close_fds; } + plane_infos[i].fd = ok; } else { memory = gst_buffer_peek_memory(buffer, memory_index); @@ -640,18 +668,23 @@ get_plane_infos(GstBuffer *buffer, const GstVideoInfo *info, struct gbm_device * ok = dup(ok); if (ok < 0) { - ok = errno; + ok = errno ? errno : EIO; LOG_ERROR("Could not dup fd. dup: %s\n", strerror(ok)); goto fail_close_fds; } plane_infos[i].fd = ok; } else { + if (!logged_dmabuf_feedback) { + LOG_DEBUG("INFO: Flutter-Pi is using manual dmabuf uploads to show video frames. This can result in poor performance.\n"); + logged_dmabuf_feedback = true; + } + /// TODO: When duping, duplicate all non-dmabuf memories into one /// gbm buffer instead. ok = dup_gst_memory_as_dmabuf(gbm_device, memory); if (ok < 0) { - LOG_ERROR("Could not duplicate gstreamer memory as dmabuf.\n"); + LOG_ERROR("Could not upload gstreamer memory into dmabuf.\n"); ok = EIO; goto fail_close_fds; } @@ -674,7 +707,7 @@ get_plane_infos(GstBuffer *buffer, const GstVideoInfo *info, struct gbm_device * fail_close_fds: for (int j = i - 1; j > 0; j--) { - close(plane_infos[i].fd); + close(plane_infos[j].fd); } return ok; } @@ -683,6 +716,8 @@ get_plane_infos(GstBuffer *buffer, const GstVideoInfo *info, struct gbm_device * } static uint32_t drm_format_from_gst_info(const GstVideoInfo *info) { + PRAGMA_DIAGNOSTIC_PUSH + PRAGMA_DIAGNOSTIC_IGNORED("-Wswitch-enum") switch (GST_VIDEO_INFO_FORMAT(info)) { case GST_VIDEO_FORMAT_YUY2: return DRM_FORMAT_YUYV; case GST_VIDEO_FORMAT_YVYU: return DRM_FORMAT_YVYU; @@ -716,6 +751,7 @@ static uint32_t drm_format_from_gst_info(const GstVideoInfo *info) { case GST_VIDEO_FORMAT_xBGR: return DRM_FORMAT_RGBX8888; default: return DRM_FORMAT_INVALID; } + PRAGMA_DIAGNOSTIC_POP } ATTR_CONST GstVideoFormat gst_video_format_from_drm_format(uint32_t drm_format) { @@ -802,7 +838,7 @@ static EGLint egl_vertical_chroma_siting_from_gst_info(const GstVideoInfo *info) } } -struct video_frame *frame_new(struct frame_interface *interface, GstSample *sample, const GstVideoInfo *info) { +static struct video_frame *frame_new_egl_imported(struct frame_interface *interface, GstSample *sample, const GstVideoInfo *info) { #define PUT_ATTR(_key, _value) \ do { \ assert(attr_index + 2 <= ARRAY_SIZE(attributes)); \ @@ -811,12 +847,11 @@ struct video_frame *frame_new(struct frame_interface *interface, GstSample *samp } while (false) struct video_frame *frame; struct plane_info planes[MAX_N_PLANES]; - GstVideoInfo _info; + GstVideoInfo video_info; EGLBoolean egl_ok; GstBuffer *buffer; EGLImageKHR egl_image; gboolean gst_ok; - uint32_t drm_format; GstCaps *caps; GLuint texture; GLenum gl_error; @@ -838,13 +873,13 @@ struct video_frame *frame_new(struct frame_interface *interface, GstSample *samp return NULL; } - info = &_info; - - gst_ok = gst_video_info_from_caps(&_info, caps); + gst_ok = gst_video_info_from_caps(&video_info, caps); if (gst_ok == FALSE) { - LOG_ERROR("Could not get video info from video sample caps.\n"); + LOG_ERROR("Could not get video info from caps.\n"); return NULL; } + + info = &video_info; } else { caps = NULL; } @@ -854,16 +889,17 @@ struct video_frame *frame_new(struct frame_interface *interface, GstSample *samp height = GST_VIDEO_INFO_HEIGHT(info); n_planes = GST_VIDEO_INFO_N_PLANES(info); - // query the drm format for this sample - drm_format = drm_format_from_gst_info(info); + uint64_t drm_modifier = DRM_FORMAT_MOD_LINEAR; + uint32_t drm_format = drm_format_from_gst_info(info); if (drm_format == DRM_FORMAT_INVALID) { LOG_ERROR("Video format has no EGL equivalent.\n"); return NULL; } + bool external_only; for_each_format_in_frame_interface(i, format, interface) { - if (format->format == drm_format && format->modifier == DRM_FORMAT_MOD_LINEAR) { + if (format->format == drm_format && format->modifier == drm_modifier) { external_only = format->external_only; goto format_supported; } @@ -872,7 +908,7 @@ struct video_frame *frame_new(struct frame_interface *interface, GstSample *samp LOG_ERROR( "Video format is not supported by EGL: %" DRM_FOURCC_FORMAT " (modifier: %" PRIu64 ").\n", DRM_FOURCC_ARGS(drm_format), - (uint64_t) DRM_FORMAT_MOD_LINEAR + (uint64_t) drm_modifier ); return NULL; @@ -1096,9 +1132,9 @@ struct video_frame *frame_new(struct frame_interface *interface, GstSample *samp frame->drm_format = drm_format; frame->n_dmabuf_fds = n_planes; frame->dmabuf_fds[0] = planes[0].fd; - frame->dmabuf_fds[1] = planes[1].fd; - frame->dmabuf_fds[2] = planes[2].fd; - frame->dmabuf_fds[3] = planes[3].fd; + frame->dmabuf_fds[1] = n_planes >= 2 ? planes[1].fd : -1; + frame->dmabuf_fds[2] = n_planes >= 3 ? planes[2].fd : -1; + frame->dmabuf_fds[3] = n_planes >= 4 ? planes[3].fd : -1; frame->image = egl_image; frame->gl_frame.target = target; frame->gl_frame.name = texture; @@ -1129,6 +1165,29 @@ struct video_frame *frame_new(struct frame_interface *interface, GstSample *samp return NULL; } +static struct video_frame *frame_new_egl_duped(struct frame_interface *interface, GstSample *sample, const GstVideoInfo *info) { + (void) interface; + (void) sample; + (void) info; + return NULL; +} + +struct video_frame *frame_new(struct frame_interface *interface, GstSample *sample, const GstVideoInfo *info) { + struct video_frame *frame; + + frame = frame_new_egl_imported(interface, sample, info); + if (frame != NULL) { + return frame; + } + + frame = frame_new_egl_duped(interface, sample, info); + if (frame != NULL) { + return frame; + } + + return NULL; +} + void frame_destroy(struct video_frame *frame) { EGLBoolean egl_ok; int ok; diff --git a/src/plugins/gstreamer_video_player/player.c b/src/plugins/gstreamer_video_player/player.c deleted file mode 100644 index a6555eec..00000000 --- a/src/plugins/gstreamer_video_player/player.c +++ /dev/null @@ -1,1337 +0,0 @@ -#define _GNU_SOURCE - -#include -#include -#include - -#include - -#include -#include -#include -#include -#include -#include -#include -#include - -#include "flutter-pi.h" -#include "notifier_listener.h" -#include "platformchannel.h" -#include "pluginregistry.h" -#include "plugins/gstreamer_video_player.h" -#include "texture_registry.h" -#include "util/collection.h" -#include "util/logging.h" - -#ifdef DEBUG - #define DEBUG_TRACE_BEGIN(player, name) trace_begin(player, name) - #define DEBUG_TRACE_END(player, name) trace_end(player, name) - #define DEBUG_TRACE_INSTANT(player, name) trace_instant(player, name) -#else - #define DEBUG_TRACE_BEGIN(player, name) \ - do { \ - } while (0) - #define DEBUG_TRACE_END(player, name) \ - do { \ - } while (0) - #define DEBUG_TRACE_INSTANT(player, name) \ - do { \ - } while (0) -#endif - -#define LOG_GST_SET_STATE_ERROR(_element) \ - LOG_ERROR( \ - "setting gstreamer playback state failed. gst_element_set_state(element name: %s): GST_STATE_CHANGE_FAILURE\n", \ - GST_ELEMENT_NAME(_element) \ - ) -#define LOG_GST_GET_STATE_ERROR(_element) \ - LOG_ERROR( \ - "last gstreamer state change failed. gst_element_get_state(element name: %s): GST_STATE_CHANGE_FAILURE\n", \ - GST_ELEMENT_NAME(_element) \ - ) - -struct incomplete_video_info { - bool has_resolution; - bool has_fps; - bool has_duration; - bool has_seeking_info; - struct video_info info; -}; - -enum playpause_state { kPaused, kPlaying, kStepping }; - -enum playback_direction { kForward, kBackward }; - -#define PLAYPAUSE_STATE_AS_STRING(playpause_state) \ - ((playpause_state) == kPaused ? "paused" : \ - (playpause_state) == kPlaying ? "playing" : \ - (playpause_state) == kStepping ? "stepping" : \ - "?") - -struct gstplayer { - pthread_mutex_t lock; - - struct flutterpi *flutterpi; - void *userdata; - - char *video_uri; - char *pipeline_description; - - GstStructure *headers; - - /** - * @brief The desired playback rate that should be used when @ref playpause_state is kPlayingForward. (should be > 0) - * - */ - double playback_rate_forward; - - /** - * @brief The desired playback rate that should be used when @ref playpause_state is kPlayingBackward. (should be < 0) - * - */ - double playback_rate_backward; - - /** - * @brief True if the video should seemlessly start from the beginning once the end is reached. - * - */ - atomic_bool looping; - - /** - * @brief The desired playback state. Either paused, playing, or single-frame stepping. - * - */ - enum playpause_state playpause_state; - - /** - * @brief The desired playback direction. - * - */ - enum playback_direction direction; - - /** - * @brief The actual, currently used playback rate. - * - */ - double current_playback_rate; - - /** - * @brief The position reported if gstreamer position queries fail (for example, because gstreamer is currently - * seeking to a new position. In that case, fallback_position_ms will be the seeking target position, so we report the - * new position while we're seeking to it) - */ - int64_t fallback_position_ms; - - /** - * @brief True if there's a position that apply_playback_state should seek to. - * - */ - bool has_desired_position; - - /** - * @brief True if gstplayer should seek to the nearest keyframe instead, which is a bit faster. - * - */ - bool do_fast_seeking; - - /** - * @brief The position, if any, that apply_playback_state should seek to. - * - */ - int64_t desired_position_ms; - - struct notifier video_info_notifier, buffering_state_notifier, error_notifier; - - bool is_initialized; - bool has_sent_info; - struct incomplete_video_info info; - - bool has_gst_info; - GstVideoInfo gst_info; - - struct texture *texture; - int64_t texture_id; - - struct frame_interface *frame_interface; - - GstElement *pipeline, *sink; - GstBus *bus; - sd_event_source *busfd_events; - - bool is_live; -}; - -#define MAX_N_PLANES 4 -#define MAX_N_EGL_DMABUF_IMAGE_ATTRIBUTES 6 + 6 * MAX_N_PLANES + 1 - -UNUSED static inline void lock(struct gstplayer *player) { - pthread_mutex_lock(&player->lock); -} - -UNUSED static inline void unlock(struct gstplayer *player) { - pthread_mutex_unlock(&player->lock); -} - -UNUSED static inline void trace_instant(struct gstplayer *player, const char *name) { - return flutterpi_trace_event_instant(player->flutterpi, name); -} - -UNUSED static inline void trace_begin(struct gstplayer *player, const char *name) { - return flutterpi_trace_event_begin(player->flutterpi, name); -} - -UNUSED static inline void trace_end(struct gstplayer *player, const char *name) { - return flutterpi_trace_event_end(player->flutterpi, name); -} - -static int maybe_send_info(struct gstplayer *player) { - struct video_info *duped; - - if (player->info.has_resolution && player->info.has_fps && player->info.has_duration && player->info.has_seeking_info) { - // we didn't send the info yet but we have complete video info now. - // send it! - duped = memdup(&(player->info.info), sizeof(player->info.info)); - if (duped == NULL) { - return ENOMEM; - } - - notifier_notify(&player->video_info_notifier, duped); - } - return 0; -} - -static void fetch_duration(struct gstplayer *player) { - gboolean ok; - int64_t duration; - - ok = gst_element_query_duration(player->pipeline, GST_FORMAT_TIME, &duration); - if (ok == FALSE) { - if (player->is_live) { - player->info.info.duration_ms = INT64_MAX; - player->info.has_duration = true; - return; - } else { - LOG_ERROR("Could not fetch duration. (gst_element_query_duration)\n"); - return; - } - } - - player->info.info.duration_ms = GST_TIME_AS_MSECONDS(duration); - player->info.has_duration = true; -} - -static void fetch_seeking(struct gstplayer *player) { - GstQuery *seeking_query; - gboolean ok, seekable; - int64_t seek_begin, seek_end; - - seeking_query = gst_query_new_seeking(GST_FORMAT_TIME); - ok = gst_element_query(player->pipeline, seeking_query); - if (ok == FALSE) { - if (player->is_live) { - player->info.info.can_seek = false; - player->info.info.seek_begin_ms = 0; - player->info.info.seek_end_ms = 0; - player->info.has_seeking_info = true; - return; - } else { - LOG_DEBUG("Could not query seeking info. (gst_element_query)\n"); - return; - } - } - - gst_query_parse_seeking(seeking_query, NULL, &seekable, &seek_begin, &seek_end); - - gst_query_unref(seeking_query); - - player->info.info.can_seek = seekable; - player->info.info.seek_begin_ms = GST_TIME_AS_MSECONDS(seek_begin); - player->info.info.seek_end_ms = GST_TIME_AS_MSECONDS(seek_end); - player->info.has_seeking_info = true; -} - -static void update_buffering_state(struct gstplayer *player) { - struct buffering_state *state; - GstBufferingMode mode; - GstQuery *query; - gboolean ok, busy; - int64_t start, stop, buffering_left; - int n_ranges, percent, avg_in, avg_out; - - query = gst_query_new_buffering(GST_FORMAT_TIME); - ok = gst_element_query(player->pipeline, query); - if (ok == FALSE) { - LOG_ERROR("Could not query buffering state. (gst_element_query)\n"); - goto fail_unref_query; - } - - gst_query_parse_buffering_percent(query, &busy, &percent); - gst_query_parse_buffering_stats(query, &mode, &avg_in, &avg_out, &buffering_left); - - n_ranges = (int) gst_query_get_n_buffering_ranges(query); - - state = malloc(sizeof(*state) + n_ranges * sizeof(struct buffering_range)); - if (state == NULL) { - goto fail_unref_query; - } - - for (int i = 0; i < n_ranges; i++) { - ok = gst_query_parse_nth_buffering_range(query, (unsigned int) i, &start, &stop); - if (ok == FALSE) { - LOG_ERROR("Could not parse %dth buffering range from buffering state. (gst_query_parse_nth_buffering_range)\n", i); - goto fail_free_state; - } - - state->ranges[i].start_ms = GST_TIME_AS_MSECONDS(start); - state->ranges[i].stop_ms = GST_TIME_AS_MSECONDS(stop); - } - - gst_query_unref(query); - - state->percent = percent; - state->mode = - (mode == GST_BUFFERING_STREAM ? BUFFERING_MODE_STREAM : - mode == GST_BUFFERING_DOWNLOAD ? BUFFERING_MODE_DOWNLOAD : - mode == GST_BUFFERING_TIMESHIFT ? BUFFERING_MODE_TIMESHIFT : - mode == GST_BUFFERING_LIVE ? BUFFERING_MODE_LIVE : - (assert(0), BUFFERING_MODE_STREAM)); - state->avg_in = avg_in; - state->avg_out = avg_out; - state->time_left_ms = buffering_left; - state->n_ranges = n_ranges; - - notifier_notify(&player->buffering_state_notifier, state); - return; - -fail_free_state: - free(state); - -fail_unref_query: - gst_query_unref(query); -} - -static int init(struct gstplayer *player, bool force_sw_decoders); - -static void maybe_deinit(struct gstplayer *player); - -static int apply_playback_state(struct gstplayer *player) { - GstStateChangeReturn ok; - GstState desired_state, current_state, pending_state; - double desired_rate; - int64_t position; - - desired_state = player->playpause_state == kPlaying ? GST_STATE_PLAYING : GST_STATE_PAUSED; /* use GST_STATE_PAUSED if we're stepping */ - - /// Use 1.0 if we're stepping, otherwise use the stored playback rate for the current direction. - if (player->playpause_state == kStepping) { - desired_rate = player->direction == kForward ? 1.0 : -1.0; - } else { - desired_rate = player->direction == kForward ? player->playback_rate_forward : player->playback_rate_backward; - } - - if (player->current_playback_rate != desired_rate || player->has_desired_position) { - if (player->has_desired_position) { - position = player->desired_position_ms * GST_MSECOND; - } else { - ok = gst_element_query_position(GST_ELEMENT(player->pipeline), GST_FORMAT_TIME, &position); - if (ok == FALSE) { - LOG_ERROR("Could not get the current playback position to apply the playback speed.\n"); - return EIO; - } - } - - if (player->direction == kForward) { - LOG_DEBUG( - "gst_element_seek(..., rate: %f, start: %" GST_TIME_FORMAT ", end: %" GST_TIME_FORMAT ", ...)\n", - desired_rate, - GST_TIME_ARGS(position), - GST_TIME_ARGS(GST_CLOCK_TIME_NONE) - ); - ok = gst_element_seek( - GST_ELEMENT(player->pipeline), - desired_rate, - GST_FORMAT_TIME, - GST_SEEK_FLAG_FLUSH | - (player->do_fast_seeking ? GST_SEEK_FLAG_KEY_UNIT | GST_SEEK_FLAG_SNAP_NEAREST : GST_SEEK_FLAG_ACCURATE), - GST_SEEK_TYPE_SET, - position, - GST_SEEK_TYPE_SET, - GST_CLOCK_TIME_NONE - ); - if (ok == FALSE) { - LOG_ERROR( - "Could not set the new playback speed / playback position (speed: %f, pos: %" GST_TIME_FORMAT ").\n", - desired_rate, - GST_TIME_ARGS(position) - ); - return EIO; - } - } else { - LOG_DEBUG( - "gst_element_seek(..., rate: %f, start: %" GST_TIME_FORMAT ", end: %" GST_TIME_FORMAT ", ...)\n", - desired_rate, - GST_TIME_ARGS(0), - GST_TIME_ARGS(position) - ); - ok = gst_element_seek( - GST_ELEMENT(player->pipeline), - desired_rate, - GST_FORMAT_TIME, - GST_SEEK_FLAG_FLUSH | - (player->do_fast_seeking ? GST_SEEK_FLAG_KEY_UNIT | GST_SEEK_FLAG_SNAP_NEAREST : GST_SEEK_FLAG_ACCURATE), - GST_SEEK_TYPE_SET, - 0, - GST_SEEK_TYPE_SET, - position - ); - - if (ok == FALSE) { - LOG_ERROR( - "Could not set the new playback speed / playback position (speed: %f, pos: %" GST_TIME_FORMAT ").\n", - desired_rate, - GST_TIME_ARGS(position) - ); - return EIO; - } - } - - player->current_playback_rate = desired_rate; - player->fallback_position_ms = GST_TIME_AS_MSECONDS(position); - player->has_desired_position = false; - } - - DEBUG_TRACE_BEGIN(player, "gst_element_get_state"); - ok = gst_element_get_state(player->pipeline, ¤t_state, &pending_state, 0); - DEBUG_TRACE_END(player, "gst_element_get_state"); - - if (ok == GST_STATE_CHANGE_FAILURE) { - LOG_ERROR( - "last gstreamer pipeline state change failed. gst_element_get_state(element name: %s): GST_STATE_CHANGE_FAILURE\n", - GST_ELEMENT_NAME(player->pipeline) - ); - DEBUG_TRACE_END(player, "apply_playback_state"); - return EIO; - } - - if (pending_state == GST_STATE_VOID_PENDING) { - if (current_state == desired_state) { - // we're already in the desired state, and we're also not changing it - // no need to do anything. - LOG_DEBUG( - "apply_playback_state(playing: %s): already in desired state and none pending\n", - PLAYPAUSE_STATE_AS_STRING(player->playpause_state) - ); - DEBUG_TRACE_END(player, "apply_playback_state"); - return 0; - } - - LOG_DEBUG( - "apply_playback_state(playing: %s): setting state to %s\n", - PLAYPAUSE_STATE_AS_STRING(player->playpause_state), - gst_element_state_get_name(desired_state) - ); - - DEBUG_TRACE_BEGIN(player, "gst_element_set_state"); - ok = gst_element_set_state(player->pipeline, desired_state); - DEBUG_TRACE_END(player, "gst_element_set_state"); - - if (ok == GST_STATE_CHANGE_FAILURE) { - LOG_GST_SET_STATE_ERROR(player->pipeline); - DEBUG_TRACE_END(player, "apply_playback_state"); - return EIO; - } - } else if (pending_state != desired_state) { - // queue to be executed when pending async state change completes - /// TODO: Implement properly - - LOG_DEBUG( - "apply_playback_state(playing: %s): async state change in progress, setting state to %s\n", - PLAYPAUSE_STATE_AS_STRING(player->playpause_state), - gst_element_state_get_name(desired_state) - ); - - DEBUG_TRACE_BEGIN(player, "gst_element_set_state"); - ok = gst_element_set_state(player->pipeline, desired_state); - DEBUG_TRACE_END(player, "gst_element_set_state"); - - if (ok == GST_STATE_CHANGE_FAILURE) { - LOG_GST_SET_STATE_ERROR(player->pipeline); - DEBUG_TRACE_END(player, "apply_playback_state"); - return EIO; - } - } - - DEBUG_TRACE_END(player, "apply_playback_state"); - return 0; -} - -static void on_bus_message(struct gstplayer *player, GstMessage *msg) { - GstState old, current, pending, requested; - GError *error; - gchar *debug_info; - - DEBUG_TRACE_BEGIN(player, "on_bus_message"); - switch (GST_MESSAGE_TYPE(msg)) { - case GST_MESSAGE_ERROR: - gst_message_parse_error(msg, &error, &debug_info); - - LOG_ERROR( - "gstreamer error: code: %d, domain: %s, msg: %s (debug info: %s)\n", - error->code, - g_quark_to_string(error->domain), - error->message, - debug_info - ); - g_clear_error(&error); - g_free(debug_info); - break; - - case GST_MESSAGE_WARNING: - gst_message_parse_warning(msg, &error, &debug_info); - LOG_ERROR("gstreamer warning: %s (debug info: %s)\n", error->message, debug_info); - g_clear_error(&error); - g_free(debug_info); - break; - - case GST_MESSAGE_INFO: - gst_message_parse_info(msg, &error, &debug_info); - LOG_DEBUG("gstreamer info: %s (debug info: %s)\n", error->message, debug_info); - g_clear_error(&error); - g_free(debug_info); - break; - - case GST_MESSAGE_BUFFERING: { - GstBufferingMode mode; - int64_t buffering_left; - int percent, avg_in, avg_out; - - gst_message_parse_buffering(msg, &percent); - gst_message_parse_buffering_stats(msg, &mode, &avg_in, &avg_out, &buffering_left); - - LOG_DEBUG( - "buffering, src: %s, percent: %d, mode: %s, avg in: %d B/s, avg out: %d B/s, %" GST_TIME_FORMAT "\n", - GST_MESSAGE_SRC_NAME(msg), - percent, - mode == GST_BUFFERING_STREAM ? "stream" : - mode == GST_BUFFERING_DOWNLOAD ? "download" : - mode == GST_BUFFERING_TIMESHIFT ? "timeshift" : - mode == GST_BUFFERING_LIVE ? "live" : - "?", - avg_in, - avg_out, - GST_TIME_ARGS(buffering_left * GST_MSECOND) - ); - - /// TODO: GST_MESSAGE_BUFFERING is only emitted when we actually need to wait on some buffering till we can resume the playback. - /// However, the info we send to the callback also contains information on the buffered video ranges. - /// That information is constantly changing, but we only notify the player about it when we actively wait for the buffer to be filled. - DEBUG_TRACE_BEGIN(player, "update_buffering_state"); - update_buffering_state(player); - DEBUG_TRACE_END(player, "update_buffering_state"); - - break; - }; - - case GST_MESSAGE_STATE_CHANGED: - gst_message_parse_state_changed(msg, &old, ¤t, &pending); - LOG_DEBUG( - "state-changed: src: %s, old: %s, current: %s, pending: %s\n", - GST_MESSAGE_SRC_NAME(msg), - gst_element_state_get_name(old), - gst_element_state_get_name(current), - gst_element_state_get_name(pending) - ); - - if (GST_MESSAGE_SRC(msg) == GST_OBJECT(player->pipeline)) { - if (!player->info.has_duration && (current == GST_STATE_PAUSED || current == GST_STATE_PLAYING)) { - // it's our pipeline that changed to either playing / paused, and we don't have info about our video duration yet. - // get that info now. - // technically we can already fetch the duration when the decodebin changed to PAUSED state. - DEBUG_TRACE_BEGIN(player, "fetch video info"); - fetch_duration(player); - fetch_seeking(player); - maybe_send_info(player); - DEBUG_TRACE_END(player, "fetch video info"); - } - } - break; - - case GST_MESSAGE_ASYNC_DONE: break; - - case GST_MESSAGE_LATENCY: - LOG_DEBUG("gstreamer: redistributing latency\n"); - DEBUG_TRACE_BEGIN(player, "gst_bin_recalculate_latency"); - gst_bin_recalculate_latency(GST_BIN(player->pipeline)); - DEBUG_TRACE_END(player, "gst_bin_recalculate_latency"); - break; - - case GST_MESSAGE_EOS: LOG_DEBUG("end of stream, src: %s\n", GST_MESSAGE_SRC_NAME(msg)); break; - - case GST_MESSAGE_REQUEST_STATE: - gst_message_parse_request_state(msg, &requested); - LOG_DEBUG( - "gstreamer state change to %s was requested by %s\n", - gst_element_state_get_name(requested), - GST_MESSAGE_SRC_NAME(msg) - ); - DEBUG_TRACE_BEGIN(player, "gst_element_set_state"); - gst_element_set_state(GST_ELEMENT(player->pipeline), requested); - DEBUG_TRACE_END(player, "gst_element_set_state"); - break; - - case GST_MESSAGE_APPLICATION: - if (player->looping && gst_message_has_name(msg, "appsink-eos")) { - // we have an appsink end of stream event - // and we should be looping, so seek back to start - LOG_DEBUG("appsink eos, seeking back to segment start (flushing)\n"); - DEBUG_TRACE_BEGIN(player, "gst_element_seek"); - gst_element_seek( - GST_ELEMENT(player->pipeline), - player->current_playback_rate, - GST_FORMAT_TIME, - GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, - GST_SEEK_TYPE_SET, - 0, - GST_SEEK_TYPE_SET, - GST_CLOCK_TIME_NONE - ); - DEBUG_TRACE_END(player, "gst_element_seek"); - - apply_playback_state(player); - } - break; - - default: LOG_DEBUG("gstreamer message: %s, src: %s\n", GST_MESSAGE_TYPE_NAME(msg), GST_MESSAGE_SRC_NAME(msg)); break; - } - DEBUG_TRACE_END(player, "on_bus_message"); - return; -} - -static int on_bus_fd_ready(sd_event_source *s, int fd, uint32_t revents, void *userdata) { - struct gstplayer *player; - GstMessage *msg; - - (void) s; - (void) fd; - (void) revents; - - player = userdata; - - DEBUG_TRACE_BEGIN(player, "on_bus_fd_ready"); - - msg = gst_bus_pop(player->bus); - if (msg != NULL) { - on_bus_message(player, msg); - gst_message_unref(msg); - } - - DEBUG_TRACE_END(player, "on_bus_fd_ready"); - - return 0; -} - -static GstPadProbeReturn on_query_appsink(GstPad *pad, GstPadProbeInfo *info, void *userdata) { - GstQuery *query; - - (void) pad; - (void) userdata; - - query = gst_pad_probe_info_get_query(info); - if (query == NULL) { - LOG_DEBUG("Couldn't get query from pad probe info.\n"); - return GST_PAD_PROBE_OK; - } - - if (GST_QUERY_TYPE(query) != GST_QUERY_ALLOCATION) { - return GST_PAD_PROBE_OK; - } - - gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL); - - return GST_PAD_PROBE_HANDLED; -} - -static void on_element_added(GstBin *bin, GstElement *element, void *userdata) { - GstElementFactory *factory; - const char *factory_name; - - (void) userdata; - (void) bin; - - factory = gst_element_get_factory(element); - factory_name = gst_plugin_feature_get_name(factory); - - if (g_str_has_prefix(factory_name, "v4l2video") && g_str_has_suffix(factory_name, "dec")) { - gst_util_set_object_arg(G_OBJECT(element), "capture-io-mode", "dmabuf"); - fprintf(stderr, "[gstreamer video player] found gstreamer V4L2 video decoder element with name \"%s\"\n", GST_OBJECT_NAME(element)); - } -} - -static GstPadProbeReturn on_probe_pad(GstPad *pad, GstPadProbeInfo *info, void *userdata) { - struct gstplayer *player; - GstEvent *event; - GstCaps *caps; - gboolean ok; - - (void) pad; - - player = userdata; - event = GST_PAD_PROBE_INFO_EVENT(info); - - if (GST_EVENT_TYPE(event) != GST_EVENT_CAPS) { - return GST_PAD_PROBE_OK; - } - - gst_event_parse_caps(event, &caps); - if (caps == NULL) { - LOG_ERROR("gstreamer: caps event without caps\n"); - return GST_PAD_PROBE_OK; - } - - ok = gst_video_info_from_caps(&player->gst_info, caps); - if (!ok) { - LOG_ERROR("gstreamer: caps event with invalid video caps\n"); - return GST_PAD_PROBE_OK; - } - - player->has_gst_info = true; - - LOG_DEBUG( - "on_probe_pad, fps: %f, res: % 4d x % 4d, format: %s\n", - (double) GST_VIDEO_INFO_FPS_N(&player->gst_info) / GST_VIDEO_INFO_FPS_D(&player->gst_info), - GST_VIDEO_INFO_WIDTH(&player->gst_info), - GST_VIDEO_INFO_HEIGHT(&player->gst_info), - gst_video_format_to_string(player->gst_info.finfo->format) - ); - - player->info.info.width = GST_VIDEO_INFO_WIDTH(&player->gst_info); - player->info.info.height = GST_VIDEO_INFO_HEIGHT(&player->gst_info); - player->info.info.fps = (double) GST_VIDEO_INFO_FPS_N(&player->gst_info) / GST_VIDEO_INFO_FPS_D(&player->gst_info); - player->info.has_resolution = true; - player->info.has_fps = true; - maybe_send_info(player); - - return GST_PAD_PROBE_OK; -} - -static void on_destroy_texture_frame(const struct texture_frame *texture_frame, void *userdata) { - struct video_frame *frame; - - (void) texture_frame; - - ASSERT_NOT_NULL(texture_frame); - ASSERT_NOT_NULL(userdata); - - frame = userdata; - - frame_destroy(frame); -} - -static void on_appsink_eos(GstAppSink *appsink, void *userdata) { - gboolean ok; - - ASSERT_NOT_NULL(appsink); - ASSERT_NOT_NULL(userdata); - - (void) userdata; - - LOG_DEBUG("on_appsink_eos()\n"); - - // this method is called from the streaming thread. - // we shouldn't access the player directly here, it could change while we use it. - // post a message to the gstreamer bus instead, will be handled by - // @ref on_bus_message. - ok = gst_element_post_message( - GST_ELEMENT(appsink), - gst_message_new_application(GST_OBJECT(appsink), gst_structure_new_empty("appsink-eos")) - ); - if (ok == FALSE) { - LOG_ERROR("Could not post appsink end-of-stream event to the message bus.\n"); - } -} - -static GstFlowReturn on_appsink_new_preroll(GstAppSink *appsink, void *userdata) { - struct video_frame *frame; - struct gstplayer *player; - GstSample *sample; - - ASSERT_NOT_NULL(appsink); - ASSERT_NOT_NULL(userdata); - - player = userdata; - - sample = gst_app_sink_try_pull_preroll(appsink, 0); - if (sample == NULL) { - LOG_ERROR("gstreamer returned a NULL sample.\n"); - return GST_FLOW_ERROR; - } - - /// TODO: Attempt to upload using gst_gl_upload here - frame = frame_new(player->frame_interface, sample, player->has_gst_info ? &player->gst_info : NULL); - - gst_sample_unref(sample); - - if (frame != NULL) { - texture_push_frame( - player->texture, - &(struct texture_frame){ - .gl = *frame_get_gl_frame(frame), - .destroy = on_destroy_texture_frame, - .userdata = frame, - } - ); - } - - return GST_FLOW_OK; -} - -static GstFlowReturn on_appsink_new_sample(GstAppSink *appsink, void *userdata) { - struct video_frame *frame; - struct gstplayer *player; - GstSample *sample; - - ASSERT_NOT_NULL(appsink); - ASSERT_NOT_NULL(userdata); - - player = userdata; - - /// TODO: Attempt to upload using gst_gl_upload here - sample = gst_app_sink_try_pull_sample(appsink, 0); - if (sample == NULL) { - LOG_ERROR("gstreamer returned a NULL sample.\n"); - return GST_FLOW_ERROR; - } - - frame = frame_new(player->frame_interface, sample, player->has_gst_info ? &player->gst_info : NULL); - - gst_sample_unref(sample); - - if (frame != NULL) { - texture_push_frame( - player->texture, - &(struct texture_frame){ - .gl = *frame_get_gl_frame(frame), - .destroy = on_destroy_texture_frame, - .userdata = frame, - } - ); - } - - return GST_FLOW_OK; -} - -static void on_appsink_cbs_destroy(void *userdata) { - struct gstplayer *player; - - LOG_DEBUG("on_appsink_cbs_destroy()\n"); - ASSERT_NOT_NULL(userdata); - - player = userdata; - - (void) player; -} - -void on_source_setup(GstElement *bin, GstElement *source, gpointer userdata) { - (void) bin; - - if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), "extra-headers") != NULL) { - g_object_set(source, "extra-headers", (GstStructure *) userdata, NULL); - } else { - LOG_ERROR("Failed to set custom HTTP headers because gstreamer source element has no 'extra-headers' property.\n"); - } -} - -static int init(struct gstplayer *player, bool force_sw_decoders) { - GstStateChangeReturn state_change_return; - sd_event_source *busfd_event_source; - GstElement *pipeline, *sink, *src; - GstBus *bus; - GstPad *pad; - GPollFD fd; - GError *error = NULL; - int ok; - - static const char *default_pipeline_descr = "uridecodebin name=\"src\" ! video/x-raw ! appsink sync=true name=\"sink\""; - - const char *pipeline_descr; - if (player->pipeline_description != NULL) { - pipeline_descr = player->pipeline_description; - } else { - pipeline_descr = default_pipeline_descr; - } - - pipeline = gst_parse_launch(pipeline_descr, &error); - if (pipeline == NULL) { - LOG_ERROR("Could create GStreamer pipeline from description: %s (pipeline: `%s`)\n", error->message, pipeline_descr); - return error->code; - } - - sink = gst_bin_get_by_name(GST_BIN(pipeline), "sink"); - if (sink == NULL) { - LOG_ERROR("Couldn't find appsink in pipeline bin.\n"); - ok = EINVAL; - goto fail_unref_pipeline; - } - - pad = gst_element_get_static_pad(sink, "sink"); - if (pad == NULL) { - LOG_ERROR("Couldn't get static pad \"sink\" from video sink.\n"); - ok = EINVAL; - goto fail_unref_sink; - } - - gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM, on_query_appsink, player, NULL); - - src = gst_bin_get_by_name(GST_BIN(pipeline), "src"); - - if (player->video_uri != NULL) { - if (src != NULL) { - g_object_set(G_OBJECT(src), "uri", player->video_uri, NULL); - } else { - LOG_ERROR("Couldn't find \"src\" element to configure Video URI.\n"); - } - } - - if (force_sw_decoders) { - if (src != NULL) { - g_object_set(G_OBJECT(src), "force-sw-decoders", force_sw_decoders, NULL); - } else { - LOG_ERROR("Couldn't find \"src\" element to force sw decoding.\n"); - } - } - - if (player->headers != NULL) { - if (src != NULL) { - g_signal_connect(G_OBJECT(src), "source-setup", G_CALLBACK(on_source_setup), player->headers); - } else { - LOG_ERROR("Couldn't find \"src\" element to configure additional HTTP headers.\n"); - } - } - - gst_base_sink_set_max_lateness(GST_BASE_SINK(sink), 20 * GST_MSECOND); - gst_base_sink_set_qos_enabled(GST_BASE_SINK(sink), TRUE); - gst_base_sink_set_sync(GST_BASE_SINK(sink), TRUE); - gst_app_sink_set_max_buffers(GST_APP_SINK(sink), 2); - gst_app_sink_set_emit_signals(GST_APP_SINK(sink), TRUE); - gst_app_sink_set_drop(GST_APP_SINK(sink), FALSE); - - // configure our caps - // we only accept video formats that we can actually upload to EGL - GstCaps *caps = gst_caps_new_empty(); - for_each_format_in_frame_interface(i, format, player->frame_interface) { - GstVideoFormat gst_format = gst_video_format_from_drm_format(format->format); - if (gst_format == GST_VIDEO_FORMAT_UNKNOWN) { - continue; - } - - gst_caps_append(caps, gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, gst_video_format_to_string(gst_format), NULL)); - } - gst_app_sink_set_caps(GST_APP_SINK(sink), caps); - gst_caps_unref(caps); - - gst_app_sink_set_callbacks( - GST_APP_SINK(sink), - &(GstAppSinkCallbacks - ){ .eos = on_appsink_eos, .new_preroll = on_appsink_new_preroll, .new_sample = on_appsink_new_sample, ._gst_reserved = { 0 } }, - player, - on_appsink_cbs_destroy - ); - - gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, on_probe_pad, player, NULL); - - /// FIXME: Make this work for custom pipelines as well. - if (src != NULL) { - g_signal_connect(src, "element-added", G_CALLBACK(on_element_added), player); - } else { - LOG_DEBUG("Couldn't find \"src\" element to setup v4l2 'capture-io-mode' to 'dmabuf'.\n"); - } - - if (src != NULL) { - gst_object_unref(src); - src = NULL; - } - - bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline)); - - gst_bus_get_pollfd(bus, &fd); - - flutterpi_sd_event_add_io(&busfd_event_source, fd.fd, EPOLLIN, on_bus_fd_ready, player); - - LOG_DEBUG("Setting state to paused...\n"); - state_change_return = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PAUSED); - if (state_change_return == GST_STATE_CHANGE_NO_PREROLL) { - LOG_DEBUG("Is Live!\n"); - player->is_live = true; - } else { - LOG_DEBUG("Not live!\n"); - player->is_live = false; - } - - player->sink = sink; - /// FIXME: Not sure we need this here. pipeline is floating after gst_parse_launch, which - /// means we should take a reference, but the examples don't increase the refcount. - player->pipeline = pipeline; //gst_object_ref(pipeline); - player->bus = bus; - player->busfd_events = busfd_event_source; - - gst_object_unref(pad); - return 0; - -fail_unref_sink: - gst_object_unref(sink); - -fail_unref_pipeline: - gst_object_unref(pipeline); - - return ok; -} - -static void maybe_deinit(struct gstplayer *player) { - if (player->busfd_events != NULL) { - sd_event_source_unrefp(&player->busfd_events); - } - if (player->sink != NULL) { - gst_object_unref(GST_OBJECT(player->sink)); - player->sink = NULL; - } - if (player->bus != NULL) { - gst_object_unref(GST_OBJECT(player->bus)); - player->bus = NULL; - } - if (player->pipeline != NULL) { - gst_element_set_state(GST_ELEMENT(player->pipeline), GST_STATE_READY); - gst_element_set_state(GST_ELEMENT(player->pipeline), GST_STATE_NULL); - gst_object_unref(GST_OBJECT(player->pipeline)); - player->pipeline = NULL; - } -} - -DEFINE_LOCK_OPS(gstplayer, lock) - -static struct gstplayer *gstplayer_new(struct flutterpi *flutterpi, const char *uri, const char *pipeline_descr, void *userdata) { - struct frame_interface *frame_interface; - struct gstplayer *player; - struct texture *texture; - GstStructure *gst_headers; - int64_t texture_id; - char *uri_owned, *pipeline_descr_owned; - int ok; - - ASSERT_NOT_NULL(flutterpi); - assert((uri != NULL) != (pipeline_descr != NULL)); - - player = malloc(sizeof *player); - if (player == NULL) - return NULL; - - texture = flutterpi_create_texture(flutterpi); - if (texture == NULL) - goto fail_free_player; - - frame_interface = frame_interface_new(flutterpi_get_gl_renderer(flutterpi)); - if (frame_interface == NULL) - goto fail_destroy_texture; - - texture_id = texture_get_id(texture); - - if (uri != NULL) { - uri_owned = strdup(uri); - if (uri_owned == NULL) - goto fail_destroy_frame_interface; - } else { - uri_owned = NULL; - } - - if (pipeline_descr != NULL) { - pipeline_descr_owned = strdup(pipeline_descr); - if (pipeline_descr_owned == NULL) - goto fail_destroy_frame_interface; - } else { - pipeline_descr_owned = NULL; - } - - gst_headers = gst_structure_new_empty("http-headers"); - - ok = pthread_mutex_init(&player->lock, NULL); - if (ok != 0) - goto fail_free_gst_headers; - - ok = value_notifier_init(&player->video_info_notifier, NULL, free /* free(NULL) is a no-op, I checked */); - if (ok != 0) - goto fail_destroy_mutex; - - ok = value_notifier_init(&player->buffering_state_notifier, NULL, free); - if (ok != 0) - goto fail_deinit_video_info_notifier; - - ok = change_notifier_init(&player->error_notifier); - if (ok != 0) - goto fail_deinit_buffering_state_notifier; - - player->flutterpi = flutterpi; - player->userdata = userdata; - player->video_uri = uri_owned; - player->pipeline_description = pipeline_descr_owned; - player->headers = gst_headers; - player->playback_rate_forward = 1.0; - player->playback_rate_backward = 1.0; - player->looping = false; - player->playpause_state = kPaused; - player->direction = kForward; - player->current_playback_rate = 1.0; - player->fallback_position_ms = 0; - player->has_desired_position = false; - player->desired_position_ms = 0; - player->has_sent_info = false; - player->info.has_resolution = false; - player->info.has_fps = false; - player->info.has_duration = false; - player->info.has_seeking_info = false; - player->has_gst_info = false; - memset(&player->gst_info, 0, sizeof(player->gst_info)); - player->texture = texture; - player->texture_id = texture_id; - player->frame_interface = frame_interface; - player->pipeline = NULL; - player->sink = NULL; - player->bus = NULL; - player->busfd_events = NULL; - player->is_live = false; - return player; - - //fail_deinit_error_notifier: - //notifier_deinit(&player->error_notifier); - -fail_deinit_buffering_state_notifier: - notifier_deinit(&player->buffering_state_notifier); - -fail_deinit_video_info_notifier: - notifier_deinit(&player->video_info_notifier); - -fail_destroy_mutex: - pthread_mutex_destroy(&player->lock); - -fail_free_gst_headers: - gst_structure_free(gst_headers); - free(uri_owned); - -fail_destroy_frame_interface: - frame_interface_unref(frame_interface); - -fail_destroy_texture: - texture_destroy(texture); - -fail_free_player: - free(player); - - return NULL; -} - -struct gstplayer *gstplayer_new_from_asset(struct flutterpi *flutterpi, const char *asset_path, const char *package_name, void *userdata) { - struct gstplayer *player; - char *uri; - int ok; - - (void) package_name; - - ok = asprintf(&uri, "file://%s/%s", flutterpi_get_asset_bundle_path(flutterpi), asset_path); - if (ok < 0) { - return NULL; - } - - player = gstplayer_new(flutterpi, uri, NULL, userdata); - - free(uri); - - return player; -} - -struct gstplayer *gstplayer_new_from_network(struct flutterpi *flutterpi, const char *uri, enum format_hint format_hint, void *userdata) { - (void) format_hint; - return gstplayer_new(flutterpi, uri, NULL, userdata); -} - -struct gstplayer *gstplayer_new_from_file(struct flutterpi *flutterpi, const char *uri, void *userdata) { - return gstplayer_new(flutterpi, uri, NULL, userdata); -} - -struct gstplayer *gstplayer_new_from_content_uri(struct flutterpi *flutterpi, const char *uri, void *userdata) { - return gstplayer_new(flutterpi, uri, NULL, userdata); -} - -struct gstplayer *gstplayer_new_from_pipeline(struct flutterpi *flutterpi, const char *pipeline, void *userdata) { - return gstplayer_new(flutterpi, NULL, pipeline, userdata); -} - -void gstplayer_destroy(struct gstplayer *player) { - LOG_DEBUG("gstplayer_destroy(%p)\n", player); - notifier_deinit(&player->video_info_notifier); - notifier_deinit(&player->buffering_state_notifier); - notifier_deinit(&player->error_notifier); - maybe_deinit(player); - pthread_mutex_destroy(&player->lock); - if (player->headers != NULL) { - gst_structure_free(player->headers); - } - if (player->video_uri != NULL) { - free(player->video_uri); - } - if (player->pipeline_description != NULL) { - free(player->pipeline_description); - } - frame_interface_unref(player->frame_interface); - texture_destroy(player->texture); - free(player); -} - -int64_t gstplayer_get_texture_id(struct gstplayer *player) { - return player->texture_id; -} - -void gstplayer_put_http_header(struct gstplayer *player, const char *key, const char *value) { - GValue gvalue = G_VALUE_INIT; - g_value_set_string(&gvalue, value); - gst_structure_take_value(player->headers, key, &gvalue); -} - -void gstplayer_set_userdata_locked(struct gstplayer *player, void *userdata) { - player->userdata = userdata; -} - -void *gstplayer_get_userdata_locked(struct gstplayer *player) { - return player->userdata; -} - -int gstplayer_initialize(struct gstplayer *player) { - return init(player, false); -} - -int gstplayer_play(struct gstplayer *player) { - LOG_DEBUG("gstplayer_play()\n"); - player->playpause_state = kPlaying; - player->direction = kForward; - return apply_playback_state(player); -} - -int gstplayer_pause(struct gstplayer *player) { - LOG_DEBUG("gstplayer_pause()\n"); - player->playpause_state = kPaused; - player->direction = kForward; - return apply_playback_state(player); -} - -int gstplayer_set_looping(struct gstplayer *player, bool looping) { - LOG_DEBUG("gstplayer_set_looping(%s)\n", looping ? "true" : "false"); - player->looping = looping; - return 0; -} - -int gstplayer_set_volume(struct gstplayer *player, double volume) { - (void) player; - (void) volume; - LOG_DEBUG("gstplayer_set_volume(%f)\n", volume); - /// TODO: Implement - return 0; -} - -int64_t gstplayer_get_position(struct gstplayer *player) { - GstState current, pending; - gboolean ok; - int64_t position; - - GstStateChangeReturn statechange = gst_element_get_state(GST_ELEMENT(player->pipeline), ¤t, &pending, 0); - if (statechange == GST_STATE_CHANGE_FAILURE) { - LOG_GST_GET_STATE_ERROR(player->pipeline); - return -1; - } - - if (statechange == GST_STATE_CHANGE_ASYNC) { - // we don't have position data yet. - // report the latest known (or the desired) position. - return player->fallback_position_ms; - } - - DEBUG_TRACE_BEGIN(player, "gstplayer_get_position"); - DEBUG_TRACE_BEGIN(player, "gst_element_query_position"); - ok = gst_element_query_position(player->pipeline, GST_FORMAT_TIME, &position); - DEBUG_TRACE_END(player, "gst_element_query_position"); - - if (ok == FALSE) { - LOG_ERROR("Could not query gstreamer position. (gst_element_query_position)\n"); - return 0; - } - - DEBUG_TRACE_END(player, "gstplayer_get_position"); - return GST_TIME_AS_MSECONDS(position); -} - -int gstplayer_seek_to(struct gstplayer *player, int64_t position, bool nearest_keyframe) { - LOG_DEBUG("gstplayer_seek_to(%" PRId64 ")\n", position); - player->has_desired_position = true; - player->desired_position_ms = position; - player->do_fast_seeking = nearest_keyframe; - return apply_playback_state(player); -} - -int gstplayer_set_playback_speed(struct gstplayer *player, double playback_speed) { - LOG_DEBUG("gstplayer_set_playback_speed(%f)\n", playback_speed); - ASSERT_MSG(playback_speed > 0, "playback speed must be > 0."); - player->playback_rate_forward = playback_speed; - return apply_playback_state(player); -} - -int gstplayer_step_forward(struct gstplayer *player) { - gboolean gst_ok; - int ok; - - ASSERT_NOT_NULL(player); - - player->playpause_state = kStepping; - player->direction = kForward; - ok = apply_playback_state(player); - if (ok != 0) { - return ok; - } - - gst_ok = gst_element_send_event(player->pipeline, gst_event_new_step(GST_FORMAT_BUFFERS, 1, 1, TRUE, FALSE)); - if (gst_ok == FALSE) { - LOG_ERROR("Could not send frame-step event to pipeline. (gst_element_send_event)\n"); - return EIO; - } - return 0; -} - -int gstplayer_step_backward(struct gstplayer *player) { - gboolean gst_ok; - int ok; - - ASSERT_NOT_NULL(player); - - player->playpause_state = kStepping; - player->direction = kBackward; - ok = apply_playback_state(player); - if (ok != 0) { - return ok; - } - - gst_ok = gst_element_send_event(player->pipeline, gst_event_new_step(GST_FORMAT_BUFFERS, 1, 1, TRUE, FALSE)); - if (gst_ok == FALSE) { - LOG_ERROR("Could not send frame-step event to pipeline. (gst_element_send_event)\n"); - return EIO; - } - - return 0; -} - -struct notifier *gstplayer_get_video_info_notifier(struct gstplayer *player) { - return &player->video_info_notifier; -} - -struct notifier *gstplayer_get_buffering_state_notifier(struct gstplayer *player) { - return &player->buffering_state_notifier; -} - -struct notifier *gstplayer_get_error_notifier(struct gstplayer *player) { - return &player->error_notifier; -} diff --git a/src/plugins/gstreamer_video_player/plugin.c b/src/plugins/gstreamer_video_player/plugin.c index d1de5b49..c8d23a9d 100644 --- a/src/plugins/gstreamer_video_player/plugin.c +++ b/src/plugins/gstreamer_video_player/plugin.c @@ -15,6 +15,7 @@ #include "platformchannel.h" #include "pluginregistry.h" #include "plugins/gstreamer_video_player.h" +#include "plugins/gstplayer.h" #include "texture_registry.h" #include "util/collection.h" #include "util/list.h" @@ -54,7 +55,7 @@ static struct plugin { struct list_head players; } plugin; -DEFINE_LOCK_OPS(plugin, lock); +DEFINE_LOCK_OPS(plugin, lock) /// Add a player instance to the player collection. static void add_player(struct gstplayer_meta *meta) { @@ -97,9 +98,9 @@ static struct gstplayer *get_player_by_evch(const char *const event_channel_name /** * @brief Remove a player instance from the player list. - * + * * Assumes the plugin struct is not locked. - * + * */ static void remove_player(struct gstplayer_meta *meta) { plugin_lock(&plugin); @@ -111,17 +112,17 @@ static void remove_player(struct gstplayer_meta *meta) { /** * @brief Remove a player instance from the player list. - * + * * Assumes the plugin struct is locked. - * + * */ static void remove_player_locked(struct gstplayer_meta *meta) { - ASSERT_MUTEX_LOCKED(plugin.lock); + assert_mutex_locked(&plugin.lock); list_del(&meta->entry); } static struct gstplayer_meta *get_meta(struct gstplayer *player) { - return (struct gstplayer_meta *) gstplayer_get_userdata_locked(player); + return (struct gstplayer_meta *) gstplayer_get_userdata(player); } /// Get the player id from the given arg, which is a kStdMap. @@ -204,7 +205,7 @@ get_player_from_map_arg(struct std_value *arg, struct gstplayer **player_out, Fl return 0; } -static int ensure_initialized() { +static int ensure_initialized(void) { GError *gst_error; gboolean success; @@ -315,7 +316,7 @@ static enum listener_return on_video_info_notify(void *arg, void *userdata) { /// on_video_info_notify is called on an internal thread, /// but send_initialized_event is (should be) mt-safe send_initialized_event(meta, !info->can_seek, info->width, info->height, info->duration_ms); - + /// FIXME: Threading /// Set this to NULL here so we don't unlisten to it twice. meta->video_info_listener = NULL; @@ -368,7 +369,7 @@ static int on_receive_evch(char *channel, struct platch_obj *object, FlutterPlat return platch_respond_not_implemented(responsehandle); } - meta = gstplayer_get_userdata_locked(player); + meta = gstplayer_get_userdata(player); if (streq("listen", method)) { platch_respond_success_std(responsehandle, NULL); @@ -417,44 +418,32 @@ static int on_initialize(char *channel, struct platch_obj *object, FlutterPlatfo return platch_respond_success_pigeon(responsehandle, NULL); } -static int check_headers(const struct std_value *headers, FlutterPlatformMessageResponseHandle *responsehandle) { - const struct std_value *key, *value; - - if (headers == NULL || STDVALUE_IS_NULL(*headers)) { - return 0; - } else if (!STDVALUE_IS_MAP(*headers)) { - platch_respond_illegal_arg_pigeon(responsehandle, "Expected `arg['httpHeaders']` to be a map of strings or null."); - return EINVAL; - } - - for (int i = 0; i < headers->size; i++) { - key = headers->keys + i; - value = headers->values + i; - - if (STDVALUE_IS_NULL(*key) || STDVALUE_IS_NULL(*value)) { - // ignore this value - continue; - } else if (STDVALUE_IS_STRING(*key) && STDVALUE_IS_STRING(*value)) { - // valid too - continue; - } else { - platch_respond_illegal_arg_pigeon(responsehandle, "Expected `arg['httpHeaders']` to be a map of strings or null."); - return EINVAL; - } - } +static void gst_structure_put_string(GstStructure *structure, const char *key, const char *value) { + GValue gvalue = G_VALUE_INIT; + g_value_set_string(&gvalue, value); + gst_structure_take_value(structure, key, &gvalue); +} - return 0; +static void gst_structure_take_string(GstStructure *structure, const char *key, char *value) { + GValue gvalue = G_VALUE_INIT; + g_value_take_string(&gvalue, value); + gst_structure_take_value(structure, key, &gvalue); } -static int add_headers_to_player(const struct std_value *headers, struct gstplayer *player) { +static bool get_headers(const struct std_value *headers, GstStructure **structure_out, FlutterPlatformMessageResponseHandle *responsehandle) { const struct std_value *key, *value; if (headers == NULL || STDVALUE_IS_NULL(*headers)) { - return 0; + *structure_out = NULL; + return true; } else if (!STDVALUE_IS_MAP(*headers)) { - assert(false); + *structure_out = NULL; + platch_respond_illegal_arg_pigeon(responsehandle, "Expected `arg['httpHeaders']` to be a map of strings or null."); + return false; } + *structure_out = gst_structure_new_empty("http-headers"); + for (int i = 0; i < headers->size; i++) { key = headers->keys + i; value = headers->values + i; @@ -463,13 +452,17 @@ static int add_headers_to_player(const struct std_value *headers, struct gstplay // ignore this value continue; } else if (STDVALUE_IS_STRING(*key) && STDVALUE_IS_STRING(*value)) { - gstplayer_put_http_header(player, STDVALUE_AS_STRING(*key), STDVALUE_AS_STRING(*value)); + gst_structure_put_string(*structure_out, STDVALUE_AS_STRING(*key), STDVALUE_AS_STRING(*value)); } else { - assert(false); + gst_structure_free(*structure_out); + *structure_out = NULL; + + platch_respond_illegal_arg_pigeon(responsehandle, "Expected `arg['httpHeaders']` to be a map of strings or null."); + return false; } } - return 0; + return true; } /// Allocates and initializes a gstplayer_meta struct, which we @@ -612,19 +605,20 @@ static int on_create(char *channel, struct platch_obj *object, FlutterPlatformMe ); } - temp = stdmap_get_str(arg, "httpHeaders"); - - // check our headers are valid, so we don't create our player for nothing - ok = check_headers(temp, responsehandle); - if (ok != 0) { - return 0; - } - // create our actual player (this doesn't initialize it) if (asset != NULL) { - player = gstplayer_new_from_asset(flutterpi, asset, package_name, NULL); + player = gstplayer_new_from_asset(flutterpi, asset, package_name, /* play_video */ true, /* play_audio */ false, NULL); } else { - player = gstplayer_new_from_network(flutterpi, uri, format_hint, NULL); + temp = stdmap_get_str(arg, "httpHeaders"); + + // check our headers are valid, so we don't create our player for nothing + GstStructure *headers = NULL; + ok = get_headers(temp, &headers, responsehandle); + if (ok == false) { + return 0; + } + + player = gstplayer_new_from_network(flutterpi, uri, format_hint, /* play_video */ true, /* play_audio */ false, NULL, headers); } if (player == NULL) { LOG_ERROR("Couldn't create gstreamer video player.\n"); @@ -640,10 +634,7 @@ static int on_create(char *channel, struct platch_obj *object, FlutterPlatformMe goto fail_destroy_player; } - gstplayer_set_userdata_locked(player, meta); - - // Add all our HTTP headers to gstplayer using gstplayer_put_http_header - add_headers_to_player(temp, player); + gstplayer_set_userdata(player, meta); // add it to our player collection add_player(meta); @@ -654,17 +645,8 @@ static int on_create(char *channel, struct platch_obj *object, FlutterPlatformMe goto fail_remove_player; } - // Finally, start initializing - ok = gstplayer_initialize(player); - if (ok != 0) { - goto fail_remove_receiver; - } - return platch_respond_success_pigeon(responsehandle, &STDMAP1(STDSTRING("textureId"), STDINT64(gstplayer_get_texture_id(player)))); -fail_remove_receiver: - plugin_registry_remove_receiver(meta->event_channel_name); - fail_remove_player: remove_player(meta); destroy_meta(meta); @@ -716,7 +698,8 @@ static int on_set_looping(char *channel, struct platch_obj *object, FlutterPlatf return platch_respond_illegal_arg_ext_pigeon(responsehandle, "Expected `arg['isLooping']` to be a boolean, but was:", temp); } - gstplayer_set_looping(player, loop); + gstplayer_set_looping(player, loop, true); + return platch_respond_success_pigeon(responsehandle, NULL); } @@ -953,11 +936,18 @@ get_player_from_texture_id_with_custom_errmsg(int64_t texture_id, FlutterPlatfor plugin_lock(&plugin); int n_texture_ids = list_length(&plugin.players); - int64_t *texture_ids = alloca(sizeof(int64_t) * n_texture_ids); - int64_t *texture_ids_cursor = texture_ids; - list_for_each_entry(struct gstplayer_meta, meta, &plugin.players, entry) { - *texture_ids_cursor++ = gstplayer_get_texture_id(meta->player); + int64_t *texture_ids; + + if (n_texture_ids == 0) { + texture_ids = NULL; + } else { + texture_ids = alloca(sizeof(int64_t) * n_texture_ids); + int64_t *texture_ids_cursor = texture_ids; + + list_for_each_entry(struct gstplayer_meta, meta, &plugin.players, entry) { + *texture_ids_cursor++ = gstplayer_get_texture_id(meta->player); + } } plugin_unlock(&plugin); @@ -1050,7 +1040,6 @@ static int on_initialize_v2(const struct raw_std_value *arg, FlutterPlatformMess } static int on_create_v2(const struct raw_std_value *arg, FlutterPlatformMessageResponseHandle *responsehandle) { - const struct raw_std_value *headers; struct gstplayer_meta *meta; struct gstplayer *player; enum format_hint format_hint; @@ -1078,8 +1067,7 @@ static int on_create_v2(const struct raw_std_value *arg, FlutterPlatformMessageR } else if (raw_std_value_is_string(arg)) { asset = raw_std_string_dup(arg); if (asset == NULL) { - ok = ENOMEM; - goto fail_respond_error; + return platch_respond_native_error_std(responsehandle, ENOMEM); } } else { return platch_respond_illegal_arg_std(responsehandle, "Expected `arg[0]` to be a String or null."); @@ -1097,11 +1085,12 @@ static int on_create_v2(const struct raw_std_value *arg, FlutterPlatformMessageR } else if (raw_std_value_is_string(arg)) { package_name = raw_std_string_dup(arg); if (package_name == NULL) { - ok = ENOMEM; - goto fail_respond_error; + ok = platch_respond_native_error_std(responsehandle, ENOMEM); + goto fail_free_asset; } } else { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg[1]` to be a String or null."); + ok = platch_respond_illegal_arg_std(responsehandle, "Expected `arg[1]` to be a String or null."); + goto fail_free_asset; } } else { package_name = NULL; @@ -1116,11 +1105,12 @@ static int on_create_v2(const struct raw_std_value *arg, FlutterPlatformMessageR } else if (raw_std_value_is_string(arg)) { uri = raw_std_string_dup(arg); if (uri == NULL) { - ok = ENOMEM; - goto fail_respond_error; + ok = platch_respond_native_error_std(responsehandle, ENOMEM); + goto fail_free_package_name; } } else { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg[2]` to be a String or null."); + ok = platch_respond_illegal_arg_std(responsehandle, "Expected `arg[2]` to be a String or null."); + goto fail_free_package_name; } } else { uri = NULL; @@ -1146,12 +1136,15 @@ static int on_create_v2(const struct raw_std_value *arg, FlutterPlatformMessageR } } else { invalid_format_hint: - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg[3]` to be one of 'ss', 'hls', 'dash', 'other' or null."); + ok = platch_respond_illegal_arg_std(responsehandle, "Expected `arg[3]` to be one of 'ss', 'hls', 'dash', 'other' or null."); + goto fail_free_uri; } } else { format_hint = FORMAT_HINT_NONE; } + GstStructure *headers = NULL; + // arg[4]: HTTP Headers if (size >= 5) { arg = raw_std_value_after(arg); @@ -1160,14 +1153,25 @@ static int on_create_v2(const struct raw_std_value *arg, FlutterPlatformMessageR headers = NULL; } else if (raw_std_value_is_map(arg)) { for_each_entry_in_raw_std_map(key, value, arg) { - if (!raw_std_value_is_string(key) || !raw_std_value_is_string(value)) { + if (raw_std_value_is_string(key) && raw_std_value_is_string(value)) { + if (headers == NULL) { + headers = gst_structure_new_empty("http-headers"); + } + + char *key_str = raw_std_string_dup(key); + gst_structure_take_string(headers, key_str, raw_std_string_dup(value)); + free(key_str); + } else { goto invalid_headers; } } - headers = arg; } else { invalid_headers: - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg[4]` to be a map of strings or null."); + if (headers != NULL) { + gst_structure_free(headers); + } + ok = platch_respond_illegal_arg_std(responsehandle, "Expected `arg[4]` to be a map of strings or null."); + goto fail_free_uri; } } else { headers = NULL; @@ -1182,98 +1186,333 @@ static int on_create_v2(const struct raw_std_value *arg, FlutterPlatformMessageR } else if (raw_std_value_is_string(arg)) { pipeline = raw_std_string_dup(arg); } else { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg[5]` to be a string or null."); + ok = platch_respond_illegal_arg_std(responsehandle, "Expected `arg[5]` to be a string or null."); + goto fail_free_headers; } } else { pipeline = NULL; } if ((asset ? 1 : 0) + (uri ? 1 : 0) + (pipeline ? 1 : 0) != 1) { - return platch_respond_illegal_arg_std(responsehandle, "Expected exactly one of `arg[0]`, `arg[2]` or `arg[5]` to be non-null."); + ok = platch_respond_illegal_arg_std(responsehandle, "Expected exactly one of `arg[0]`, `arg[2]` or `arg[5]` to be non-null."); + goto fail_free_pipeline; } // Create our actual player (this doesn't initialize it) if (asset != NULL) { - player = gstplayer_new_from_asset(flutterpi, asset, package_name, NULL); + player = gstplayer_new_from_asset(flutterpi, asset, package_name, /* play_video */ true, /* play_audio */ false, NULL); + } else if (uri != NULL) { + player = gstplayer_new_from_network(flutterpi, uri, format_hint, /* play_video */ true, /* play_audio */ false, NULL, headers); + + // player owns the headers now, except creation failed + if (player) { + headers = NULL; + } + } else if (pipeline != NULL) { + player = gstplayer_new_from_pipeline(flutterpi, uri, NULL); + } else { + UNREACHABLE(); + } - // gstplayer_new_from_network will construct a file:// URI out of the - // asset path internally. + if (asset != NULL) { free(asset); asset = NULL; - } else if (uri != NULL) { - player = gstplayer_new_from_network(flutterpi, uri, format_hint, NULL); + } - // gstplayer_new_from_network will dup the uri internally. + if (package_name != NULL) { + free(package_name); + package_name = NULL; + } + + if (uri != NULL) { free(uri); uri = NULL; - } else if (pipeline != NULL) { - player = gstplayer_new_from_pipeline(flutterpi, pipeline, NULL); + } - // gstplayer_new_from_network will dup the pipeline internally. + if (pipeline != NULL) { free(pipeline); pipeline = NULL; - } else { - UNREACHABLE(); } if (player == NULL) { LOG_ERROR("Couldn't create gstreamer video player.\n"); - ok = EIO; - goto fail_respond_error; + ok = platch_respond_native_error_std(responsehandle, EIO); + goto fail_destroy_player; } - + // create a meta object so we can store the event channel name // of a player with it meta = create_meta(gstplayer_get_texture_id(player), player); if (meta == NULL) { - ok = ENOMEM; + ok = platch_respond_native_error_std(responsehandle, ENOMEM); goto fail_destroy_player; } - gstplayer_set_userdata_locked(player, meta); + gstplayer_set_userdata(player, meta); + + // Add it to our player collection + add_player(meta); + + // Set a receiver on the videoEvents event channel + ok = plugin_registry_set_receiver(meta->event_channel_name, kStandardMethodCall, on_receive_evch); + if (ok != 0) { + goto fail_remove_player; + } + + return platch_respond_success_std(responsehandle, &STDINT64(gstplayer_get_texture_id(player))); + +fail_remove_player: + remove_player(meta); + destroy_meta(meta); + +fail_destroy_player: + gstplayer_destroy(player); + +fail_free_pipeline: + if (pipeline) { + free(pipeline); + pipeline = NULL; + } - // Add all the HTTP headers to gstplayer using gstplayer_put_http_header +fail_free_headers: if (headers != NULL) { - for_each_entry_in_raw_std_map(header_name, header_value, headers) { - char *header_name_duped = raw_std_string_dup(header_name); - char *header_value_duped = raw_std_string_dup(header_value); + gst_structure_free(headers); + headers = NULL; + } + +fail_free_uri: + if (uri) { + free(uri); + uri = NULL; + } + +fail_free_package_name: + if (package_name) { + free(package_name); + package_name = NULL; + } + +fail_free_asset: + if (asset) { + free(asset); + asset = NULL; + } + + return ok; +} + +static int on_create_with_audio(const struct raw_std_value *arg, FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer_meta *meta; + struct gstplayer *player; + enum format_hint format_hint; + char *asset, *uri, *package_name; + size_t size; + int ok; + + ok = ensure_initialized(); + if (ok != 0) { + return respond_init_failed_v2(responsehandle); + } + + if (!raw_std_value_is_list(arg)) { + return platch_respond_illegal_arg_std(responsehandle, "Expected `arg` to be a List."); + } + + size = raw_std_list_get_size(arg); + + // arg[0]: Asset Path + if (size >= 1) { + arg = raw_std_list_get_first_element(arg); + + if (raw_std_value_is_null(arg)) { + asset = NULL; + } else if (raw_std_value_is_string(arg)) { + asset = raw_std_string_dup(arg); + ASSERT_NOT_NULL(asset); + } else { + return platch_respond_illegal_arg_std(responsehandle, "Expected `arg[0]` to be a String or null."); + } + } else { + asset = NULL; + } + + // arg[1]: Package Name + if (size >= 2) { + arg = raw_std_value_after(arg); + + if (raw_std_value_is_null(arg)) { + package_name = NULL; + } else if (raw_std_value_is_string(arg)) { + package_name = raw_std_string_dup(arg); + ASSERT_NOT_NULL(package_name); + } else { + return platch_respond_illegal_arg_std(responsehandle, "Expected `arg[1]` to be a String or null."); + } + } else { + package_name = NULL; + } + + // arg[1]: URI + if (size >= 3) { + arg = raw_std_value_after(arg); + + if (raw_std_value_is_null(arg)) { + uri = NULL; + } else if (raw_std_value_is_string(arg)) { + uri = raw_std_string_dup(arg); + if (uri == NULL) { + ASSERT_NOT_NULL(uri); + } + } else { + return platch_respond_illegal_arg_std(responsehandle, "Expected `arg[2]` to be a String or null."); + } + } else { + uri = NULL; + } - gstplayer_put_http_header(player, header_name_duped, header_value_duped); + // arg[3]: Format Hint + if (size >= 4) { + arg = raw_std_value_after(arg); - free(header_value_duped); - free(header_name_duped); + if (raw_std_value_is_null(arg)) { + format_hint = FORMAT_HINT_NONE; + } else if (raw_std_value_is_string(arg)) { + if (raw_std_string_equals(arg, "ss")) { + format_hint = FORMAT_HINT_SS; + } else if (raw_std_string_equals(arg, "hls")) { + format_hint = FORMAT_HINT_HLS; + } else if (raw_std_string_equals(arg, "dash")) { + format_hint = FORMAT_HINT_MPEG_DASH; + } else if (raw_std_string_equals(arg, "other")) { + format_hint = FORMAT_HINT_OTHER; + } else { + goto invalid_format_hint; + } + } else { +invalid_format_hint: + return platch_respond_illegal_arg_std(responsehandle, "Expected `arg[3]` to be one of 'ss', 'hls', 'dash', 'other' or null."); } + } else { + format_hint = FORMAT_HINT_NONE; } + GstStructure *headers = NULL; + + // arg[4]: HTTP Headers + if (size >= 5) { + arg = raw_std_value_after(arg); + + if (raw_std_value_is_null(arg)) { + headers = NULL; + } else if (raw_std_value_is_map(arg)) { + for_each_entry_in_raw_std_map(key, value, arg) { + if (raw_std_value_is_string(key) && raw_std_value_is_string(value)) { + if (headers == NULL) { + headers = gst_structure_new_empty("http-headers"); + } + + char *key_str = raw_std_string_dup(key); + ASSERT_NOT_NULL(key_str); + + gst_structure_take_string(headers, key_str, raw_std_string_dup(value)); + free(key_str); + } else { + goto invalid_headers; + } + } + } else { +invalid_headers: + if (headers != NULL) { + gst_structure_free(headers); + } + return platch_respond_illegal_arg_std(responsehandle, "Expected `arg[4]` to be a map of strings or null."); + } + } else { + headers = NULL; + } + + if ((asset ? 1 : 0) + (uri ? 1 : 0) != 1) { + platch_respond_illegal_arg_std(responsehandle, "Expected exactly one of `arg[0]` or `arg[2]` to be non-null."); + goto fail_free_headers; + } + + // Create our actual player (this doesn't initialize it) + if (asset != NULL) { + player = gstplayer_new_from_asset(flutterpi, asset, package_name, /* play_video */ true, /* play_audio */ true, NULL); + } else if (uri != NULL) { + player = gstplayer_new_from_network(flutterpi, uri, format_hint, /* play_video */ true, /* play_audio */ true, NULL, headers); + + if (player) { + headers = NULL; + } + } else { + UNREACHABLE(); + } + + if (asset != NULL) { + free(asset); + asset = NULL; + } + + if (package_name != NULL) { + free(package_name); + package_name = NULL; + } + + if (uri != NULL) { + free(uri); + uri = NULL; + } + + if (player == NULL) { + LOG_ERROR("Couldn't create gstreamer video player.\n"); + ok = platch_respond_native_error_std(responsehandle, EIO); + goto fail_free_headers; + } + + // create a meta object so we can store the event channel name + // of a player with it + meta = create_meta(gstplayer_get_texture_id(player), player); + ASSERT_NOT_NULL(meta); + + gstplayer_set_userdata(player, meta); + // Add it to our player collection add_player(meta); // Set a receiver on the videoEvents event channel ok = plugin_registry_set_receiver(meta->event_channel_name, kStandardMethodCall, on_receive_evch); if (ok != 0) { + platch_respond_native_error_std(responsehandle, ok); goto fail_remove_player; } - // Finally, start initializing - ok = gstplayer_initialize(player); - if (ok != 0) { - goto fail_remove_receiver; - } - return platch_respond_success_std(responsehandle, &STDINT64(gstplayer_get_texture_id(player))); -fail_remove_receiver: - plugin_registry_remove_receiver(meta->event_channel_name); - fail_remove_player: remove_player(meta); destroy_meta(meta); - -fail_destroy_player: gstplayer_destroy(player); -fail_respond_error: - return platch_respond_native_error_std(responsehandle, ok); +fail_free_headers: + if (headers != NULL) { + gst_structure_free(headers); + headers = NULL; + } + + if (uri != NULL) { + free(uri); + } + + if (package_name != NULL) { + free(package_name); + } + + if (asset != NULL) { + free(asset); + } + + return ok; } static int on_dispose_v2(const struct raw_std_value *arg, FlutterPlatformMessageResponseHandle *responsehandle) { @@ -1312,7 +1551,21 @@ static int on_set_looping_v2(const struct raw_std_value *arg, FlutterPlatformMes return platch_respond_illegal_arg_std(responsehandle, "Expected `arg[1]` to be a bool."); } - ok = gstplayer_set_looping(player, looping); + // For video playback, gapless looping usually works fine + // it seems. + bool gapless = true; + if (raw_std_list_get_size(arg) >= 3) { + const struct raw_std_value *third = raw_std_list_get_nth_element(arg, 2); + if (raw_std_value_is_null(third)) { + // unchanged + } else if (raw_std_value_is_bool(third)) { + gapless = raw_std_value_as_bool(third); + } else { + return platch_respond_illegal_arg_std(responsehandle, "Expected `arg[2]` to be a bool or null."); + } + } + + ok = gstplayer_set_looping(player, looping, gapless); if (ok != 0) { return platch_respond_native_error_std(responsehandle, ok); } @@ -1559,6 +1812,8 @@ static int on_receive_method_channel_v2(char *channel, struct platch_obj *object return on_initialize_v2(arg, responsehandle); } else if (raw_std_string_equals(method, "create")) { return on_create_v2(arg, responsehandle); + } else if (raw_std_string_equals(method, "createWithAudio")) { + return on_create_with_audio(arg, responsehandle); } else if (raw_std_string_equals(method, "dispose")) { return on_dispose_v2(arg, responsehandle); } else if (raw_std_string_equals(method, "setLooping")) { diff --git a/src/plugins/raw_keyboard.c b/src/plugins/raw_keyboard.c index a814088a..8ace05bb 100644 --- a/src/plugins/raw_keyboard.c +++ b/src/plugins/raw_keyboard.c @@ -424,7 +424,7 @@ ATTR_CONST static uint64_t physical_key_for_evdev_keycode(uint16_t evdev_keycode ATTR_CONST static uint64_t physical_key_for_xkb_keycode(xkb_keycode_t xkb_keycode) { assert(xkb_keycode >= 8); - return physical_key_for_evdev_keycode(xkb_keycode - 8); + return physical_key_for_evdev_keycode((uint16_t) (xkb_keycode - 8)); } ATTR_CONST static char eascii_to_lower(unsigned char n) { @@ -622,7 +622,7 @@ ATTR_CONST static uint32_t logical_key_for_xkb_keysym(xkb_keysym_t keysym) { if (keysym == XKB_KEY_yen) { return apply_flutter_key_plane(0x00022); } else if (keysym < 256) { - return apply_unicode_key_plane(eascii_to_lower(keysym)); + return apply_unicode_key_plane(eascii_to_lower((int8_t) keysym)); } else if (keysym >= 0xfd06 && keysym - 0xfd06 < ARRAY_SIZE(logical_keys_1)) { logical = logical_keys_1[keysym]; } else if (keysym >= 0x1008ff02 && keysym - 0x1008ff02 < ARRAY_SIZE(logical_keys_2)) { @@ -818,6 +818,7 @@ int rawkb_on_key_event( return ok; } + // NOLINTNEXTLINE(readability-suspicious-call-argument) ok = rawkb_send_gtk_keyevent(plain_codepoint, xkb_keysym, xkb_keycode, modifiers.u32, is_down); if (ok != 0) { return ok; @@ -826,7 +827,7 @@ int rawkb_on_key_event( return 0; } -static void assert_key_modifiers_work() { +static void assert_key_modifiers_work(void) { key_modifiers_t mods; memset(&mods, 0, sizeof(mods)); diff --git a/src/plugins/sentry/sentry.c b/src/plugins/sentry/sentry.c index d18031c2..0c28a0c5 100644 --- a/src/plugins/sentry/sentry.c +++ b/src/plugins/sentry/sentry.c @@ -301,7 +301,15 @@ static sentry_value_t raw_std_value_as_sentry_value(const struct raw_std_value * case kStdInt32: return sentry_value_new_int32(raw_std_value_as_int32(arg)); case kStdInt64: return sentry_value_new_int32((int32_t) raw_std_value_as_int64(arg)); case kStdFloat64: return sentry_value_new_double(raw_std_value_as_float64(arg)); + + case kStdUInt8Array: + case kStdInt32Array: + case kStdInt64Array: + case kStdFloat64Array: return sentry_value_new_null(); + + case kStdLargeInt: case kStdString: return sentry_value_new_string_n(raw_std_string_get_nonzero_terminated(arg), raw_std_string_get_length(arg)); + case kStdMap: { sentry_value_t map = sentry_value_new_object(); for_each_entry_in_raw_std_map(key, value, arg) { @@ -328,6 +336,7 @@ static sentry_value_t raw_std_value_as_sentry_value(const struct raw_std_value * return list; } + case kStdFloat32Array: return sentry_value_new_null(); default: return sentry_value_new_null(); } } @@ -755,7 +764,7 @@ static void on_method_call(void *userdata, const FlutterPlatformMessage *message } } -enum plugin_init_result sentry_plugin_deinit(struct flutterpi *flutterpi, void **userdata_out) { +enum plugin_init_result sentry_plugin_init(struct flutterpi *flutterpi, void **userdata_out) { struct sentry_plugin *plugin; int ok; @@ -780,7 +789,7 @@ enum plugin_init_result sentry_plugin_deinit(struct flutterpi *flutterpi, void * return PLUGIN_INIT_RESULT_INITIALIZED; } -void sentry_plugin_init(struct flutterpi *flutterpi, void *userdata) { +void sentry_plugin_fini(struct flutterpi *flutterpi, void *userdata) { struct sentry_plugin *plugin; ASSERT_NOT_NULL(userdata); @@ -794,4 +803,4 @@ void sentry_plugin_init(struct flutterpi *flutterpi, void *userdata) { free(plugin); } -FLUTTERPI_PLUGIN("sentry", sentry_plugin_init, sentry_plugin_deinit, NULL); +FLUTTERPI_PLUGIN("sentry", sentry, sentry_plugin_init, sentry_plugin_fini) diff --git a/src/plugins/text_input.c b/src/plugins/text_input.c index c57938cf..15674cd4 100644 --- a/src/plugins/text_input.c +++ b/src/plugins/text_input.c @@ -11,6 +11,7 @@ #include "flutter-pi.h" #include "pluginregistry.h" #include "util/asserts.h" +#include "util/logging.h" struct text_input { int64_t connection_id; @@ -33,16 +34,16 @@ struct text_input { * UTF8 utility functions */ static inline uint8_t utf8_symbol_length(uint8_t c) { - if ((c & 0b11110000) == 0b11110000) { + if ((c & 240 /* 0b11110000 */) == 240 /* 0b11110000 */) { return 4; } - if ((c & 0b11100000) == 0b11100000) { + if ((c & 224 /* 0b11100000 */) == 224 /* 0b11100000 */) { return 3; } - if ((c & 0b11000000) == 0b11000000) { + if ((c & 192 /* 0b11000000 */) == 192 /* 0b11000000 */) { return 2; } - if ((c & 0b10000000) == 0b10000000) { + if ((c & 128 /* 0b10000000 */) == 128 /* 0b10000000 */) { // XXX should we return 1 and don't care here? ASSERT_MSG(false, "Invalid UTF-8 character"); return 0; @@ -181,6 +182,7 @@ static int on_set_client(struct platch_obj *object, FlutterPlatformMessageRespon temp2 = jsobject_get(temp, "signed"); if (temp2 == NULL || temp2->type == kJsonNull) { has_allow_signs = false; + allow_signs = true; } else if (temp2->type == kJsonTrue || temp2->type == kJsonFalse) { has_allow_signs = true; allow_signs = temp2->type == kJsonTrue; @@ -191,6 +193,7 @@ static int on_set_client(struct platch_obj *object, FlutterPlatformMessageRespon temp2 = jsobject_get(temp, "decimal"); if (temp2 == NULL || temp2->type == kJsonNull) { has_allow_decimal = false; + allow_decimal = true; } else if (temp2->type == kJsonTrue || temp2->type == kJsonFalse) { has_allow_decimal = true; allow_decimal = temp2->type == kJsonTrue; @@ -239,14 +242,18 @@ static int on_set_client(struct platch_obj *object, FlutterPlatformMessageRespon int32_t new_id = (int32_t) object->json_arg.array[0].number_value; // everything okay, apply the new text editing config + text_input.has_allow_signs = has_allow_signs; + text_input.allow_signs = allow_signs; + text_input.has_allow_decimal = has_allow_decimal; + text_input.allow_decimal = allow_decimal; text_input.connection_id = new_id; text_input.autocorrect = autocorrect; text_input.input_action = input_action; text_input.input_type = input_type; if (autocorrect && !text_input.warned_about_autocorrect) { - printf( - "[text_input] warning: flutter requested native autocorrect, which" + LOG_ERROR( + "info: flutter requested native autocorrect, which" "is not supported by flutter-pi.\n" ); text_input.warned_about_autocorrect = true; @@ -527,7 +534,9 @@ int client_perform_action(double connection_id, enum text_input_action action) { } int client_perform_private_command(double connection_id, char *action, struct json_value *data) { - if (data != NULL && data->type != kJsonNull && data->type != kJsonObject) { + if (data == NULL) { + return EINVAL; + } else if (data->type != kJsonNull && data->type != kJsonObject) { return EINVAL; } diff --git a/src/texture_registry.c b/src/texture_registry.c index a2ab95b0..2b2fd961 100644 --- a/src/texture_registry.c +++ b/src/texture_registry.c @@ -202,6 +202,7 @@ struct texture *texture_new(struct texture_registry *reg) { if (ok != 0) { pthread_mutex_destroy(&texture->lock); free(texture); + return NULL; } return texture; @@ -301,7 +302,7 @@ texture_gl_external_texture_frame_callback(struct texture *texture, size_t width if (texture->next_frame != NULL) { /// TODO: If acquiring the texture frame fails, flutter will destroy the texture frame two times. /// So we'll probably have a segfault if that happens. - frame = counted_texture_frame_ref(texture->next_frame); + frame = texture->next_frame; } else { frame = NULL; } @@ -315,14 +316,17 @@ texture_gl_external_texture_frame_callback(struct texture *texture, size_t width ok = frame->unresolved_frame.resolve(width, height, frame->unresolved_frame.userdata, &frame->frame); if (ok != 0) { LOG_ERROR("Couldn't resolve texture frame.\n"); - counted_texture_frame_unrefp(&frame); counted_texture_frame_unrefp(&texture->next_frame); + texture_unlock(texture); + return false; } frame->unresolved_frame.destroy(frame->unresolved_frame.userdata); frame->is_resolved = true; } + frame = counted_texture_frame_ref(frame); + texture_unlock(texture); // only actually fill out the frame info when we have a frame. diff --git a/src/tracer.c b/src/tracer.c index 9177fede..99623cc7 100644 --- a/src/tracer.c +++ b/src/tracer.c @@ -50,7 +50,7 @@ struct tracer *tracer_new_with_cbs( return NULL; } -struct tracer *tracer_new_with_stubs() { +struct tracer *tracer_new_with_stubs(void) { struct tracer *tracer; tracer = malloc(sizeof *tracer); diff --git a/src/tracer.h b/src/tracer.h index 896ee686..11ab967d 100644 --- a/src/tracer.h +++ b/src/tracer.h @@ -20,9 +20,9 @@ struct tracer *tracer_new_with_cbs( FlutterEngineTraceEventInstantFnPtr trace_instant ); -struct tracer *tracer_new_with_stubs(); +struct tracer *tracer_new_with_stubs(void); -DECLARE_REF_OPS(tracer); +DECLARE_REF_OPS(tracer) void __tracer_begin(struct tracer *tracer, const char *name); diff --git a/src/user_input.c b/src/user_input.c index 652d7df5..17531743 100644 --- a/src/user_input.c +++ b/src/user_input.c @@ -270,7 +270,7 @@ static void on_close(int fd, void *userdata) { ASSERT_NOT_NULL(userdata); input = userdata; - return input->interface.close(fd, input->userdata); + input->interface.close(fd, input->userdata); } static const struct libinput_interface libinput_interface = { .open_restricted = on_open, .close_restricted = on_close }; @@ -376,6 +376,8 @@ void user_input_destroy(struct user_input *input) { event = libinput_get_event(input->libinput); event_type = libinput_event_get_type(event); + PRAGMA_DIAGNOSTIC_PUSH + PRAGMA_DIAGNOSTIC_IGNORED("-Wswitch-enum") switch (event_type) { case LIBINPUT_EVENT_DEVICE_REMOVED: ok = on_device_removed(input, event, 0, false); @@ -383,6 +385,7 @@ void user_input_destroy(struct user_input *input) { break; default: break; } + PRAGMA_DIAGNOSTIC_POP libinput_event_destroy(event); } @@ -746,21 +749,21 @@ static int on_key_event(struct user_input *input, struct libinput_event *event) // we emit UTF8 unconditionally here, // maybe we should check if codepoint is a control character? if (isprint(codepoint)) { - utf8_character[0] = codepoint; + utf8_character[0] = (uint8_t) codepoint; } } else if (codepoint < 0x800) { - utf8_character[0] = 0xc0 | (codepoint >> 6); + utf8_character[0] = 0xc0 | (uint8_t) (codepoint >> 6); utf8_character[1] = 0x80 | (codepoint & 0x3f); } else if (codepoint < 0x10000) { // the console keyboard driver of the linux kernel checks // at this point whether `codepoint` is a UTF16 high surrogate (U+D800 to U+DFFF) // or U+FFFF and returns without emitting UTF8 in that case. // don't know whether we should do this here too - utf8_character[0] = 0xe0 | (codepoint >> 12); + utf8_character[0] = 0xe0 | (uint8_t) (codepoint >> 12); utf8_character[1] = 0x80 | ((codepoint >> 6) & 0x3f); utf8_character[2] = 0x80 | (codepoint & 0x3f); } else if (codepoint < 0x110000) { - utf8_character[0] = 0xf0 | (codepoint >> 18); + utf8_character[0] = 0xf0 | (uint8_t) (codepoint >> 18); utf8_character[1] = 0x80 | ((codepoint >> 12) & 0x3f); utf8_character[2] = 0x80 | ((codepoint >> 6) & 0x3f); utf8_character[3] = 0x80 | (codepoint & 0x3f); @@ -1356,6 +1359,11 @@ static int process_libinput_events(struct user_input *input, uint64_t timestamp) event = libinput_get_event(input->libinput); event_type = libinput_event_get_type(event); + // We explicitly don't want to handle every event type here. + // Otherwise we'd need to add a new `case` every libinput introduces + // a new event. + PRAGMA_DIAGNOSTIC_PUSH + PRAGMA_DIAGNOSTIC_IGNORED("-Wswitch-enum") switch (event_type) { case LIBINPUT_EVENT_DEVICE_ADDED: ok = on_device_added(input, event, timestamp); @@ -1481,6 +1489,7 @@ static int process_libinput_events(struct user_input *input, uint64_t timestamp) #endif default: break; } + PRAGMA_DIAGNOSTIC_POP libinput_event_destroy(event); } @@ -1514,8 +1523,8 @@ int user_input_on_fd_ready(struct user_input *input) { // record cursor state before handling events cursor_enabled_before = input->n_cursor_devices > 0; - cursor_x_before = round(input->cursor_x); - cursor_y_before = round(input->cursor_y); + cursor_x_before = (int) round(input->cursor_x); + cursor_y_before = (int) round(input->cursor_y); // handle all available libinput events ok = process_libinput_events(input, timestamp); @@ -1526,8 +1535,8 @@ int user_input_on_fd_ready(struct user_input *input) { // record cursor state after handling events cursor_enabled = input->n_cursor_devices > 0; - cursor_x = round(input->cursor_x); - cursor_y = round(input->cursor_y); + cursor_x = (int) round(input->cursor_x); + cursor_y = (int) round(input->cursor_y); // make sure we've dispatched all the flutter pointer events flush_pointer_events(input); diff --git a/src/util/asserts.h b/src/util/asserts.h index b2926833..7ca0d312 100644 --- a/src/util/asserts.h +++ b/src/util/asserts.h @@ -18,18 +18,7 @@ #define ASSERT_EQUALS_MSG(__a, __b, __msg) ASSERT_MSG((__a) == (__b), __msg) #define ASSERT_EGL_TRUE(__var) assert((__var) == EGL_TRUE) #define ASSERT_EGL_TRUE_MSG(__var, __msg) ASSERT_MSG((__var) == EGL_TRUE, __msg) -#define ASSERT_MUTEX_LOCKED(__mutex) \ - assert(({ \ - bool result; \ - int r = pthread_mutex_trylock(&(__mutex)); \ - if (r == 0) { \ - pthread_mutex_unlock(&(__mutex)); \ - result = false; \ - } else { \ - result = true; \ - } \ - result; \ - })) + #define ASSERT_ZERO(__var) assert((__var) == 0) #define ASSERT_ZERO_MSG(__var, __msg) ASSERT_MSG((__var) == 0, __msg) diff --git a/src/util/collection.c b/src/util/collection.c index 323f4fb2..dcc50394 100644 --- a/src/util/collection.c +++ b/src/util/collection.c @@ -2,14 +2,14 @@ static pthread_mutexattr_t default_mutex_attrs; -static void init_default_mutex_attrs() { +static void init_default_mutex_attrs(void) { pthread_mutexattr_init(&default_mutex_attrs); #ifdef DEBUG pthread_mutexattr_settype(&default_mutex_attrs, PTHREAD_MUTEX_ERRORCHECK); #endif } -const pthread_mutexattr_t *get_default_mutex_attrs() { +const pthread_mutexattr_t *get_default_mutex_attrs(void) { static pthread_once_t init_once_ctl = PTHREAD_ONCE_INIT; pthread_once(&init_once_ctl, init_default_mutex_attrs); diff --git a/src/util/collection.h b/src/util/collection.h index bbc48d23..d466ae35 100644 --- a/src/util/collection.h +++ b/src/util/collection.h @@ -108,7 +108,7 @@ static inline void *uint32_to_ptr(const uint32_t v) { #define MAX_ALIGNMENT (__alignof__(max_align_t)) #define IS_MAX_ALIGNED(num) ((num) % MAX_ALIGNMENT == 0) -#define DOUBLE_TO_FP1616(v) ((uint32_t) ((v) *65536)) +#define DOUBLE_TO_FP1616(v) ((uint32_t) ((v) * 65536)) #define DOUBLE_TO_FP1616_ROUNDED(v) (((uint32_t) (v)) << 16) typedef void (*void_callback_t)(void *userdata); @@ -117,6 +117,6 @@ ATTR_PURE static inline bool streq(const char *a, const char *b) { return strcmp(a, b) == 0; } -const pthread_mutexattr_t *get_default_mutex_attrs(); +const pthread_mutexattr_t *get_default_mutex_attrs(void); #endif // _FLUTTERPI_SRC_UTIL_COLLECTION_H diff --git a/src/util/khash.h b/src/util/khash.h new file mode 100644 index 00000000..a429ee9d --- /dev/null +++ b/src/util/khash.h @@ -0,0 +1,627 @@ +/* The MIT License + + Copyright (c) 2008, 2009, 2011 by Attractive Chaos + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS + BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN + ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN + CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. +*/ + +/* + An example: + +#include "khash.h" +KHASH_MAP_INIT_INT(32, char) +int main() { + int ret, is_missing; + khiter_t k; + khash_t(32) *h = kh_init(32); + k = kh_put(32, h, 5, &ret); + kh_value(h, k) = 10; + k = kh_get(32, h, 10); + is_missing = (k == kh_end(h)); + k = kh_get(32, h, 5); + kh_del(32, h, k); + for (k = kh_begin(h); k != kh_end(h); ++k) + if (kh_exist(h, k)) kh_value(h, k) = 1; + kh_destroy(32, h); + return 0; +} +*/ + +/* + 2013-05-02 (0.2.8): + + * Use quadratic probing. When the capacity is power of 2, stepping function + i*(i+1)/2 guarantees to traverse each bucket. It is better than double + hashing on cache performance and is more robust than linear probing. + + In theory, double hashing should be more robust than quadratic probing. + However, my implementation is probably not for large hash tables, because + the second hash function is closely tied to the first hash function, + which reduce the effectiveness of double hashing. + + Reference: http://research.cs.vt.edu/AVresearch/hashing/quadratic.php + + 2011-12-29 (0.2.7): + + * Minor code clean up; no actual effect. + + 2011-09-16 (0.2.6): + + * The capacity is a power of 2. This seems to dramatically improve the + speed for simple keys. Thank Zilong Tan for the suggestion. Reference: + + - http://code.google.com/p/ulib/ + - http://nothings.org/computer/judy/ + + * Allow to optionally use linear probing which usually has better + performance for random input. Double hashing is still the default as it + is more robust to certain non-random input. + + * Added Wang's integer hash function (not used by default). This hash + function is more robust to certain non-random input. + + 2011-02-14 (0.2.5): + + * Allow to declare global functions. + + 2009-09-26 (0.2.4): + + * Improve portability + + 2008-09-19 (0.2.3): + + * Corrected the example + * Improved interfaces + + 2008-09-11 (0.2.2): + + * Improved speed a little in kh_put() + + 2008-09-10 (0.2.1): + + * Added kh_clear() + * Fixed a compiling error + + 2008-09-02 (0.2.0): + + * Changed to token concatenation which increases flexibility. + + 2008-08-31 (0.1.2): + + * Fixed a bug in kh_get(), which has not been tested previously. + + 2008-08-31 (0.1.1): + + * Added destructor +*/ + + +#ifndef __AC_KHASH_H +#define __AC_KHASH_H + +/*! + @header + + Generic hash table library. + */ + +#define AC_VERSION_KHASH_H "0.2.8" + +#include +#include +#include + +/* compiler specific configuration */ + +#if UINT_MAX == 0xffffffffu +typedef unsigned int khint32_t; +#elif ULONG_MAX == 0xffffffffu +typedef unsigned long khint32_t; +#endif + +#if ULONG_MAX == ULLONG_MAX +typedef unsigned long khint64_t; +#else +typedef unsigned long long khint64_t; +#endif + +#ifndef kh_inline +#ifdef _MSC_VER +#define kh_inline __inline +#else +#define kh_inline inline +#endif +#endif /* kh_inline */ + +#ifndef klib_unused +#if (defined __clang__ && __clang_major__ >= 3) || (defined __GNUC__ && __GNUC__ >= 3) +#define klib_unused __attribute__ ((__unused__)) +#else +#define klib_unused +#endif +#endif /* klib_unused */ + +typedef khint32_t khint_t; +typedef khint_t khiter_t; + +#define __ac_isempty(flag, i) ((flag[i>>4]>>((i&0xfU)<<1))&2) +#define __ac_isdel(flag, i) ((flag[i>>4]>>((i&0xfU)<<1))&1) +#define __ac_iseither(flag, i) ((flag[i>>4]>>((i&0xfU)<<1))&3) +#define __ac_set_isdel_false(flag, i) (flag[i>>4]&=~(1ul<<((i&0xfU)<<1))) +#define __ac_set_isempty_false(flag, i) (flag[i>>4]&=~(2ul<<((i&0xfU)<<1))) +#define __ac_set_isboth_false(flag, i) (flag[i>>4]&=~(3ul<<((i&0xfU)<<1))) +#define __ac_set_isdel_true(flag, i) (flag[i>>4]|=1ul<<((i&0xfU)<<1)) + +#define __ac_fsize(m) ((m) < 16? 1 : (m)>>4) + +#ifndef kroundup32 +#define kroundup32(x) (--(x), (x)|=(x)>>1, (x)|=(x)>>2, (x)|=(x)>>4, (x)|=(x)>>8, (x)|=(x)>>16, ++(x)) +#endif + +#ifndef kcalloc +#define kcalloc(N,Z) calloc(N,Z) +#endif +#ifndef kmalloc +#define kmalloc(Z) malloc(Z) +#endif +#ifndef krealloc +#define krealloc(P,Z) realloc(P,Z) +#endif +#ifndef kfree +#define kfree(P) free(P) +#endif + +static const double __ac_HASH_UPPER = 0.77; + +#define __KHASH_TYPE(name, khkey_t, khval_t) \ + typedef struct kh_##name##_s { \ + khint_t n_buckets, size, n_occupied, upper_bound; \ + khint32_t *flags; \ + khkey_t *keys; \ + khval_t *vals; \ + } kh_##name##_t; + +#define __KHASH_PROTOTYPES(name, khkey_t, khval_t) \ + extern kh_##name##_t *kh_init_##name(void); \ + extern void kh_destroy_##name(kh_##name##_t *h); \ + extern void kh_clear_##name(kh_##name##_t *h); \ + extern khint_t kh_get_##name(const kh_##name##_t *h, khkey_t key); \ + extern int kh_resize_##name(kh_##name##_t *h, khint_t new_n_buckets); \ + extern khint_t kh_put_##name(kh_##name##_t *h, khkey_t key, int *ret); \ + extern void kh_del_##name(kh_##name##_t *h, khint_t x); + +#define __KHASH_IMPL(name, SCOPE, khkey_t, khval_t, kh_is_map, __hash_func, __hash_equal) \ + SCOPE kh_##name##_t *kh_init_##name(void) { \ + return (kh_##name##_t*)kcalloc(1, sizeof(kh_##name##_t)); \ + } \ + SCOPE void kh_destroy_##name(kh_##name##_t *h) \ + { \ + if (h) { \ + kfree((void *)h->keys); kfree(h->flags); \ + kfree((void *)h->vals); \ + kfree(h); \ + } \ + } \ + SCOPE void kh_clear_##name(kh_##name##_t *h) \ + { \ + if (h && h->flags) { \ + memset(h->flags, 0xaa, __ac_fsize(h->n_buckets) * sizeof(khint32_t)); \ + h->size = h->n_occupied = 0; \ + } \ + } \ + SCOPE khint_t kh_get_##name(const kh_##name##_t *h, khkey_t key) \ + { \ + if (h->n_buckets) { \ + khint_t k, i, last, mask, step = 0; \ + mask = h->n_buckets - 1; \ + k = __hash_func(key); i = k & mask; \ + last = i; \ + while (!__ac_isempty(h->flags, i) && (__ac_isdel(h->flags, i) || !__hash_equal(h->keys[i], key))) { \ + i = (i + (++step)) & mask; \ + if (i == last) return h->n_buckets; \ + } \ + return __ac_iseither(h->flags, i)? h->n_buckets : i; \ + } else return 0; \ + } \ + SCOPE int kh_resize_##name(kh_##name##_t *h, khint_t new_n_buckets) \ + { /* This function uses 0.25*n_buckets bytes of working space instead of [sizeof(key_t+val_t)+.25]*n_buckets. */ \ + khint32_t *new_flags = 0; \ + khint_t j = 1; \ + { \ + kroundup32(new_n_buckets); \ + if (new_n_buckets < 4) new_n_buckets = 4; \ + if (h->size >= (khint_t)(new_n_buckets * __ac_HASH_UPPER + 0.5)) j = 0; /* requested size is too small */ \ + else { /* hash table size to be changed (shrink or expand); rehash */ \ + new_flags = (khint32_t*)kmalloc(__ac_fsize(new_n_buckets) * sizeof(khint32_t)); \ + if (!new_flags) return -1; \ + memset(new_flags, 0xaa, __ac_fsize(new_n_buckets) * sizeof(khint32_t)); \ + if (h->n_buckets < new_n_buckets) { /* expand */ \ + khkey_t *new_keys = (khkey_t*)krealloc((void *)h->keys, new_n_buckets * sizeof(khkey_t)); \ + if (!new_keys) { kfree(new_flags); return -1; } \ + h->keys = new_keys; \ + if (kh_is_map) { \ + khval_t *new_vals = (khval_t*)krealloc((void *)h->vals, new_n_buckets * sizeof(khval_t)); \ + if (!new_vals) { kfree(new_flags); return -1; } \ + h->vals = new_vals; \ + } \ + } /* otherwise shrink */ \ + } \ + } \ + if (j) { /* rehashing is needed */ \ + for (j = 0; j != h->n_buckets; ++j) { \ + if (__ac_iseither(h->flags, j) == 0) { \ + khkey_t key = h->keys[j]; \ + khval_t val; \ + khint_t new_mask; \ + new_mask = new_n_buckets - 1; \ + if (kh_is_map) val = h->vals[j]; \ + __ac_set_isdel_true(h->flags, j); \ + while (1) { /* kick-out process; sort of like in Cuckoo hashing */ \ + khint_t k, i, step = 0; \ + k = __hash_func(key); \ + i = k & new_mask; \ + while (!__ac_isempty(new_flags, i)) i = (i + (++step)) & new_mask; \ + __ac_set_isempty_false(new_flags, i); \ + if (i < h->n_buckets && __ac_iseither(h->flags, i) == 0) { /* kick out the existing element */ \ + { khkey_t tmp = h->keys[i]; h->keys[i] = key; key = tmp; } \ + if (kh_is_map) { khval_t tmp = h->vals[i]; h->vals[i] = val; val = tmp; } \ + __ac_set_isdel_true(h->flags, i); /* mark it as deleted in the old hash table */ \ + } else { /* write the element and jump out of the loop */ \ + h->keys[i] = key; \ + if (kh_is_map) h->vals[i] = val; \ + break; \ + } \ + } \ + } \ + } \ + if (h->n_buckets > new_n_buckets) { /* shrink the hash table */ \ + h->keys = (khkey_t*)krealloc((void *)h->keys, new_n_buckets * sizeof(khkey_t)); /* NOLINT(bugprone-suspicious-realloc-usage) */\ + if (kh_is_map) h->vals = (khval_t*)krealloc((void *)h->vals, new_n_buckets * sizeof(khval_t)); /* NOLINT(bugprone-suspicious-realloc-usage) */\ + } \ + kfree(h->flags); /* free the working space */ \ + h->flags = new_flags; \ + h->n_buckets = new_n_buckets; \ + h->n_occupied = h->size; \ + h->upper_bound = (khint_t)(h->n_buckets * __ac_HASH_UPPER + 0.5); \ + } \ + return 0; \ + } \ + SCOPE khint_t kh_put_##name(kh_##name##_t *h, khkey_t key, int *ret) \ + { \ + khint_t x; \ + if (h->n_occupied >= h->upper_bound) { /* update the hash table */ \ + if (h->n_buckets > (h->size<<1)) { \ + if (kh_resize_##name(h, h->n_buckets - 1) < 0) { /* clear "deleted" elements */ \ + *ret = -1; return h->n_buckets; \ + } \ + } else if (kh_resize_##name(h, h->n_buckets + 1) < 0) { /* expand the hash table */ \ + *ret = -1; return h->n_buckets; \ + } \ + } /* TODO: to implement automatically shrinking; resize() already support shrinking */ \ + { \ + khint_t k, i, site, last, mask = h->n_buckets - 1, step = 0; \ + x = site = h->n_buckets; k = __hash_func(key); i = k & mask; \ + if (__ac_isempty(h->flags, i)) x = i; /* for speed up */ \ + else { \ + last = i; \ + while (!__ac_isempty(h->flags, i) && (__ac_isdel(h->flags, i) || !__hash_equal(h->keys[i], key))) { \ + if (__ac_isdel(h->flags, i)) site = i; \ + i = (i + (++step)) & mask; \ + if (i == last) { x = site; break; } \ + } \ + if (x == h->n_buckets) { \ + if (__ac_isempty(h->flags, i) && site != h->n_buckets) x = site; \ + else x = i; \ + } \ + } \ + } \ + if (__ac_isempty(h->flags, x)) { /* not present at all */ \ + h->keys[x] = key; \ + __ac_set_isboth_false(h->flags, x); \ + ++h->size; ++h->n_occupied; \ + *ret = 1; \ + } else if (__ac_isdel(h->flags, x)) { /* deleted */ \ + h->keys[x] = key; \ + __ac_set_isboth_false(h->flags, x); \ + ++h->size; \ + *ret = 2; \ + } else *ret = 0; /* Don't touch h->keys[x] if present and not deleted */ \ + return x; \ + } \ + SCOPE void kh_del_##name(kh_##name##_t *h, khint_t x) \ + { \ + if (x != h->n_buckets && !__ac_iseither(h->flags, x)) { \ + __ac_set_isdel_true(h->flags, x); \ + --h->size; \ + } \ + } + +#define KHASH_DECLARE(name, khkey_t, khval_t) \ + __KHASH_TYPE(name, khkey_t, khval_t) \ + __KHASH_PROTOTYPES(name, khkey_t, khval_t) + +#define KHASH_INIT2(name, SCOPE, khkey_t, khval_t, kh_is_map, __hash_func, __hash_equal) \ + __KHASH_TYPE(name, khkey_t, khval_t) \ + __KHASH_IMPL(name, SCOPE, khkey_t, khval_t, kh_is_map, __hash_func, __hash_equal) + +#define KHASH_INIT(name, khkey_t, khval_t, kh_is_map, __hash_func, __hash_equal) \ + KHASH_INIT2(name, static kh_inline klib_unused, khkey_t, khval_t, kh_is_map, __hash_func, __hash_equal) + +/* --- BEGIN OF HASH FUNCTIONS --- */ + +/*! @function + @abstract Integer hash function + @param key The integer [khint32_t] + @return The hash value [khint_t] + */ +#define kh_int_hash_func(key) (khint32_t)(key) +/*! @function + @abstract Integer comparison function + */ +#define kh_int_hash_equal(a, b) ((a) == (b)) +/*! @function + @abstract 64-bit integer hash function + @param key The integer [khint64_t] + @return The hash value [khint_t] + */ +#define kh_int64_hash_func(key) (khint32_t)((key)>>33^(key)^(key)<<11) +/*! @function + @abstract 64-bit integer comparison function + */ +#define kh_int64_hash_equal(a, b) ((a) == (b)) +/*! @function + @abstract const char* hash function + @param s Pointer to a null terminated string + @return The hash value + */ +static kh_inline khint_t __ac_X31_hash_string(const char *s) +{ + khint_t h = (khint_t)*s; + if (h) for (++s ; *s; ++s) h = (h << 5) - h + (khint_t)*s; + return h; +} +/*! @function + @abstract Another interface to const char* hash function + @param key Pointer to a null terminated string [const char*] + @return The hash value [khint_t] + */ +#define kh_str_hash_func(key) __ac_X31_hash_string(key) +/*! @function + @abstract Const char* comparison function + */ +#define kh_str_hash_equal(a, b) (strcmp(a, b) == 0) + +static kh_inline khint_t __ac_Wang_hash(khint_t key) +{ + key += ~(key << 15); + key ^= (key >> 10); + key += (key << 3); + key ^= (key >> 6); + key += ~(key << 11); + key ^= (key >> 16); + return key; +} +#define kh_int_hash_func2(key) __ac_Wang_hash((khint_t)key) + +/* --- END OF HASH FUNCTIONS --- */ + +/* Other convenient macros... */ + +/*! + @abstract Type of the hash table. + @param name Name of the hash table [symbol] + */ +#define khash_t(name) kh_##name##_t + +/*! @function + @abstract Initiate a hash table. + @param name Name of the hash table [symbol] + @return Pointer to the hash table [khash_t(name)*] + */ +#define kh_init(name) kh_init_##name() + +/*! @function + @abstract Destroy a hash table. + @param name Name of the hash table [symbol] + @param h Pointer to the hash table [khash_t(name)*] + */ +#define kh_destroy(name, h) kh_destroy_##name(h) + +/*! @function + @abstract Reset a hash table without deallocating memory. + @param name Name of the hash table [symbol] + @param h Pointer to the hash table [khash_t(name)*] + */ +#define kh_clear(name, h) kh_clear_##name(h) + +/*! @function + @abstract Resize a hash table. + @param name Name of the hash table [symbol] + @param h Pointer to the hash table [khash_t(name)*] + @param s New size [khint_t] + */ +#define kh_resize(name, h, s) kh_resize_##name(h, s) + +/*! @function + @abstract Insert a key to the hash table. + @param name Name of the hash table [symbol] + @param h Pointer to the hash table [khash_t(name)*] + @param k Key [type of keys] + @param r Extra return code: -1 if the operation failed; + 0 if the key is present in the hash table; + 1 if the bucket is empty (never used); 2 if the element in + the bucket has been deleted [int*] + @return Iterator to the inserted element [khint_t] + */ +#define kh_put(name, h, k, r) kh_put_##name(h, k, r) + +/*! @function + @abstract Retrieve a key from the hash table. + @param name Name of the hash table [symbol] + @param h Pointer to the hash table [khash_t(name)*] + @param k Key [type of keys] + @return Iterator to the found element, or kh_end(h) if the element is absent [khint_t] + */ +#define kh_get(name, h, k) kh_get_##name(h, k) + +/*! @function + @abstract Remove a key from the hash table. + @param name Name of the hash table [symbol] + @param h Pointer to the hash table [khash_t(name)*] + @param k Iterator to the element to be deleted [khint_t] + */ +#define kh_del(name, h, k) kh_del_##name(h, k) + +/*! @function + @abstract Test whether a bucket contains data. + @param h Pointer to the hash table [khash_t(name)*] + @param x Iterator to the bucket [khint_t] + @return 1 if containing data; 0 otherwise [int] + */ +#define kh_exist(h, x) (!__ac_iseither((h)->flags, (x))) + +/*! @function + @abstract Get key given an iterator + @param h Pointer to the hash table [khash_t(name)*] + @param x Iterator to the bucket [khint_t] + @return Key [type of keys] + */ +#define kh_key(h, x) ((h)->keys[x]) + +/*! @function + @abstract Get value given an iterator + @param h Pointer to the hash table [khash_t(name)*] + @param x Iterator to the bucket [khint_t] + @return Value [type of values] + @discussion For hash sets, calling this results in segfault. + */ +#define kh_val(h, x) ((h)->vals[x]) + +/*! @function + @abstract Alias of kh_val() + */ +#define kh_value(h, x) ((h)->vals[x]) + +/*! @function + @abstract Get the start iterator + @param h Pointer to the hash table [khash_t(name)*] + @return The start iterator [khint_t] + */ +#define kh_begin(h) (khint_t)(0) + +/*! @function + @abstract Get the end iterator + @param h Pointer to the hash table [khash_t(name)*] + @return The end iterator [khint_t] + */ +#define kh_end(h) ((h)->n_buckets) + +/*! @function + @abstract Get the number of elements in the hash table + @param h Pointer to the hash table [khash_t(name)*] + @return Number of elements in the hash table [khint_t] + */ +#define kh_size(h) ((h)->size) + +/*! @function + @abstract Get the number of buckets in the hash table + @param h Pointer to the hash table [khash_t(name)*] + @return Number of buckets in the hash table [khint_t] + */ +#define kh_n_buckets(h) ((h)->n_buckets) + +/*! @function + @abstract Iterate over the entries in the hash table + @param h Pointer to the hash table [khash_t(name)*] + @param kvar Variable to which key will be assigned + @param vvar Variable to which value will be assigned + @param code Block of code to execute + */ +#define kh_foreach(h, kvar, vvar, code) { khint_t __i; \ + for (__i = kh_begin(h); __i != kh_end(h); ++__i) { \ + if (!kh_exist(h,__i)) continue; \ + (kvar) = kh_key(h,__i); \ + (vvar) = kh_val(h,__i); \ + code; \ + } } + +/*! @function + @abstract Iterate over the values in the hash table + @param h Pointer to the hash table [khash_t(name)*] + @param vvar Variable to which value will be assigned + @param code Block of code to execute + */ +#define kh_foreach_value(h, vvar, code) { khint_t __i; \ + for (__i = kh_begin(h); __i != kh_end(h); ++__i) { \ + if (!kh_exist(h,__i)) continue; \ + (vvar) = kh_val(h,__i); \ + code; \ + } } + +/* More convenient interfaces */ + +/*! @function + @abstract Instantiate a hash set containing integer keys + @param name Name of the hash table [symbol] + */ +#define KHASH_SET_INIT_INT(name) \ + KHASH_INIT(name, khint32_t, char, 0, kh_int_hash_func, kh_int_hash_equal) + +/*! @function + @abstract Instantiate a hash map containing integer keys + @param name Name of the hash table [symbol] + @param khval_t Type of values [type] + */ +#define KHASH_MAP_INIT_INT(name, khval_t) \ + KHASH_INIT(name, khint32_t, khval_t, 1, kh_int_hash_func, kh_int_hash_equal) + +/*! @function + @abstract Instantiate a hash set containing 64-bit integer keys + @param name Name of the hash table [symbol] + */ +#define KHASH_SET_INIT_INT64(name) \ + KHASH_INIT(name, khint64_t, char, 0, kh_int64_hash_func, kh_int64_hash_equal) + +/*! @function + @abstract Instantiate a hash map containing 64-bit integer keys + @param name Name of the hash table [symbol] + @param khval_t Type of values [type] + */ +#define KHASH_MAP_INIT_INT64(name, khval_t) \ + KHASH_INIT(name, khint64_t, khval_t, 1, kh_int64_hash_func, kh_int64_hash_equal) + +typedef const char *kh_cstr_t; +/*! @function + @abstract Instantiate a hash map containing const char* keys + @param name Name of the hash table [symbol] + */ +#define KHASH_SET_INIT_STR(name) \ + KHASH_INIT(name, kh_cstr_t, char, 0, kh_str_hash_func, kh_str_hash_equal) + +/*! @function + @abstract Instantiate a hash map containing const char* keys + @param name Name of the hash table [symbol] + @param khval_t Type of values [type] + */ +#define KHASH_MAP_INIT_STR(name, khval_t) \ + KHASH_INIT(name, kh_cstr_t, khval_t, 1, kh_str_hash_func, kh_str_hash_equal) + +#endif /* __AC_KHASH_H */ diff --git a/src/util/lock_ops.h b/src/util/lock_ops.h index dc0a31a0..648c9791 100644 --- a/src/util/lock_ops.h +++ b/src/util/lock_ops.h @@ -59,4 +59,18 @@ (void) ok; \ } +#ifdef DEBUG +static inline void assert_mutex_locked(pthread_mutex_t *mutex) { + int result = pthread_mutex_trylock(mutex); + if (result == 0) { + pthread_mutex_unlock(mutex); + ASSERT_MSG(false, "Mutex is not locked."); + } +} +#else +static inline void assert_mutex_locked(pthread_mutex_t *mutex) { + (void) mutex; +} +#endif + #endif // _FLUTTERPI_SRC_UTIL_LOCK_OPS_H diff --git a/src/util/macros.h b/src/util/macros.h index e0f7933a..8be7fc43 100644 --- a/src/util/macros.h +++ b/src/util/macros.h @@ -122,6 +122,9 @@ #if __has_attribute(noreturn) #define HAVE_FUNC_ATTRIBUTE_NORETURN #endif +#if __has_attribute(suppress) + #define HAVE_STMT_ATTRIBUTE_SUPPRESS +#endif /** * __builtin_expect macros @@ -405,6 +408,12 @@ #define ATTR_NOINLINE #endif +#ifdef HAVE_STMT_ATTRIBUTE_SUPPRESS + #define ANALYZER_SUPPRESS(stmt) __attribute__((suppress)) stmt +#else + #define ANALYZER_SUPPRESS(stmt) stmt +#endif + /** * Check that STRUCT::FIELD can hold MAXVAL. We use a lot of bitfields * in Mesa/gallium. We have to be sure they're of sufficient size to @@ -421,7 +430,7 @@ } while (0) /** Compute ceiling of integer quotient of A divided by B. */ -#define DIV_ROUND_UP(A, B) (((A) + (B) -1) / (B)) +#define DIV_ROUND_UP(A, B) (((A) + (B) - 1) / (B)) /** * Clamp X to [MIN,MAX]. Turn NaN into MIN, arbitrarily. @@ -450,10 +459,10 @@ #define MAX4(A, B, C, D) ((A) > (B) ? MAX3(A, C, D) : MAX3(B, C, D)) /** Align a value to a power of two */ -#define ALIGN_POT(x, pot_align) (((x) + (pot_align) -1) & ~((pot_align) -1)) +#define ALIGN_POT(x, pot_align) (((x) + (pot_align) - 1) & ~((pot_align) - 1)) /** Checks is a value is a power of two. Does not handle zero. */ -#define IS_POT(v) (((v) & ((v) -1)) == 0) +#define IS_POT(v) (((v) & ((v) - 1)) == 0) /** Set a single bit */ #define BITFIELD_BIT(b) (1u << (b)) @@ -547,21 +556,27 @@ typedef int lock_cap_t; #if defined(__clang__) #define PRAGMA_DIAGNOSTIC_PUSH _Pragma("clang diagnostic push") #define PRAGMA_DIAGNOSTIC_POP _Pragma("clang diagnostic pop") - #define PRAGMA_DIAGNOSTIC_ERROR(X) DO_PRAGMA(clang diagnostic error #X) - #define PRAGMA_DIAGNOSTIC_WARNING(X) DO_PRAGMA(clang diagnostic warning #X) - #define PRAGMA_DIAGNOSTIC_IGNORED(X) DO_PRAGMA(clang diagnostic ignored #X) + #define PRAGMA_DIAGNOSTIC_ERROR(X) DO_PRAGMA(clang diagnostic error X) + #define PRAGMA_DIAGNOSTIC_WARNING(X) DO_PRAGMA(clang diagnostic warning X) + #define PRAGMA_DIAGNOSTIC_IGNORED(X) DO_PRAGMA(clang diagnostic ignored X) + #define PRAGMA_GCC_DIAGNOSTIC_IGNORED(X) + #define PRAGMA_CLANG_DIAGNOSTIC_IGNORED(X) DO_PRAGMA(clang diagnostic ignored X) #elif defined(__GNUC__) #define PRAGMA_DIAGNOSTIC_PUSH _Pragma("GCC diagnostic push") #define PRAGMA_DIAGNOSTIC_POP _Pragma("GCC diagnostic pop") - #define PRAGMA_DIAGNOSTIC_ERROR(X) DO_PRAGMA(GCC diagnostic error #X) - #define PRAGMA_DIAGNOSTIC_WARNING(X) DO_PRAGMA(GCC diagnostic warning #X) - #define PRAGMA_DIAGNOSTIC_IGNORED(X) DO_PRAGMA(GCC diagnostic ignored #X) + #define PRAGMA_DIAGNOSTIC_ERROR(X) DO_PRAGMA(GCC diagnostic error X) + #define PRAGMA_DIAGNOSTIC_WARNING(X) DO_PRAGMA(GCC diagnostic warning X) + #define PRAGMA_DIAGNOSTIC_IGNORED(X) DO_PRAGMA(GCC diagnostic ignored X) + #define PRAGMA_GCC_DIAGNOSTIC_IGNORED(X) DO_PRAGMA(GCC diagnostic ignored X) + #define PRAGMA_CLANG_DIAGNOSTIC_IGNORED(X) #else #define PRAGMA_DIAGNOSTIC_PUSH #define PRAGMA_DIAGNOSTIC_POP #define PRAGMA_DIAGNOSTIC_ERROR(X) #define PRAGMA_DIAGNOSTIC_WARNING(X) #define PRAGMA_DIAGNOSTIC_IGNORED(X) + #define PRAGMA_GCC_DIAGNOSTIC_IGNORED(X) + #define PRAGMA_CLANG_DIAGNOSTIC_IGNORED(X) #endif #define PASTE2(a, b) a##b @@ -588,7 +603,7 @@ typedef int lock_cap_t; #define UNIMPLEMENTED() \ do { \ - fprintf(stderr, "%s%s:%u: Unimplemented\n", __FILE__, __func__, __LINE__); \ + fprintf(stderr, "%s%s:%d: Unimplemented\n", __FILE__, __func__, __LINE__); \ TRAP(); \ } while (0) diff --git a/src/util/uuid.h b/src/util/uuid.h index 160d16d4..db2a679e 100644 --- a/src/util/uuid.h +++ b/src/util/uuid.h @@ -21,9 +21,9 @@ typedef struct { }) #define CONST_UUID(_0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15) \ - ((const uuid_t){ \ + { \ .bytes = { _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15 }, \ - }) + } static inline bool uuid_equals(const uuid_t a, const uuid_t b) { return memcmp(&a, &b, sizeof(uuid_t)) == 0; diff --git a/src/vk_renderer.c b/src/vk_renderer.c index 4a96b810..3c2abe5a 100644 --- a/src/vk_renderer.c +++ b/src/vk_renderer.c @@ -52,7 +52,7 @@ static VkBool32 on_debug_utils_message( UNUSED void *userdata ) { LOG_DEBUG( - "[%s] (%d, %s) %s (queues: %d, cmdbufs: %d, objects: %d)\n", + "[%s] (%d, %s) %s (queues: %u, cmdbufs: %u, objects: %u)\n", severity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT ? "VERBOSE" : severity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT ? "INFO" : severity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT ? "WARNING" : @@ -160,7 +160,7 @@ struct vk_renderer { const char **enabled_device_extensions; }; -MUST_CHECK struct vk_renderer *vk_renderer_new() { +MUST_CHECK struct vk_renderer *vk_renderer_new(void) { PFN_vkDestroyDebugUtilsMessengerEXT destroy_debug_utils_messenger; PFN_vkCreateDebugUtilsMessengerEXT create_debug_utils_messenger; VkDebugUtilsMessengerEXT debug_utils_messenger; diff --git a/src/vk_renderer.h b/src/vk_renderer.h index 6e25b145..6ce8a61a 100644 --- a/src/vk_renderer.h +++ b/src/vk_renderer.h @@ -55,7 +55,7 @@ struct vk_renderer; * * @return New vulkan renderer instance. */ -struct vk_renderer *vk_renderer_new(); +struct vk_renderer *vk_renderer_new(void); void vk_renderer_destroy(struct vk_renderer *renderer); diff --git a/src/vulkan.c b/src/vulkan.c new file mode 100644 index 00000000..0b67b97e --- /dev/null +++ b/src/vulkan.c @@ -0,0 +1,81 @@ +#include "vulkan.h" + +#include "util/macros.h" + +const char *vk_strerror(VkResult result) { + PRAGMA_DIAGNOSTIC_PUSH + + // We'd really like to use PRAGMA_DIAGNOSTIC_WARNING for "-Wswitch-enum" here, + // but CodeChecker makes it hard to distinguish between warnings and errors + // and will always treat this an error. + // So ignore it for now. + PRAGMA_DIAGNOSTIC_IGNORED("-Wswitch-enum") + switch (result) { + case VK_SUCCESS: return "VK_SUCCESS"; + case VK_NOT_READY: return "VK_NOT_READY"; + case VK_TIMEOUT: return "VK_TIMEOUT"; + case VK_EVENT_SET: return "VK_EVENT_SET"; + case VK_EVENT_RESET: return "VK_EVENT_RESET"; + case VK_INCOMPLETE: return "VK_INCOMPLETE"; + case VK_ERROR_OUT_OF_HOST_MEMORY: return "VK_ERROR_OUT_OF_HOST_MEMORY"; + case VK_ERROR_OUT_OF_DEVICE_MEMORY: return "VK_ERROR_OUT_OF_DEVICE_MEMORY"; + case VK_ERROR_INITIALIZATION_FAILED: return "VK_ERROR_INITIALIZATION_FAILED"; + case VK_ERROR_DEVICE_LOST: return "VK_ERROR_DEVICE_LOST"; + case VK_ERROR_MEMORY_MAP_FAILED: return "VK_ERROR_MEMORY_MAP_FAILED"; + case VK_ERROR_LAYER_NOT_PRESENT: return "VK_ERROR_LAYER_NOT_PRESENT"; + case VK_ERROR_EXTENSION_NOT_PRESENT: return "VK_ERROR_EXTENSION_NOT_PRESENT"; + case VK_ERROR_FEATURE_NOT_PRESENT: return "VK_ERROR_FEATURE_NOT_PRESENT"; + case VK_ERROR_INCOMPATIBLE_DRIVER: return "VK_ERROR_INCOMPATIBLE_DRIVER"; + case VK_ERROR_TOO_MANY_OBJECTS: return "VK_ERROR_TOO_MANY_OBJECTS"; + case VK_ERROR_FORMAT_NOT_SUPPORTED: return "VK_ERROR_FORMAT_NOT_SUPPORTED"; + case VK_ERROR_FRAGMENTED_POOL: return "VK_ERROR_FRAGMENTED_POOL"; +#if VK_HEADER_VERSION >= 131 + case VK_ERROR_UNKNOWN: return "VK_ERROR_UNKNOWN"; +#endif + case VK_ERROR_OUT_OF_POOL_MEMORY: return "VK_ERROR_OUT_OF_POOL_MEMORY"; + case VK_ERROR_INVALID_EXTERNAL_HANDLE: return "VK_ERROR_INVALID_EXTERNAL_HANDLE"; +#if VK_HEADER_VERSION >= 131 + case VK_ERROR_FRAGMENTATION: return "VK_ERROR_FRAGMENTATION"; + case VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS: return "VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS"; +#endif +#if VK_HEADER_VERSION >= 204 + case VK_PIPELINE_COMPILE_REQUIRED: return "VK_PIPELINE_COMPILE_REQUIRED_EXT"; +#endif + case VK_ERROR_SURFACE_LOST_KHR: return "VK_ERROR_SURFACE_LOST_KHR"; + case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR: return "VK_ERROR_NATIVE_WINDOW_IN_USE_KHR"; + case VK_SUBOPTIMAL_KHR: return "VK_SUBOPTIMAL_KHR"; + case VK_ERROR_OUT_OF_DATE_KHR: return "VK_ERROR_OUT_OF_DATE_KHR"; + case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR: return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR"; + case VK_ERROR_VALIDATION_FAILED_EXT: return "VK_ERROR_VALIDATION_FAILED_EXT"; + case VK_ERROR_INVALID_SHADER_NV: return "VK_ERROR_INVALID_SHADER_NV"; +#if VK_HEADER_VERSION >= 218 && VK_ENABLE_BETA_EXTENSIONS + case VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR: return "VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR"; + case VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR: return "VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR"; + case VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR: return "VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR"; + case VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR: return "VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR"; + case VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR: return "VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR"; + case VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR: return "VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR"; +#endif +#if VK_HEADER_VERSION >= 89 + case VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT: return "VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT"; +#endif +#if VK_HEADER_VERSION >= 204 + case VK_ERROR_NOT_PERMITTED_KHR: return "VK_ERROR_NOT_PERMITTED_KHR"; +#endif +#if VK_HEADER_VERSION >= 105 + case VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT: return "VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT"; +#endif +#if VK_HEADER_VERSION >= 135 + case VK_THREAD_IDLE_KHR: return "VK_THREAD_IDLE_KHR"; + case VK_THREAD_DONE_KHR: return "VK_THREAD_DONE_KHR"; + case VK_OPERATION_DEFERRED_KHR: return "VK_OPERATION_DEFERRED_KHR"; + case VK_OPERATION_NOT_DEFERRED_KHR: return "VK_OPERATION_NOT_DEFERRED_KHR"; +#endif +#if VK_HEADER_VERSION >= 213 + case VK_ERROR_COMPRESSION_EXHAUSTED_EXT: return "VK_ERROR_COMPRESSION_EXHAUSTED_EXT"; +#endif + case VK_RESULT_MAX_ENUM: + default: return ""; + } + PRAGMA_DIAGNOSTIC_POP +} diff --git a/src/vulkan.h b/src/vulkan.h index 3821805a..e665af4c 100644 --- a/src/vulkan.h +++ b/src/vulkan.h @@ -16,75 +16,9 @@ #include -static inline const char *vk_strerror(VkResult result) { - switch (result) { - case VK_SUCCESS: return "VK_SUCCESS"; - case VK_NOT_READY: return "VK_NOT_READY"; - case VK_TIMEOUT: return "VK_TIMEOUT"; - case VK_EVENT_SET: return "VK_EVENT_SET"; - case VK_EVENT_RESET: return "VK_EVENT_RESET"; - case VK_INCOMPLETE: return "VK_INCOMPLETE"; - case VK_ERROR_OUT_OF_HOST_MEMORY: return "VK_ERROR_OUT_OF_HOST_MEMORY"; - case VK_ERROR_OUT_OF_DEVICE_MEMORY: return "VK_ERROR_OUT_OF_DEVICE_MEMORY"; - case VK_ERROR_INITIALIZATION_FAILED: return "VK_ERROR_INITIALIZATION_FAILED"; - case VK_ERROR_DEVICE_LOST: return "VK_ERROR_DEVICE_LOST"; - case VK_ERROR_MEMORY_MAP_FAILED: return "VK_ERROR_MEMORY_MAP_FAILED"; - case VK_ERROR_LAYER_NOT_PRESENT: return "VK_ERROR_LAYER_NOT_PRESENT"; - case VK_ERROR_EXTENSION_NOT_PRESENT: return "VK_ERROR_EXTENSION_NOT_PRESENT"; - case VK_ERROR_FEATURE_NOT_PRESENT: return "VK_ERROR_FEATURE_NOT_PRESENT"; - case VK_ERROR_INCOMPATIBLE_DRIVER: return "VK_ERROR_INCOMPATIBLE_DRIVER"; - case VK_ERROR_TOO_MANY_OBJECTS: return "VK_ERROR_TOO_MANY_OBJECTS"; - case VK_ERROR_FORMAT_NOT_SUPPORTED: return "VK_ERROR_FORMAT_NOT_SUPPORTED"; - case VK_ERROR_FRAGMENTED_POOL: return "VK_ERROR_FRAGMENTED_POOL"; -#if VK_HEADER_VERSION >= 131 - case VK_ERROR_UNKNOWN: return "VK_ERROR_UNKNOWN"; -#endif - case VK_ERROR_OUT_OF_POOL_MEMORY: return "VK_ERROR_OUT_OF_POOL_MEMORY"; - case VK_ERROR_INVALID_EXTERNAL_HANDLE: return "VK_ERROR_INVALID_EXTERNAL_HANDLE"; -#if VK_HEADER_VERSION >= 131 - case VK_ERROR_FRAGMENTATION: return "VK_ERROR_FRAGMENTATION"; - case VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS: return "VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS"; -#endif -#if VK_HEADER_VERSION >= 204 - case VK_PIPELINE_COMPILE_REQUIRED: return "VK_PIPELINE_COMPILE_REQUIRED_EXT"; -#endif - case VK_ERROR_SURFACE_LOST_KHR: return "VK_ERROR_SURFACE_LOST_KHR"; - case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR: return "VK_ERROR_NATIVE_WINDOW_IN_USE_KHR"; - case VK_SUBOPTIMAL_KHR: return "VK_SUBOPTIMAL_KHR"; - case VK_ERROR_OUT_OF_DATE_KHR: return "VK_ERROR_OUT_OF_DATE_KHR"; - case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR: return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR"; - case VK_ERROR_VALIDATION_FAILED_EXT: return "VK_ERROR_VALIDATION_FAILED_EXT"; - case VK_ERROR_INVALID_SHADER_NV: return "VK_ERROR_INVALID_SHADER_NV"; -#if VK_HEADER_VERSION >= 218 && VK_ENABLE_BETA_EXTENSIONS - case VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR: return "VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR"; - case VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR: return "VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR"; - case VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR: return "VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR"; - case VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR: return "VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR"; - case VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR: return "VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR"; - case VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR: return "VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR"; -#endif -#if VK_HEADER_VERSION >= 89 - case VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT: return "VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT"; -#endif -#if VK_HEADER_VERSION >= 204 - case VK_ERROR_NOT_PERMITTED_KHR: return "VK_ERROR_NOT_PERMITTED_KHR"; -#endif -#if VK_HEADER_VERSION >= 105 - case VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT: return "VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT"; -#endif -#if VK_HEADER_VERSION >= 135 - case VK_THREAD_IDLE_KHR: return "VK_THREAD_IDLE_KHR"; - case VK_THREAD_DONE_KHR: return "VK_THREAD_DONE_KHR"; - case VK_OPERATION_DEFERRED_KHR: return "VK_OPERATION_DEFERRED_KHR"; - case VK_OPERATION_NOT_DEFERRED_KHR: return "VK_OPERATION_NOT_DEFERRED_KHR"; -#endif -#if VK_HEADER_VERSION >= 213 - case VK_ERROR_COMPRESSION_EXHAUSTED_EXT: return "VK_ERROR_COMPRESSION_EXHAUSTED_EXT"; -#endif - default: return ""; - } -} +const char *vk_strerror(VkResult result); -#define LOG_VK_ERROR(result, fmt, ...) LOG_ERROR(fmt ": %s\n", __VA_ARGS__ vk_strerror(result)) +#define LOG_VK_ERROR_FMT(result, fmt, ...) LOG_ERROR(fmt ": %s\n", __VA_ARGS__ vk_strerror(result)) +#define LOG_VK_ERROR(result, str) LOG_ERROR(str ": %s\n", vk_strerror(result)) #endif // _FLUTTERPI_SRC_VULKAN_H diff --git a/src/window.c b/src/window.c index 687a747f..920878ee 100644 --- a/src/window.c +++ b/src/window.c @@ -73,7 +73,7 @@ struct window { * To calculate this, the physical dimensions of the display are required. If there are no physical dimensions, * this will default to 1.0. */ - double pixel_ratio; + float pixel_ratio; /** * @brief Whether we have physical screen dimensions and @ref width_mm and @ref height_mm contain usable values. @@ -300,7 +300,7 @@ static int window_init( // clang-format on ) { enum device_orientation original_orientation; - double pixel_ratio; + float pixel_ratio; ASSERT_NOT_NULL(window); ASSERT_NOT_NULL(tracer); @@ -317,7 +317,7 @@ static int window_init( ); pixel_ratio = 1.0; } else { - pixel_ratio = (10.0 * width) / (width_mm * 38.0); + pixel_ratio = (10.0f * width) / (width_mm * 38.0f); int horizontal_dpi = (int) (width / (width_mm / 25.4)); int vertical_dpi = (int) (height / (height_mm / 25.4)); @@ -943,9 +943,8 @@ MUST_CHECK struct window *kms_window_new( has_dimensions = true; width_mm = selected_connector->variable_state.width_mm; height_mm = selected_connector->variable_state.height_mm; - } else if (selected_connector->type == DRM_MODE_CONNECTOR_DSI - && selected_connector->variable_state.width_mm == 0 - && selected_connector->variable_state.height_mm == 0) { + } else if (selected_connector->type == DRM_MODE_CONNECTOR_DSI && selected_connector->variable_state.width_mm == 0 && + selected_connector->variable_state.height_mm == 0) { // assume this is the official Raspberry Pi DSI display. has_dimensions = true; width_mm = 155; @@ -985,7 +984,7 @@ MUST_CHECK struct window *kms_window_new( mode_get_vrefresh(selected_mode), width_mm, height_mm, - window->pixel_ratio, + (double) (window->pixel_ratio), has_forced_pixel_format ? get_pixfmt_info(forced_pixel_format)->name : "(any)" ); @@ -1219,6 +1218,7 @@ static int kms_window_push_composition_locked(struct window *window, struct fl_l req = kms_req_builder_build(builder); if (req == NULL) { + ok = EIO; goto fail_unref_builder; } @@ -1227,6 +1227,7 @@ static int kms_window_push_composition_locked(struct window *window, struct fl_l frame = malloc(sizeof *frame); if (frame == NULL) { + ok = ENOMEM; goto fail_unref_req; } @@ -1428,11 +1429,15 @@ static struct render_surface *kms_window_get_render_surface_internal(struct wind drm_plane_for_each_modified_format(plane, count_modifiers_for_pixel_format, &context); n_allowed_modifiers = context.n_modifiers; - allowed_modifiers = calloc(n_allowed_modifiers, sizeof(*context.modifiers)); - context.modifiers = allowed_modifiers; + if (n_allowed_modifiers) { + allowed_modifiers = calloc(n_allowed_modifiers, sizeof(*context.modifiers)); + context.modifiers = allowed_modifiers; - // Next, fill context.modifiers with the allowed modifiers. - drm_plane_for_each_modified_format(plane, extract_modifiers_for_pixel_format, &context); + // Next, fill context.modifiers with the allowed modifiers. + drm_plane_for_each_modified_format(plane, extract_modifiers_for_pixel_format, &context); + } else { + allowed_modifiers = NULL; + } break; } } @@ -1750,6 +1755,10 @@ static EGLSurface dummy_window_get_egl_surface(struct window *window) { if (window->renderer_type == kOpenGL_RendererType) { struct render_surface *render_surface = dummy_window_get_render_surface_internal(window, false, VEC2I(0, 0)); + if (render_surface == NULL) { + return EGL_NO_SURFACE; + } + return egl_gbm_render_surface_get_egl_surface(CAST_EGL_GBM_RENDER_SURFACE(render_surface)); } else { return EGL_NO_SURFACE; diff --git a/src/window.h b/src/window.h index 2efd0196..7fe89d9b 100644 --- a/src/window.h +++ b/src/window.h @@ -27,7 +27,7 @@ struct view_geometry { struct vec2f view_size, display_size; struct mat3f display_to_view_transform; struct mat3f view_to_display_transform; - double device_pixel_ratio; + float device_pixel_ratio; }; enum renderer_type { kOpenGL_RendererType, kVulkan_RendererType }; diff --git a/test/flutterpi_test.c b/test/flutterpi_test.c index 96236b78..a23cbd90 100644 --- a/test/flutterpi_test.c +++ b/test/flutterpi_test.c @@ -1,10 +1,10 @@ #include #include -void setUp() { +void setUp(void) { } -void tearDown() { +void tearDown(void) { } #define TEST_ASSERT_EQUAL_BOOL(expected, actual) \ @@ -50,7 +50,7 @@ void expect_parsed_cmdline_args_matches(int argc, char **argv, bool expected_res TEST_ASSERT_EQUAL_INT(expected.dummy_display_size.y, actual.dummy_display_size.y); } -static struct flutterpi_cmdline_args get_default_args() { +static struct flutterpi_cmdline_args get_default_args(void) { static char *engine_argv[1] = { "flutter-pi" }; return (struct flutterpi_cmdline_args){ @@ -74,7 +74,7 @@ static struct flutterpi_cmdline_args get_default_args() { }; } -void test_parse_orientation_arg() { +void test_parse_orientation_arg(void) { struct flutterpi_cmdline_args expected = get_default_args(); // test --orientation @@ -132,7 +132,7 @@ void test_parse_orientation_arg() { ); } -void test_parse_rotation_arg() { +void test_parse_rotation_arg(void) { struct flutterpi_cmdline_args expected = get_default_args(); expected.has_rotation = true; @@ -149,7 +149,7 @@ void test_parse_rotation_arg() { expect_parsed_cmdline_args_matches(4, (char *[]){ "flutter-pi", "--rotation", "270", BUNDLE_PATH }, true, expected); } -void test_parse_physical_dimensions_arg() { +void test_parse_physical_dimensions_arg(void) { struct flutterpi_cmdline_args expected = get_default_args(); expected.bundle_path = NULL; @@ -164,7 +164,7 @@ void test_parse_physical_dimensions_arg() { expect_parsed_cmdline_args_matches(4, (char *[]){ "flutter-pi", "--dimensions", "10,10", BUNDLE_PATH }, true, expected); } -void test_parse_pixel_format_arg() { +void test_parse_pixel_format_arg(void) { struct flutterpi_cmdline_args expected = get_default_args(); expected.has_pixel_format = true; @@ -176,7 +176,7 @@ void test_parse_pixel_format_arg() { expect_parsed_cmdline_args_matches(4, (char *[]){ "flutter-pi", "--pixelformat", "RGBA8888", BUNDLE_PATH }, true, expected); } -void test_parse_runtime_mode_arg() { +void test_parse_runtime_mode_arg(void) { struct flutterpi_cmdline_args expected = get_default_args(); // test --debug, --profile, --release @@ -194,14 +194,14 @@ void test_parse_runtime_mode_arg() { expect_parsed_cmdline_args_matches(3, (char *[]){ "flutter-pi", "--release", BUNDLE_PATH }, true, expected); } -void test_parse_bundle_path_arg() { +void test_parse_bundle_path_arg(void) { struct flutterpi_cmdline_args expected = get_default_args(); expected.bundle_path = "/path/to/bundle/test"; expect_parsed_cmdline_args_matches(2, (char *[]){ "flutter-pi", "/path/to/bundle/test" }, true, expected); } -void test_parse_engine_arg() { +void test_parse_engine_arg(void) { struct flutterpi_cmdline_args expected = get_default_args(); expected.engine_argc = 2; @@ -210,14 +210,14 @@ void test_parse_engine_arg() { expect_parsed_cmdline_args_matches(3, (char *[]){ "flutter-pi", BUNDLE_PATH, "engine-arg" }, true, expected); } -void test_parse_vulkan_arg() { +void test_parse_vulkan_arg(void) { struct flutterpi_cmdline_args expected = get_default_args(); expected.use_vulkan = true; expect_parsed_cmdline_args_matches(3, (char *[]){ "flutter-pi", "--vulkan", BUNDLE_PATH }, true, expected); } -void test_parse_desired_videomode_arg() { +void test_parse_desired_videomode_arg(void) { struct flutterpi_cmdline_args expected = get_default_args(); expected.desired_videomode = "1920x1080"; @@ -227,7 +227,7 @@ void test_parse_desired_videomode_arg() { expect_parsed_cmdline_args_matches(4, (char *[]){ "flutter-pi", "--videomode", "1920x1080@60", BUNDLE_PATH }, true, expected); } -int main() { +int main(void) { UNITY_BEGIN(); RUN_TEST(test_parse_runtime_mode_arg); diff --git a/test/platformchannel_test.c b/test/platformchannel_test.c index 5047a082..6adb3e30 100644 --- a/test/platformchannel_test.c +++ b/test/platformchannel_test.c @@ -1,6 +1,7 @@ #define _GNU_SOURCE #include "platformchannel.h" +#include #include #include #include @@ -10,34 +11,37 @@ #define RAW_STD_BUF(...) (const struct raw_std_value *) ((const uint8_t[]){ __VA_ARGS__ }) #define AS_RAW_STD_VALUE(_value) ((const struct raw_std_value *) (_value)) +#define DBL_INFINITY ((double) INFINITY) +#define DBL_NAN ((double) NAN) + // required by Unity. -void setUp() { +void setUp(void) { } -void tearDown() { +void tearDown(void) { } -void test_raw_std_value_is_null() { +void test_raw_std_value_is_null(void) { TEST_ASSERT_TRUE(raw_std_value_is_null(RAW_STD_BUF(kStdNull))); TEST_ASSERT_FALSE(raw_std_value_is_null(RAW_STD_BUF(kStdTrue))); } -void test_raw_std_value_is_true() { +void test_raw_std_value_is_true(void) { TEST_ASSERT_TRUE(raw_std_value_is_true(RAW_STD_BUF(kStdTrue))); TEST_ASSERT_FALSE(raw_std_value_is_true(RAW_STD_BUF(kStdFalse))); } -void test_raw_std_value_is_false() { +void test_raw_std_value_is_false(void) { TEST_ASSERT_TRUE(raw_std_value_is_false(RAW_STD_BUF(kStdFalse))); TEST_ASSERT_FALSE(raw_std_value_is_false(RAW_STD_BUF(kStdTrue))); } -void test_raw_std_value_is_int32() { +void test_raw_std_value_is_int32(void) { TEST_ASSERT_TRUE(raw_std_value_is_int32(RAW_STD_BUF(kStdInt32))); TEST_ASSERT_FALSE(raw_std_value_is_int32(RAW_STD_BUF(kStdNull))); } -void test_raw_std_value_as_int32() { +void test_raw_std_value_as_int32(void) { // clang-format off alignas(16) uint8_t buffer[5] = { kStdInt32, @@ -53,12 +57,12 @@ void test_raw_std_value_as_int32() { TEST_ASSERT_EQUAL_INT32(-2003205, raw_std_value_as_int32(AS_RAW_STD_VALUE(buffer))); } -void test_raw_std_value_is_int64() { +void test_raw_std_value_is_int64(void) { TEST_ASSERT_TRUE(raw_std_value_is_int64(RAW_STD_BUF(kStdInt64))); TEST_ASSERT_FALSE(raw_std_value_is_int64(RAW_STD_BUF(kStdNull))); } -void test_raw_std_value_as_int64() { +void test_raw_std_value_as_int64(void) { // clang-format off alignas(16) uint8_t buffer[9] = { kStdInt64, @@ -74,12 +78,12 @@ void test_raw_std_value_as_int64() { TEST_ASSERT_EQUAL_INT64(-7998090352538419200, raw_std_value_as_int64(AS_RAW_STD_VALUE(buffer))); } -void test_raw_std_value_is_float64() { +void test_raw_std_value_is_float64(void) { TEST_ASSERT_TRUE(raw_std_value_is_float64(RAW_STD_BUF(kStdFloat64))); TEST_ASSERT_FALSE(raw_std_value_is_float64(RAW_STD_BUF(kStdNull))); } -void test_raw_std_value_as_float64() { +void test_raw_std_value_as_float64(void) { // clang-format off alignas(16) uint8_t buffer[] = { kStdFloat64, @@ -93,18 +97,18 @@ void test_raw_std_value_as_float64() { TEST_ASSERT_EQUAL_DOUBLE(M_PI, raw_std_value_as_float64(AS_RAW_STD_VALUE(buffer))); - value = INFINITY; + value = DBL_INFINITY; memcpy(buffer + 8, &value, sizeof(value)); - TEST_ASSERT_EQUAL_DOUBLE(INFINITY, raw_std_value_as_float64(AS_RAW_STD_VALUE(buffer))); + TEST_ASSERT_EQUAL_DOUBLE(DBL_INFINITY, raw_std_value_as_float64(AS_RAW_STD_VALUE(buffer))); } -void test_raw_std_value_is_string() { +void test_raw_std_value_is_string(void) { TEST_ASSERT_TRUE(raw_std_value_is_string(RAW_STD_BUF(kStdString))); TEST_ASSERT_FALSE(raw_std_value_is_string(RAW_STD_BUF(kStdNull))); } -void test_raw_std_string_dup() { +void test_raw_std_string_dup(void) { const char *str = "The quick brown fox jumps over the lazy dog."; // clang-format off @@ -129,7 +133,7 @@ void test_raw_std_string_dup() { free(str_duped); } -void test_raw_std_string_equals() { +void test_raw_std_string_equals(void) { const char *str = "The quick brown fox jumps over the lazy dog."; alignas(16) uint8_t buffer[1 + 1 + strlen(str)]; @@ -151,12 +155,12 @@ void test_raw_std_string_equals() { TEST_ASSERT_FALSE(raw_std_string_equals(AS_RAW_STD_VALUE(buffer), "anything")); } -void test_raw_std_value_is_uint8array() { +void test_raw_std_value_is_uint8array(void) { TEST_ASSERT_TRUE(raw_std_value_is_uint8array(RAW_STD_BUF(kStdUInt8Array))); TEST_ASSERT_FALSE(raw_std_value_is_uint8array(RAW_STD_BUF(kStdNull))); } -void test_raw_std_value_as_uint8array() { +void test_raw_std_value_as_uint8array(void) { // clang-format off alignas(16) uint8_t buffer[] = { kStdUInt8Array, @@ -179,12 +183,12 @@ void test_raw_std_value_as_uint8array() { TEST_ASSERT_EQUAL_UINT8_ARRAY(expected, raw_std_value_as_uint8array(AS_RAW_STD_VALUE(buffer)), 4); } -void test_raw_std_value_is_int32array() { +void test_raw_std_value_is_int32array(void) { TEST_ASSERT_TRUE(raw_std_value_is_int32array(RAW_STD_BUF(kStdInt32Array))); TEST_ASSERT_FALSE(raw_std_value_is_int32array(RAW_STD_BUF(kStdNull))); } -void test_raw_std_value_as_int32array() { +void test_raw_std_value_as_int32array(void) { // clang-format off alignas(16) uint8_t buffer[] = { // type @@ -216,12 +220,12 @@ void test_raw_std_value_as_int32array() { TEST_ASSERT_EQUAL_INT32_ARRAY(expected, raw_std_value_as_int32array(AS_RAW_STD_VALUE(buffer)), 2); } -void test_raw_std_value_is_int64array() { +void test_raw_std_value_is_int64array(void) { TEST_ASSERT_TRUE(raw_std_value_is_int64array(RAW_STD_BUF(kStdInt64Array))); TEST_ASSERT_FALSE(raw_std_value_is_int64array(RAW_STD_BUF(kStdNull))); } -void test_raw_std_value_as_int64array() { +void test_raw_std_value_as_int64array(void) { // clang-format off alignas(16) uint8_t buffer[] = { // type @@ -251,12 +255,12 @@ void test_raw_std_value_as_int64array() { TEST_ASSERT_EQUAL_INT64_ARRAY(expected, raw_std_value_as_int64array(AS_RAW_STD_VALUE(buffer)), 2); } -void test_raw_std_value_is_float64array() { +void test_raw_std_value_is_float64array(void) { TEST_ASSERT_TRUE(raw_std_value_is_float64array(RAW_STD_BUF(kStdFloat64Array))); TEST_ASSERT_FALSE(raw_std_value_is_float64array(RAW_STD_BUF(kStdNull))); } -void test_raw_std_value_as_float64array() { +void test_raw_std_value_as_float64array(void) { // clang-format off alignas(16) uint8_t buffer[] = { // type @@ -274,7 +278,7 @@ void test_raw_std_value_as_float64array() { // clang-format off double expected[] = { M_PI, - INFINITY, + DBL_INFINITY, }; // clang-format on @@ -288,12 +292,12 @@ void test_raw_std_value_as_float64array() { TEST_ASSERT_EQUAL_DOUBLE_ARRAY(expected, raw_std_value_as_float64array(AS_RAW_STD_VALUE(buffer)), 2); } -void test_raw_std_value_is_list() { +void test_raw_std_value_is_list(void) { TEST_ASSERT_TRUE(raw_std_value_is_list(RAW_STD_BUF(kStdList))); TEST_ASSERT_FALSE(raw_std_value_is_list(RAW_STD_BUF(kStdNull))); } -void test_raw_std_list_get_size() { +void test_raw_std_list_get_size(void) { // clang-format off alignas(16) uint8_t buffer[] = { // type @@ -325,12 +329,12 @@ void test_raw_std_list_get_size() { TEST_ASSERT_EQUAL_size_t(0xDEADBEEF, raw_std_list_get_size(AS_RAW_STD_VALUE(buffer))); } -void test_raw_std_value_is_map() { +void test_raw_std_value_is_map(void) { TEST_ASSERT_TRUE(raw_std_value_is_map(RAW_STD_BUF(kStdMap))); TEST_ASSERT_FALSE(raw_std_value_is_map(RAW_STD_BUF(kStdNull))); } -void test_raw_std_map_get_size() { +void test_raw_std_map_get_size(void) { // clang-format off alignas(16) uint8_t buffer[] = { // type @@ -362,12 +366,12 @@ void test_raw_std_map_get_size() { TEST_ASSERT_EQUAL_size_t(0xDEADBEEF, raw_std_map_get_size(AS_RAW_STD_VALUE(buffer))); } -void test_raw_std_value_is_float32array() { +void test_raw_std_value_is_float32array(void) { TEST_ASSERT_TRUE(raw_std_value_is_float32array(RAW_STD_BUF(kStdFloat32Array))); TEST_ASSERT_FALSE(raw_std_value_is_float32array(RAW_STD_BUF(kStdNull))); } -void test_raw_std_value_as_float32array() { +void test_raw_std_value_as_float32array(void) { // clang-format off alignas(16) uint8_t buffer[] = { // type @@ -385,7 +389,7 @@ void test_raw_std_value_as_float32array() { // clang-format off float expected[] = { M_PI, - INFINITY, + DBL_INFINITY, }; // clang-format on @@ -399,7 +403,7 @@ void test_raw_std_value_as_float32array() { TEST_ASSERT_EQUAL_FLOAT_ARRAY(expected, raw_std_value_as_float32array(AS_RAW_STD_VALUE(buffer)), 2); } -void test_raw_std_value_equals() { +void test_raw_std_value_equals(void) { TEST_ASSERT_TRUE(raw_std_value_equals(RAW_STD_BUF(kStdNull), RAW_STD_BUF(kStdNull))); TEST_ASSERT_FALSE(raw_std_value_equals(RAW_STD_BUF(kStdNull), RAW_STD_BUF(kStdTrue))); TEST_ASSERT_FALSE(raw_std_value_equals(RAW_STD_BUF(kStdTrue), RAW_STD_BUF(kStdFalse))); @@ -479,7 +483,7 @@ void test_raw_std_value_equals() { TEST_ASSERT_TRUE(raw_std_value_equals(AS_RAW_STD_VALUE(lhs), AS_RAW_STD_VALUE(rhs))); - f = NAN; + f = DBL_NAN; memcpy(rhs + 8, &f, sizeof(f)); TEST_ASSERT_FALSE(raw_std_value_equals(AS_RAW_STD_VALUE(lhs), AS_RAW_STD_VALUE(rhs))); @@ -689,7 +693,7 @@ void test_raw_std_value_equals() { double array[] = { M_PI, - INFINITY, + DBL_INFINITY, }; // clang-format on @@ -705,7 +709,7 @@ void test_raw_std_value_equals() { rhs[1] = 2; double array2[] = { 0.0, - INFINITY, + DBL_INFINITY, }; memcpy(rhs + 8, array2, sizeof(array2)); @@ -783,7 +787,7 @@ void test_raw_std_value_equals() { int64_t int64 = (int64_t) INT64_MIN; float floats[] = { M_PI, - INFINITY, + DBL_INFINITY, }; // clang-format on @@ -835,7 +839,7 @@ void test_raw_std_value_equals() { float array[] = { M_PI, - INFINITY, + DBL_INFINITY, }; // clang-format on @@ -852,7 +856,7 @@ void test_raw_std_value_equals() { // clang-format off float array2[] = { 0.0, - INFINITY, + DBL_INFINITY, }; // clang-format on memcpy(rhs + 4, array2, sizeof(array2)); @@ -861,18 +865,18 @@ void test_raw_std_value_equals() { } } -void test_raw_std_value_is_bool() { +void test_raw_std_value_is_bool(void) { TEST_ASSERT_FALSE(raw_std_value_is_bool(RAW_STD_BUF(kStdNull))); TEST_ASSERT_TRUE(raw_std_value_is_bool(RAW_STD_BUF(kStdTrue))); TEST_ASSERT_TRUE(raw_std_value_is_bool(RAW_STD_BUF(kStdFalse))); } -void test_raw_std_value_as_bool() { +void test_raw_std_value_as_bool(void) { TEST_ASSERT_TRUE(raw_std_value_as_bool(RAW_STD_BUF(kStdTrue))); TEST_ASSERT_FALSE(raw_std_value_as_bool(RAW_STD_BUF(kStdFalse))); } -void test_raw_std_value_is_int() { +void test_raw_std_value_is_int(void) { TEST_ASSERT_FALSE(raw_std_value_is_int(RAW_STD_BUF(kStdNull))); TEST_ASSERT_FALSE(raw_std_value_is_int(RAW_STD_BUF(kStdTrue))); TEST_ASSERT_FALSE(raw_std_value_is_int(RAW_STD_BUF(kStdFalse))); @@ -881,7 +885,7 @@ void test_raw_std_value_is_int() { TEST_ASSERT_FALSE(raw_std_value_is_int(RAW_STD_BUF(kStdFloat64))); } -void test_raw_std_value_as_int() { +void test_raw_std_value_as_int(void) { // clang-format off alignas(16) uint8_t buffer[9] = { kStdInt32, @@ -905,7 +909,7 @@ void test_raw_std_value_as_int() { TEST_ASSERT_EQUAL_INT64(INT32_MIN, raw_std_value_as_int(AS_RAW_STD_VALUE(buffer))); } -void test_raw_std_value_get_size() { +void test_raw_std_value_get_size(void) { // clang-format off alignas(16) uint8_t buffer[] = { // type @@ -938,7 +942,7 @@ void test_raw_std_value_get_size() { TEST_ASSERT_EQUAL_size_t(0xDEADBEEF, raw_std_value_get_size(AS_RAW_STD_VALUE(buffer))); } -void test_raw_std_value_after() { +void test_raw_std_value_after(void) { // null { // clang-format off @@ -1263,7 +1267,7 @@ void test_raw_std_value_after() { } } -void test_raw_std_list_get_first_element() { +void test_raw_std_list_get_first_element(void) { // list const char *str = "The quick brown fox jumps over the lazy dog."; @@ -1286,7 +1290,7 @@ void test_raw_std_list_get_first_element() { ); } -void test_raw_std_list_get_nth_element() { +void test_raw_std_list_get_nth_element(void) { // list const char *str = "The quick brown fox jumps over the lazy dog."; @@ -1309,7 +1313,7 @@ void test_raw_std_list_get_nth_element() { ); } -void test_raw_std_map_get_first_key() { +void test_raw_std_map_get_first_key(void) { // map // clang-format off alignas(16) uint8_t buffer[] = { @@ -1340,31 +1344,31 @@ void test_raw_std_map_get_first_key() { TEST_ASSERT_EQUAL_PTR(buffer + 1 + 1 + 4, raw_std_map_get_first_key(AS_RAW_STD_VALUE(buffer))); } -void test_raw_std_map_find() { +void test_raw_std_map_find(void) { } -void test_raw_std_map_find_str() { +void test_raw_std_map_find_str(void) { } -void test_raw_std_value_check() { +void test_raw_std_value_check(void) { } -void test_raw_std_method_call_check() { +void test_raw_std_method_call_check(void) { } -void test_raw_std_method_call_response_check() { +void test_raw_std_method_call_response_check(void) { } -void test_raw_std_event_check() { +void test_raw_std_event_check(void) { } -void test_raw_std_method_call_get_method() { +void test_raw_std_method_call_get_method(void) { } -void test_raw_std_method_call_get_method_dup() { +void test_raw_std_method_call_get_method_dup(void) { } -void test_raw_std_method_call_get_arg() { +void test_raw_std_method_call_get_arg(void) { } int main(void) { diff --git a/third_party/flutter_embedder_header/include/flutter_embedder.h b/third_party/flutter_embedder_header/include/flutter_embedder_header/flutter_embedder.h similarity index 100% rename from third_party/flutter_embedder_header/include/flutter_embedder.h rename to third_party/flutter_embedder_header/include/flutter_embedder_header/flutter_embedder.h diff --git a/src/util/dynarray.h b/third_party/mesa3d/include/mesa3d/dynarray.h similarity index 98% rename from src/util/dynarray.h rename to third_party/mesa3d/include/mesa3d/dynarray.h index 2d9f2101..6ca5aeb4 100644 --- a/src/util/dynarray.h +++ b/third_party/mesa3d/include/mesa3d/dynarray.h @@ -32,8 +32,6 @@ #include #include -#include "macros.h" - #ifdef __cplusplus extern "C" { #endif @@ -156,7 +154,10 @@ static inline void util_dynarray_trim(struct util_dynarray *buf) { if (buf->size != buf->capacity) { if (buf->size) { - buf->data = realloc(buf->data, buf->size); + void *new_data = realloc(buf->data, buf->size); + ASSERT_NOT_NULL(new_data); + + buf->data = new_data; buf->capacity = buf->size; } else { free(buf->data); diff --git a/third_party/sentry-native b/third_party/sentry-native index 0f1d6647..a64d5bd8 160000 --- a/third_party/sentry-native +++ b/third_party/sentry-native @@ -1 +1 @@ -Subproject commit 0f1d664759cba187a846a562f9d55f3c62dffaa3 +Subproject commit a64d5bd8ee130f2cda196b6fa7d9b65bfa6d32e2