diff --git a/.gitignore b/.gitignore index 32a43b8c637..d8f25a78da1 100644 --- a/.gitignore +++ b/.gitignore @@ -207,7 +207,6 @@ cscope.po.out *.swp # Visual Studio Code -.vscode/ *.code-workspace .history/ @@ -378,4 +377,7 @@ $RECYCLE.BIN/ *.msm *.msp *.lnk +.vscode/c_cpp_properties.json +.vscode/settings.json *.generated.props +*.ogv diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 00000000000..ade62613ca4 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,45 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Godot Editor", + "type": "cppdbg", + "request": "launch", + "program": "${workspaceFolder}/bin/godot.macos.editor.dev.arm64", + "args": [ + "--editor", + "--path", + "${workspaceFolder}/examples/camera", + ], + "stopAtEntry": false, + "externalConsole": false, + "cwd": "${workspaceFolder}", + "environment": [], + "MIMode": "lldb", + "preLaunchTask": "Build Godot" + }, + { + "name": "Godot Project", + "type": "cppdbg", + "request": "launch", + "program": "${workspaceFolder}/bin/godot.macos.editor.dev.arm64", + "args": [ + "--path", + "${workspaceFolder}/examples/camera", + "--write-movie", + "test.ogv", + "--fixed-fps", + "15" + ], + "stopAtEntry": false, + "externalConsole": false, + "cwd": "${workspaceFolder}", + "environment": [], + "MIMode": "lldb", + "preLaunchTask": "Build Godot" + } + ] +} \ No newline at end of file diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 00000000000..900db6608fe --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,218 @@ +{ + // See https://go.microsoft.com/fwlink/?LinkId=733558 + // for the documentation about the tasks.json format + "version": "2.0.0", + "tasks": [ + { + "label": "Clean Godot", + "group": "build", + "type": "shell", + "command": "scons", + "args": [ + "--clean", + "dev_build=yes" + ], + "problemMatcher": { + "base": "$gcc", + "fileLocation": [ + "autoDetect", + "${workspaceFolder}" + ] + } + }, + { + "label": "Build Godot", + "group": "build", + "type": "shell", + "command": "scons", + "args": [ + "dev_build=yes", + "debug_symbols=yes", + "progress=no", + "vulkan=yes" + ], + "problemMatcher": { + "base": "$gcc", + "fileLocation": [ + "autoDetect", + "${workspaceFolder}" + ] + } + }, + { + "label": "Build Android debug template ARM32", + "group": "build", + "type": "shell", + "command": "scons", + "args": [ + "platform=android", + "arch=arm32", + "target=template_debug", + "progress=no" + ], + "problemMatcher": { + "base": "$gcc", + "fileLocation": [ + "autoDetect", + "${workspaceFolder}" + ] + } + }, + { + "label": "Build Android debug template ARM64", + "group": "build", + "type": "shell", + "command": "scons", + "args": [ + "platform=android", + "arch=arm64", + "target=template_debug", + "progress=no" + ], + "problemMatcher": { + "base": "$gcc", + "fileLocation": [ + "autoDetect", + "${workspaceFolder}" + ] + } + }, + { + "label": "Build Android debug template X86_32", + "group": "build", + "type": "shell", + "command": "scons", + "args": [ + "platform=android", + "arch=x86_32", + "target=template_debug", + "progress=no" + ], + "problemMatcher": { + "base": "$gcc", + "fileLocation": [ + "autoDetect", + "${workspaceFolder}" + ] + } + }, + { + "label": "Build Android debug template X86_64", + "group": "build", + "type": "shell", + "command": "scons", + "args": [ + "platform=android", + "arch=x86_64", + "target=template_debug", + "progress=no" + ], + "problemMatcher": { + "base": "$gcc", + "fileLocation": [ + "autoDetect", + "${workspaceFolder}" + ] + } + }, + { + "label": "Build Android release template ARM32", + "group": "build", + "type": "shell", + "command": "scons", + "args": [ + "platform=android", + "arch=arm32", + "target=template_release", + "progress=no" + ], + "problemMatcher": { + "base": "$gcc", + "fileLocation": [ + "autoDetect", + "${workspaceFolder}" + ] + } + }, + { + "label": "Build Android release template ARM64", + "group": "build", + "type": "shell", + "command": "scons", + "args": [ + "platform=android", + "arch=arm64", + "target=template_release", + "progress=no" + ], + "problemMatcher": { + "base": "$gcc", + "fileLocation": [ + "autoDetect", + "${workspaceFolder}" + ] + } + }, + { + "label": "Build Android release template X86_32", + "group": "build", + "type": "shell", + "command": "scons", + "args": [ + "platform=android", + "arch=x86_32", + "target=template_release", + "progress=no" + ], + "problemMatcher": { + "base": "$gcc", + "fileLocation": [ + "autoDetect", + "${workspaceFolder}" + ] + } + }, + { + "label": "Build Android release template X86_64", + "group": "build", + "type": "shell", + "command": "scons", + "args": [ + "platform=android", + "arch=x86_64", + "target=template_release", + "progress=no" + ], + "problemMatcher": { + "base": "$gcc", + "fileLocation": [ + "autoDetect", + "${workspaceFolder}" + ] + } + }, + { + "label": "Install Android export templates", + "group": "build", + "type": "shell", + "command": "./gradlew", + "args": [ + "generateGodotTemplates", + ], + "options": { + "cwd": "${workspaceFolder}/platform/android/java" + }, + "dependsOrder": "sequence", + "dependsOn": [ + "Build Android debug template ARM32", + "Build Android debug template ARM64", + "Build Android release template ARM32", + "Build Android release template ARM64", + // "Build Android debug template X86_32", + // "Build Android debug template X86_64", + // "Build Android release template X86_32", + // "Build Android release template X86_64" + ] + } + ] +} \ No newline at end of file diff --git a/doc/classes/CameraFeed.xml b/doc/classes/CameraFeed.xml index 2b6db13906c..95ce44e47aa 100644 --- a/doc/classes/CameraFeed.xml +++ b/doc/classes/CameraFeed.xml @@ -1,21 +1,15 @@ - A camera feed gives you access to a single physical camera attached to your device. + A camera feed gives you access to a physical camera attached to your device. - A camera feed gives you access to a single physical camera attached to your device. When enabled, Godot will start capturing frames from the camera which can then be used. See also [CameraServer]. - [b]Note:[/b] Many cameras will return YCbCr images which are split into two textures and need to be combined in a shader. Godot does this automatically for you if you set the environment to show the camera image in the background. + A camera feed gives you access to a physical camera attached to your device with specific media size and format. When enabled, Godot will start capturing frames from the camera which can then be user as texture in other nodes. See also [CameraServer]. + [b]Note:[/b] Some media formats, like YUV or N12 produce two textures (one for the Y plane and one for the UV plane). These textures need be drawn using a suitable shader where the Y plane is provided as TEXTURE and the UV plane as NORMAL_TEXTURE. See the camera project in the examples folder. - - - - Returns feed image data type. - - @@ -34,6 +28,16 @@ Returns the position of camera on the device. + + + + Returns the width of the captured camera frames. + + + + + + Returns the height of the captured camera frames. @@ -98,18 +102,6 @@ - - No image set for the feed. - - - Feed supplies RGB images. - - - Feed supplies YCbCr images that need to be converted to RGB. - - - Feed supplies separate Y and CbCr images that need to be combined and converted to RGB. - Unspecified position. diff --git a/doc/classes/CameraServer.xml b/doc/classes/CameraServer.xml index b09010147ea..406488b9c02 100644 --- a/doc/classes/CameraServer.xml +++ b/doc/classes/CameraServer.xml @@ -6,7 +6,7 @@ The [CameraServer] keeps track of different cameras accessible in Godot. These are external cameras such as webcams or the cameras on your phone. It is notably used to provide AR modules with a video feed from the camera. - [b]Note:[/b] This class is currently only implemented on Linux, macOS, and iOS, on other platforms no [CameraFeed]s will be available. To get a [CameraFeed] on iOS, the camera plugin from [url=https://github.com/godotengine/godot-ios-plugins]godot-ios-plugins[/url] is required. + [b]Note:[/b] This class currently only supports macOS, iOS, Android and Windows platform. On other platforms, supported [CameraFeed]s are in development. @@ -59,18 +59,4 @@ - - - The RGBA camera image. - - - The [url=https://en.wikipedia.org/wiki/YCbCr]YCbCr[/url] camera image. - - - The Y component camera image. - - - The CbCr component camera image. - - diff --git a/doc/classes/MovieWriter.xml b/doc/classes/MovieWriter.xml index e96080d6a83..b22ab580d6d 100644 --- a/doc/classes/MovieWriter.xml +++ b/doc/classes/MovieWriter.xml @@ -5,8 +5,9 @@ Godot can record videos with non-real-time simulation. Like the [code]--fixed-fps[/code] [url=$DOCS_URL/tutorials/editor/command_line_tutorial.html]command line argument[/url], this forces the reported [code]delta[/code] in [method Node._process] functions to be identical across frames, regardless of how long it actually took to render the frame. This can be used to record high-quality videos with perfect frame pacing regardless of your hardware's capabilities. - Godot has 2 built-in [MovieWriter]s: - - AVI container with MJPEG for video and uncompressed audio ([code].avi[/code] file extension). Lossy compression, medium file sizes, fast encoding. The lossy compression quality can be adjusted by changing [member ProjectSettings.editor/movie_writer/mjpeg_quality]. The resulting file can be viewed in most video players, but it must be converted to another format for viewing on the web or by Godot with [VideoStreamPlayer]. MJPEG does not support transparency. AVI output is currently limited to a file of 4 GB in size at most. + Godot has 3 built-in [MovieWriter]s: + - OGV container with Theora for video and Vorbis for audio ([code].ogv[/code] file extension). Lossy compression, medium file sizes, fast encoding. The lossy compression quality can be adjusted by changing [member ProjectSettings.editor/movie_writer/video_quality] and [member ProjectSettings.editor/movie_writer/audio_quality]. The resulting file can be viewed in Godot with [VideoStreamPlayer] and most video players and web-browsers. + - AVI container with MJPEG for video and uncompressed audio ([code].avi[/code] file extension). Lossy compression, medium file sizes, fast encoding. The lossy compression quality can be adjusted by changing [member ProjectSettings.editor/movie_writer/video_quality]. The resulting file can be viewed in most video players, but it must be converted to another format for viewing on the web or by Godot with [VideoStreamPlayer]. MJPEG does not support transparency. AVI output is currently limited to a file of 4 GB in size at most. - PNG image sequence for video and WAV for audio ([code].png[/code] file extension). Lossless compression, large file sizes, slow encoding. Designed to be encoded to a video file with another tool such as [url=https://ffmpeg.org/]FFmpeg[/url] after recording. Transparency is currently not supported, even if the root viewport is set to be transparent. If you need to encode to a different format or pipe a stream through third-party software, you can extend the [MovieWriter] class to create your own movie writers. This should typically be done using GDExtension for performance reasons. [b]Editor usage:[/b] A default movie file path can be specified in [member ProjectSettings.editor/movie_writer/movie_file]. Alternatively, for running single scenes, a [code]movie_file[/code] metadata can be added to the root node, specifying the path to a movie file that will be used when recording that scene. Once a path is set, click the video reel icon in the top-right corner of the editor to enable Movie Maker mode, then run any scene as usual. The engine will start recording as soon as the splash screen is finished, and it will only stop recording when the engine quits. Click the video reel icon again to disable Movie Maker mode. Note that toggling Movie Maker mode does not affect project instances that are already running. diff --git a/doc/classes/ProjectSettings.xml b/doc/classes/ProjectSettings.xml index 1e571e58a17..0f701ae0b23 100644 --- a/doc/classes/ProjectSettings.xml +++ b/doc/classes/ProjectSettings.xml @@ -962,14 +962,18 @@ The audio mix rate to use in the recorded audio when writing a movie (in Hz). This can be different from [member audio/driver/mix_rate], but this value must be divisible by [member editor/movie_writer/fps] to prevent audio from desynchronizing over time. - - The JPEG quality to use when writing a video to an AVI file, between [code]0.01[/code] and [code]1.0[/code] (inclusive). Higher [code]quality[/code] values result in better-looking output at the cost of larger file sizes. Recommended [code]quality[/code] values are between [code]0.75[/code] and [code]0.9[/code]. Even at quality [code]1.0[/code], JPEG compression remains lossy. + + The video encoding quality to use when writing a video to a file, between [code]0.0[/code] and [code]1.0[/code] (inclusive). Higher [code]quality[/code] values result in better-looking output at the cost of larger file sizes. Recommended [code]quality[/code] values are between [code]0.75[/code] and [code]0.9[/code]. Even at quality [code]1.0[/code], compression remains lossy. [b]Note:[/b] This does not affect the audio quality or writing PNG image sequences. + + The audio encoding quality to use when writing a audio to a file, between [code]0.0[/code] and [code]1.0[/code] (inclusive). Higher [code]quality[/code] values result in better-looking output at the cost of larger file sizes. + [b]Note:[/b] This does not affect the video quality. + The output path for the movie. The file extension determines the [MovieWriter] that will be used. Godot has 2 built-in [MovieWriter]s: - - AVI container with MJPEG for video and uncompressed audio ([code].avi[/code] file extension). Lossy compression, medium file sizes, fast encoding. The lossy compression quality can be adjusted by changing [member ProjectSettings.editor/movie_writer/mjpeg_quality]. The resulting file can be viewed in most video players, but it must be converted to another format for viewing on the web or by Godot with [VideoStreamPlayer]. MJPEG does not support transparency. AVI output is currently limited to a file of 4 GB in size at most. + - AVI container with MJPEG for video and uncompressed audio ([code].avi[/code] file extension). Lossy compression, medium file sizes, fast encoding. The lossy compression quality can be adjusted by changing [member ProjectSettings.editor/movie_writer/video_quality]. The resulting file can be viewed in most video players, but it must be converted to another format for viewing on the web or by Godot with [VideoStreamPlayer]. MJPEG does not support transparency. AVI output is currently limited to a file of 4 GB in size at most. - PNG image sequence for video and WAV for audio ([code].png[/code] file extension). Lossless compression, large file sizes, slow encoding. Designed to be encoded to a video file with another tool such as [url=https://ffmpeg.org/]FFmpeg[/url] after recording. Transparency is currently not supported, even if the root viewport is set to be transparent. If you need to encode to a different format or pipe a stream through third-party software, you can extend this [MovieWriter] class to create your own movie writers. When using PNG output, the frame number will be appended at the end of the file name. It starts from 0 and is padded with 8 digits to ensure correct sorting and easier processing. For example, if the output path is [code]/tmp/hello.png[/code], the first two frames will be [code]/tmp/hello00000000.png[/code] and [code]/tmp/hello00000001.png[/code]. The audio will be saved at [code]/tmp/hello.wav[/code]. diff --git a/doc/translations/it.po b/doc/translations/it.po index 755742f6803..f3976f30f60 100644 --- a/doc/translations/it.po +++ b/doc/translations/it.po @@ -66789,7 +66789,7 @@ msgid "" "- AVI container with MJPEG for video and uncompressed audio ([code].avi[/" "code] file extension). Lossy compression, medium file sizes, fast encoding. " "The lossy compression quality can be adjusted by changing [member " -"ProjectSettings.editor/movie_writer/mjpeg_quality]. The resulting file can be " +"ProjectSettings.editor/movie_writer/video_quality]. The resulting file can be " "viewed in most video players, but it must be converted to another format for " "viewing on the web or by Godot with [VideoStreamPlayer]. MJPEG does not " "support transparency. AVI output is currently limited to a file of 4 GB in " @@ -66815,7 +66815,7 @@ msgstr "" "- Contenitore AVI con MJPEG per video e audio non compresso (estensione file " "[code].avi[/code]). Compressione con perdite, dimensioni file medie, codifica " "veloce. La qualità della compressione con perdite può essere regolata " -"modificando [member ProjectSettings.editor/movie_writer/mjpeg_quality]. È " +"modificando [member ProjectSettings.editor/movie_writer/video_quality]. È " "possibile visualizzare il file risultante nella maggior parte dei lettori " "video, ma deve essere convertito in un altro formato per la visualizzazione " "sul Web o da Godot con [VideoStreamPlayer]. MJPEG non supporta la " diff --git a/doc/translations/zh_CN.po b/doc/translations/zh_CN.po index ad3572f5bbd..6ad83657ca8 100644 --- a/doc/translations/zh_CN.po +++ b/doc/translations/zh_CN.po @@ -76286,7 +76286,7 @@ msgid "" "- AVI container with MJPEG for video and uncompressed audio ([code].avi[/" "code] file extension). Lossy compression, medium file sizes, fast encoding. " "The lossy compression quality can be adjusted by changing [member " -"ProjectSettings.editor/movie_writer/mjpeg_quality]. The resulting file can be " +"ProjectSettings.editor/movie_writer/video_quality]. The resulting file can be " "viewed in most video players, but it must be converted to another format for " "viewing on the web or by Godot with [VideoStreamPlayer]. MJPEG does not " "support transparency. AVI output is currently limited to a file of 4 GB in " @@ -76324,7 +76324,7 @@ msgstr "" "Godot 内置的 [MovieWriter] 有两个:\n" "- 使用 MJPEG 视频和未压缩音频的 AVI 容器(文件扩展名为 [code].avi[/code])。有" "损压缩、文件大小中等、编码速度较快。有损压缩质量可以通过修改 [member " -"ProjectSettings.editor/movie_writer/mjpeg_quality] 来调整。生成的文件可以使用" +"ProjectSettings.editor/movie_writer/video_quality] 来调整。生成的文件可以使用" "大多数视频播放器查看,但如果要在 Web 上查看或者用 Godot 的 " "[VideoStreamPlayer] 查看,则必须先进行格式的转换。MJPEG 不支持透明度。AVI 输出" "的文件目前最多为 4 GB 大小。\n" @@ -105865,7 +105865,7 @@ msgid "" "- AVI container with MJPEG for video and uncompressed audio ([code].avi[/" "code] file extension). Lossy compression, medium file sizes, fast encoding. " "The lossy compression quality can be adjusted by changing [member " -"ProjectSettings.editor/movie_writer/mjpeg_quality]. The resulting file can be " +"ProjectSettings.editor/movie_writer/video_quality]. The resulting file can be " "viewed in most video players, but it must be converted to another format for " "viewing on the web or by Godot with [VideoStreamPlayer]. MJPEG does not " "support transparency. AVI output is currently limited to a file of 4 GB in " @@ -105889,7 +105889,7 @@ msgstr "" "Godot 有两个内置的 [MovieWriter]:\n" "- AVI 容器,视频使用 MJPEG、音频未压缩(文件扩展名为 [code].avi[/code])。有损" "压缩,文件大小中等,编码较快。有损压缩质量可以通过 [member ProjectSettings." -"editor/movie_writer/mjpeg_quality] 调整。得到的文件可以使用大多数视频播放器查" +"editor/movie_writer/video_quality] 调整。得到的文件可以使用大多数视频播放器查" "看,但必须转换成其他格式才能在 Web 或 Godot 的 [VideoStreamPlayer] 中播放。" "MJPEG 不支持透明度。AVI 输出目前有单文件 4 GB 的大小限制。\n" "- 视频使用 PNG 图像序列,音频使用 WAV(文件扩展名为 [code].png[/code])。无损" diff --git a/doc/translations/zh_TW.po b/doc/translations/zh_TW.po index 7c4ddceec8a..4da07b8680e 100644 --- a/doc/translations/zh_TW.po +++ b/doc/translations/zh_TW.po @@ -54544,7 +54544,7 @@ msgid "" "- AVI container with MJPEG for video and uncompressed audio ([code].avi[/" "code] file extension). Lossy compression, medium file sizes, fast encoding. " "The lossy compression quality can be adjusted by changing [member " -"ProjectSettings.editor/movie_writer/mjpeg_quality]. The resulting file can be " +"ProjectSettings.editor/movie_writer/video_quality]. The resulting file can be " "viewed in most video players, but it must be converted to another format for " "viewing on the web or by Godot with [VideoStreamPlayer]. MJPEG does not " "support transparency. AVI output is currently limited to a file of 4 GB in " @@ -54582,7 +54582,7 @@ msgstr "" "Godot 內建的 [MovieWriter] 有兩個:\n" "- 使用 MJPEG 影片和未壓縮音訊的 AVI 容器(檔副檔名為 [code].avi[/code])。失真" "壓縮、檔大小中等、編碼速度較快。失真壓縮品質可以通過修改 [member " -"ProjectSettings.editor/movie_writer/mjpeg_quality] 來調整。生成的檔可以使用大" +"ProjectSettings.editor/movie_writer/video_quality] 來調整。生成的檔可以使用大" "多數影片播放機查看,但如果要在 Web 上查看或者用 Godot 的 [VideoStreamPlayer] " "查看,則必須先進行格式的轉換。MJPEG 不支援透明度。AVI 輸出的檔目前最多為 4 GB " "大小。\n" @@ -72393,7 +72393,7 @@ msgid "" "- AVI container with MJPEG for video and uncompressed audio ([code].avi[/" "code] file extension). Lossy compression, medium file sizes, fast encoding. " "The lossy compression quality can be adjusted by changing [member " -"ProjectSettings.editor/movie_writer/mjpeg_quality]. The resulting file can be " +"ProjectSettings.editor/movie_writer/video_quality]. The resulting file can be " "viewed in most video players, but it must be converted to another format for " "viewing on the web or by Godot with [VideoStreamPlayer]. MJPEG does not " "support transparency. AVI output is currently limited to a file of 4 GB in " @@ -72417,7 +72417,7 @@ msgstr "" "Godot 有兩個內建的 [MovieWriter]:\n" "- AVI 容器,影片使用 MJPEG、音訊未壓縮(檔副檔名為 [code].avi[/code])。有損壓" "縮,檔大小中等,編碼較快。失真壓縮品質可以通過 [member ProjectSettings.editor/" -"movie_writer/mjpeg_quality] 調整。得到的檔可以使用大多數影片播放機查看,但必須" +"movie_writer/video_quality] 調整。得到的檔可以使用大多數影片播放機查看,但必須" "轉換成其他格式才能在 Web 或 Godot 的 [VideoStreamPlayer] 中播放。MJPEG 不支援" "透明度。AVI 輸出目前有單檔 4 GB 的大小限制。\n" "- 影片使用 PNG 圖像序列,音訊使用 WAV(檔副檔名為 [code].png[/code])。無損壓" diff --git a/examples/camera/.gitattributes b/examples/camera/.gitattributes new file mode 100644 index 00000000000..8ad74f78d9c --- /dev/null +++ b/examples/camera/.gitattributes @@ -0,0 +1,2 @@ +# Normalize EOL for all files that Git considers text files. +* text=auto eol=lf diff --git a/examples/camera/.gitignore b/examples/camera/.gitignore new file mode 100644 index 00000000000..7de8ea58b70 --- /dev/null +++ b/examples/camera/.gitignore @@ -0,0 +1,3 @@ +# Godot 4+ specific ignores +.godot/ +android/ diff --git a/examples/camera/camera_nv12.gdshader b/examples/camera/camera_nv12.gdshader new file mode 100644 index 00000000000..2b353a977a1 --- /dev/null +++ b/examples/camera/camera_nv12.gdshader @@ -0,0 +1,15 @@ +shader_type canvas_item; + +void fragment() { + vec3 color; + color.r = texture(TEXTURE, UV).r; + color.gb = texture(NORMAL_TEXTURE, UV).rg - vec2(0.5, 0.5); + + color.rgb = mat3( + vec3(1.00000, 1.00000, 1.00000), + vec3(0.00000, -0.18732, 1.85560), + vec3(1.57481, -0.46813, 0.00000) + ) * color.rgb; + + COLOR = vec4(color, 1.0); +} diff --git a/examples/camera/camera_ycbcr_sep.gdshader b/examples/camera/camera_ycbcr_sep.gdshader new file mode 100644 index 00000000000..452f8059725 --- /dev/null +++ b/examples/camera/camera_ycbcr_sep.gdshader @@ -0,0 +1,16 @@ +shader_type canvas_item; + +void fragment() { + vec3 color; + color.r = texture(TEXTURE, UV).r; + color.g = texture(NORMAL_TEXTURE, UV).g - 0.5; + color.b = texture(SPECULAR_TEXTURE, UV).b - 0.5; + + color.rgb = mat3( + vec3(1.00000, 1.00000, 1.00000), + vec3(0.00000, -0.18732, 1.85560), + vec3(1.57481, -0.46813, 0.00000) + ) * color.rgb; + + COLOR = vec4(color, 1.0); +} \ No newline at end of file diff --git a/examples/camera/capture.gd b/examples/camera/capture.gd new file mode 100644 index 00000000000..b1e77562bf6 --- /dev/null +++ b/examples/camera/capture.gd @@ -0,0 +1,28 @@ +extends Button + +var feed:CameraFeed + +# Called when the node enters the scene tree for the first time. +func _ready(): + var feeds = CameraServer.feeds() + if (feeds.is_empty()): + text = "NO CAMERA" + else: + text = "START" + _on_texture_rect_property_list_changed() + + +func _toggled(toggled_on): + feed.feed_is_active = toggled_on + if (toggled_on): + text = "STOP" + else: + text = "START" + + +func _on_texture_rect_property_list_changed(): + if (feed != null && feed.feed_is_active): _toggled(false) + + var texture_rect = get_node("/root/Control/TextureRect") + var camera_id = texture_rect.texture.get_camera_feed_id() + feed = CameraServer.get_feed_by_id(camera_id) diff --git a/examples/camera/control.tscn b/examples/camera/control.tscn new file mode 100644 index 00000000000..bcfeefc1f8d --- /dev/null +++ b/examples/camera/control.tscn @@ -0,0 +1,77 @@ +[gd_scene load_steps=7 format=3 uid="uid://wcx5rx1sckqy"] + +[ext_resource type="Shader" path="res://camera_nv12.gdshader" id="2_7jche"] +[ext_resource type="Script" path="res://capture.gd" id="3_e3a7h"] + +[sub_resource type="ShaderMaterial" id="ShaderMaterial_psofl"] +shader = ExtResource("2_7jche") + +[sub_resource type="CameraTexture" id="CameraTexture_nlxrt"] +camera_feed_id = 1 + +[sub_resource type="StyleBoxFlat" id="StyleBoxFlat_iy0nj"] +content_margin_left = 8.0 +content_margin_top = 16.0 +content_margin_right = 8.0 +content_margin_bottom = 8.0 +bg_color = Color(0, 0, 0, 0.686275) +corner_radius_top_left = 24 +corner_radius_top_right = 24 + +[sub_resource type="AudioStreamMicrophone" id="AudioStreamMicrophone_qfag3"] + +[node name="Control" type="Control"] +clip_children = 1 +clip_contents = true +layout_mode = 3 +anchors_preset = 15 +anchor_right = 1.0 +anchor_bottom = 1.0 +grow_horizontal = 2 +grow_vertical = 2 + +[node name="TextureRect" type="TextureRect" parent="."] +material = SubResource("ShaderMaterial_psofl") +layout_mode = 1 +anchors_preset = 15 +anchor_right = 1.0 +anchor_bottom = 1.0 +offset_left = -720.0 +offset_right = -720.0 +grow_horizontal = 2 +grow_vertical = 2 +scale = Vector2(3, 1) +texture = SubResource("CameraTexture_nlxrt") +expand_mode = 5 +stretch_mode = 6 +flip_h = true + +[node name="ScrollContainer" type="ScrollContainer" parent="."] +layout_mode = 1 +anchors_preset = 12 +anchor_top = 1.0 +anchor_right = 1.0 +anchor_bottom = 1.0 +offset_top = -521.0 +grow_horizontal = 2 +grow_vertical = 0 +theme_override_styles/panel = SubResource("StyleBoxFlat_iy0nj") +horizontal_scroll_mode = 0 + +[node name="VBoxContainer" type="VBoxContainer" parent="ScrollContainer"] +layout_mode = 2 +size_flags_horizontal = 3 +size_flags_vertical = 0 + +[node name="Start capture" type="Button" parent="ScrollContainer/VBoxContainer"] +layout_mode = 2 +theme_override_font_sizes/font_size = 42 +toggle_mode = true +text = "Loading..." +flat = true +script = ExtResource("3_e3a7h") + +[node name="AudioStreamPlayer" type="AudioStreamPlayer" parent="."] +stream = SubResource("AudioStreamMicrophone_qfag3") + +[connection signal="property_list_changed" from="TextureRect" to="ScrollContainer/VBoxContainer/Start capture" method="_on_texture_rect_property_list_changed"] diff --git a/examples/camera/default_bus_layout.tres b/examples/camera/default_bus_layout.tres new file mode 100644 index 00000000000..a32868e7244 --- /dev/null +++ b/examples/camera/default_bus_layout.tres @@ -0,0 +1,3 @@ +[gd_resource type="AudioBusLayout" format=3 uid="uid://d4hxcbnwr0jek"] + +[resource] diff --git a/examples/camera/export_presets.cfg b/examples/camera/export_presets.cfg new file mode 100644 index 00000000000..16f3f5e9554 --- /dev/null +++ b/examples/camera/export_presets.cfg @@ -0,0 +1,673 @@ +[preset.0] + +name="Android" +platform="Android" +runnable=true +advanced_options=false +dedicated_server=false +custom_features="" +export_filter="all_resources" +include_filter="" +exclude_filter="" +export_path="" +encryption_include_filters="" +encryption_exclude_filters="" +encrypt_pck=false +encrypt_directory=false +script_export_mode=2 + +[preset.0.options] + +custom_template/debug="/Users/penninghlhd/Code/noomi4godot/bin/android_debug.apk" +custom_template/release="/Users/penninghlhd/Code/noomi4godot/bin/android_release.apk" +gradle_build/use_gradle_build=true +gradle_build/gradle_build_directory="" +gradle_build/android_source_template="" +gradle_build/compress_native_libraries=false +gradle_build/export_format=1 +gradle_build/min_sdk="" +gradle_build/target_sdk="" +architectures/armeabi-v7a=false +architectures/arm64-v8a=true +architectures/x86=false +architectures/x86_64=false +version/code=1 +version/name="" +package/unique_name="com.illuminoo.noomi.godot" +package/name="NOOMI Test" +package/signed=true +package/app_category=2 +package/retain_data_on_uninstall=false +package/exclude_from_recents=false +package/show_in_android_tv=false +package/show_in_app_library=true +package/show_as_launcher_app=false +launcher_icons/main_192x192="" +launcher_icons/adaptive_foreground_432x432="" +launcher_icons/adaptive_background_432x432="" +graphics/opengl_debug=false +xr_features/xr_mode=0 +screen/immersive_mode=true +screen/support_small=true +screen/support_normal=true +screen/support_large=true +screen/support_xlarge=true +user_data_backup/allow=false +command_line/extra_args="" +apk_expansion/enable=false +apk_expansion/SALT="" +apk_expansion/public_key="" +permissions/custom_permissions=PackedStringArray() +permissions/access_checkin_properties=false +permissions/access_coarse_location=false +permissions/access_fine_location=false +permissions/access_location_extra_commands=false +permissions/access_mock_location=false +permissions/access_network_state=false +permissions/access_surface_flinger=false +permissions/access_wifi_state=false +permissions/account_manager=false +permissions/add_voicemail=false +permissions/authenticate_accounts=false +permissions/battery_stats=false +permissions/bind_accessibility_service=false +permissions/bind_appwidget=false +permissions/bind_device_admin=false +permissions/bind_input_method=false +permissions/bind_nfc_service=false +permissions/bind_notification_listener_service=false +permissions/bind_print_service=false +permissions/bind_remoteviews=false +permissions/bind_text_service=false +permissions/bind_vpn_service=false +permissions/bind_wallpaper=false +permissions/bluetooth=false +permissions/bluetooth_admin=false +permissions/bluetooth_privileged=false +permissions/brick=false +permissions/broadcast_package_removed=false +permissions/broadcast_sms=false +permissions/broadcast_sticky=false +permissions/broadcast_wap_push=false +permissions/call_phone=false +permissions/call_privileged=false +permissions/camera=true +permissions/capture_audio_output=true +permissions/capture_secure_video_output=false +permissions/capture_video_output=true +permissions/change_component_enabled_state=false +permissions/change_configuration=false +permissions/change_network_state=false +permissions/change_wifi_multicast_state=false +permissions/change_wifi_state=false +permissions/clear_app_cache=false +permissions/clear_app_user_data=false +permissions/control_location_updates=false +permissions/delete_cache_files=false +permissions/delete_packages=false +permissions/device_power=false +permissions/diagnostic=false +permissions/disable_keyguard=false +permissions/dump=false +permissions/expand_status_bar=false +permissions/factory_test=false +permissions/flashlight=false +permissions/force_back=false +permissions/get_accounts=false +permissions/get_package_size=false +permissions/get_tasks=false +permissions/get_top_activity_info=false +permissions/global_search=false +permissions/hardware_test=false +permissions/inject_events=false +permissions/install_location_provider=false +permissions/install_packages=false +permissions/install_shortcut=false +permissions/internal_system_window=false +permissions/internet=false +permissions/kill_background_processes=false +permissions/location_hardware=false +permissions/manage_accounts=false +permissions/manage_app_tokens=false +permissions/manage_documents=false +permissions/manage_external_storage=false +permissions/master_clear=false +permissions/media_content_control=false +permissions/modify_audio_settings=false +permissions/modify_phone_state=false +permissions/mount_format_filesystems=false +permissions/mount_unmount_filesystems=false +permissions/nfc=false +permissions/persistent_activity=false +permissions/post_notifications=false +permissions/process_outgoing_calls=false +permissions/read_calendar=false +permissions/read_call_log=false +permissions/read_contacts=false +permissions/read_external_storage=false +permissions/read_frame_buffer=false +permissions/read_history_bookmarks=false +permissions/read_input_state=false +permissions/read_logs=false +permissions/read_phone_state=false +permissions/read_profile=false +permissions/read_sms=false +permissions/read_social_stream=false +permissions/read_sync_settings=false +permissions/read_sync_stats=false +permissions/read_user_dictionary=false +permissions/reboot=false +permissions/receive_boot_completed=false +permissions/receive_mms=false +permissions/receive_sms=false +permissions/receive_wap_push=false +permissions/record_audio=true +permissions/reorder_tasks=false +permissions/restart_packages=false +permissions/send_respond_via_message=false +permissions/send_sms=false +permissions/set_activity_watcher=false +permissions/set_alarm=false +permissions/set_always_finish=false +permissions/set_animation_scale=false +permissions/set_debug_app=false +permissions/set_orientation=false +permissions/set_pointer_speed=false +permissions/set_preferred_applications=false +permissions/set_process_limit=false +permissions/set_time=false +permissions/set_time_zone=false +permissions/set_wallpaper=false +permissions/set_wallpaper_hints=false +permissions/signal_persistent_processes=false +permissions/status_bar=false +permissions/subscribed_feeds_read=false +permissions/subscribed_feeds_write=false +permissions/system_alert_window=false +permissions/transmit_ir=false +permissions/uninstall_shortcut=false +permissions/update_device_stats=false +permissions/use_credentials=false +permissions/use_sip=false +permissions/vibrate=false +permissions/wake_lock=false +permissions/write_apn_settings=false +permissions/write_calendar=false +permissions/write_call_log=false +permissions/write_contacts=false +permissions/write_external_storage=false +permissions/write_gservices=false +permissions/write_history_bookmarks=false +permissions/write_profile=false +permissions/write_secure_settings=false +permissions/write_settings=false +permissions/write_sms=false +permissions/write_social_stream=false +permissions/write_sync_settings=false +permissions/write_user_dictionary=false + +[preset.1] + +name="macOS" +platform="macOS" +runnable=true +advanced_options=false +dedicated_server=false +custom_features="" +export_filter="all_resources" +include_filter="" +exclude_filter="" +export_path="" +encryption_include_filters="" +encryption_exclude_filters="" +encrypt_pck=false +encrypt_directory=false +script_export_mode=2 + +[preset.1.options] + +export/distribution_type=1 +binary_format/architecture="universal" +custom_template/debug="" +custom_template/release="" +debug/export_console_wrapper=1 +application/icon="" +application/icon_interpolation=4 +application/bundle_identifier="" +application/signature="" +application/app_category="Games" +application/short_version="" +application/version="" +application/copyright="" +application/copyright_localized={} +application/min_macos_version="10.12" +application/export_angle=0 +display/high_res=true +application/additional_plist_content="" +xcode/platform_build="14C18" +xcode/sdk_version="13.1" +xcode/sdk_build="22C55" +xcode/sdk_name="macosx13.1" +xcode/xcode_version="1420" +xcode/xcode_build="14C18" +codesign/codesign=3 +codesign/installer_identity="" +codesign/apple_team_id="" +codesign/identity="" +codesign/entitlements/custom_file="" +codesign/entitlements/allow_jit_code_execution=false +codesign/entitlements/allow_unsigned_executable_memory=false +codesign/entitlements/allow_dyld_environment_variables=false +codesign/entitlements/disable_library_validation=false +codesign/entitlements/audio_input=false +codesign/entitlements/camera=false +codesign/entitlements/location=false +codesign/entitlements/address_book=false +codesign/entitlements/calendars=false +codesign/entitlements/photos_library=false +codesign/entitlements/apple_events=false +codesign/entitlements/debugging=false +codesign/entitlements/app_sandbox/enabled=false +codesign/entitlements/app_sandbox/network_server=false +codesign/entitlements/app_sandbox/network_client=false +codesign/entitlements/app_sandbox/device_usb=false +codesign/entitlements/app_sandbox/device_bluetooth=false +codesign/entitlements/app_sandbox/files_downloads=0 +codesign/entitlements/app_sandbox/files_pictures=0 +codesign/entitlements/app_sandbox/files_music=0 +codesign/entitlements/app_sandbox/files_movies=0 +codesign/entitlements/app_sandbox/files_user_selected=0 +codesign/entitlements/app_sandbox/helper_executables=[] +codesign/custom_options=PackedStringArray() +notarization/notarization=0 +privacy/microphone_usage_description="" +privacy/microphone_usage_description_localized={} +privacy/camera_usage_description="" +privacy/camera_usage_description_localized={} +privacy/location_usage_description="" +privacy/location_usage_description_localized={} +privacy/address_book_usage_description="" +privacy/address_book_usage_description_localized={} +privacy/calendar_usage_description="" +privacy/calendar_usage_description_localized={} +privacy/photos_library_usage_description="" +privacy/photos_library_usage_description_localized={} +privacy/desktop_folder_usage_description="" +privacy/desktop_folder_usage_description_localized={} +privacy/documents_folder_usage_description="" +privacy/documents_folder_usage_description_localized={} +privacy/downloads_folder_usage_description="" +privacy/downloads_folder_usage_description_localized={} +privacy/network_volumes_usage_description="" +privacy/network_volumes_usage_description_localized={} +privacy/removable_volumes_usage_description="" +privacy/removable_volumes_usage_description_localized={} +privacy/tracking_enabled=false +privacy/tracking_domains=PackedStringArray() +privacy/collected_data/name/collected=false +privacy/collected_data/name/linked_to_user=false +privacy/collected_data/name/used_for_tracking=false +privacy/collected_data/name/collection_purposes=0 +privacy/collected_data/email_address/collected=false +privacy/collected_data/email_address/linked_to_user=false +privacy/collected_data/email_address/used_for_tracking=false +privacy/collected_data/email_address/collection_purposes=0 +privacy/collected_data/phone_number/collected=false +privacy/collected_data/phone_number/linked_to_user=false +privacy/collected_data/phone_number/used_for_tracking=false +privacy/collected_data/phone_number/collection_purposes=0 +privacy/collected_data/physical_address/collected=false +privacy/collected_data/physical_address/linked_to_user=false +privacy/collected_data/physical_address/used_for_tracking=false +privacy/collected_data/physical_address/collection_purposes=0 +privacy/collected_data/other_contact_info/collected=false +privacy/collected_data/other_contact_info/linked_to_user=false +privacy/collected_data/other_contact_info/used_for_tracking=false +privacy/collected_data/other_contact_info/collection_purposes=0 +privacy/collected_data/health/collected=false +privacy/collected_data/health/linked_to_user=false +privacy/collected_data/health/used_for_tracking=false +privacy/collected_data/health/collection_purposes=0 +privacy/collected_data/fitness/collected=false +privacy/collected_data/fitness/linked_to_user=false +privacy/collected_data/fitness/used_for_tracking=false +privacy/collected_data/fitness/collection_purposes=0 +privacy/collected_data/payment_info/collected=false +privacy/collected_data/payment_info/linked_to_user=false +privacy/collected_data/payment_info/used_for_tracking=false +privacy/collected_data/payment_info/collection_purposes=0 +privacy/collected_data/credit_info/collected=false +privacy/collected_data/credit_info/linked_to_user=false +privacy/collected_data/credit_info/used_for_tracking=false +privacy/collected_data/credit_info/collection_purposes=0 +privacy/collected_data/other_financial_info/collected=false +privacy/collected_data/other_financial_info/linked_to_user=false +privacy/collected_data/other_financial_info/used_for_tracking=false +privacy/collected_data/other_financial_info/collection_purposes=0 +privacy/collected_data/precise_location/collected=false +privacy/collected_data/precise_location/linked_to_user=false +privacy/collected_data/precise_location/used_for_tracking=false +privacy/collected_data/precise_location/collection_purposes=0 +privacy/collected_data/coarse_location/collected=false +privacy/collected_data/coarse_location/linked_to_user=false +privacy/collected_data/coarse_location/used_for_tracking=false +privacy/collected_data/coarse_location/collection_purposes=0 +privacy/collected_data/sensitive_info/collected=false +privacy/collected_data/sensitive_info/linked_to_user=false +privacy/collected_data/sensitive_info/used_for_tracking=false +privacy/collected_data/sensitive_info/collection_purposes=0 +privacy/collected_data/contacts/collected=false +privacy/collected_data/contacts/linked_to_user=false +privacy/collected_data/contacts/used_for_tracking=false +privacy/collected_data/contacts/collection_purposes=0 +privacy/collected_data/emails_or_text_messages/collected=false +privacy/collected_data/emails_or_text_messages/linked_to_user=false +privacy/collected_data/emails_or_text_messages/used_for_tracking=false +privacy/collected_data/emails_or_text_messages/collection_purposes=0 +privacy/collected_data/photos_or_videos/collected=false +privacy/collected_data/photos_or_videos/linked_to_user=false +privacy/collected_data/photos_or_videos/used_for_tracking=false +privacy/collected_data/photos_or_videos/collection_purposes=0 +privacy/collected_data/audio_data/collected=false +privacy/collected_data/audio_data/linked_to_user=false +privacy/collected_data/audio_data/used_for_tracking=false +privacy/collected_data/audio_data/collection_purposes=0 +privacy/collected_data/gameplay_content/collected=false +privacy/collected_data/gameplay_content/linked_to_user=false +privacy/collected_data/gameplay_content/used_for_tracking=false +privacy/collected_data/gameplay_content/collection_purposes=0 +privacy/collected_data/customer_support/collected=false +privacy/collected_data/customer_support/linked_to_user=false +privacy/collected_data/customer_support/used_for_tracking=false +privacy/collected_data/customer_support/collection_purposes=0 +privacy/collected_data/other_user_content/collected=false +privacy/collected_data/other_user_content/linked_to_user=false +privacy/collected_data/other_user_content/used_for_tracking=false +privacy/collected_data/other_user_content/collection_purposes=0 +privacy/collected_data/browsing_history/collected=false +privacy/collected_data/browsing_history/linked_to_user=false +privacy/collected_data/browsing_history/used_for_tracking=false +privacy/collected_data/browsing_history/collection_purposes=0 +privacy/collected_data/search_hhistory/collected=false +privacy/collected_data/search_hhistory/linked_to_user=false +privacy/collected_data/search_hhistory/used_for_tracking=false +privacy/collected_data/search_hhistory/collection_purposes=0 +privacy/collected_data/user_id/collected=false +privacy/collected_data/user_id/linked_to_user=false +privacy/collected_data/user_id/used_for_tracking=false +privacy/collected_data/user_id/collection_purposes=0 +privacy/collected_data/device_id/collected=false +privacy/collected_data/device_id/linked_to_user=false +privacy/collected_data/device_id/used_for_tracking=false +privacy/collected_data/device_id/collection_purposes=0 +privacy/collected_data/purchase_history/collected=false +privacy/collected_data/purchase_history/linked_to_user=false +privacy/collected_data/purchase_history/used_for_tracking=false +privacy/collected_data/purchase_history/collection_purposes=0 +privacy/collected_data/product_interaction/collected=false +privacy/collected_data/product_interaction/linked_to_user=false +privacy/collected_data/product_interaction/used_for_tracking=false +privacy/collected_data/product_interaction/collection_purposes=0 +privacy/collected_data/advertising_data/collected=false +privacy/collected_data/advertising_data/linked_to_user=false +privacy/collected_data/advertising_data/used_for_tracking=false +privacy/collected_data/advertising_data/collection_purposes=0 +privacy/collected_data/other_usage_data/collected=false +privacy/collected_data/other_usage_data/linked_to_user=false +privacy/collected_data/other_usage_data/used_for_tracking=false +privacy/collected_data/other_usage_data/collection_purposes=0 +privacy/collected_data/crash_data/collected=false +privacy/collected_data/crash_data/linked_to_user=false +privacy/collected_data/crash_data/used_for_tracking=false +privacy/collected_data/crash_data/collection_purposes=0 +privacy/collected_data/performance_data/collected=false +privacy/collected_data/performance_data/linked_to_user=false +privacy/collected_data/performance_data/used_for_tracking=false +privacy/collected_data/performance_data/collection_purposes=0 +privacy/collected_data/other_diagnostic_data/collected=false +privacy/collected_data/other_diagnostic_data/linked_to_user=false +privacy/collected_data/other_diagnostic_data/used_for_tracking=false +privacy/collected_data/other_diagnostic_data/collection_purposes=0 +privacy/collected_data/environment_scanning/collected=false +privacy/collected_data/environment_scanning/linked_to_user=false +privacy/collected_data/environment_scanning/used_for_tracking=false +privacy/collected_data/environment_scanning/collection_purposes=0 +privacy/collected_data/hands/collected=false +privacy/collected_data/hands/linked_to_user=false +privacy/collected_data/hands/used_for_tracking=false +privacy/collected_data/hands/collection_purposes=0 +privacy/collected_data/head/collected=false +privacy/collected_data/head/linked_to_user=false +privacy/collected_data/head/used_for_tracking=false +privacy/collected_data/head/collection_purposes=0 +privacy/collected_data/other_data_types/collected=false +privacy/collected_data/other_data_types/linked_to_user=false +privacy/collected_data/other_data_types/used_for_tracking=false +privacy/collected_data/other_data_types/collection_purposes=0 +ssh_remote_deploy/enabled=false +ssh_remote_deploy/host="user@host_ip" +ssh_remote_deploy/port="22" +ssh_remote_deploy/extra_args_ssh="" +ssh_remote_deploy/extra_args_scp="" +ssh_remote_deploy/run_script="#!/usr/bin/env bash +unzip -o -q \"{temp_dir}/{archive_name}\" -d \"{temp_dir}\" +open \"{temp_dir}/{exe_name}.app\" --args {cmd_args}" +ssh_remote_deploy/cleanup_script="#!/usr/bin/env bash +kill $(pgrep -x -f \"{temp_dir}/{exe_name}.app/Contents/MacOS/{exe_name} {cmd_args}\") +rm -rf \"{temp_dir}\"" + +[preset.2] + +name="iOS" +platform="iOS" +runnable=true +advanced_options=false +dedicated_server=false +custom_features="" +export_filter="all_resources" +include_filter="" +exclude_filter="" +export_path="" +encryption_include_filters="" +encryption_exclude_filters="" +encrypt_pck=false +encrypt_directory=false +script_export_mode=2 + +[preset.2.options] + +custom_template/debug="" +custom_template/release="" +architectures/arm64=true +application/app_store_team_id="" +application/code_sign_identity_debug="" +application/export_method_debug=1 +application/code_sign_identity_release="" +application/export_method_release=1 +application/targeted_device_family=2 +application/bundle_identifier="" +application/signature="" +application/short_version="" +application/version="" +application/min_ios_version="12.0" +application/additional_plist_content="" +application/icon_interpolation=4 +application/export_project_only=false +application/delete_old_export_files_unconditionally=false +application/generate_simulator_library_if_missing=true +capabilities/access_wifi=false +capabilities/push_notifications=false +capabilities/performance_gaming_tier=false +capabilities/performance_a12=false +user_data/accessible_from_files_app=false +user_data/accessible_from_itunes_sharing=false +privacy/camera_usage_description="" +privacy/camera_usage_description_localized={} +privacy/microphone_usage_description="" +privacy/microphone_usage_description_localized={} +privacy/photolibrary_usage_description="" +privacy/photolibrary_usage_description_localized={} +privacy/file_timestamp_access_reasons=3 +privacy/system_boot_time_access_reasons=1 +privacy/disk_space_access_reasons=3 +privacy/active_keyboard_access_reasons=0 +privacy/user_defaults_access_reasons=0 +privacy/tracking_enabled=false +privacy/tracking_domains=PackedStringArray() +privacy/collected_data/name/collected=false +privacy/collected_data/name/linked_to_user=false +privacy/collected_data/name/used_for_tracking=false +privacy/collected_data/name/collection_purposes=0 +privacy/collected_data/email_address/collected=false +privacy/collected_data/email_address/linked_to_user=false +privacy/collected_data/email_address/used_for_tracking=false +privacy/collected_data/email_address/collection_purposes=0 +privacy/collected_data/phone_number/collected=false +privacy/collected_data/phone_number/linked_to_user=false +privacy/collected_data/phone_number/used_for_tracking=false +privacy/collected_data/phone_number/collection_purposes=0 +privacy/collected_data/physical_address/collected=false +privacy/collected_data/physical_address/linked_to_user=false +privacy/collected_data/physical_address/used_for_tracking=false +privacy/collected_data/physical_address/collection_purposes=0 +privacy/collected_data/other_contact_info/collected=false +privacy/collected_data/other_contact_info/linked_to_user=false +privacy/collected_data/other_contact_info/used_for_tracking=false +privacy/collected_data/other_contact_info/collection_purposes=0 +privacy/collected_data/health/collected=false +privacy/collected_data/health/linked_to_user=false +privacy/collected_data/health/used_for_tracking=false +privacy/collected_data/health/collection_purposes=0 +privacy/collected_data/fitness/collected=false +privacy/collected_data/fitness/linked_to_user=false +privacy/collected_data/fitness/used_for_tracking=false +privacy/collected_data/fitness/collection_purposes=0 +privacy/collected_data/payment_info/collected=false +privacy/collected_data/payment_info/linked_to_user=false +privacy/collected_data/payment_info/used_for_tracking=false +privacy/collected_data/payment_info/collection_purposes=0 +privacy/collected_data/credit_info/collected=false +privacy/collected_data/credit_info/linked_to_user=false +privacy/collected_data/credit_info/used_for_tracking=false +privacy/collected_data/credit_info/collection_purposes=0 +privacy/collected_data/other_financial_info/collected=false +privacy/collected_data/other_financial_info/linked_to_user=false +privacy/collected_data/other_financial_info/used_for_tracking=false +privacy/collected_data/other_financial_info/collection_purposes=0 +privacy/collected_data/precise_location/collected=false +privacy/collected_data/precise_location/linked_to_user=false +privacy/collected_data/precise_location/used_for_tracking=false +privacy/collected_data/precise_location/collection_purposes=0 +privacy/collected_data/coarse_location/collected=false +privacy/collected_data/coarse_location/linked_to_user=false +privacy/collected_data/coarse_location/used_for_tracking=false +privacy/collected_data/coarse_location/collection_purposes=0 +privacy/collected_data/sensitive_info/collected=false +privacy/collected_data/sensitive_info/linked_to_user=false +privacy/collected_data/sensitive_info/used_for_tracking=false +privacy/collected_data/sensitive_info/collection_purposes=0 +privacy/collected_data/contacts/collected=false +privacy/collected_data/contacts/linked_to_user=false +privacy/collected_data/contacts/used_for_tracking=false +privacy/collected_data/contacts/collection_purposes=0 +privacy/collected_data/emails_or_text_messages/collected=false +privacy/collected_data/emails_or_text_messages/linked_to_user=false +privacy/collected_data/emails_or_text_messages/used_for_tracking=false +privacy/collected_data/emails_or_text_messages/collection_purposes=0 +privacy/collected_data/photos_or_videos/collected=false +privacy/collected_data/photos_or_videos/linked_to_user=false +privacy/collected_data/photos_or_videos/used_for_tracking=false +privacy/collected_data/photos_or_videos/collection_purposes=0 +privacy/collected_data/audio_data/collected=false +privacy/collected_data/audio_data/linked_to_user=false +privacy/collected_data/audio_data/used_for_tracking=false +privacy/collected_data/audio_data/collection_purposes=0 +privacy/collected_data/gameplay_content/collected=false +privacy/collected_data/gameplay_content/linked_to_user=false +privacy/collected_data/gameplay_content/used_for_tracking=false +privacy/collected_data/gameplay_content/collection_purposes=0 +privacy/collected_data/customer_support/collected=false +privacy/collected_data/customer_support/linked_to_user=false +privacy/collected_data/customer_support/used_for_tracking=false +privacy/collected_data/customer_support/collection_purposes=0 +privacy/collected_data/other_user_content/collected=false +privacy/collected_data/other_user_content/linked_to_user=false +privacy/collected_data/other_user_content/used_for_tracking=false +privacy/collected_data/other_user_content/collection_purposes=0 +privacy/collected_data/browsing_history/collected=false +privacy/collected_data/browsing_history/linked_to_user=false +privacy/collected_data/browsing_history/used_for_tracking=false +privacy/collected_data/browsing_history/collection_purposes=0 +privacy/collected_data/search_hhistory/collected=false +privacy/collected_data/search_hhistory/linked_to_user=false +privacy/collected_data/search_hhistory/used_for_tracking=false +privacy/collected_data/search_hhistory/collection_purposes=0 +privacy/collected_data/user_id/collected=false +privacy/collected_data/user_id/linked_to_user=false +privacy/collected_data/user_id/used_for_tracking=false +privacy/collected_data/user_id/collection_purposes=0 +privacy/collected_data/device_id/collected=false +privacy/collected_data/device_id/linked_to_user=false +privacy/collected_data/device_id/used_for_tracking=false +privacy/collected_data/device_id/collection_purposes=0 +privacy/collected_data/purchase_history/collected=false +privacy/collected_data/purchase_history/linked_to_user=false +privacy/collected_data/purchase_history/used_for_tracking=false +privacy/collected_data/purchase_history/collection_purposes=0 +privacy/collected_data/product_interaction/collected=false +privacy/collected_data/product_interaction/linked_to_user=false +privacy/collected_data/product_interaction/used_for_tracking=false +privacy/collected_data/product_interaction/collection_purposes=0 +privacy/collected_data/advertising_data/collected=false +privacy/collected_data/advertising_data/linked_to_user=false +privacy/collected_data/advertising_data/used_for_tracking=false +privacy/collected_data/advertising_data/collection_purposes=0 +privacy/collected_data/other_usage_data/collected=false +privacy/collected_data/other_usage_data/linked_to_user=false +privacy/collected_data/other_usage_data/used_for_tracking=false +privacy/collected_data/other_usage_data/collection_purposes=0 +privacy/collected_data/crash_data/collected=false +privacy/collected_data/crash_data/linked_to_user=false +privacy/collected_data/crash_data/used_for_tracking=false +privacy/collected_data/crash_data/collection_purposes=0 +privacy/collected_data/performance_data/collected=false +privacy/collected_data/performance_data/linked_to_user=false +privacy/collected_data/performance_data/used_for_tracking=false +privacy/collected_data/performance_data/collection_purposes=0 +privacy/collected_data/other_diagnostic_data/collected=false +privacy/collected_data/other_diagnostic_data/linked_to_user=false +privacy/collected_data/other_diagnostic_data/used_for_tracking=false +privacy/collected_data/other_diagnostic_data/collection_purposes=0 +privacy/collected_data/environment_scanning/collected=false +privacy/collected_data/environment_scanning/linked_to_user=false +privacy/collected_data/environment_scanning/used_for_tracking=false +privacy/collected_data/environment_scanning/collection_purposes=0 +privacy/collected_data/hands/collected=false +privacy/collected_data/hands/linked_to_user=false +privacy/collected_data/hands/used_for_tracking=false +privacy/collected_data/hands/collection_purposes=0 +privacy/collected_data/head/collected=false +privacy/collected_data/head/linked_to_user=false +privacy/collected_data/head/used_for_tracking=false +privacy/collected_data/head/collection_purposes=0 +privacy/collected_data/other_data_types/collected=false +privacy/collected_data/other_data_types/linked_to_user=false +privacy/collected_data/other_data_types/used_for_tracking=false +privacy/collected_data/other_data_types/collection_purposes=0 +icons/iphone_120x120="" +icons/iphone_180x180="" +icons/ipad_76x76="" +icons/ipad_152x152="" +icons/ipad_167x167="" +icons/app_store_1024x1024="" +icons/spotlight_40x40="" +icons/spotlight_80x80="" +icons/settings_58x58="" +icons/settings_87x87="" +icons/notification_40x40="" +icons/notification_60x60="" +storyboard/image_scale_mode=0 +storyboard/custom_image@2x="" +storyboard/custom_image@3x="" +storyboard/use_custom_bg_color=false +storyboard/custom_bg_color=Color(0, 0, 0, 1) diff --git a/examples/camera/icon.svg b/examples/camera/icon.svg new file mode 100644 index 00000000000..3fe4f4ae8c2 --- /dev/null +++ b/examples/camera/icon.svg @@ -0,0 +1 @@ + diff --git a/examples/camera/icon.svg.import b/examples/camera/icon.svg.import new file mode 100644 index 00000000000..23fef206491 --- /dev/null +++ b/examples/camera/icon.svg.import @@ -0,0 +1,37 @@ +[remap] + +importer="texture" +type="CompressedTexture2D" +uid="uid://8d4e5b3aytse" +path="res://.godot/imported/icon.svg-218a8f2b3041327d8a5756f3a245f83b.ctex" +metadata={ +"vram_texture": false +} + +[deps] + +source_file="res://icon.svg" +dest_files=["res://.godot/imported/icon.svg-218a8f2b3041327d8a5756f3a245f83b.ctex"] + +[params] + +compress/mode=0 +compress/high_quality=false +compress/lossy_quality=0.7 +compress/hdr_compression=1 +compress/normal_map=0 +compress/channel_pack=0 +mipmaps/generate=false +mipmaps/limit=-1 +roughness/mode=0 +roughness/src_normal="" +process/fix_alpha_border=true +process/premult_alpha=false +process/normal_map_invert_y=false +process/hdr_as_srgb=false +process/hdr_clamp_exposure=false +process/size_limit=0 +detect_3d/compress_to=1 +svg/scale=1.0 +editor/scale_with_editor_scale=false +editor/convert_colors_with_editor_theme=false diff --git a/examples/camera/project.godot b/examples/camera/project.godot new file mode 100644 index 00000000000..eb82b4b3254 --- /dev/null +++ b/examples/camera/project.godot @@ -0,0 +1,36 @@ +; Engine configuration file. +; It's best edited using the editor UI and not directly, +; since the parameters that go here are not all obvious. +; +; Format: +; [section] ; section goes between [] +; param=value ; assign values to parameters + +config_version=5 + +[application] + +config/name="NOOMI camera test" +run/main_scene="res://control.tscn" +config/features=PackedStringArray("4.4") +config/icon="res://icon.svg" + +[audio] + +driver/enable_input=true +driver/mix_rate=48000 + +[display] + +window/size/viewport_width=720 +window/size/viewport_height=1280 +window/handheld/orientation=1 + +[editor] + +movie_writer/movie_file="test.ogv" +movie_writer/fps=15 + +[rendering] + +textures/vram_compression/import_etc2_astc=true diff --git a/modules/camera/SCsub b/modules/camera/SCsub index aed5efd0d20..5f0aac0063a 100644 --- a/modules/camera/SCsub +++ b/modules/camera/SCsub @@ -6,16 +6,20 @@ Import("env_modules") env_camera = env_modules.Clone() -if env["platform"] in ["windows", "macos", "linuxbsd"]: - env_camera.add_source_files(env.modules_sources, "register_types.cpp") +env_camera.add_source_files(env.modules_sources, "register_types.cpp") if env["platform"] == "windows": env_camera.add_source_files(env.modules_sources, "camera_win.cpp") + env.Append(LINKFLAGS=["mf.lib", "mfplat.lib", "mfreadwrite.lib"]) -elif env["platform"] == "macos": +if env["platform"] == "macos" or env["platform"] == "ios": env_camera.add_source_files(env.modules_sources, "camera_macos.mm") +if env["platform"] == "android": + env_camera.add_source_files(env.modules_sources, "camera_android.cpp") + env.Append(LIBS=["camera2ndk", "mediandk"]) + elif env["platform"] == "linuxbsd": env_camera.add_source_files(env.modules_sources, "camera_linux.cpp") env_camera.add_source_files(env.modules_sources, "camera_feed_linux.cpp") - env_camera.add_source_files(env.modules_sources, "buffer_decoder.cpp") + env_camera.add_source_files(env.modules_sources, "buffer_decoder.cpp") \ No newline at end of file diff --git a/modules/camera/camera_android.cpp b/modules/camera/camera_android.cpp new file mode 100644 index 00000000000..e86a3429e28 --- /dev/null +++ b/modules/camera/camera_android.cpp @@ -0,0 +1,370 @@ +/**************************************************************************/ +/* camera_android.cpp */ +/**************************************************************************/ +/* This file is part of: */ +/* GODOT ENGINE */ +/* https://godotengine.org */ +/**************************************************************************/ +/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */ +/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */ +/* */ +/* Permission is hereby granted, free of charge, to any person obtaining */ +/* a copy of this software and associated documentation files (the */ +/* "Software"), to deal in the Software without restriction, including */ +/* without limitation the rights to use, copy, modify, merge, publish, */ +/* distribute, sublicense, and/or sell copies of the Software, and to */ +/* permit persons to whom the Software is furnished to do so, subject to */ +/* the following conditions: */ +/* */ +/* The above copyright notice and this permission notice shall be */ +/* included in all copies or substantial portions of the Software. */ +/* */ +/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */ +/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */ +/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */ +/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */ +/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */ +/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */ +/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ +/**************************************************************************/ + +#include "camera_android.h" + +////////////////////////////////////////////////////////////////////////// +// Helper functions +// +// The following code enables you to view the contents of a media type while +// debugging. + +#ifndef IF_EQUAL_RETURN +#define IF_EQUAL_RETURN(param, val) \ + if (val == param) \ + return #val +#endif + +String GetFormatName(const int32_t &format) { + IF_EQUAL_RETURN(format, AIMAGE_FORMAT_YUV_420_888); + IF_EQUAL_RETURN(format, AIMAGE_FORMAT_RGB_888); + IF_EQUAL_RETURN(format, AIMAGE_FORMAT_RGBA_8888); + + return "Unsupported"; +} + +////////////////////////////////////////////////////////////////////////// +// CameraFeedAndroid - Subclass for our camera feed on Android + +CameraFeedAndroid::CameraFeedAndroid(ACameraManager *manager, const char *id, int32_t position, int32_t width, + int32_t height, int32_t format, int32_t orientation) { + this->manager = manager; + this->camera_id = id; + this->width = width; + this->height = height; + + // Name + name = vformat("%s | %d x %d", id, width, height); + + // Data type + this->format = format; + if (format == AIMAGE_FORMAT_RGB_888) { + this->datatype = FEED_RGB; + name += " | RGB"; + } + if (format == AIMAGE_FORMAT_RGBA_8888) { + this->datatype = FEED_RGBA; + name += " | RGBA"; + } + if (format == AIMAGE_FORMAT_YUV_420_888) { + this->datatype = FEED_YCBCR; + name += " | YCBCR"; + } + + // Position + if (position == ACAMERA_LENS_FACING_BACK) { + this->position = CameraFeed::FEED_BACK; + name += " | BACK"; + } + if (position == ACAMERA_LENS_FACING_FRONT) { + this->position = CameraFeed::FEED_FRONT; + name += " | FRONT"; + } + + // Orientation + int32_t imageRotation = 0; + if (position == ACAMERA_LENS_FACING_FRONT) { + imageRotation = orientation % 360; + imageRotation = (360 - imageRotation) % 360; + } else { + imageRotation = (orientation + 360) % 360; + } + transform.rotate(real_t(imageRotation) * 0.015707963267949F); +} + +CameraFeedAndroid::~CameraFeedAndroid() { + if (is_active()) { + deactivate_feed(); + }; +} + +bool CameraFeedAndroid::activate_feed() { + if (is_active()) { + deactivate_feed(); + }; + + // Request permission + if (!OS::get_singleton()->request_permission("CAMERA")) { + return false; + } + + // Open device + static ACameraDevice_stateCallbacks deviceCallbacks = { + .context = this, + .onDisconnected = onDisconnected, + .onError = onError, + }; + camera_status_t c_status = ACameraManager_openCamera(manager, camera_id.utf8(), &deviceCallbacks, &device); + if (c_status != ACAMERA_OK) { + onError(this, device, c_status); + return false; + } + + // Create image reader + media_status_t m_status = AImageReader_new(width, height, format, 1, &reader); + if (m_status != AMEDIA_OK) { + onError(this, device, m_status); + return false; + } + + // Create image buffers + set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_DIFFUSE, + Image::create_empty(width, height, false, Image::FORMAT_R8)); + set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_NORMAL, + Image::create_empty(width / 2, height / 2, false, Image::FORMAT_RG8)); + // set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_SPECULAR, + // Image::create_empty(width, height, false, Image::FORMAT_R8)); + + // Get image listener + static AImageReader_ImageListener listener{ + .context = this, + .onImageAvailable = onImage, + }; + m_status = AImageReader_setImageListener(reader, &listener); + if (m_status != AMEDIA_OK) { + onError(this, device, m_status); + return false; + } + + // Get image surface + ANativeWindow *surface; + m_status = AImageReader_getWindow(reader, &surface); + if (m_status != AMEDIA_OK) { + onError(this, device, m_status); + return false; + } + + // Prepare session outputs + ACaptureSessionOutput *output = nullptr; + c_status = ACaptureSessionOutput_create(surface, &output); + if (c_status != ACAMERA_OK) { + onError(this, device, c_status); + return false; + } + + ACaptureSessionOutputContainer *outputs = nullptr; + c_status = ACaptureSessionOutputContainer_create(&outputs); + if (c_status != ACAMERA_OK) { + onError(this, device, c_status); + return false; + } + + c_status = ACaptureSessionOutputContainer_add(outputs, output); + if (c_status != ACAMERA_OK) { + onError(this, device, c_status); + return false; + } + + // Create capture session + static ACameraCaptureSession_stateCallbacks sessionStateCallbacks{ + .context = this, + .onClosed = onSessionClosed, + .onReady = onSessionReady, + .onActive = onSessionActive + }; + c_status = ACameraDevice_createCaptureSession(device, outputs, &sessionStateCallbacks, &session); + if (c_status != ACAMERA_OK) { + onError(this, device, c_status); + return false; + } + + // Create capture request + c_status = ACameraDevice_createCaptureRequest(device, TEMPLATE_PREVIEW, &request); + if (c_status != ACAMERA_OK) { + onError(this, device, c_status); + return false; + } + + // Set capture target + ACameraOutputTarget *imageTarget = nullptr; + c_status = ACameraOutputTarget_create(surface, &imageTarget); + if (c_status != ACAMERA_OK) { + onError(this, device, c_status); + return false; + } + + c_status = ACaptureRequest_addTarget(request, imageTarget); + if (c_status != ACAMERA_OK) { + onError(this, device, c_status); + return false; + } + + // Start capture + c_status = ACameraCaptureSession_setRepeatingRequest(session, nullptr, 1, &request, nullptr); + if (c_status != ACAMERA_OK) { + onError(this, device, c_status); + return false; + } + + return true; +} + +void CameraFeedAndroid::onImage(void *context, AImageReader *p_reader) { + auto *feed = static_cast(context); + + // Get image + AImage *image = nullptr; + media_status_t status = AImageReader_acquireNextImage(p_reader, &image); + ERR_FAIL_COND(status != AMEDIA_OK); + + // Get image data + uint8_t *data = nullptr; + int len = 0; + int32_t pixel_stride, row_stride; + AImage_getPlaneData(image, 0, &data, &len); + feed->set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_DIFFUSE, data, 0, len); + AImage_getPlanePixelStride(image, 1, &pixel_stride); + AImage_getPlaneRowStride(image, 1, &row_stride); + AImage_getPlaneData(image, 1, &data, &len); + feed->set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_NORMAL, data, 0, len); + // AImage_getPlaneData(image, 2, &data, &len); + // feed->set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_SPECULAR, data, 0, len); + + // Release image + AImage_delete(image); +} + +void CameraFeedAndroid::onSessionReady(void *context, ACameraCaptureSession *session) { + print_verbose("Capture session ready"); +} + +void CameraFeedAndroid::onSessionActive(void *context, ACameraCaptureSession *session) { + print_verbose("Capture session active"); +} + +void CameraFeedAndroid::onSessionClosed(void *context, ACameraCaptureSession *session) { + print_verbose("Capture session active"); +} + +void CameraFeedAndroid::deactivate_feed() { + if (session != nullptr) { + ACameraCaptureSession_stopRepeating(session); + ACameraCaptureSession_close(session); + session = nullptr; + } + + if (request != nullptr) { + ACaptureRequest_free(request); + request = nullptr; + } + + if (reader != nullptr) { + AImageReader_delete(reader); + reader = nullptr; + } + + if (device != nullptr) { + ACameraDevice_close(device); + device = nullptr; + } +} + +void CameraFeedAndroid::onError(void *context, ACameraDevice *p_device, int error) { + print_error(vformat("Camera error: %d", error)); + onDisconnected(context, p_device); +} + +void CameraFeedAndroid::onDisconnected(void *context, ACameraDevice *p_device) { + print_verbose("Camera disconnected"); + auto *feed = static_cast(context); + feed->set_active(false); +} + +////////////////////////////////////////////////////////////////////////// +// CameraAndroid - Subclass for our camera server on Android + +void CameraAndroid::update_feeds() { + ACameraIdList *cameraIds = nullptr; + camera_status_t c_status = ACameraManager_getCameraIdList(cameraManager, &cameraIds); + if (c_status != ACAMERA_OK) { + ERR_PRINT("Unable to retrieve supported cameras"); + return; + } + + for (int c = 0; c < cameraIds->numCameras; ++c) { + const char *id = cameraIds->cameraIds[c]; + ACameraMetadata *metadata; + ACameraManager_getCameraCharacteristics(cameraManager, id, &metadata); + + // Get position + ACameraMetadata_const_entry lensInfo; + ACameraMetadata_getConstEntry(metadata, ACAMERA_LENS_FACING, &lensInfo); + uint8_t position = static_cast(lensInfo.data.u8[0]); + + // Get sensor orientation + ACameraMetadata_const_entry orientation; + ACameraMetadata_getConstEntry(metadata, ACAMERA_SENSOR_ORIENTATION, &orientation); + int32_t cameraOrientation = orientation.data.i32[0]; + + // Get supported formats + ACameraMetadata_const_entry formats; + ACameraMetadata_getConstEntry(metadata, ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &formats); + for (uint32_t f = 0; f < formats.count; f += 4) { + // Only support output streams + int32_t input = formats.data.i32[f + 3]; + if (input) { + continue; + } + + // Get format and resolution + int32_t format = formats.data.i32[f + 0]; + if (format == AIMAGE_FORMAT_YUV_420_888 || format == AIMAGE_FORMAT_RGB_888 || + format == AIMAGE_FORMAT_RGBA_8888) { + int32_t width = formats.data.i32[f + 1]; + int32_t height = formats.data.i32[f + 2]; + Ref feed = new CameraFeedAndroid(cameraManager, id, + position, + width, + height, + format, + cameraOrientation); + add_feed(feed); + print_line("Added camera feed: ", feed->get_name()); + } + } + + ACameraMetadata_free(metadata); + } + + ACameraManager_deleteCameraIdList(cameraIds); +} + +CameraAndroid::CameraAndroid() { + cameraManager = ACameraManager_create(); + + // Update feeds + update_feeds(); +} + +CameraAndroid::~CameraAndroid() { + if (cameraManager != nullptr) { + ACameraManager_delete(cameraManager); + } +} diff --git a/modules/camera/camera_android.h b/modules/camera/camera_android.h new file mode 100644 index 00000000000..5f811ab9011 --- /dev/null +++ b/modules/camera/camera_android.h @@ -0,0 +1,82 @@ +/**************************************************************************/ +/* camera_android.h */ +/**************************************************************************/ +/* This file is part of: */ +/* GODOT ENGINE */ +/* https://godotengine.org */ +/**************************************************************************/ +/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */ +/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */ +/* */ +/* Permission is hereby granted, free of charge, to any person obtaining */ +/* a copy of this software and associated documentation files (the */ +/* "Software"), to deal in the Software without restriction, including */ +/* without limitation the rights to use, copy, modify, merge, publish, */ +/* distribute, sublicense, and/or sell copies of the Software, and to */ +/* permit persons to whom the Software is furnished to do so, subject to */ +/* the following conditions: */ +/* */ +/* The above copyright notice and this permission notice shall be */ +/* included in all copies or substantial portions of the Software. */ +/* */ +/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */ +/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */ +/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */ +/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */ +/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */ +/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */ +/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ +/**************************************************************************/ + +#ifndef CAMERA_ANDROID_H +#define CAMERA_ANDROID_H + +#include "servers/camera/camera_feed.h" +#include "servers/camera_server.h" + +#include +#include +#include +#include +#include + +class CameraFeedAndroid : public CameraFeed { +private: + String camera_id; + int32_t format; + + ACameraManager *manager = nullptr; + ACameraDevice *device = nullptr; + AImageReader *reader = nullptr; + ACameraCaptureSession *session = nullptr; + ACaptureRequest *request = nullptr; + + static void onError(void *context, ACameraDevice *p_device, int error); + static void onDisconnected(void *context, ACameraDevice *p_device); + static void onImage(void *context, AImageReader *p_reader); + static void onSessionReady(void *context, ACameraCaptureSession *session); + static void onSessionActive(void *context, ACameraCaptureSession *session); + static void onSessionClosed(void *context, ACameraCaptureSession *session); + +protected: +public: + CameraFeedAndroid(ACameraManager *manager, const char *id, int32_t position, int32_t width, int32_t height, + int32_t format, int32_t orientation); + virtual ~CameraFeedAndroid(); + + bool activate_feed(); + void deactivate_feed(); +}; + +class CameraAndroid : public CameraServer { +private: + ACameraManager *cameraManager; + + void update_feeds(); + +public: + CameraAndroid(); + ~CameraAndroid(); +}; + +#endif // CAMERA_ANDROID_H diff --git a/modules/camera/camera_macos.mm b/modules/camera/camera_macos.mm index de4f814846d..cbc21d9bdc2 100644 --- a/modules/camera/camera_macos.mm +++ b/modules/camera/camera_macos.mm @@ -42,10 +42,6 @@ @interface MyCaptureSession : AVCaptureSession { Ref feed; - size_t width[2]; - size_t height[2]; - Vector img_data[2]; - AVCaptureDeviceInput *input; AVCaptureVideoDataOutput *output; } @@ -58,10 +54,6 @@ if (self = [super init]) { NSError *error; feed = p_feed; - width[0] = 0; - height[0] = 0; - width[1] = 0; - height[1] = 0; [self beginConfiguration]; @@ -76,7 +68,11 @@ if (!output) { print_line("Couldn't get output device for camera"); } else { - NSDictionary *settings = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) }; + NSDictionary *settings = @{ + (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), + @"Width" : @1280, + @"Height" : @720, + }; output.videoSettings = settings; // discard if the data output queue is blocked (as we process the still image) @@ -135,54 +131,42 @@ CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); // get our buffers - unsigned char *dataY = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0); - unsigned char *dataCbCr = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1); - if (dataY == nullptr) { - print_line("Couldn't access Y pixel buffer data"); - } else if (dataCbCr == nullptr) { - print_line("Couldn't access CbCr pixel buffer data"); - } else { - Ref img[2]; - - { - // do Y - size_t new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0); - size_t new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0); - - if ((width[0] != new_width) || (height[0] != new_height)) { - width[0] = new_width; - height[0] = new_height; - img_data[0].resize(new_width * new_height); - } - - uint8_t *w = img_data[0].ptrw(); - memcpy(w, dataY, new_width * new_height); - - img[0].instantiate(); - img[0]->set_data(new_width, new_height, 0, Image::FORMAT_R8, img_data[0]); + { + // do Y + unsigned char *dataY = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0); + if (dataY == nullptr) { + print_line("Couldn't access Y pixel buffer data"); + return; } - { - // do CbCr - size_t new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1); - size_t new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1); + size_t new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0); + size_t new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0); - if ((width[1] != new_width) || (height[1] != new_height)) { - width[1] = new_width; - height[1] = new_height; - img_data[1].resize(2 * new_width * new_height); - } + Ref image = feed->get_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_DIFFUSE); + if (image.is_null() || image->get_width() != new_width || image->get_height() != new_height) { + feed->set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_DIFFUSE, Image::create_empty(new_width, new_height, false, Image::FORMAT_R8)); + } else { + feed->set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_DIFFUSE, dataY, 0, new_width * new_height); + } + } - uint8_t *w = img_data[1].ptrw(); - memcpy(w, dataCbCr, 2 * new_width * new_height); - - ///TODO OpenGL doesn't support FORMAT_RG8, need to do some form of conversion - img[1].instantiate(); - img[1]->set_data(new_width, new_height, 0, Image::FORMAT_RG8, img_data[1]); + { + // do CbCr + unsigned char *dataCbCr = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1); + if (dataCbCr == nullptr) { + print_line("Couldn't access CbCr pixel buffer data"); + return; } - // set our texture... - feed->set_ycbcr_images(img[0], img[1]); + size_t new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1); + size_t new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1); + + Ref image = feed->get_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_NORMAL); + if (image.is_null() || image->get_width() != new_width || image->get_height() != new_height) { + feed->set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_NORMAL, Image::create_empty(new_width, new_height, false, Image::FORMAT_RG8)); + } else { + feed->set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_NORMAL, dataCbCr, 0, 2 * new_width * new_height); + } } // and unlock @@ -343,11 +327,6 @@ void CameraMacOS::update_feeds() { Ref newfeed; newfeed.instantiate(); newfeed->set_device(device); - - // assume display camera so inverse - Transform2D transform = Transform2D(-1.0, 0.0, 0.0, -1.0, 1.0, 1.0); - newfeed->set_transform(transform); - add_feed(newfeed); }; }; diff --git a/modules/camera/camera_win.cpp b/modules/camera/camera_win.cpp index 755642270ef..3c45c545e18 100644 --- a/modules/camera/camera_win.cpp +++ b/modules/camera/camera_win.cpp @@ -29,66 +29,517 @@ /**************************************************************************/ #include "camera_win.h" +#include -///@TODO sorry guys, I got about 80% through implementing this using DirectShow only -// to find out Microsoft deprecated half the API and its replacement is as confusing -// as they could make it. Joey suggested looking into libuvc which offers a more direct -// route to webcams over USB and this is very promising but it wouldn't compile on -// windows for me...I've gutted the classes I implemented DirectShow in just to have -// a skeleton for someone to work on, mail me for more details or if you want a copy.... +////////////////////////////////////////////////////////////////////////// +// Helper functions +// +// The following code enables you to view the contents of a media type while +// debugging. + +#ifndef IF_EQUAL_RETURN +#define IF_EQUAL_RETURN(param, val) \ + if (val == param) \ + return #val +#endif + +String GetGUIDNameConst(const GUID &guid) { + IF_EQUAL_RETURN(guid, MF_MT_MAJOR_TYPE); + IF_EQUAL_RETURN(guid, MF_MT_MAJOR_TYPE); + IF_EQUAL_RETURN(guid, MF_MT_SUBTYPE); + IF_EQUAL_RETURN(guid, MF_MT_ALL_SAMPLES_INDEPENDENT); + IF_EQUAL_RETURN(guid, MF_MT_FIXED_SIZE_SAMPLES); + IF_EQUAL_RETURN(guid, MF_MT_COMPRESSED); + IF_EQUAL_RETURN(guid, MF_MT_SAMPLE_SIZE); + IF_EQUAL_RETURN(guid, MF_MT_WRAPPED_TYPE); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_NUM_CHANNELS); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_SAMPLES_PER_SECOND); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_FLOAT_SAMPLES_PER_SECOND); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_AVG_BYTES_PER_SECOND); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_BLOCK_ALIGNMENT); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_BITS_PER_SAMPLE); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_VALID_BITS_PER_SAMPLE); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_SAMPLES_PER_BLOCK); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_CHANNEL_MASK); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_FOLDDOWN_MATRIX); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_PEAKREF); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_PEAKTARGET); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_AVGREF); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_AVGTARGET); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_PREFER_WAVEFORMATEX); + IF_EQUAL_RETURN(guid, MF_MT_AAC_PAYLOAD_TYPE); + IF_EQUAL_RETURN(guid, MF_MT_AAC_AUDIO_PROFILE_LEVEL_INDICATION); + IF_EQUAL_RETURN(guid, MF_MT_FRAME_SIZE); + IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE); + IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE_RANGE_MAX); + IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE_RANGE_MIN); + IF_EQUAL_RETURN(guid, MF_MT_PIXEL_ASPECT_RATIO); + IF_EQUAL_RETURN(guid, MF_MT_DRM_FLAGS); + IF_EQUAL_RETURN(guid, MF_MT_PAD_CONTROL_FLAGS); + IF_EQUAL_RETURN(guid, MF_MT_SOURCE_CONTENT_HINT); + IF_EQUAL_RETURN(guid, MF_MT_VIDEO_CHROMA_SITING); + IF_EQUAL_RETURN(guid, MF_MT_INTERLACE_MODE); + IF_EQUAL_RETURN(guid, MF_MT_TRANSFER_FUNCTION); + IF_EQUAL_RETURN(guid, MF_MT_VIDEO_PRIMARIES); + IF_EQUAL_RETURN(guid, MF_MT_CUSTOM_VIDEO_PRIMARIES); + IF_EQUAL_RETURN(guid, MF_MT_YUV_MATRIX); + IF_EQUAL_RETURN(guid, MF_MT_VIDEO_LIGHTING); + IF_EQUAL_RETURN(guid, MF_MT_VIDEO_NOMINAL_RANGE); + IF_EQUAL_RETURN(guid, MF_MT_GEOMETRIC_APERTURE); + IF_EQUAL_RETURN(guid, MF_MT_MINIMUM_DISPLAY_APERTURE); + IF_EQUAL_RETURN(guid, MF_MT_PAN_SCAN_APERTURE); + IF_EQUAL_RETURN(guid, MF_MT_PAN_SCAN_ENABLED); + IF_EQUAL_RETURN(guid, MF_MT_AVG_BITRATE); + IF_EQUAL_RETURN(guid, MF_MT_AVG_BIT_ERROR_RATE); + IF_EQUAL_RETURN(guid, MF_MT_MAX_KEYFRAME_SPACING); + IF_EQUAL_RETURN(guid, MF_MT_DEFAULT_STRIDE); + IF_EQUAL_RETURN(guid, MF_MT_PALETTE); + IF_EQUAL_RETURN(guid, MF_MT_USER_DATA); + IF_EQUAL_RETURN(guid, MF_MT_AM_FORMAT_TYPE); + IF_EQUAL_RETURN(guid, MF_MT_MPEG_START_TIME_CODE); + IF_EQUAL_RETURN(guid, MF_MT_MPEG2_PROFILE); + IF_EQUAL_RETURN(guid, MF_MT_MPEG2_LEVEL); + IF_EQUAL_RETURN(guid, MF_MT_MPEG2_FLAGS); + IF_EQUAL_RETURN(guid, MF_MT_MPEG_SEQUENCE_HEADER); + IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_SRC_PACK_0); + IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_CTRL_PACK_0); + IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_SRC_PACK_1); + IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_CTRL_PACK_1); + IF_EQUAL_RETURN(guid, MF_MT_DV_VAUX_SRC_PACK); + IF_EQUAL_RETURN(guid, MF_MT_DV_VAUX_CTRL_PACK); + IF_EQUAL_RETURN(guid, MF_MT_ARBITRARY_HEADER); + IF_EQUAL_RETURN(guid, MF_MT_ARBITRARY_FORMAT); + IF_EQUAL_RETURN(guid, MF_MT_IMAGE_LOSS_TOLERANT); + IF_EQUAL_RETURN(guid, MF_MT_MPEG4_SAMPLE_DESCRIPTION); + IF_EQUAL_RETURN(guid, MF_MT_MPEG4_CURRENT_SAMPLE_ENTRY); + IF_EQUAL_RETURN(guid, MF_MT_ORIGINAL_4CC); + IF_EQUAL_RETURN(guid, MF_MT_ORIGINAL_WAVE_FORMAT_TAG); + + // Media types + IF_EQUAL_RETURN(guid, MFMediaType_Audio); + IF_EQUAL_RETURN(guid, MFMediaType_Video); + IF_EQUAL_RETURN(guid, MFMediaType_Protected); + IF_EQUAL_RETURN(guid, MFMediaType_SAMI); + IF_EQUAL_RETURN(guid, MFMediaType_Script); + IF_EQUAL_RETURN(guid, MFMediaType_Image); + IF_EQUAL_RETURN(guid, MFMediaType_HTML); + IF_EQUAL_RETURN(guid, MFMediaType_Binary); + IF_EQUAL_RETURN(guid, MFMediaType_FileTransfer); + + IF_EQUAL_RETURN(guid, MFVideoFormat_AI44); // FCC('AI44') + IF_EQUAL_RETURN(guid, MFVideoFormat_ARGB32); // D3DFMT_A8R8G8B8 + IF_EQUAL_RETURN(guid, MFVideoFormat_AYUV); // FCC('AYUV') + IF_EQUAL_RETURN(guid, MFVideoFormat_DV25); // FCC('dv25') + IF_EQUAL_RETURN(guid, MFVideoFormat_DV50); // FCC('dv50') + IF_EQUAL_RETURN(guid, MFVideoFormat_DVH1); // FCC('dvh1') + IF_EQUAL_RETURN(guid, MFVideoFormat_DVSD); // FCC('dvsd') + IF_EQUAL_RETURN(guid, MFVideoFormat_DVSL); // FCC('dvsl') + IF_EQUAL_RETURN(guid, MFVideoFormat_H264); // FCC('H264') + IF_EQUAL_RETURN(guid, MFVideoFormat_I420); // FCC('I420') + IF_EQUAL_RETURN(guid, MFVideoFormat_IYUV); // FCC('IYUV') + IF_EQUAL_RETURN(guid, MFVideoFormat_M4S2); // FCC('M4S2') + IF_EQUAL_RETURN(guid, MFVideoFormat_MJPG); + IF_EQUAL_RETURN(guid, MFVideoFormat_MP43); // FCC('MP43') + IF_EQUAL_RETURN(guid, MFVideoFormat_MP4S); // FCC('MP4S') + IF_EQUAL_RETURN(guid, MFVideoFormat_MP4V); // FCC('MP4V') + IF_EQUAL_RETURN(guid, MFVideoFormat_MPG1); // FCC('MPG1') + IF_EQUAL_RETURN(guid, MFVideoFormat_MSS1); // FCC('MSS1') + IF_EQUAL_RETURN(guid, MFVideoFormat_MSS2); // FCC('MSS2') + IF_EQUAL_RETURN(guid, MFVideoFormat_NV11); // FCC('NV11') + IF_EQUAL_RETURN(guid, MFVideoFormat_NV12); // FCC('NV12') + IF_EQUAL_RETURN(guid, MFVideoFormat_P010); // FCC('P010') + IF_EQUAL_RETURN(guid, MFVideoFormat_P016); // FCC('P016') + IF_EQUAL_RETURN(guid, MFVideoFormat_P210); // FCC('P210') + IF_EQUAL_RETURN(guid, MFVideoFormat_P216); // FCC('P216') + IF_EQUAL_RETURN(guid, MFVideoFormat_RGB24); // D3DFMT_R8G8B8 + IF_EQUAL_RETURN(guid, MFVideoFormat_RGB32); // D3DFMT_X8R8G8B8 + IF_EQUAL_RETURN(guid, MFVideoFormat_RGB555); // D3DFMT_X1R5G5B5 + IF_EQUAL_RETURN(guid, MFVideoFormat_RGB565); // D3DFMT_R5G6B5 + IF_EQUAL_RETURN(guid, MFVideoFormat_RGB8); + IF_EQUAL_RETURN(guid, MFVideoFormat_UYVY); // FCC('UYVY') + IF_EQUAL_RETURN(guid, MFVideoFormat_v210); // FCC('v210') + IF_EQUAL_RETURN(guid, MFVideoFormat_v410); // FCC('v410') + IF_EQUAL_RETURN(guid, MFVideoFormat_WMV1); // FCC('WMV1') + IF_EQUAL_RETURN(guid, MFVideoFormat_WMV2); // FCC('WMV2') + IF_EQUAL_RETURN(guid, MFVideoFormat_WMV3); // FCC('WMV3') + IF_EQUAL_RETURN(guid, MFVideoFormat_WVC1); // FCC('WVC1') + IF_EQUAL_RETURN(guid, MFVideoFormat_Y210); // FCC('Y210') + IF_EQUAL_RETURN(guid, MFVideoFormat_Y216); // FCC('Y216') + IF_EQUAL_RETURN(guid, MFVideoFormat_Y410); // FCC('Y410') + IF_EQUAL_RETURN(guid, MFVideoFormat_Y416); // FCC('Y416') + IF_EQUAL_RETURN(guid, MFVideoFormat_Y41P); + IF_EQUAL_RETURN(guid, MFVideoFormat_Y41T); + IF_EQUAL_RETURN(guid, MFVideoFormat_YUY2); // FCC('YUY2') + IF_EQUAL_RETURN(guid, MFVideoFormat_YV12); // FCC('YV12') + IF_EQUAL_RETURN(guid, MFVideoFormat_YVYU); + + IF_EQUAL_RETURN(guid, MFAudioFormat_PCM); // WAVE_FORMAT_PCM + IF_EQUAL_RETURN(guid, MFAudioFormat_Float); // WAVE_FORMAT_IEEE_FLOAT + IF_EQUAL_RETURN(guid, MFAudioFormat_DTS); // WAVE_FORMAT_DTS + IF_EQUAL_RETURN(guid, MFAudioFormat_Dolby_AC3_SPDIF); // WAVE_FORMAT_DOLBY_AC3_SPDIF + IF_EQUAL_RETURN(guid, MFAudioFormat_DRM); // WAVE_FORMAT_DRM + IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudioV8); // WAVE_FORMAT_WMAUDIO2 + IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudioV9); // WAVE_FORMAT_WMAUDIO3 + IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudio_Lossless); // WAVE_FORMAT_WMAUDIO_LOSSLESS + IF_EQUAL_RETURN(guid, MFAudioFormat_WMASPDIF); // WAVE_FORMAT_WMASPDIF + IF_EQUAL_RETURN(guid, MFAudioFormat_MSP1); // WAVE_FORMAT_WMAVOICE9 + IF_EQUAL_RETURN(guid, MFAudioFormat_MP3); // WAVE_FORMAT_MPEGLAYER3 + IF_EQUAL_RETURN(guid, MFAudioFormat_MPEG); // WAVE_FORMAT_MPEG + IF_EQUAL_RETURN(guid, MFAudioFormat_AAC); // WAVE_FORMAT_MPEG_HEAAC + IF_EQUAL_RETURN(guid, MFAudioFormat_ADTS); // WAVE_FORMAT_MPEG_ADTS_AAC + + return "Unknown"; +} ////////////////////////////////////////////////////////////////////////// // CameraFeedWindows - Subclass for our camera feed on windows -/// @TODO need to implement this - -class CameraFeedWindows : public CameraFeed { -private: -protected: -public: - CameraFeedWindows(); - virtual ~CameraFeedWindows(); - - bool activate_feed(); - void deactivate_feed(); -}; - -CameraFeedWindows::CameraFeedWindows() { - ///@TODO implement this, should store information about our available camera +CameraFeedWindows::CameraFeedWindows(LPCWSTR camera_id, IMFMediaType *type, String name, int width, int height, GUID format) { + this->camera_id = camera_id; + this->name = name; + this->width = width; + this->height = height; + this->type = type; + this->format = format; } CameraFeedWindows::~CameraFeedWindows() { - // make sure we stop recording if we are! if (is_active()) { deactivate_feed(); }; - ///@TODO free up anything used by this -}; + SafeRelease(&type); +} bool CameraFeedWindows::activate_feed() { - ///@TODO this should activate our camera and start the process of capturing frames + IMFAttributes *pAttributes = NULL; + HRESULT hr = MFCreateAttributes(&pAttributes, 2); + if (FAILED(hr)) { + goto done; + } + // Set the device type to video. + hr = pAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID); + if (FAILED(hr)) { + goto done; + } + + // Set the symbolic link. + hr = pAttributes->SetString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, camera_id); + if (FAILED(hr)) { + goto done; + } + + // Create media source + hr = MFCreateDeviceSource(pAttributes, &source); + if (FAILED(hr)) { + goto done; + } + + // Get information about device + IMFPresentationDescriptor *pPD; + hr = source->CreatePresentationDescriptor(&pPD); + if (FAILED(hr)) { + goto done; + } + + // Get information about video stream + BOOL fSelected; + IMFStreamDescriptor *pSD; + hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, &pSD); + if (FAILED(hr)) { + goto done; + } + + // Get information about supported media types + IMFMediaTypeHandler *pHandler; + hr = pSD->GetMediaTypeHandler(&pHandler); + if (FAILED(hr)) { + goto done; + } + + // Set media type + hr = pHandler->SetCurrentMediaType(type); + if (FAILED(hr)) { + goto done; + } + + // Create media reader + hr = MFCreateSourceReaderFromMediaSource(source, NULL, &reader); + if (FAILED(hr)) { + goto done; + } + + // Prepare images and textures + if (format == MFVideoFormat_RGB24) { + set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_DIFFUSE, + Image::create_empty(width, height, false, Image::FORMAT_RGB8)); + } + + if (format == MFVideoFormat_NV12) { + set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_DIFFUSE, + Image::create_empty(width, height, false, Image::FORMAT_R8)); + + set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_NORMAL, + Image::create_empty(width / 2, height / 2, false, Image::FORMAT_RG8)); + } + + // Start reading + worker = memnew(std::thread(capture, this)); + +done: + SafeRelease(&pAttributes); + SafeRelease(&pPD); + SafeRelease(&pSD); + SafeRelease(&pHandler); + + if FAILED (hr) { + print_error(vformat("Unable to activate camera feed (%d)", hr)); + return false; + } return true; -}; - -///@TODO we should probably have a callback method here that is being called by the -// camera API which provides frames and call back into the CameraServer to update our texture +} void CameraFeedWindows::deactivate_feed() { - ///@TODO this should deactivate our camera and stop the process of capturing frames + if (worker != NULL) { + active = false; + worker->join(); + memdelete(worker); + worker = NULL; + } + + SafeRelease(&reader); + SafeRelease(&source); +} + +void CameraFeedWindows::capture(CameraFeedWindows *feed) { + print_verbose("Camera feed is now streaming"); + feed->active = true; + while (feed->active) { + feed->read(); + Sleep(100); + } +} + +void CameraFeedWindows::read() { + HRESULT hr = S_OK; + IMFSample *pSample = NULL; + BYTE *data; + DWORD streamIndex, flags, len; + LONGLONG llTimeStamp; + IMFMediaBuffer *buffer; + + hr = reader->ReadSample( + MF_SOURCE_READER_FIRST_VIDEO_STREAM, // Stream index. + 0, // Flags. + &streamIndex, // Receives the actual stream index. + &flags, // Receives status flags. + &llTimeStamp, // Receives the time stamp. + &pSample // Receives the sample or NULL. + ); + + if (FAILED(hr)) { + return; + } + + // End of stream + if (flags & MF_SOURCE_READERF_ENDOFSTREAM) { + print_verbose("\tEnd of stream"); + active = false; + } + if (flags & MF_SOURCE_READERF_NEWSTREAM) { + print_verbose("\tNew stream"); + } + if (flags & MF_SOURCE_READERF_NATIVEMEDIATYPECHANGED) { + print_verbose("\tNative type changed"); + } + if (flags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED) { + print_verbose("\tCurrent type changed"); + } + if (flags & MF_SOURCE_READERF_STREAMTICK) { + print_verbose("\tStream tick"); + } + if (flags & MF_SOURCE_READERF_NATIVEMEDIATYPECHANGED) { + print_verbose("\tOutput format changed"); + } + + // Process sample + if (pSample) { + hr = pSample->GetBufferByIndex(0, &buffer); + if (FAILED(hr)) { + return; + } + + // Get image buffer + buffer->Lock(&data, NULL, &len); + + // Get RGB or Y plane + Ref yImage = get_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_DIFFUSE); + set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_DIFFUSE, data, 0, yImage->get_data().size()); + + // Get UV plane + if (format == MFVideoFormat_NV12) { + Ref uvImage = get_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_NORMAL); + set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_NORMAL, data, yImage->get_data().size(), uvImage->get_data().size()); + } + + buffer->Unlock(); + buffer->Release(); + pSample->Release(); + } } ////////////////////////////////////////////////////////////////////////// // CameraWindows - Subclass for our camera server on windows -void CameraWindows::add_active_cameras() { - ///@TODO scan through any active cameras and create CameraFeedWindows objects for them +void CameraWindows::update_feeds() { + // remove existing devices + for (int i = feeds.size() - 1; i >= 0; i--) { + Ref feed = (Ref)feeds[i]; + remove_feed(feed); + }; + + // Create an attribute store to hold the search criteria. + IMFAttributes *pConfig = NULL; + HRESULT hr = MFCreateAttributes(&pConfig, 1); + if (FAILED(hr)) { + goto done_all; + } + + // Request video capture devices. + hr = pConfig->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID); + if (FAILED(hr)) { + goto done_all; + } + + // Process devices + UINT32 count = 0; + IMFActivate **ppDevices = NULL; + hr = MFEnumDeviceSources(pConfig, &ppDevices, &count); + if (FAILED(hr)) { + goto done_all; + } + + // Create feeds for all supported media sources + for (DWORD i = 0; i < count; i++) { + IMFActivate *pDevice = ppDevices[i]; + + // Get camera id + WCHAR *szCameraID = NULL; + UINT32 len; + hr = pDevice->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, &szCameraID, &len); + if (FAILED(hr)) { + goto done_device; + } + + // Get name + WCHAR *szFriendlyName = NULL; + hr = pDevice->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, &szFriendlyName, &len); + if (FAILED(hr)) { + goto done_device; + } + + // Get media source + IMFMediaSource *pSource = NULL; + hr = pDevice->ActivateObject(IID_PPV_ARGS(&pSource)); + if (FAILED(hr)) { + goto done_device; + } + + // Get information about device + IMFPresentationDescriptor *pPD = NULL; + hr = pSource->CreatePresentationDescriptor(&pPD); + if (FAILED(hr)) { + goto done_device; + } + + // Get information about video stream + BOOL fSelected; + IMFStreamDescriptor *pSD = NULL; + hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, &pSD); + if (FAILED(hr)) { + goto done_device; + } + + // Get information about supported media types + IMFMediaTypeHandler *pHandler = NULL; + hr = pSD->GetMediaTypeHandler(&pHandler); + if (FAILED(hr)) { + goto done_device; + } + + // Get supported media types + DWORD cTypes = 0; + hr = pHandler->GetMediaTypeCount(&cTypes); + if (FAILED(hr)) { + goto done_device; + } + + for (DWORD i = 0; i < cTypes; i++) { + // Get media type + IMFMediaType *pType = NULL; + hr = pHandler->GetMediaTypeByIndex(i, &pType); + if (FAILED(hr)) { + SafeRelease(&pType); + break; + } + + // Get subtype + GUID subType; + hr = pType->GetGUID(MF_MT_SUBTYPE, &subType); + if (FAILED(hr)) { + SafeRelease(&pType); + break; + } + + // Get image size + UINT32 width, height = 0; + hr = MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height); + if (FAILED(hr)) { + SafeRelease(&pType); + break; + } + + // Add feed for supported formats + if (subType == MFVideoFormat_RGB24 || subType == MFVideoFormat_NV12) { + String format = GetGUIDNameConst(subType); + format = format.replace("MFVideoFormat_", ""); + String name = szFriendlyName + vformat(" (%d x %d, %s)", width, height, format); + Ref feed = new CameraFeedWindows(szCameraID, pType, name, width, height, subType); + add_feed(feed); + + print_line("Added camera feed: ", name); + } + } + + done_device: + SafeRelease(&pPD); + SafeRelease(&pSD); + SafeRelease(&pHandler); + SafeRelease(&pSource); + SafeRelease(&pDevice); + } + +done_all: + SafeRelease(&pConfig); + + if (FAILED(hr)) { + print_error(vformat("Error updating feeds (%d)", hr)); + } } CameraWindows::CameraWindows() { - // Find cameras active right now - add_active_cameras(); + // Initialize the Media Foundation platform. + HRESULT hr = MFStartup(MF_VERSION); + if (FAILED(hr)) { + print_error("Unable to initialize Media Foundation platform"); + return; + } - // need to add something that will react to devices being connected/removed... -}; + // Update feeds + update_feeds(); +} + +CameraWindows::~CameraWindows() { + MFShutdown(); +} diff --git a/modules/camera/camera_win.h b/modules/camera/camera_win.h index 8817e2decd7..2429766770c 100644 --- a/modules/camera/camera_win.h +++ b/modules/camera/camera_win.h @@ -33,14 +33,51 @@ #include "servers/camera/camera_feed.h" #include "servers/camera_server.h" +#include +#include +#include +#include +#include +#include + +class CameraFeedWindows : public CameraFeed { +private: + LPCWSTR camera_id; + IMFMediaSource *source = NULL; + IMFMediaType *type = NULL; + GUID format; + + IMFSourceReader *reader = NULL; + std::thread *worker; + + static void capture(CameraFeedWindows *feed); + void read(); + +protected: +public: + CameraFeedWindows(LPCWSTR camera_id, IMFMediaType *type, String name, int width, int height, GUID format); + virtual ~CameraFeedWindows(); + + bool activate_feed(); + void deactivate_feed(); +}; class CameraWindows : public CameraServer { private: - void add_active_cameras(); + void update_feeds(); public: CameraWindows(); - ~CameraWindows() {} + ~CameraWindows(); }; +template void SafeRelease(T **ppT) +{ + if (*ppT) + { + (*ppT)->Release(); + *ppT = NULL; + } +} + #endif // CAMERA_WIN_H diff --git a/modules/camera/config.py b/modules/camera/config.py index 7b368d21936..c094981082c 100644 --- a/modules/camera/config.py +++ b/modules/camera/config.py @@ -1,6 +1,5 @@ def can_build(env, platform): - return platform == "macos" or platform == "windows" or platform == "linuxbsd" - + return platform == "macos" or platform == "windows" or platform == "android" or platform == "ios" or platform == "linuxbsd" def configure(env): pass diff --git a/modules/camera/register_types.cpp b/modules/camera/register_types.cpp index 666ea8ba651..c56ddfe862e 100644 --- a/modules/camera/register_types.cpp +++ b/modules/camera/register_types.cpp @@ -39,6 +39,9 @@ #if defined(MACOS_ENABLED) #include "camera_macos.h" #endif +#if defined(ANDROID_ENABLED) +#include "camera_android.h" +#endif void initialize_camera_module(ModuleInitializationLevel p_level) { if (p_level != MODULE_INITIALIZATION_LEVEL_SCENE) { @@ -54,6 +57,9 @@ void initialize_camera_module(ModuleInitializationLevel p_level) { #if defined(MACOS_ENABLED) CameraServer::make_default(); #endif +#if defined(ANDROID_ENABLED) + CameraServer::make_default(); +#endif } void uninitialize_camera_module(ModuleInitializationLevel p_level) { diff --git a/modules/theora/SCsub b/modules/theora/SCsub index be557c1c24c..06be677dda9 100644 --- a/modules/theora/SCsub +++ b/modules/theora/SCsub @@ -13,7 +13,7 @@ thirdparty_obj = [] if env["builtin_libtheora"]: thirdparty_dir = "#thirdparty/libtheora/" thirdparty_sources = [ - # "analyze.c", + "analyze.c", # "apiwrapper.c", "bitpack.c", # "collect.c", @@ -22,24 +22,24 @@ if env["builtin_libtheora"]: "decode.c", "dequant.c", # "encapiwrapper.c", - # "encfrag.c", - # "encinfo.c", - # "encode.c", + "encfrag.c", + "encinfo.c", + "encode.c", # "encoder_disabled.c", - # "enquant.c", - # "fdct.c", + "enquant.c", + "fdct.c", "fragment.c", "huffdec.c", - # "huffenc.c", + "huffenc.c", "idct.c", "info.c", "internal.c", - # "mathops.c", - # "mcenc.c", + "mathops.c", + "mcenc.c", "quant.c", - # "rate.c", + "rate.c", "state.c", - # "tokenize.c", + "tokenize.c", ] thirdparty_sources_x86 = [ diff --git a/modules/vorbis/SCsub b/modules/vorbis/SCsub index f063d97fee7..3a144e7b73e 100644 --- a/modules/vorbis/SCsub +++ b/modules/vorbis/SCsub @@ -13,7 +13,7 @@ thirdparty_obj = [] if env["builtin_libvorbis"]: thirdparty_dir = "#thirdparty/libvorbis/" thirdparty_sources = [ - # "analysis.c", + "analysis.c", # "barkmel.c", "bitrate.c", "block.c", @@ -35,7 +35,7 @@ if env["builtin_libvorbis"]: "smallft.c", "synthesis.c", # "tone.c", - # "vorbisenc.c", + "vorbisenc.c", "vorbisfile.c", "window.c", ] diff --git a/platform/android/detect.py b/platform/android/detect.py index 0a10754e246..2a5ebb4f191 100644 --- a/platform/android/detect.py +++ b/platform/android/detect.py @@ -63,7 +63,7 @@ def get_ndk_version(): # This is kept in sync with the value in 'platform/android/java/app/config.gradle'. def get_min_target_api(): - return 21 + return 24 def get_flags(): diff --git a/platform/android/java/app/AndroidManifest.xml b/platform/android/java/app/AndroidManifest.xml index 0cc929d226c..93a3f0a0fc1 100644 --- a/platform/android/java/app/AndroidManifest.xml +++ b/platform/android/java/app/AndroidManifest.xml @@ -15,6 +15,12 @@ android:glEsVersion="0x00030000" android:required="true" /> + + + + diff --git a/platform/android/java/app/assets/.gitignore b/platform/android/java/app/assets/.gitignore deleted file mode 100644 index d6b7ef32c84..00000000000 --- a/platform/android/java/app/assets/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -* -!.gitignore diff --git a/platform/android/java/app/build.gradle b/platform/android/java/app/build.gradle index 308f126d5d1..10085823907 100644 --- a/platform/android/java/app/build.gradle +++ b/platform/android/java/app/build.gradle @@ -179,11 +179,7 @@ android { // Signing and zip-aligning are skipped for prebuilt builds, but // performed for Godot gradle builds. zipAlignEnabled shouldZipAlign() - if (shouldSign()) { - signingConfig signingConfigs.debug - } else { - signingConfig null - } + signingConfig signingConfigs.debug } dev { @@ -191,22 +187,14 @@ android { // Signing and zip-aligning are skipped for prebuilt builds, but // performed for Godot gradle builds. zipAlignEnabled shouldZipAlign() - if (shouldSign()) { - signingConfig signingConfigs.debug - } else { - signingConfig null - } + signingConfig signingConfigs.debug } release { // Signing and zip-aligning are skipped for prebuilt builds, but // performed for Godot gradle builds. zipAlignEnabled shouldZipAlign() - if (shouldSign()) { - signingConfig signingConfigs.release - } else { - signingConfig null - } + signingConfig signingConfigs.release } } @@ -266,15 +254,6 @@ task copyAndRenameBinary(type: Copy) { rename sourceFilename, exportFilename } -/** - * Used to validate the version of the Java SDK used for the Godot gradle builds. - */ -task validateJavaVersion { - if (JavaVersion.current() != versions.javaVersion) { - throw new GradleException("Invalid Java version ${JavaVersion.current()}. Version ${versions.javaVersion} is the required Java version for Godot gradle builds.") - } -} - /* When they're scheduled to run, the copy*AARToAppModule tasks generate dependencies for the 'app' module, so we're ensuring the ':app:preBuild' task is set to run after those tasks. diff --git a/platform/android/java/app/config.gradle b/platform/android/java/app/config.gradle index e8921e1bb1f..bf44ac37660 100644 --- a/platform/android/java/app/config.gradle +++ b/platform/android/java/app/config.gradle @@ -1,13 +1,13 @@ ext.versions = [ - androidGradlePlugin: '8.2.0', + androidGradlePlugin: '8.5.0', compileSdk : 34, // Also update 'platform/android/export/export_plugin.cpp#OPENGL_MIN_SDK_VERSION' - minSdk : 21, + minSdk : 24, // Also update 'platform/android/export/export_plugin.cpp#DEFAULT_TARGET_SDK_VERSION' targetSdk : 34, buildTools : '34.0.0', kotlinVersion : '1.9.20', - fragmentVersion : '1.7.1', + fragmentVersion : '1.8.1', nexusPublishVersion: '1.3.0', javaVersion : JavaVersion.VERSION_17, // Also update 'platform/android/detect.py#get_ndk_version()' when this is updated. diff --git a/platform/android/java/editor/src/main/AndroidManifest.xml b/platform/android/java/editor/src/main/AndroidManifest.xml index a8757458605..0a8c6c078fb 100644 --- a/platform/android/java/editor/src/main/AndroidManifest.xml +++ b/platform/android/java/editor/src/main/AndroidManifest.xml @@ -13,8 +13,11 @@ android:glEsVersion="0x00030000" android:required="true" /> - + + + #include diff --git a/scene/resources/camera_texture.cpp b/scene/resources/camera_texture.cpp index b219f89e598..543ff71e60b 100644 --- a/scene/resources/camera_texture.cpp +++ b/scene/resources/camera_texture.cpp @@ -36,14 +36,10 @@ void CameraTexture::_bind_methods() { ClassDB::bind_method(D_METHOD("set_camera_feed_id", "feed_id"), &CameraTexture::set_camera_feed_id); ClassDB::bind_method(D_METHOD("get_camera_feed_id"), &CameraTexture::get_camera_feed_id); - ClassDB::bind_method(D_METHOD("set_which_feed", "which_feed"), &CameraTexture::set_which_feed); - ClassDB::bind_method(D_METHOD("get_which_feed"), &CameraTexture::get_which_feed); - ClassDB::bind_method(D_METHOD("set_camera_active", "active"), &CameraTexture::set_camera_active); ClassDB::bind_method(D_METHOD("get_camera_active"), &CameraTexture::get_camera_active); ADD_PROPERTY(PropertyInfo(Variant::INT, "camera_feed_id"), "set_camera_feed_id", "get_camera_feed_id"); - ADD_PROPERTY(PropertyInfo(Variant::INT, "which_feed"), "set_which_feed", "get_which_feed"); ADD_PROPERTY(PropertyInfo(Variant::BOOL, "camera_is_active"), "set_camera_active", "get_camera_active"); } @@ -55,7 +51,7 @@ void CameraTexture::_on_format_changed() { int CameraTexture::get_width() const { Ref feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id); if (feed.is_valid()) { - return feed->get_base_width(); + return feed->get_width(); } else { return 0; } @@ -64,7 +60,7 @@ int CameraTexture::get_width() const { int CameraTexture::get_height() const { Ref feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id); if (feed.is_valid()) { - return feed->get_base_height(); + return feed->get_height(); } else { return 0; } @@ -77,7 +73,7 @@ bool CameraTexture::has_alpha() const { RID CameraTexture::get_rid() const { Ref feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id); if (feed.is_valid()) { - return feed->get_texture(which_feed); + return feed->get_texture(); } else { if (_texture.is_null()) { _texture = RenderingServer::get_singleton()->texture_2d_placeholder_create(); @@ -113,16 +109,6 @@ int CameraTexture::get_camera_feed_id() const { return camera_feed_id; } -void CameraTexture::set_which_feed(CameraServer::FeedImage p_which) { - which_feed = p_which; - notify_property_list_changed(); - callable_mp((Resource *)this, &Resource::emit_changed).call_deferred(); -} - -CameraServer::FeedImage CameraTexture::get_which_feed() const { - return which_feed; -} - void CameraTexture::set_camera_active(bool p_active) { Ref feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id); if (feed.is_valid()) { diff --git a/scene/resources/camera_texture.h b/scene/resources/camera_texture.h index dd216a72d61..e6e35f6d2cf 100644 --- a/scene/resources/camera_texture.h +++ b/scene/resources/camera_texture.h @@ -39,7 +39,6 @@ class CameraTexture : public Texture2D { private: mutable RID _texture; int camera_feed_id = 0; - CameraServer::FeedImage which_feed = CameraServer::FEED_RGBA_IMAGE; protected: static void _bind_methods(); @@ -48,17 +47,14 @@ protected: public: virtual int get_width() const override; virtual int get_height() const override; - virtual RID get_rid() const override; virtual bool has_alpha() const override; + virtual RID get_rid() const override; virtual Ref get_image() const override; void set_camera_feed_id(int p_new_id); int get_camera_feed_id() const; - void set_which_feed(CameraServer::FeedImage p_which); - CameraServer::FeedImage get_which_feed() const; - void set_camera_active(bool p_active); bool get_camera_active() const; diff --git a/servers/camera/camera_feed.cpp b/servers/camera/camera_feed.cpp index 4021d9564bf..f8729914779 100644 --- a/servers/camera/camera_feed.cpp +++ b/servers/camera/camera_feed.cpp @@ -34,28 +34,24 @@ void CameraFeed::_bind_methods() { ClassDB::bind_method(D_METHOD("get_id"), &CameraFeed::get_id); + ClassDB::bind_method(D_METHOD("get_name"), &CameraFeed::get_name); + ClassDB::bind_method(D_METHOD("get_position"), &CameraFeed::get_position); + ClassDB::bind_method(D_METHOD("get_width"), &CameraFeed::get_width); + ClassDB::bind_method(D_METHOD("get_heigth"), &CameraFeed::get_height); + ClassDB::bind_method(D_METHOD("get_datatype"), &CameraFeed::get_datatype); ClassDB::bind_method(D_METHOD("is_active"), &CameraFeed::is_active); ClassDB::bind_method(D_METHOD("set_active", "active"), &CameraFeed::set_active); ClassDB::bind_method(D_METHOD("get_name"), &CameraFeed::get_name); - ClassDB::bind_method(D_METHOD("set_name", "name"), &CameraFeed::set_name); - ClassDB::bind_method(D_METHOD("get_position"), &CameraFeed::get_position); - ClassDB::bind_method(D_METHOD("set_position", "position"), &CameraFeed::set_position); // Note, for transform some feeds may override what the user sets (such as ARKit) ClassDB::bind_method(D_METHOD("get_transform"), &CameraFeed::get_transform); ClassDB::bind_method(D_METHOD("set_transform", "transform"), &CameraFeed::set_transform); - ClassDB::bind_method(D_METHOD("set_rgb_image", "rgb_image"), &CameraFeed::set_rgb_image); - ClassDB::bind_method(D_METHOD("set_ycbcr_image", "ycbcr_image"), &CameraFeed::set_ycbcr_image); - ClassDB::bind_method(D_METHOD("get_datatype"), &CameraFeed::get_datatype); - ClassDB::bind_method(D_METHOD("get_formats"), &CameraFeed::get_formats); - ClassDB::bind_method(D_METHOD("set_format", "index", "parameters"), &CameraFeed::set_format); - ADD_SIGNAL(MethodInfo("frame_changed")); ADD_SIGNAL(MethodInfo("format_changed")); @@ -64,14 +60,16 @@ void CameraFeed::_bind_methods() { ADD_PROPERTY(PropertyInfo(Variant::TRANSFORM2D, "feed_transform"), "set_transform", "get_transform"); ADD_PROPERTY(PropertyInfo(Variant::ARRAY, "formats"), "", "get_formats"); - BIND_ENUM_CONSTANT(FEED_NOIMAGE); - BIND_ENUM_CONSTANT(FEED_RGB); - BIND_ENUM_CONSTANT(FEED_YCBCR); - BIND_ENUM_CONSTANT(FEED_YCBCR_SEP); - BIND_ENUM_CONSTANT(FEED_UNSPECIFIED); BIND_ENUM_CONSTANT(FEED_FRONT); BIND_ENUM_CONSTANT(FEED_BACK); + + BIND_ENUM_CONSTANT(FEED_UNSUPPORTED); + BIND_ENUM_CONSTANT(FEED_RGB); + BIND_ENUM_CONSTANT(FEED_RGBA); + BIND_ENUM_CONSTANT(FEED_YCBCR); + BIND_ENUM_CONSTANT(FEED_YCBCR_SEP); + BIND_ENUM_CONSTANT(FEED_NV12); } int CameraFeed::get_id() const { @@ -101,28 +99,20 @@ String CameraFeed::get_name() const { return name; } -void CameraFeed::set_name(String p_name) { - name = p_name; +int CameraFeed::get_width() const { + return width; } -int CameraFeed::get_base_width() const { - return base_width; -} - -int CameraFeed::get_base_height() const { - return base_height; -} - -CameraFeed::FeedDataType CameraFeed::get_datatype() const { - return datatype; +int CameraFeed::get_height() const { + return height; } CameraFeed::FeedPosition CameraFeed::get_position() const { return position; } -void CameraFeed::set_position(CameraFeed::FeedPosition p_position) { - position = p_position; +CameraFeed::FeedDataType CameraFeed::get_datatype() const { + return datatype; } Transform2D CameraFeed::get_transform() const { @@ -133,122 +123,54 @@ void CameraFeed::set_transform(const Transform2D &p_transform) { transform = p_transform; } -RID CameraFeed::get_texture(CameraServer::FeedImage p_which) { - return texture[p_which]; +RID CameraFeed::get_texture() const { + return texture; +} + +Ref CameraFeed::get_image(RenderingServer::CanvasTextureChannel channel) { + return channel_image[channel]; +} + +void CameraFeed::set_image(RenderingServer::CanvasTextureChannel channel, const Ref &image) { + if (channel_image[channel] != image) { + channel_image[channel] = image; + RenderingServer::get_singleton()->free(channel_texture[channel]); + channel_texture[channel] = RenderingServer::get_singleton()->texture_2d_create(image); + RenderingServer::get_singleton()->canvas_texture_set_channel(texture, channel, channel_texture[channel]); + } else { + RenderingServer::get_singleton()->texture_2d_update(channel_texture[channel], image); + } +} + +void CameraFeed::set_image(RenderingServer::CanvasTextureChannel channel, uint8_t *data, size_t offset, size_t len) { + Ref image = channel_image[channel]; + ERR_FAIL_COND_MSG(image.is_null(), "Channel not initialized"); + Vector image_data = image->get_data(); + uint8_t *dest = image_data.ptrw(); + memcpy(dest, data + offset, len); + image->set_data(image->get_width(), image->get_height(), false, image->get_format(), image_data); + RenderingServer::get_singleton()->texture_2d_update(channel_texture[channel], image); } CameraFeed::CameraFeed() { // initialize our feed id = CameraServer::get_singleton()->get_free_id(); - base_width = 0; - base_height = 0; - name = "???"; + name = "?"; + width = 0; + height = 0; active = false; - datatype = CameraFeed::FEED_RGB; position = CameraFeed::FEED_UNSPECIFIED; + datatype = CameraFeed::FEED_UNSUPPORTED; transform = Transform2D(1.0, 0.0, 0.0, -1.0, 0.0, 1.0); - texture[CameraServer::FEED_Y_IMAGE] = RenderingServer::get_singleton()->texture_2d_placeholder_create(); - texture[CameraServer::FEED_CBCR_IMAGE] = RenderingServer::get_singleton()->texture_2d_placeholder_create(); -} - -CameraFeed::CameraFeed(String p_name, FeedPosition p_position) { - // initialize our feed - id = CameraServer::get_singleton()->get_free_id(); - base_width = 0; - base_height = 0; - name = p_name; - active = false; - datatype = CameraFeed::FEED_NOIMAGE; - position = p_position; - transform = Transform2D(1.0, 0.0, 0.0, -1.0, 0.0, 1.0); - texture[CameraServer::FEED_Y_IMAGE] = RenderingServer::get_singleton()->texture_2d_placeholder_create(); - texture[CameraServer::FEED_CBCR_IMAGE] = RenderingServer::get_singleton()->texture_2d_placeholder_create(); + texture = RenderingServer::get_singleton()->canvas_texture_create(); } CameraFeed::~CameraFeed() { // Free our textures ERR_FAIL_NULL(RenderingServer::get_singleton()); - RenderingServer::get_singleton()->free(texture[CameraServer::FEED_Y_IMAGE]); - RenderingServer::get_singleton()->free(texture[CameraServer::FEED_CBCR_IMAGE]); -} - -void CameraFeed::set_rgb_image(const Ref &p_rgb_img) { - ERR_FAIL_COND(p_rgb_img.is_null()); - if (active) { - int new_width = p_rgb_img->get_width(); - int new_height = p_rgb_img->get_height(); - - if ((base_width != new_width) || (base_height != new_height)) { - // We're assuming here that our camera image doesn't change around formats etc, allocate the whole lot... - base_width = new_width; - base_height = new_height; - - RID new_texture = RenderingServer::get_singleton()->texture_2d_create(p_rgb_img); - RenderingServer::get_singleton()->texture_replace(texture[CameraServer::FEED_RGBA_IMAGE], new_texture); - - emit_signal(SNAME("format_changed")); - } else { - RenderingServer::get_singleton()->texture_2d_update(texture[CameraServer::FEED_RGBA_IMAGE], p_rgb_img); - } - - datatype = CameraFeed::FEED_RGB; - } -} - -void CameraFeed::set_ycbcr_image(const Ref &p_ycbcr_img) { - ERR_FAIL_COND(p_ycbcr_img.is_null()); - if (active) { - int new_width = p_ycbcr_img->get_width(); - int new_height = p_ycbcr_img->get_height(); - - if ((base_width != new_width) || (base_height != new_height)) { - // We're assuming here that our camera image doesn't change around formats etc, allocate the whole lot... - base_width = new_width; - base_height = new_height; - - RID new_texture = RenderingServer::get_singleton()->texture_2d_create(p_ycbcr_img); - RenderingServer::get_singleton()->texture_replace(texture[CameraServer::FEED_RGBA_IMAGE], new_texture); - - emit_signal(SNAME("format_changed")); - } else { - RenderingServer::get_singleton()->texture_2d_update(texture[CameraServer::FEED_RGBA_IMAGE], p_ycbcr_img); - } - - datatype = CameraFeed::FEED_YCBCR; - } -} - -void CameraFeed::set_ycbcr_images(const Ref &p_y_img, const Ref &p_cbcr_img) { - ERR_FAIL_COND(p_y_img.is_null()); - ERR_FAIL_COND(p_cbcr_img.is_null()); - if (active) { - ///@TODO investigate whether we can use thirdparty/misc/yuv2rgb.h here to convert our YUV data to RGB, our shader approach is potentially faster though.. - // Wondering about including that into multiple projects, may cause issues. - // That said, if we convert to RGB, we could enable using texture resources again... - - int new_y_width = p_y_img->get_width(); - int new_y_height = p_y_img->get_height(); - - if ((base_width != new_y_width) || (base_height != new_y_height)) { - // We're assuming here that our camera image doesn't change around formats etc, allocate the whole lot... - base_width = new_y_width; - base_height = new_y_height; - { - RID new_texture = RenderingServer::get_singleton()->texture_2d_create(p_y_img); - RenderingServer::get_singleton()->texture_replace(texture[CameraServer::FEED_Y_IMAGE], new_texture); - } - { - RID new_texture = RenderingServer::get_singleton()->texture_2d_create(p_cbcr_img); - RenderingServer::get_singleton()->texture_replace(texture[CameraServer::FEED_CBCR_IMAGE], new_texture); - } - - emit_signal(SNAME("format_changed")); - } else { - RenderingServer::get_singleton()->texture_2d_update(texture[CameraServer::FEED_Y_IMAGE], p_y_img); - RenderingServer::get_singleton()->texture_2d_update(texture[CameraServer::FEED_CBCR_IMAGE], p_cbcr_img); - } - - datatype = CameraFeed::FEED_YCBCR_SEP; + RenderingServer::get_singleton()->free(texture); + for (size_t i = 0; i < 3; i++) { + RenderingServer::get_singleton()->free(channel_texture[i]); } } @@ -259,17 +181,4 @@ bool CameraFeed::activate_feed() { void CameraFeed::deactivate_feed() { // nothing to do here -} - -bool CameraFeed::set_format(int p_index, const Dictionary &p_parameters) { - return false; -} - -Array CameraFeed::get_formats() const { - return Array(); -} - -CameraFeed::FeedFormat CameraFeed::get_format() const { - FeedFormat feed_format = {}; - return feed_format; -} +} \ No newline at end of file diff --git a/servers/camera/camera_feed.h b/servers/camera/camera_feed.h index 492a909239e..c8a20ed3b02 100644 --- a/servers/camera/camera_feed.h +++ b/servers/camera/camera_feed.h @@ -45,21 +45,27 @@ class CameraFeed : public RefCounted { GDCLASS(CameraFeed, RefCounted); public: - enum FeedDataType { - FEED_NOIMAGE, // we don't have an image yet - FEED_RGB, // our texture will contain a normal RGB texture that can be used directly - FEED_YCBCR, // our texture will contain a YCbCr texture that needs to be converted to RGB before output - FEED_YCBCR_SEP // our camera is split into two textures, first plane contains Y data, second plane contains CbCr data - }; - enum FeedPosition { FEED_UNSPECIFIED, // we have no idea FEED_FRONT, // this is a camera on the front of the device FEED_BACK // this is a camera on the back of the device }; + enum FeedDataType { + FEED_UNSUPPORTED, // unsupported type + FEED_RGB, // TEXTURE contains RGB data + FEED_RGBA, // TEXTURE contains RGBA data + FEED_NV12, // TEXTURE contains Y data, NORMAL_TEXTURE contains CbCr data + FEED_YCBCR, // TEXTURE contains YCbCr data + FEED_YCBCR_SEP // TEXTURE contains Y data, NORMAL_TEXTURE contains Cb data, SPECULAR_TEXTURE contains Cr data + }; + private: int id; // unique id for this, for internal use in case feeds are removed + + RID texture; // layered texture + RID channel_texture[3]; // channel textures + Ref channel_image[3]; // channel images protected: struct FeedFormat { @@ -72,8 +78,11 @@ protected: }; String name; // name of our camera feed - FeedDataType datatype; // type of texture data stored + FeedDataType datatype; // type of texture data stored FeedPosition position; // position of camera on the device + int width; // width of camera frames + int height; // height of camera frames + Transform2D transform; // display transform int base_width = 0; int base_height = 0; @@ -82,47 +91,37 @@ protected: int selected_format = -1; bool active; // only when active do we actually update the camera texture each frame - RID texture[CameraServer::FEED_IMAGES]; // texture images needed for this static void _bind_methods(); public: int get_id() const; + String get_name() const; + int get_width() const; + int get_height() const; + FeedPosition get_position() const; + FeedDataType get_datatype() const; + + RID get_texture() const; + bool is_active() const; void set_active(bool p_is_active); - String get_name() const; - void set_name(String p_name); - - int get_base_width() const; - int get_base_height() const; - - FeedPosition get_position() const; - void set_position(FeedPosition p_position); - Transform2D get_transform() const; void set_transform(const Transform2D &p_transform); - RID get_texture(CameraServer::FeedImage p_which); + Ref get_image(RenderingServer::CanvasTextureChannel channel); + void set_image(RenderingServer::CanvasTextureChannel channel, const Ref &image); + void set_image(RenderingServer::CanvasTextureChannel channel, uint8_t *data, size_t offset, size_t len); CameraFeed(); - CameraFeed(String p_name, FeedPosition p_position = CameraFeed::FEED_UNSPECIFIED); virtual ~CameraFeed(); - FeedDataType get_datatype() const; - void set_rgb_image(const Ref &p_rgb_img); - void set_ycbcr_image(const Ref &p_ycbcr_img); - void set_ycbcr_images(const Ref &p_y_img, const Ref &p_cbcr_img); - - virtual bool set_format(int p_index, const Dictionary &p_parameters); - virtual Array get_formats() const; - virtual FeedFormat get_format() const; - virtual bool activate_feed(); virtual void deactivate_feed(); }; -VARIANT_ENUM_CAST(CameraFeed::FeedDataType); VARIANT_ENUM_CAST(CameraFeed::FeedPosition); +VARIANT_ENUM_CAST(CameraFeed::FeedDataType); #endif // CAMERA_FEED_H diff --git a/servers/camera_server.cpp b/servers/camera_server.cpp index bf698e39456..d35d928b59d 100644 --- a/servers/camera_server.cpp +++ b/servers/camera_server.cpp @@ -40,6 +40,7 @@ CameraServer::CreateFunc CameraServer::create_func = nullptr; void CameraServer::_bind_methods() { ClassDB::bind_method(D_METHOD("get_feed", "index"), &CameraServer::get_feed); + ClassDB::bind_method(D_METHOD("get_feed_by_id", "feed_id"), &CameraServer::get_feed_by_id); ClassDB::bind_method(D_METHOD("get_feed_count"), &CameraServer::get_feed_count); ClassDB::bind_method(D_METHOD("feeds"), &CameraServer::get_feeds); @@ -150,15 +151,6 @@ TypedArray CameraServer::get_feeds() { return return_feeds; }; -RID CameraServer::feed_texture(int p_id, CameraServer::FeedImage p_texture) { - int index = get_feed_index(p_id); - ERR_FAIL_COND_V(index == -1, RID()); - - Ref feed = get_feed(index); - - return feed->get_texture(p_texture); -}; - CameraServer::CameraServer() { singleton = this; }; diff --git a/servers/camera_server.h b/servers/camera_server.h index e9bcd771d2e..9ab75148b47 100644 --- a/servers/camera_server.h +++ b/servers/camera_server.h @@ -104,9 +104,6 @@ public: int get_feed_count(); TypedArray get_feeds(); - // Intended for use with custom CameraServer implementation. - RID feed_texture(int p_id, FeedImage p_texture); - CameraServer(); ~CameraServer(); }; diff --git a/servers/movie_writer/SCsub b/servers/movie_writer/SCsub index 98f918b2458..235a85a4c4e 100644 --- a/servers/movie_writer/SCsub +++ b/servers/movie_writer/SCsub @@ -4,3 +4,11 @@ from misc.utility.scons_hints import * Import("env") env.add_source_files(env.servers_sources, "*.cpp") + +# also requires libogg, libtheora and libvorbis +if env["builtin_libogg"]: + env.Prepend(CPPPATH=["#thirdparty/libogg"]) +if env["builtin_libtheora"]: + env.Prepend(CPPPATH=["#thirdparty/libtheora", "#thirdparty/misc"]) +if env["builtin_libvorbis"]: + env.Prepend(CPPPATH=["#thirdparty/libvorbis"]) \ No newline at end of file diff --git a/servers/movie_writer/movie_writer.cpp b/servers/movie_writer/movie_writer.cpp index 14e920d8cdb..6ea379be1b4 100644 --- a/servers/movie_writer/movie_writer.cpp +++ b/servers/movie_writer/movie_writer.cpp @@ -141,7 +141,9 @@ void MovieWriter::_bind_methods() { GLOBAL_DEF(PropertyInfo(Variant::INT, "editor/movie_writer/mix_rate", PROPERTY_HINT_RANGE, "8000,192000,1,suffix:Hz"), 48000); GLOBAL_DEF(PropertyInfo(Variant::INT, "editor/movie_writer/speaker_mode", PROPERTY_HINT_ENUM, "Stereo,3.1,5.1,7.1"), 0); - GLOBAL_DEF(PropertyInfo(Variant::FLOAT, "editor/movie_writer/mjpeg_quality", PROPERTY_HINT_RANGE, "0.01,1.0,0.01"), 0.75); + GLOBAL_DEF(PropertyInfo(Variant::FLOAT, "editor/movie_writer/video_quality", PROPERTY_HINT_RANGE, "0.0,1.0,0.01"), 0.75); + GLOBAL_DEF(PropertyInfo(Variant::FLOAT, "editor/movie_writer/audio_quality", PROPERTY_HINT_RANGE, "0.0,1.0,0.01"), 0.2); + // Used by the editor. GLOBAL_DEF_BASIC("editor/movie_writer/movie_file", ""); GLOBAL_DEF_BASIC("editor/movie_writer/disable_vsync", false); diff --git a/servers/movie_writer/movie_writer.h b/servers/movie_writer/movie_writer.h index 69d6b1ba2b4..e907b86bd95 100644 --- a/servers/movie_writer/movie_writer.h +++ b/servers/movie_writer/movie_writer.h @@ -85,6 +85,7 @@ public: void begin(const Size2i &p_movie_size, uint32_t p_fps, const String &p_base_path); void add_frame(); + void add_frame(const Ref image); static void set_extensions_hint(); diff --git a/servers/movie_writer/movie_writer_mjpeg.cpp b/servers/movie_writer/movie_writer_mjpeg.cpp index f46f7ff5f12..fab93636ff6 100644 --- a/servers/movie_writer/movie_writer_mjpeg.cpp +++ b/servers/movie_writer/movie_writer_mjpeg.cpp @@ -259,5 +259,5 @@ void MovieWriterMJPEG::write_end() { MovieWriterMJPEG::MovieWriterMJPEG() { mix_rate = GLOBAL_GET("editor/movie_writer/mix_rate"); speaker_mode = AudioServer::SpeakerMode(int(GLOBAL_GET("editor/movie_writer/speaker_mode"))); - quality = GLOBAL_GET("editor/movie_writer/mjpeg_quality"); + quality = GLOBAL_GET("editor/movie_writer/video_quality"); } diff --git a/servers/movie_writer/movie_writer_ogv.cpp b/servers/movie_writer/movie_writer_ogv.cpp new file mode 100644 index 00000000000..5865fd9d84a --- /dev/null +++ b/servers/movie_writer/movie_writer_ogv.cpp @@ -0,0 +1,426 @@ +/**************************************************************************/ +/* movie_writer_ogv.cpp */ +/**************************************************************************/ +/* This file is part of: */ +/* GODOT ENGINE */ +/* https://godotengine.org */ +/**************************************************************************/ +/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */ +/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */ +/* */ +/* Permission is hereby granted, free of charge, to any person obtaining */ +/* a copy of this software and associated documentation files (the */ +/* "Software"), to deal in the Software without restriction, including */ +/* without limitation the rights to use, copy, modify, merge, publish, */ +/* distribute, sublicense, and/or sell copies of the Software, and to */ +/* permit persons to whom the Software is furnished to do so, subject to */ +/* the following conditions: */ +/* */ +/* The above copyright notice and this permission notice shall be */ +/* included in all copies or substantial portions of the Software. */ +/* */ +/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */ +/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */ +/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */ +/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */ +/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */ +/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */ +/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ +/**************************************************************************/ + +#include "movie_writer_ogv.h" +#include "core/config/project_settings.h" +#include + +int MovieWriterOGV::encode_audio(const int32_t *p_audio_data) { + ogg_packet op; + if (ogg_stream_eos(&vo)) + return 0; + + if (p_audio_data == nullptr) { + /* end of file. this can be done implicitly, but it's + easier to see here in non-clever fashion. Tell the + library we're at end of stream so that it can handle the + last frame and mark end of stream in the output properly */ + vorbis_analysis_wrote(&vd, 0); + } else { + /* read and process more audio */ + float **vorbis_buffer = vorbis_analysis_buffer(&vd, audio_frames); + + /* uninterleave samples */ + uint32_t count = 0; + for (uint32_t i = 0; i < audio_frames; i++) { + for (uint32_t j = 0; j < audio_ch; j++) { + vorbis_buffer[j][i] = p_audio_data[count] / 2147483647.f; + count++; + } + } + + vorbis_analysis_wrote(&vd, audio_frames); + } + + while (vorbis_analysis_blockout(&vd, &vb) > 0) { + /* analysis, assume we want to use bitrate management */ + vorbis_analysis(&vb, NULL); + vorbis_bitrate_addblock(&vb); + + /* weld packets into the bitstream */ + while (vorbis_bitrate_flushpacket(&vd, &op) > 0) { + ogg_stream_packetin(&vo, &op); + } + } + + if (ogg_stream_pageout(&vo, &audiopage) > 0) + return 1; + + return 0; +} + +int MovieWriterOGV::encode_video(const Ref &p_image) { + ogg_packet op; + if (ogg_stream_eos(&to)) + return 0; + + if (p_image != nullptr) { + PackedByteArray data = p_image->get_data(); + rgb2yuv420(y, u, v, data.ptrw(), p_image->get_width(), p_image->get_height()); + + /*We submit the buffer using the size of the picture region. libtheora will pad the picture region out to the full frame size for us, + whether we pass in a full frame or not.*/ + ycbcr[0].width = p_image->get_width(); + ycbcr[0].height = p_image->get_height(); + ycbcr[0].stride = p_image->get_width(); + ycbcr[0].data = y; + ycbcr[1].width = p_image->get_width() / 2; + ycbcr[1].height = p_image->get_height() / 2; + ycbcr[1].stride = p_image->get_width() / 2; + ycbcr[1].data = u; + ycbcr[2].width = p_image->get_width() / 2; + ycbcr[2].height = p_image->get_height() / 2; + ycbcr[2].stride = p_image->get_width() / 2; + ycbcr[2].data = v; + th_encode_ycbcr_in(td, ycbcr); + } + + int ret = 0; + do { + ret = th_encode_packetout(td, p_image == nullptr, &op); + if (ret > 0) + ogg_stream_packetin(&to, &op); + } while (ret > 0); + + if (ogg_stream_pageout(&to, &videopage) > 0) + return 1; + + return 0; +} + +uint32_t MovieWriterOGV::get_audio_mix_rate() const { + return mix_rate; +} + +AudioServer::SpeakerMode MovieWriterOGV::get_audio_speaker_mode() const { + return speaker_mode; +} + +bool MovieWriterOGV::handles_file(const String &p_path) const { + return p_path.get_extension().to_lower() == "ogv"; +} + +void MovieWriterOGV::get_supported_extensions(List *r_extensions) const { + r_extensions->push_back("ogv"); +} + +Error MovieWriterOGV::write_begin(const Size2i &p_movie_size, uint32_t p_fps, const String &p_base_path) { + base_path = p_base_path.get_basename(); + if (base_path.is_relative_path()) { + base_path = "res://" + base_path; + } + base_path += ".ogv"; + + f = FileAccess::open(base_path, FileAccess::WRITE_READ); + ERR_FAIL_COND_V(f.is_null(), ERR_CANT_OPEN); + + fps = p_fps; + speed = 4; + + audio_ch = 2; + switch (speaker_mode) { + case AudioServer::SPEAKER_MODE_STEREO: + audio_ch = 2; + break; + case AudioServer::SPEAKER_SURROUND_31: + audio_ch = 4; + break; + case AudioServer::SPEAKER_SURROUND_51: + audio_ch = 6; + break; + case AudioServer::SPEAKER_SURROUND_71: + audio_ch = 8; + break; + } + audio_frames = mix_rate / fps; + + /* Set up Ogg output streams */ + srand(time(NULL)); + ogg_stream_init(&to, rand()); // video + ogg_stream_init(&vo, rand()); // audio + + /* Initialize Vorbis audio encoding */ + vorbis_info_init(&vi); + int ret = 0; + if (audio_r == 0) + ret = vorbis_encode_init_vbr(&vi, audio_ch, mix_rate, audio_q); + else + ret = vorbis_encode_init(&vi, audio_ch, mix_rate, -1, (int)(64870 * (ogg_int64_t)audio_r >> 16), -1); + ERR_FAIL_COND_V_MSG(ret, ERR_UNAVAILABLE, "The Vorbis encoder could not set up a mode according to the requested quality or bitrate."); + + vorbis_comment_init(&vc); + vorbis_analysis_init(&vd, &vi); + vorbis_block_init(&vd, &vb); + + /* Set up Theora encoder */ + /* Theora has a divisible-by-sixteen restriction for the encoded frame size */ + /* scale the picture size up to the nearest /16 and calculate offsets */ + int pic_w = p_movie_size.width; + int pic_h = p_movie_size.height; + int frame_w = (pic_w + 15) & ~0xF; + int frame_h = (pic_h + 15) & ~0xF; + /*Force the offsets to be even so that chroma samples line up like we + expect.*/ + int pic_x = (frame_w - pic_w) / 2 & ~1; + int pic_y = (frame_h - pic_h) / 2 & ~1; + + y = (uint8_t *)memalloc(pic_w * pic_h); + u = (uint8_t *)memalloc(pic_w * pic_h / 4); + v = (uint8_t *)memalloc(pic_w * pic_h / 4); + + th_info_init(&ti); + ti.frame_width = frame_w; + ti.frame_height = frame_h; + ti.pic_width = pic_w; + ti.pic_height = pic_h; + ti.pic_x = pic_x; + ti.pic_y = pic_y; + ti.fps_numerator = fps; + ti.fps_denominator = 1; + ti.aspect_numerator = 1; + ti.aspect_denominator = 1; + ti.colorspace = TH_CS_UNSPECIFIED; + /*Account for the Ogg page overhead. + This is 1 byte per 255 for lacing values, plus 26 bytes per 4096 bytes for + the page header, plus approximately 1/2 byte per packet (not accounted for + here).*/ + ti.target_bitrate = (int)(64870 * (ogg_int64_t)video_r >> 16); + ti.quality = video_q * 63; + ti.pixel_fmt = TH_PF_420; + td = th_encode_alloc(&ti); + th_info_clear(&ti); + ERR_FAIL_COND_V_MSG(td == NULL, ERR_UNCONFIGURED, "Error: Could not create an encoder instance. Check that video parameters are valid."); + + /* setting just the granule shift only allows power-of-two keyframe spacing. Set the actual requested spacing. */ + ret = th_encode_ctl(td, TH_ENCCTL_SET_KEYFRAME_FREQUENCY_FORCE, &keyframe_frequency, sizeof(keyframe_frequency - 1)); + if (ret < 0) + ERR_PRINT("Could not set keyframe interval"); + + if (vp3_compatible) { + ret = th_encode_ctl(td, TH_ENCCTL_SET_VP3_COMPATIBLE, &vp3_compatible, sizeof(vp3_compatible)); + if (ret < 0) + ERR_PRINT("Could not enable strict VP3 compatibility"); + } + + /* reverse the rate control flags to favor a 'long time' strategy */ + if (soft_target) { + int arg = TH_RATECTL_CAP_UNDERFLOW; + ret = th_encode_ctl(td, TH_ENCCTL_SET_RATE_FLAGS, &arg, sizeof(arg)); + if (ret < 0) + ERR_PRINT("Could not set encoder flags for soft-target"); + + if (buf_delay < 0) { + if ((keyframe_frequency * 7 >> 1) > 5 * fps) + arg = keyframe_frequency * 7 >> 1; + else + arg = 5 * fps; + ret = th_encode_ctl(td, TH_ENCCTL_SET_RATE_BUFFER, &arg, sizeof(arg)); + if (ret < 0) + ERR_PRINT("Could not set rate control buffer for soft-target"); + } + } + + /* Now we can set the buffer delay if the user requested a non-default one + (this has to be done after two-pass is enabled).*/ + if (buf_delay >= 0) { + ret = th_encode_ctl(td, TH_ENCCTL_SET_RATE_BUFFER, &buf_delay, sizeof(buf_delay)); + if (ret < 0) + WARN_PRINT("Warning: could not set desired buffer delay"); + } + + /*Speed should also be set after the current encoder mode is established, + since the available speed levels may change depending.*/ + if (speed >= 0) { + int speed_max; + int ret; + ret = th_encode_ctl(td, TH_ENCCTL_GET_SPLEVEL_MAX, &speed_max, sizeof(speed_max)); + if (ret < 0) { + WARN_PRINT("Warning: could not determine maximum speed level."); + speed_max = 0; + } + ret = th_encode_ctl(td, TH_ENCCTL_SET_SPLEVEL, &speed, sizeof(speed)); + if (ret < 0) { + if (ret < 0) + print_line("Warning: could not set speed level to %i of %i\n", speed, speed_max); + if (speed > speed_max) { + print_line("Setting it to %i instead\n", speed_max); + } + ret = th_encode_ctl(td, TH_ENCCTL_SET_SPLEVEL, &speed_max, sizeof(speed_max)); + if (ret < 0) { + print_line("Warning: could not set speed level to %i of %i\n", speed_max, speed_max); + } + } + } + + /* write the bitstream header packets with proper page interleave */ + th_comment_init(&tc); + /* first packet will get its own page automatically */ + ogg_packet op; + if (th_encode_flushheader(td, &tc, &op) <= 0) { + ERR_FAIL_V_MSG(ERR_UNCONFIGURED, "Internal Theora library error."); + } + + ogg_stream_packetin(&to, &op); + if (ogg_stream_pageout(&to, &videopage) != 1) { + ERR_FAIL_V_MSG(ERR_UNCONFIGURED, "Internal Ogg library error."); + } + f->store_buffer(videopage.header, videopage.header_len); + f->store_buffer(videopage.body, videopage.body_len); + + /* create the remaining theora headers */ + for (;;) { + ret = th_encode_flushheader(td, &tc, &op); + if (ret < 0) { + ERR_FAIL_V_MSG(ERR_UNCONFIGURED, "Internal Theora library error."); + } else if (ret == 0) { + break; + } + ogg_stream_packetin(&to, &op); + } + + /* vorbis streams start with three standard header packets. */ + ogg_packet id; + ogg_packet comment; + ogg_packet code; + if (vorbis_analysis_headerout(&vd, &vc, &id, &comment, &code) < 0) { + ERR_FAIL_V_MSG(ERR_UNCONFIGURED, "Internal Vorbis library error."); + } + + /* id header is automatically placed in its own page */ + ogg_stream_packetin(&vo, &id); + if (ogg_stream_pageout(&vo, &audiopage) != 1) { + ERR_FAIL_V_MSG(ERR_UNCONFIGURED, "Internal Ogg library error."); + } + f->store_buffer(audiopage.header, audiopage.header_len); + f->store_buffer(audiopage.body, audiopage.body_len); + + /* append remaining vorbis header packets */ + ogg_stream_packetin(&vo, &comment); + ogg_stream_packetin(&vo, &code); + + /* Flush the rest of our headers. This ensures the actual data in each stream will start on a new page, as per spec. */ + for (;;) { + ret = ogg_stream_flush(&to, &videopage); + if (ret < 0) { + ERR_FAIL_V_MSG(ERR_UNCONFIGURED, "Internal Ogg library error."); + } else if (ret == 0) { + break; + } + f->store_buffer(videopage.header, videopage.header_len); + f->store_buffer(videopage.body, videopage.body_len); + } + + for (;;) { + ret = ogg_stream_flush(&vo, &audiopage); + if (ret < 0) { + ERR_FAIL_V_MSG(ERR_UNCONFIGURED, "Internal Ogg library error."); + } else if (ret == 0) { + break; + } + f->store_buffer(audiopage.header, audiopage.header_len); + f->store_buffer(audiopage.body, audiopage.body_len); + } + + return OK; +} + +Error MovieWriterOGV::write_frame(const Ref &p_image, const int32_t *p_audio_data) { + ERR_FAIL_COND_V(!f.is_valid() || td == NULL, ERR_UNCONFIGURED); + + int audio_or_video = -1; + + /* is there an audio page flushed? If not, fetch one if possible */ + int audioflag = encode_audio(p_audio_data); + + /* is there a video page flushed? If not, fetch one if possible */ + int videoflag = encode_video(p_image); + + /* no pages of either? Must be end of stream. */ + if (!audioflag && !videoflag) + return OK; + + /* which is earlier; the end of the audio page or the end of the video page? Flush the earlier to stream */ + double audiotime = audioflag ? vorbis_granule_time(&vd, ogg_page_granulepos(&audiopage)) : -1; + double videotime = videoflag ? th_granule_time(td, ogg_page_granulepos(&videopage)) : -1; + if (!audioflag) { + audio_or_video = 1; + } else if (!videoflag) { + audio_or_video = 0; + } else { + if (audiotime < videotime) + audio_or_video = 0; + else + audio_or_video = 1; + } + + if (audio_or_video == 1) { + /* flush a video page */ + f->store_buffer(videopage.header, videopage.header_len); + f->store_buffer(videopage.body, videopage.body_len); + } else { + /* flush an audio page */ + f->store_buffer(audiopage.header, audiopage.header_len); + f->store_buffer(audiopage.body, audiopage.body_len); + } + + frame_count++; + + return OK; +} + +void MovieWriterOGV::write_end() { + write_frame(nullptr, nullptr); + + th_encode_free(td); + + ogg_stream_clear(&vo); + vorbis_block_clear(&vb); + vorbis_dsp_clear(&vd); + vorbis_comment_clear(&vc); + vorbis_info_clear(&vi); + + ogg_stream_clear(&to); + th_comment_clear(&tc); + + memfree(y); + memfree(u); + memfree(v); + + if (f.is_valid()) { + f.unref(); + } +} + +MovieWriterOGV::MovieWriterOGV() { + mix_rate = GLOBAL_GET("editor/movie_writer/mix_rate"); + speaker_mode = AudioServer::SpeakerMode(int(GLOBAL_GET("editor/movie_writer/speaker_mode"))); + video_q = GLOBAL_GET("editor/movie_writer/video_quality"); + audio_q = GLOBAL_GET("editor/movie_writer/audio_quality"); +} diff --git a/servers/movie_writer/movie_writer_ogv.h b/servers/movie_writer/movie_writer_ogv.h new file mode 100644 index 00000000000..6176e3305cd --- /dev/null +++ b/servers/movie_writer/movie_writer_ogv.h @@ -0,0 +1,146 @@ +/**************************************************************************/ +/* movie_writer_ogv.h */ +/**************************************************************************/ +/* This file is part of: */ +/* GODOT ENGINE */ +/* https://godotengine.org */ +/**************************************************************************/ +/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */ +/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */ +/* */ +/* Permission is hereby granted, free of charge, to any person obtaining */ +/* a copy of this software and associated documentation files (the */ +/* "Software"), to deal in the Software without restriction, including */ +/* without limitation the rights to use, copy, modify, merge, publish, */ +/* distribute, sublicense, and/or sell copies of the Software, and to */ +/* permit persons to whom the Software is furnished to do so, subject to */ +/* the following conditions: */ +/* */ +/* The above copyright notice and this permission notice shall be */ +/* included in all copies or substantial portions of the Software. */ +/* */ +/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */ +/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */ +/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */ +/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */ +/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */ +/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */ +/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ +/**************************************************************************/ + +#ifndef MOVIE_WRITER_OGV_H +#define MOVIE_WRITER_OGV_H + +#include "servers/audio_server.h" +#include "servers/movie_writer/movie_writer.h" + +#include +#include +#include + +class MovieWriterOGV : public MovieWriter { + GDCLASS(MovieWriterOGV, MovieWriter) + + uint32_t mix_rate = 48000; + AudioServer::SpeakerMode speaker_mode = AudioServer::SPEAKER_MODE_STEREO; + String base_path; + uint32_t frame_count = 0; + uint32_t fps = 0; + uint32_t audio_ch = 0; + uint32_t audio_frames = 0; + + Ref f; + + // Bitrate target for Vorbis audio + int audio_r = 0; + + // Vorbis quality -0.1 to 1 (-0.1 yields smallest files but lowest fidelity; 1 yields highest fidelity but large files. '0.2' is a reasonable default) + float audio_q = 0.2; + + // VP3 strict compatibility + int vp3_compatible = 0; + + // Bitrate target for Theora video + int video_r = 0; + + // Theora quality selector from 0 to 1.0 (0 yields smallest files but lowest video quality. 1.0 yields highest fidelity but large files) + float video_q = 0.75; + + // Streaming video + ogg_uint32_t keyframe_frequency = 64; + + // Buffer delay (in frames). Longer delays allow smoother rate adaptation and provide better overall quality, but require more + // client side buffering and add latency. The default value is the keyframe interval for one-pass encoding (or somewhat larger if + // soft-target is used) + int buf_delay = -1; + + // Sets the encoder speed level. Higher speed levels favor quicker encoding over better quality per bit. Depending on the encoding + // mode, and the internal algorithms used, quality may actually improve with higher speeds, but in this case bitrate will also + // likely increase. The maximum value, and the meaning of each value, are implementation-specific and may change depending on the + // current encoding mode + int speed = 4; + + /* Use a large reservoir and treat the rate as a soft target; rate control is less strict but resulting quality is usually + higher/smoother overall. Soft target also allows an optional setting to specify a minimum allowed quality. */ + int soft_target = 1; + + // Take physical pages, weld into a logical stream of packets + ogg_stream_state to; + + // Take physical pages, weld into a logical stream of packets + ogg_stream_state vo; + + // Theora encoding context + th_enc_ctx *td; + + // Theora bitstream information + th_info ti; + + // Theora comment information + th_comment tc; + + // Vorbis bitstream information + vorbis_info vi; + + // Vorbis comment information + vorbis_comment vc; + + // central working state for the packet->PCM decoder + vorbis_dsp_state vd; + + // local working space for packet->PCM decode + vorbis_block vb; + + // Video buffer + uint8_t *y, *u, *v; + th_ycbcr_buffer ycbcr; + + ogg_page audiopage; + ogg_page videopage; + + int encode_audio(const int32_t *p_audio_data); + int encode_video(const Ref &p_image); + + inline int ilog(unsigned _v) { + int ret; + for (ret = 0; _v; ret++) + _v >>= 1; + return ret; + } + +protected: + virtual uint32_t get_audio_mix_rate() const override; + virtual AudioServer::SpeakerMode get_audio_speaker_mode() const override; + virtual void get_supported_extensions(List *r_extensions) const override; + + virtual Error write_begin(const Size2i &p_movie_size, uint32_t p_fps, const String &p_base_path) override; + virtual Error write_frame(const Ref &p_image, const int32_t *p_audio_data) override; + virtual void write_end() override; + + virtual bool handles_file(const String &p_path) const override; + +public: + MovieWriterOGV(); +}; + +#endif // MOVIE_WRITER_OGV_H diff --git a/servers/register_server_types.cpp b/servers/register_server_types.cpp index 18ee8630838..595981730ed 100644 --- a/servers/register_server_types.cpp +++ b/servers/register_server_types.cpp @@ -61,6 +61,7 @@ #include "display_server.h" #include "movie_writer/movie_writer.h" #include "movie_writer/movie_writer_mjpeg.h" +#include "movie_writer/movie_writer_ogv.h" #include "movie_writer/movie_writer_pngwav.h" #include "rendering/renderer_compositor.h" #include "rendering/renderer_rd/framebuffer_cache_rd.h" @@ -127,6 +128,7 @@ static bool has_server_feature_callback(const String &p_feature) { static MovieWriterMJPEG *writer_mjpeg = nullptr; static MovieWriterPNGWAV *writer_pngwav = nullptr; +static MovieWriterOGV *writer_ogv = nullptr; void register_server_types() { OS::get_singleton()->benchmark_begin_measure("Servers", "Register Extensions"); @@ -331,6 +333,9 @@ void register_server_types() { writer_pngwav = memnew(MovieWriterPNGWAV); MovieWriter::add_writer(writer_pngwav); + writer_ogv = memnew(MovieWriterOGV); + MovieWriter::add_writer(writer_ogv); + OS::get_singleton()->benchmark_end_measure("Servers", "Register Extensions"); } @@ -341,6 +346,7 @@ void unregister_server_types() { memdelete(shader_types); memdelete(writer_mjpeg); memdelete(writer_pngwav); + memdelete(writer_ogv); OS::get_singleton()->benchmark_end_measure("Servers", "Unregister Extensions"); } diff --git a/thirdparty/misc/rgb2yuv.h b/thirdparty/misc/rgb2yuv.h new file mode 100644 index 00000000000..6b22edc9044 --- /dev/null +++ b/thirdparty/misc/rgb2yuv.h @@ -0,0 +1,41 @@ +#ifndef RGB2YUV_H +#define RGB2YUV_H + +#include "core/typedefs.h" + +static void rgb2yuv420(uint8_t *y, uint8_t *u, uint8_t *v, uint8_t *rgb, size_t width, size_t height) { + size_t upos = 0; + size_t vpos = 0; + size_t i = 0; + + for (size_t line = 0; line < height; ++line) { + if (!(line % 2)) { + for (size_t x = 0; x < width; x += 2) { + uint8_t r = rgb[3 * i]; + uint8_t g = rgb[3 * i + 1]; + uint8_t b = rgb[3 * i + 2]; + + y[i++] = ((66 * r + 129 * g + 25 * b) >> 8) + 16; + + u[upos++] = ((-38 * r + -74 * g + 112 * b) >> 8) + 128; + v[vpos++] = ((112 * r + -94 * g + -18 * b) >> 8) + 128; + + r = rgb[3 * i]; + g = rgb[3 * i + 1]; + b = rgb[3 * i + 2]; + + y[i++] = ((66 * r + 129 * g + 25 * b) >> 8) + 16; + } + } else { + for (size_t x = 0; x < width; x += 1) { + uint8_t r = rgb[3 * i]; + uint8_t g = rgb[3 * i + 1]; + uint8_t b = rgb[3 * i + 2]; + + y[i++] = ((66 * r + 129 * g + 25 * b) >> 8) + 16; + } + } + } +} + +#endif // RGB2YUV_H \ No newline at end of file