This commit is contained in:
Leo de Penning 2024-10-22 22:11:20 +02:00 committed by GitHub
commit 929d7f6888
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
53 changed files with 2974 additions and 413 deletions

4
.gitignore vendored
View File

@ -207,7 +207,6 @@ cscope.po.out
*.swp
# Visual Studio Code
.vscode/
*.code-workspace
.history/
@ -378,4 +377,7 @@ $RECYCLE.BIN/
*.msm
*.msp
*.lnk
.vscode/c_cpp_properties.json
.vscode/settings.json
*.generated.props
*.ogv

45
.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,45 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Godot Editor",
"type": "cppdbg",
"request": "launch",
"program": "${workspaceFolder}/bin/godot.macos.editor.dev.arm64",
"args": [
"--editor",
"--path",
"${workspaceFolder}/examples/camera",
],
"stopAtEntry": false,
"externalConsole": false,
"cwd": "${workspaceFolder}",
"environment": [],
"MIMode": "lldb",
"preLaunchTask": "Build Godot"
},
{
"name": "Godot Project",
"type": "cppdbg",
"request": "launch",
"program": "${workspaceFolder}/bin/godot.macos.editor.dev.arm64",
"args": [
"--path",
"${workspaceFolder}/examples/camera",
"--write-movie",
"test.ogv",
"--fixed-fps",
"15"
],
"stopAtEntry": false,
"externalConsole": false,
"cwd": "${workspaceFolder}",
"environment": [],
"MIMode": "lldb",
"preLaunchTask": "Build Godot"
}
]
}

218
.vscode/tasks.json vendored Normal file
View File

@ -0,0 +1,218 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "Clean Godot",
"group": "build",
"type": "shell",
"command": "scons",
"args": [
"--clean",
"dev_build=yes"
],
"problemMatcher": {
"base": "$gcc",
"fileLocation": [
"autoDetect",
"${workspaceFolder}"
]
}
},
{
"label": "Build Godot",
"group": "build",
"type": "shell",
"command": "scons",
"args": [
"dev_build=yes",
"debug_symbols=yes",
"progress=no",
"vulkan=yes"
],
"problemMatcher": {
"base": "$gcc",
"fileLocation": [
"autoDetect",
"${workspaceFolder}"
]
}
},
{
"label": "Build Android debug template ARM32",
"group": "build",
"type": "shell",
"command": "scons",
"args": [
"platform=android",
"arch=arm32",
"target=template_debug",
"progress=no"
],
"problemMatcher": {
"base": "$gcc",
"fileLocation": [
"autoDetect",
"${workspaceFolder}"
]
}
},
{
"label": "Build Android debug template ARM64",
"group": "build",
"type": "shell",
"command": "scons",
"args": [
"platform=android",
"arch=arm64",
"target=template_debug",
"progress=no"
],
"problemMatcher": {
"base": "$gcc",
"fileLocation": [
"autoDetect",
"${workspaceFolder}"
]
}
},
{
"label": "Build Android debug template X86_32",
"group": "build",
"type": "shell",
"command": "scons",
"args": [
"platform=android",
"arch=x86_32",
"target=template_debug",
"progress=no"
],
"problemMatcher": {
"base": "$gcc",
"fileLocation": [
"autoDetect",
"${workspaceFolder}"
]
}
},
{
"label": "Build Android debug template X86_64",
"group": "build",
"type": "shell",
"command": "scons",
"args": [
"platform=android",
"arch=x86_64",
"target=template_debug",
"progress=no"
],
"problemMatcher": {
"base": "$gcc",
"fileLocation": [
"autoDetect",
"${workspaceFolder}"
]
}
},
{
"label": "Build Android release template ARM32",
"group": "build",
"type": "shell",
"command": "scons",
"args": [
"platform=android",
"arch=arm32",
"target=template_release",
"progress=no"
],
"problemMatcher": {
"base": "$gcc",
"fileLocation": [
"autoDetect",
"${workspaceFolder}"
]
}
},
{
"label": "Build Android release template ARM64",
"group": "build",
"type": "shell",
"command": "scons",
"args": [
"platform=android",
"arch=arm64",
"target=template_release",
"progress=no"
],
"problemMatcher": {
"base": "$gcc",
"fileLocation": [
"autoDetect",
"${workspaceFolder}"
]
}
},
{
"label": "Build Android release template X86_32",
"group": "build",
"type": "shell",
"command": "scons",
"args": [
"platform=android",
"arch=x86_32",
"target=template_release",
"progress=no"
],
"problemMatcher": {
"base": "$gcc",
"fileLocation": [
"autoDetect",
"${workspaceFolder}"
]
}
},
{
"label": "Build Android release template X86_64",
"group": "build",
"type": "shell",
"command": "scons",
"args": [
"platform=android",
"arch=x86_64",
"target=template_release",
"progress=no"
],
"problemMatcher": {
"base": "$gcc",
"fileLocation": [
"autoDetect",
"${workspaceFolder}"
]
}
},
{
"label": "Install Android export templates",
"group": "build",
"type": "shell",
"command": "./gradlew",
"args": [
"generateGodotTemplates",
],
"options": {
"cwd": "${workspaceFolder}/platform/android/java"
},
"dependsOrder": "sequence",
"dependsOn": [
"Build Android debug template ARM32",
"Build Android debug template ARM64",
"Build Android release template ARM32",
"Build Android release template ARM64",
// "Build Android debug template X86_32",
// "Build Android debug template X86_64",
// "Build Android release template X86_32",
// "Build Android release template X86_64"
]
}
]
}

View File

@ -1,21 +1,15 @@
<?xml version="1.0" encoding="UTF-8" ?>
<class name="CameraFeed" inherits="RefCounted" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="../class.xsd">
<brief_description>
A camera feed gives you access to a single physical camera attached to your device.
A camera feed gives you access to a physical camera attached to your device.
</brief_description>
<description>
A camera feed gives you access to a single physical camera attached to your device. When enabled, Godot will start capturing frames from the camera which can then be used. See also [CameraServer].
[b]Note:[/b] Many cameras will return YCbCr images which are split into two textures and need to be combined in a shader. Godot does this automatically for you if you set the environment to show the camera image in the background.
A camera feed gives you access to a physical camera attached to your device with specific media size and format. When enabled, Godot will start capturing frames from the camera which can then be user as texture in other nodes. See also [CameraServer].
[b]Note:[/b] Some media formats, like YUV or N12 produce two textures (one for the Y plane and one for the UV plane). These textures need be drawn using a suitable shader where the Y plane is provided as TEXTURE and the UV plane as NORMAL_TEXTURE. See the camera project in the examples folder.
</description>
<tutorials>
</tutorials>
<methods>
<method name="get_datatype" qualifiers="const">
<return type="int" enum="CameraFeed.FeedDataType" />
<description>
Returns feed image data type.
</description>
</method>
<method name="get_id" qualifiers="const">
<return type="int" />
<description>
@ -34,6 +28,16 @@
Returns the position of camera on the device.
</description>
</method>
<method name="get_width" qualifiers="const">
<return type="int" />
<description>
Returns the width of the captured camera frames.
</description>
</method>
<method name="get_height" qualifiers="const">
<return type="int" />
<description>
Returns the height of the captured camera frames.
<method name="set_format">
<return type="bool" />
<param index="0" name="index" type="int" />
@ -98,18 +102,6 @@
</signal>
</signals>
<constants>
<constant name="FEED_NOIMAGE" value="0" enum="FeedDataType">
No image set for the feed.
</constant>
<constant name="FEED_RGB" value="1" enum="FeedDataType">
Feed supplies RGB images.
</constant>
<constant name="FEED_YCBCR" value="2" enum="FeedDataType">
Feed supplies YCbCr images that need to be converted to RGB.
</constant>
<constant name="FEED_YCBCR_SEP" value="3" enum="FeedDataType">
Feed supplies separate Y and CbCr images that need to be combined and converted to RGB.
</constant>
<constant name="FEED_UNSPECIFIED" value="0" enum="FeedPosition">
Unspecified position.
</constant>

View File

@ -6,7 +6,7 @@
<description>
The [CameraServer] keeps track of different cameras accessible in Godot. These are external cameras such as webcams or the cameras on your phone.
It is notably used to provide AR modules with a video feed from the camera.
[b]Note:[/b] This class is currently only implemented on Linux, macOS, and iOS, on other platforms no [CameraFeed]s will be available. To get a [CameraFeed] on iOS, the camera plugin from [url=https://github.com/godotengine/godot-ios-plugins]godot-ios-plugins[/url] is required.
[b]Note:[/b] This class currently only supports macOS, iOS, Android and Windows platform. On other platforms, supported [CameraFeed]s are in development.
</description>
<tutorials>
</tutorials>
@ -59,18 +59,4 @@
</description>
</signal>
</signals>
<constants>
<constant name="FEED_RGBA_IMAGE" value="0" enum="FeedImage">
The RGBA camera image.
</constant>
<constant name="FEED_YCBCR_IMAGE" value="0" enum="FeedImage">
The [url=https://en.wikipedia.org/wiki/YCbCr]YCbCr[/url] camera image.
</constant>
<constant name="FEED_Y_IMAGE" value="0" enum="FeedImage">
The Y component camera image.
</constant>
<constant name="FEED_CBCR_IMAGE" value="1" enum="FeedImage">
The CbCr component camera image.
</constant>
</constants>
</class>

View File

@ -5,8 +5,9 @@
</brief_description>
<description>
Godot can record videos with non-real-time simulation. Like the [code]--fixed-fps[/code] [url=$DOCS_URL/tutorials/editor/command_line_tutorial.html]command line argument[/url], this forces the reported [code]delta[/code] in [method Node._process] functions to be identical across frames, regardless of how long it actually took to render the frame. This can be used to record high-quality videos with perfect frame pacing regardless of your hardware's capabilities.
Godot has 2 built-in [MovieWriter]s:
- AVI container with MJPEG for video and uncompressed audio ([code].avi[/code] file extension). Lossy compression, medium file sizes, fast encoding. The lossy compression quality can be adjusted by changing [member ProjectSettings.editor/movie_writer/mjpeg_quality]. The resulting file can be viewed in most video players, but it must be converted to another format for viewing on the web or by Godot with [VideoStreamPlayer]. MJPEG does not support transparency. AVI output is currently limited to a file of 4 GB in size at most.
Godot has 3 built-in [MovieWriter]s:
- OGV container with Theora for video and Vorbis for audio ([code].ogv[/code] file extension). Lossy compression, medium file sizes, fast encoding. The lossy compression quality can be adjusted by changing [member ProjectSettings.editor/movie_writer/video_quality] and [member ProjectSettings.editor/movie_writer/audio_quality]. The resulting file can be viewed in Godot with [VideoStreamPlayer] and most video players and web-browsers.
- AVI container with MJPEG for video and uncompressed audio ([code].avi[/code] file extension). Lossy compression, medium file sizes, fast encoding. The lossy compression quality can be adjusted by changing [member ProjectSettings.editor/movie_writer/video_quality]. The resulting file can be viewed in most video players, but it must be converted to another format for viewing on the web or by Godot with [VideoStreamPlayer]. MJPEG does not support transparency. AVI output is currently limited to a file of 4 GB in size at most.
- PNG image sequence for video and WAV for audio ([code].png[/code] file extension). Lossless compression, large file sizes, slow encoding. Designed to be encoded to a video file with another tool such as [url=https://ffmpeg.org/]FFmpeg[/url] after recording. Transparency is currently not supported, even if the root viewport is set to be transparent.
If you need to encode to a different format or pipe a stream through third-party software, you can extend the [MovieWriter] class to create your own movie writers. This should typically be done using GDExtension for performance reasons.
[b]Editor usage:[/b] A default movie file path can be specified in [member ProjectSettings.editor/movie_writer/movie_file]. Alternatively, for running single scenes, a [code]movie_file[/code] metadata can be added to the root node, specifying the path to a movie file that will be used when recording that scene. Once a path is set, click the video reel icon in the top-right corner of the editor to enable Movie Maker mode, then run any scene as usual. The engine will start recording as soon as the splash screen is finished, and it will only stop recording when the engine quits. Click the video reel icon again to disable Movie Maker mode. Note that toggling Movie Maker mode does not affect project instances that are already running.

View File

@ -962,14 +962,18 @@
<member name="editor/movie_writer/mix_rate" type="int" setter="" getter="" default="48000">
The audio mix rate to use in the recorded audio when writing a movie (in Hz). This can be different from [member audio/driver/mix_rate], but this value must be divisible by [member editor/movie_writer/fps] to prevent audio from desynchronizing over time.
</member>
<member name="editor/movie_writer/mjpeg_quality" type="float" setter="" getter="" default="0.75">
The JPEG quality to use when writing a video to an AVI file, between [code]0.01[/code] and [code]1.0[/code] (inclusive). Higher [code]quality[/code] values result in better-looking output at the cost of larger file sizes. Recommended [code]quality[/code] values are between [code]0.75[/code] and [code]0.9[/code]. Even at quality [code]1.0[/code], JPEG compression remains lossy.
<member name="editor/movie_writer/video_quality" type="float" setter="" getter="" default="0.75">
The video encoding quality to use when writing a video to a file, between [code]0.0[/code] and [code]1.0[/code] (inclusive). Higher [code]quality[/code] values result in better-looking output at the cost of larger file sizes. Recommended [code]quality[/code] values are between [code]0.75[/code] and [code]0.9[/code]. Even at quality [code]1.0[/code], compression remains lossy.
[b]Note:[/b] This does not affect the audio quality or writing PNG image sequences.
</member>
<member name="editor/movie_writer/audio_quality" type="float" setter="" getter="" default="0.2">
The audio encoding quality to use when writing a audio to a file, between [code]0.0[/code] and [code]1.0[/code] (inclusive). Higher [code]quality[/code] values result in better-looking output at the cost of larger file sizes.
[b]Note:[/b] This does not affect the video quality.
</member>
<member name="editor/movie_writer/movie_file" type="String" setter="" getter="" default="&quot;&quot;">
The output path for the movie. The file extension determines the [MovieWriter] that will be used.
Godot has 2 built-in [MovieWriter]s:
- AVI container with MJPEG for video and uncompressed audio ([code].avi[/code] file extension). Lossy compression, medium file sizes, fast encoding. The lossy compression quality can be adjusted by changing [member ProjectSettings.editor/movie_writer/mjpeg_quality]. The resulting file can be viewed in most video players, but it must be converted to another format for viewing on the web or by Godot with [VideoStreamPlayer]. MJPEG does not support transparency. AVI output is currently limited to a file of 4 GB in size at most.
- AVI container with MJPEG for video and uncompressed audio ([code].avi[/code] file extension). Lossy compression, medium file sizes, fast encoding. The lossy compression quality can be adjusted by changing [member ProjectSettings.editor/movie_writer/video_quality]. The resulting file can be viewed in most video players, but it must be converted to another format for viewing on the web or by Godot with [VideoStreamPlayer]. MJPEG does not support transparency. AVI output is currently limited to a file of 4 GB in size at most.
- PNG image sequence for video and WAV for audio ([code].png[/code] file extension). Lossless compression, large file sizes, slow encoding. Designed to be encoded to a video file with another tool such as [url=https://ffmpeg.org/]FFmpeg[/url] after recording. Transparency is currently not supported, even if the root viewport is set to be transparent.
If you need to encode to a different format or pipe a stream through third-party software, you can extend this [MovieWriter] class to create your own movie writers.
When using PNG output, the frame number will be appended at the end of the file name. It starts from 0 and is padded with 8 digits to ensure correct sorting and easier processing. For example, if the output path is [code]/tmp/hello.png[/code], the first two frames will be [code]/tmp/hello00000000.png[/code] and [code]/tmp/hello00000001.png[/code]. The audio will be saved at [code]/tmp/hello.wav[/code].

View File

@ -66789,7 +66789,7 @@ msgid ""
"- AVI container with MJPEG for video and uncompressed audio ([code].avi[/"
"code] file extension). Lossy compression, medium file sizes, fast encoding. "
"The lossy compression quality can be adjusted by changing [member "
"ProjectSettings.editor/movie_writer/mjpeg_quality]. The resulting file can be "
"ProjectSettings.editor/movie_writer/video_quality]. The resulting file can be "
"viewed in most video players, but it must be converted to another format for "
"viewing on the web or by Godot with [VideoStreamPlayer]. MJPEG does not "
"support transparency. AVI output is currently limited to a file of 4 GB in "
@ -66815,7 +66815,7 @@ msgstr ""
"- Contenitore AVI con MJPEG per video e audio non compresso (estensione file "
"[code].avi[/code]). Compressione con perdite, dimensioni file medie, codifica "
"veloce. La qualità della compressione con perdite può essere regolata "
"modificando [member ProjectSettings.editor/movie_writer/mjpeg_quality]. È "
"modificando [member ProjectSettings.editor/movie_writer/video_quality]. È "
"possibile visualizzare il file risultante nella maggior parte dei lettori "
"video, ma deve essere convertito in un altro formato per la visualizzazione "
"sul Web o da Godot con [VideoStreamPlayer]. MJPEG non supporta la "

View File

@ -76286,7 +76286,7 @@ msgid ""
"- AVI container with MJPEG for video and uncompressed audio ([code].avi[/"
"code] file extension). Lossy compression, medium file sizes, fast encoding. "
"The lossy compression quality can be adjusted by changing [member "
"ProjectSettings.editor/movie_writer/mjpeg_quality]. The resulting file can be "
"ProjectSettings.editor/movie_writer/video_quality]. The resulting file can be "
"viewed in most video players, but it must be converted to another format for "
"viewing on the web or by Godot with [VideoStreamPlayer]. MJPEG does not "
"support transparency. AVI output is currently limited to a file of 4 GB in "
@ -76324,7 +76324,7 @@ msgstr ""
"Godot 内置的 [MovieWriter] 有两个:\n"
"- 使用 MJPEG 视频和未压缩音频的 AVI 容器(文件扩展名为 [code].avi[/code])。有"
"损压缩、文件大小中等、编码速度较快。有损压缩质量可以通过修改 [member "
"ProjectSettings.editor/movie_writer/mjpeg_quality] 来调整。生成的文件可以使用"
"ProjectSettings.editor/movie_writer/video_quality] 来调整。生成的文件可以使用"
"大多数视频播放器查看,但如果要在 Web 上查看或者用 Godot 的 "
"[VideoStreamPlayer] 查看则必须先进行格式的转换。MJPEG 不支持透明度。AVI 输出"
"的文件目前最多为 4 GB 大小。\n"
@ -105865,7 +105865,7 @@ msgid ""
"- AVI container with MJPEG for video and uncompressed audio ([code].avi[/"
"code] file extension). Lossy compression, medium file sizes, fast encoding. "
"The lossy compression quality can be adjusted by changing [member "
"ProjectSettings.editor/movie_writer/mjpeg_quality]. The resulting file can be "
"ProjectSettings.editor/movie_writer/video_quality]. The resulting file can be "
"viewed in most video players, but it must be converted to another format for "
"viewing on the web or by Godot with [VideoStreamPlayer]. MJPEG does not "
"support transparency. AVI output is currently limited to a file of 4 GB in "
@ -105889,7 +105889,7 @@ msgstr ""
"Godot 有两个内置的 [MovieWriter]\n"
"- AVI 容器,视频使用 MJPEG、音频未压缩文件扩展名为 [code].avi[/code])。有损"
"压缩,文件大小中等,编码较快。有损压缩质量可以通过 [member ProjectSettings."
"editor/movie_writer/mjpeg_quality] 调整。得到的文件可以使用大多数视频播放器查"
"editor/movie_writer/video_quality] 调整。得到的文件可以使用大多数视频播放器查"
"看,但必须转换成其他格式才能在 Web 或 Godot 的 [VideoStreamPlayer] 中播放。"
"MJPEG 不支持透明度。AVI 输出目前有单文件 4 GB 的大小限制。\n"
"- 视频使用 PNG 图像序列,音频使用 WAV文件扩展名为 [code].png[/code])。无损"

View File

@ -54544,7 +54544,7 @@ msgid ""
"- AVI container with MJPEG for video and uncompressed audio ([code].avi[/"
"code] file extension). Lossy compression, medium file sizes, fast encoding. "
"The lossy compression quality can be adjusted by changing [member "
"ProjectSettings.editor/movie_writer/mjpeg_quality]. The resulting file can be "
"ProjectSettings.editor/movie_writer/video_quality]. The resulting file can be "
"viewed in most video players, but it must be converted to another format for "
"viewing on the web or by Godot with [VideoStreamPlayer]. MJPEG does not "
"support transparency. AVI output is currently limited to a file of 4 GB in "
@ -54582,7 +54582,7 @@ msgstr ""
"Godot 內建的 [MovieWriter] 有兩個:\n"
"- 使用 MJPEG 影片和未壓縮音訊的 AVI 容器(檔副檔名為 [code].avi[/code])。失真"
"壓縮、檔大小中等、編碼速度較快。失真壓縮品質可以通過修改 [member "
"ProjectSettings.editor/movie_writer/mjpeg_quality] 來調整。生成的檔可以使用大"
"ProjectSettings.editor/movie_writer/video_quality] 來調整。生成的檔可以使用大"
"多數影片播放機查看,但如果要在 Web 上查看或者用 Godot 的 [VideoStreamPlayer] "
"查看則必須先進行格式的轉換。MJPEG 不支援透明度。AVI 輸出的檔目前最多為 4 GB "
"大小。\n"
@ -72393,7 +72393,7 @@ msgid ""
"- AVI container with MJPEG for video and uncompressed audio ([code].avi[/"
"code] file extension). Lossy compression, medium file sizes, fast encoding. "
"The lossy compression quality can be adjusted by changing [member "
"ProjectSettings.editor/movie_writer/mjpeg_quality]. The resulting file can be "
"ProjectSettings.editor/movie_writer/video_quality]. The resulting file can be "
"viewed in most video players, but it must be converted to another format for "
"viewing on the web or by Godot with [VideoStreamPlayer]. MJPEG does not "
"support transparency. AVI output is currently limited to a file of 4 GB in "
@ -72417,7 +72417,7 @@ msgstr ""
"Godot 有兩個內建的 [MovieWriter]\n"
"- AVI 容器,影片使用 MJPEG、音訊未壓縮檔副檔名為 [code].avi[/code])。有損壓"
"縮,檔大小中等,編碼較快。失真壓縮品質可以通過 [member ProjectSettings.editor/"
"movie_writer/mjpeg_quality] 調整。得到的檔可以使用大多數影片播放機查看,但必須"
"movie_writer/video_quality] 調整。得到的檔可以使用大多數影片播放機查看,但必須"
"轉換成其他格式才能在 Web 或 Godot 的 [VideoStreamPlayer] 中播放。MJPEG 不支援"
"透明度。AVI 輸出目前有單檔 4 GB 的大小限制。\n"
"- 影片使用 PNG 圖像序列,音訊使用 WAV檔副檔名為 [code].png[/code])。無損壓"

2
examples/camera/.gitattributes vendored Normal file
View File

@ -0,0 +1,2 @@
# Normalize EOL for all files that Git considers text files.
* text=auto eol=lf

3
examples/camera/.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
# Godot 4+ specific ignores
.godot/
android/

View File

@ -0,0 +1,15 @@
shader_type canvas_item;
void fragment() {
vec3 color;
color.r = texture(TEXTURE, UV).r;
color.gb = texture(NORMAL_TEXTURE, UV).rg - vec2(0.5, 0.5);
color.rgb = mat3(
vec3(1.00000, 1.00000, 1.00000),
vec3(0.00000, -0.18732, 1.85560),
vec3(1.57481, -0.46813, 0.00000)
) * color.rgb;
COLOR = vec4(color, 1.0);
}

View File

@ -0,0 +1,16 @@
shader_type canvas_item;
void fragment() {
vec3 color;
color.r = texture(TEXTURE, UV).r;
color.g = texture(NORMAL_TEXTURE, UV).g - 0.5;
color.b = texture(SPECULAR_TEXTURE, UV).b - 0.5;
color.rgb = mat3(
vec3(1.00000, 1.00000, 1.00000),
vec3(0.00000, -0.18732, 1.85560),
vec3(1.57481, -0.46813, 0.00000)
) * color.rgb;
COLOR = vec4(color, 1.0);
}

View File

@ -0,0 +1,28 @@
extends Button
var feed:CameraFeed
# Called when the node enters the scene tree for the first time.
func _ready():
var feeds = CameraServer.feeds()
if (feeds.is_empty()):
text = "NO CAMERA"
else:
text = "START"
_on_texture_rect_property_list_changed()
func _toggled(toggled_on):
feed.feed_is_active = toggled_on
if (toggled_on):
text = "STOP"
else:
text = "START"
func _on_texture_rect_property_list_changed():
if (feed != null && feed.feed_is_active): _toggled(false)
var texture_rect = get_node("/root/Control/TextureRect")
var camera_id = texture_rect.texture.get_camera_feed_id()
feed = CameraServer.get_feed_by_id(camera_id)

View File

@ -0,0 +1,77 @@
[gd_scene load_steps=7 format=3 uid="uid://wcx5rx1sckqy"]
[ext_resource type="Shader" path="res://camera_nv12.gdshader" id="2_7jche"]
[ext_resource type="Script" path="res://capture.gd" id="3_e3a7h"]
[sub_resource type="ShaderMaterial" id="ShaderMaterial_psofl"]
shader = ExtResource("2_7jche")
[sub_resource type="CameraTexture" id="CameraTexture_nlxrt"]
camera_feed_id = 1
[sub_resource type="StyleBoxFlat" id="StyleBoxFlat_iy0nj"]
content_margin_left = 8.0
content_margin_top = 16.0
content_margin_right = 8.0
content_margin_bottom = 8.0
bg_color = Color(0, 0, 0, 0.686275)
corner_radius_top_left = 24
corner_radius_top_right = 24
[sub_resource type="AudioStreamMicrophone" id="AudioStreamMicrophone_qfag3"]
[node name="Control" type="Control"]
clip_children = 1
clip_contents = true
layout_mode = 3
anchors_preset = 15
anchor_right = 1.0
anchor_bottom = 1.0
grow_horizontal = 2
grow_vertical = 2
[node name="TextureRect" type="TextureRect" parent="."]
material = SubResource("ShaderMaterial_psofl")
layout_mode = 1
anchors_preset = 15
anchor_right = 1.0
anchor_bottom = 1.0
offset_left = -720.0
offset_right = -720.0
grow_horizontal = 2
grow_vertical = 2
scale = Vector2(3, 1)
texture = SubResource("CameraTexture_nlxrt")
expand_mode = 5
stretch_mode = 6
flip_h = true
[node name="ScrollContainer" type="ScrollContainer" parent="."]
layout_mode = 1
anchors_preset = 12
anchor_top = 1.0
anchor_right = 1.0
anchor_bottom = 1.0
offset_top = -521.0
grow_horizontal = 2
grow_vertical = 0
theme_override_styles/panel = SubResource("StyleBoxFlat_iy0nj")
horizontal_scroll_mode = 0
[node name="VBoxContainer" type="VBoxContainer" parent="ScrollContainer"]
layout_mode = 2
size_flags_horizontal = 3
size_flags_vertical = 0
[node name="Start capture" type="Button" parent="ScrollContainer/VBoxContainer"]
layout_mode = 2
theme_override_font_sizes/font_size = 42
toggle_mode = true
text = "Loading..."
flat = true
script = ExtResource("3_e3a7h")
[node name="AudioStreamPlayer" type="AudioStreamPlayer" parent="."]
stream = SubResource("AudioStreamMicrophone_qfag3")
[connection signal="property_list_changed" from="TextureRect" to="ScrollContainer/VBoxContainer/Start capture" method="_on_texture_rect_property_list_changed"]

View File

@ -0,0 +1,3 @@
[gd_resource type="AudioBusLayout" format=3 uid="uid://d4hxcbnwr0jek"]
[resource]

View File

@ -0,0 +1,673 @@
[preset.0]
name="Android"
platform="Android"
runnable=true
advanced_options=false
dedicated_server=false
custom_features=""
export_filter="all_resources"
include_filter=""
exclude_filter=""
export_path=""
encryption_include_filters=""
encryption_exclude_filters=""
encrypt_pck=false
encrypt_directory=false
script_export_mode=2
[preset.0.options]
custom_template/debug="/Users/penninghlhd/Code/noomi4godot/bin/android_debug.apk"
custom_template/release="/Users/penninghlhd/Code/noomi4godot/bin/android_release.apk"
gradle_build/use_gradle_build=true
gradle_build/gradle_build_directory=""
gradle_build/android_source_template=""
gradle_build/compress_native_libraries=false
gradle_build/export_format=1
gradle_build/min_sdk=""
gradle_build/target_sdk=""
architectures/armeabi-v7a=false
architectures/arm64-v8a=true
architectures/x86=false
architectures/x86_64=false
version/code=1
version/name=""
package/unique_name="com.illuminoo.noomi.godot"
package/name="NOOMI Test"
package/signed=true
package/app_category=2
package/retain_data_on_uninstall=false
package/exclude_from_recents=false
package/show_in_android_tv=false
package/show_in_app_library=true
package/show_as_launcher_app=false
launcher_icons/main_192x192=""
launcher_icons/adaptive_foreground_432x432=""
launcher_icons/adaptive_background_432x432=""
graphics/opengl_debug=false
xr_features/xr_mode=0
screen/immersive_mode=true
screen/support_small=true
screen/support_normal=true
screen/support_large=true
screen/support_xlarge=true
user_data_backup/allow=false
command_line/extra_args=""
apk_expansion/enable=false
apk_expansion/SALT=""
apk_expansion/public_key=""
permissions/custom_permissions=PackedStringArray()
permissions/access_checkin_properties=false
permissions/access_coarse_location=false
permissions/access_fine_location=false
permissions/access_location_extra_commands=false
permissions/access_mock_location=false
permissions/access_network_state=false
permissions/access_surface_flinger=false
permissions/access_wifi_state=false
permissions/account_manager=false
permissions/add_voicemail=false
permissions/authenticate_accounts=false
permissions/battery_stats=false
permissions/bind_accessibility_service=false
permissions/bind_appwidget=false
permissions/bind_device_admin=false
permissions/bind_input_method=false
permissions/bind_nfc_service=false
permissions/bind_notification_listener_service=false
permissions/bind_print_service=false
permissions/bind_remoteviews=false
permissions/bind_text_service=false
permissions/bind_vpn_service=false
permissions/bind_wallpaper=false
permissions/bluetooth=false
permissions/bluetooth_admin=false
permissions/bluetooth_privileged=false
permissions/brick=false
permissions/broadcast_package_removed=false
permissions/broadcast_sms=false
permissions/broadcast_sticky=false
permissions/broadcast_wap_push=false
permissions/call_phone=false
permissions/call_privileged=false
permissions/camera=true
permissions/capture_audio_output=true
permissions/capture_secure_video_output=false
permissions/capture_video_output=true
permissions/change_component_enabled_state=false
permissions/change_configuration=false
permissions/change_network_state=false
permissions/change_wifi_multicast_state=false
permissions/change_wifi_state=false
permissions/clear_app_cache=false
permissions/clear_app_user_data=false
permissions/control_location_updates=false
permissions/delete_cache_files=false
permissions/delete_packages=false
permissions/device_power=false
permissions/diagnostic=false
permissions/disable_keyguard=false
permissions/dump=false
permissions/expand_status_bar=false
permissions/factory_test=false
permissions/flashlight=false
permissions/force_back=false
permissions/get_accounts=false
permissions/get_package_size=false
permissions/get_tasks=false
permissions/get_top_activity_info=false
permissions/global_search=false
permissions/hardware_test=false
permissions/inject_events=false
permissions/install_location_provider=false
permissions/install_packages=false
permissions/install_shortcut=false
permissions/internal_system_window=false
permissions/internet=false
permissions/kill_background_processes=false
permissions/location_hardware=false
permissions/manage_accounts=false
permissions/manage_app_tokens=false
permissions/manage_documents=false
permissions/manage_external_storage=false
permissions/master_clear=false
permissions/media_content_control=false
permissions/modify_audio_settings=false
permissions/modify_phone_state=false
permissions/mount_format_filesystems=false
permissions/mount_unmount_filesystems=false
permissions/nfc=false
permissions/persistent_activity=false
permissions/post_notifications=false
permissions/process_outgoing_calls=false
permissions/read_calendar=false
permissions/read_call_log=false
permissions/read_contacts=false
permissions/read_external_storage=false
permissions/read_frame_buffer=false
permissions/read_history_bookmarks=false
permissions/read_input_state=false
permissions/read_logs=false
permissions/read_phone_state=false
permissions/read_profile=false
permissions/read_sms=false
permissions/read_social_stream=false
permissions/read_sync_settings=false
permissions/read_sync_stats=false
permissions/read_user_dictionary=false
permissions/reboot=false
permissions/receive_boot_completed=false
permissions/receive_mms=false
permissions/receive_sms=false
permissions/receive_wap_push=false
permissions/record_audio=true
permissions/reorder_tasks=false
permissions/restart_packages=false
permissions/send_respond_via_message=false
permissions/send_sms=false
permissions/set_activity_watcher=false
permissions/set_alarm=false
permissions/set_always_finish=false
permissions/set_animation_scale=false
permissions/set_debug_app=false
permissions/set_orientation=false
permissions/set_pointer_speed=false
permissions/set_preferred_applications=false
permissions/set_process_limit=false
permissions/set_time=false
permissions/set_time_zone=false
permissions/set_wallpaper=false
permissions/set_wallpaper_hints=false
permissions/signal_persistent_processes=false
permissions/status_bar=false
permissions/subscribed_feeds_read=false
permissions/subscribed_feeds_write=false
permissions/system_alert_window=false
permissions/transmit_ir=false
permissions/uninstall_shortcut=false
permissions/update_device_stats=false
permissions/use_credentials=false
permissions/use_sip=false
permissions/vibrate=false
permissions/wake_lock=false
permissions/write_apn_settings=false
permissions/write_calendar=false
permissions/write_call_log=false
permissions/write_contacts=false
permissions/write_external_storage=false
permissions/write_gservices=false
permissions/write_history_bookmarks=false
permissions/write_profile=false
permissions/write_secure_settings=false
permissions/write_settings=false
permissions/write_sms=false
permissions/write_social_stream=false
permissions/write_sync_settings=false
permissions/write_user_dictionary=false
[preset.1]
name="macOS"
platform="macOS"
runnable=true
advanced_options=false
dedicated_server=false
custom_features=""
export_filter="all_resources"
include_filter=""
exclude_filter=""
export_path=""
encryption_include_filters=""
encryption_exclude_filters=""
encrypt_pck=false
encrypt_directory=false
script_export_mode=2
[preset.1.options]
export/distribution_type=1
binary_format/architecture="universal"
custom_template/debug=""
custom_template/release=""
debug/export_console_wrapper=1
application/icon=""
application/icon_interpolation=4
application/bundle_identifier=""
application/signature=""
application/app_category="Games"
application/short_version=""
application/version=""
application/copyright=""
application/copyright_localized={}
application/min_macos_version="10.12"
application/export_angle=0
display/high_res=true
application/additional_plist_content=""
xcode/platform_build="14C18"
xcode/sdk_version="13.1"
xcode/sdk_build="22C55"
xcode/sdk_name="macosx13.1"
xcode/xcode_version="1420"
xcode/xcode_build="14C18"
codesign/codesign=3
codesign/installer_identity=""
codesign/apple_team_id=""
codesign/identity=""
codesign/entitlements/custom_file=""
codesign/entitlements/allow_jit_code_execution=false
codesign/entitlements/allow_unsigned_executable_memory=false
codesign/entitlements/allow_dyld_environment_variables=false
codesign/entitlements/disable_library_validation=false
codesign/entitlements/audio_input=false
codesign/entitlements/camera=false
codesign/entitlements/location=false
codesign/entitlements/address_book=false
codesign/entitlements/calendars=false
codesign/entitlements/photos_library=false
codesign/entitlements/apple_events=false
codesign/entitlements/debugging=false
codesign/entitlements/app_sandbox/enabled=false
codesign/entitlements/app_sandbox/network_server=false
codesign/entitlements/app_sandbox/network_client=false
codesign/entitlements/app_sandbox/device_usb=false
codesign/entitlements/app_sandbox/device_bluetooth=false
codesign/entitlements/app_sandbox/files_downloads=0
codesign/entitlements/app_sandbox/files_pictures=0
codesign/entitlements/app_sandbox/files_music=0
codesign/entitlements/app_sandbox/files_movies=0
codesign/entitlements/app_sandbox/files_user_selected=0
codesign/entitlements/app_sandbox/helper_executables=[]
codesign/custom_options=PackedStringArray()
notarization/notarization=0
privacy/microphone_usage_description=""
privacy/microphone_usage_description_localized={}
privacy/camera_usage_description=""
privacy/camera_usage_description_localized={}
privacy/location_usage_description=""
privacy/location_usage_description_localized={}
privacy/address_book_usage_description=""
privacy/address_book_usage_description_localized={}
privacy/calendar_usage_description=""
privacy/calendar_usage_description_localized={}
privacy/photos_library_usage_description=""
privacy/photos_library_usage_description_localized={}
privacy/desktop_folder_usage_description=""
privacy/desktop_folder_usage_description_localized={}
privacy/documents_folder_usage_description=""
privacy/documents_folder_usage_description_localized={}
privacy/downloads_folder_usage_description=""
privacy/downloads_folder_usage_description_localized={}
privacy/network_volumes_usage_description=""
privacy/network_volumes_usage_description_localized={}
privacy/removable_volumes_usage_description=""
privacy/removable_volumes_usage_description_localized={}
privacy/tracking_enabled=false
privacy/tracking_domains=PackedStringArray()
privacy/collected_data/name/collected=false
privacy/collected_data/name/linked_to_user=false
privacy/collected_data/name/used_for_tracking=false
privacy/collected_data/name/collection_purposes=0
privacy/collected_data/email_address/collected=false
privacy/collected_data/email_address/linked_to_user=false
privacy/collected_data/email_address/used_for_tracking=false
privacy/collected_data/email_address/collection_purposes=0
privacy/collected_data/phone_number/collected=false
privacy/collected_data/phone_number/linked_to_user=false
privacy/collected_data/phone_number/used_for_tracking=false
privacy/collected_data/phone_number/collection_purposes=0
privacy/collected_data/physical_address/collected=false
privacy/collected_data/physical_address/linked_to_user=false
privacy/collected_data/physical_address/used_for_tracking=false
privacy/collected_data/physical_address/collection_purposes=0
privacy/collected_data/other_contact_info/collected=false
privacy/collected_data/other_contact_info/linked_to_user=false
privacy/collected_data/other_contact_info/used_for_tracking=false
privacy/collected_data/other_contact_info/collection_purposes=0
privacy/collected_data/health/collected=false
privacy/collected_data/health/linked_to_user=false
privacy/collected_data/health/used_for_tracking=false
privacy/collected_data/health/collection_purposes=0
privacy/collected_data/fitness/collected=false
privacy/collected_data/fitness/linked_to_user=false
privacy/collected_data/fitness/used_for_tracking=false
privacy/collected_data/fitness/collection_purposes=0
privacy/collected_data/payment_info/collected=false
privacy/collected_data/payment_info/linked_to_user=false
privacy/collected_data/payment_info/used_for_tracking=false
privacy/collected_data/payment_info/collection_purposes=0
privacy/collected_data/credit_info/collected=false
privacy/collected_data/credit_info/linked_to_user=false
privacy/collected_data/credit_info/used_for_tracking=false
privacy/collected_data/credit_info/collection_purposes=0
privacy/collected_data/other_financial_info/collected=false
privacy/collected_data/other_financial_info/linked_to_user=false
privacy/collected_data/other_financial_info/used_for_tracking=false
privacy/collected_data/other_financial_info/collection_purposes=0
privacy/collected_data/precise_location/collected=false
privacy/collected_data/precise_location/linked_to_user=false
privacy/collected_data/precise_location/used_for_tracking=false
privacy/collected_data/precise_location/collection_purposes=0
privacy/collected_data/coarse_location/collected=false
privacy/collected_data/coarse_location/linked_to_user=false
privacy/collected_data/coarse_location/used_for_tracking=false
privacy/collected_data/coarse_location/collection_purposes=0
privacy/collected_data/sensitive_info/collected=false
privacy/collected_data/sensitive_info/linked_to_user=false
privacy/collected_data/sensitive_info/used_for_tracking=false
privacy/collected_data/sensitive_info/collection_purposes=0
privacy/collected_data/contacts/collected=false
privacy/collected_data/contacts/linked_to_user=false
privacy/collected_data/contacts/used_for_tracking=false
privacy/collected_data/contacts/collection_purposes=0
privacy/collected_data/emails_or_text_messages/collected=false
privacy/collected_data/emails_or_text_messages/linked_to_user=false
privacy/collected_data/emails_or_text_messages/used_for_tracking=false
privacy/collected_data/emails_or_text_messages/collection_purposes=0
privacy/collected_data/photos_or_videos/collected=false
privacy/collected_data/photos_or_videos/linked_to_user=false
privacy/collected_data/photos_or_videos/used_for_tracking=false
privacy/collected_data/photos_or_videos/collection_purposes=0
privacy/collected_data/audio_data/collected=false
privacy/collected_data/audio_data/linked_to_user=false
privacy/collected_data/audio_data/used_for_tracking=false
privacy/collected_data/audio_data/collection_purposes=0
privacy/collected_data/gameplay_content/collected=false
privacy/collected_data/gameplay_content/linked_to_user=false
privacy/collected_data/gameplay_content/used_for_tracking=false
privacy/collected_data/gameplay_content/collection_purposes=0
privacy/collected_data/customer_support/collected=false
privacy/collected_data/customer_support/linked_to_user=false
privacy/collected_data/customer_support/used_for_tracking=false
privacy/collected_data/customer_support/collection_purposes=0
privacy/collected_data/other_user_content/collected=false
privacy/collected_data/other_user_content/linked_to_user=false
privacy/collected_data/other_user_content/used_for_tracking=false
privacy/collected_data/other_user_content/collection_purposes=0
privacy/collected_data/browsing_history/collected=false
privacy/collected_data/browsing_history/linked_to_user=false
privacy/collected_data/browsing_history/used_for_tracking=false
privacy/collected_data/browsing_history/collection_purposes=0
privacy/collected_data/search_hhistory/collected=false
privacy/collected_data/search_hhistory/linked_to_user=false
privacy/collected_data/search_hhistory/used_for_tracking=false
privacy/collected_data/search_hhistory/collection_purposes=0
privacy/collected_data/user_id/collected=false
privacy/collected_data/user_id/linked_to_user=false
privacy/collected_data/user_id/used_for_tracking=false
privacy/collected_data/user_id/collection_purposes=0
privacy/collected_data/device_id/collected=false
privacy/collected_data/device_id/linked_to_user=false
privacy/collected_data/device_id/used_for_tracking=false
privacy/collected_data/device_id/collection_purposes=0
privacy/collected_data/purchase_history/collected=false
privacy/collected_data/purchase_history/linked_to_user=false
privacy/collected_data/purchase_history/used_for_tracking=false
privacy/collected_data/purchase_history/collection_purposes=0
privacy/collected_data/product_interaction/collected=false
privacy/collected_data/product_interaction/linked_to_user=false
privacy/collected_data/product_interaction/used_for_tracking=false
privacy/collected_data/product_interaction/collection_purposes=0
privacy/collected_data/advertising_data/collected=false
privacy/collected_data/advertising_data/linked_to_user=false
privacy/collected_data/advertising_data/used_for_tracking=false
privacy/collected_data/advertising_data/collection_purposes=0
privacy/collected_data/other_usage_data/collected=false
privacy/collected_data/other_usage_data/linked_to_user=false
privacy/collected_data/other_usage_data/used_for_tracking=false
privacy/collected_data/other_usage_data/collection_purposes=0
privacy/collected_data/crash_data/collected=false
privacy/collected_data/crash_data/linked_to_user=false
privacy/collected_data/crash_data/used_for_tracking=false
privacy/collected_data/crash_data/collection_purposes=0
privacy/collected_data/performance_data/collected=false
privacy/collected_data/performance_data/linked_to_user=false
privacy/collected_data/performance_data/used_for_tracking=false
privacy/collected_data/performance_data/collection_purposes=0
privacy/collected_data/other_diagnostic_data/collected=false
privacy/collected_data/other_diagnostic_data/linked_to_user=false
privacy/collected_data/other_diagnostic_data/used_for_tracking=false
privacy/collected_data/other_diagnostic_data/collection_purposes=0
privacy/collected_data/environment_scanning/collected=false
privacy/collected_data/environment_scanning/linked_to_user=false
privacy/collected_data/environment_scanning/used_for_tracking=false
privacy/collected_data/environment_scanning/collection_purposes=0
privacy/collected_data/hands/collected=false
privacy/collected_data/hands/linked_to_user=false
privacy/collected_data/hands/used_for_tracking=false
privacy/collected_data/hands/collection_purposes=0
privacy/collected_data/head/collected=false
privacy/collected_data/head/linked_to_user=false
privacy/collected_data/head/used_for_tracking=false
privacy/collected_data/head/collection_purposes=0
privacy/collected_data/other_data_types/collected=false
privacy/collected_data/other_data_types/linked_to_user=false
privacy/collected_data/other_data_types/used_for_tracking=false
privacy/collected_data/other_data_types/collection_purposes=0
ssh_remote_deploy/enabled=false
ssh_remote_deploy/host="user@host_ip"
ssh_remote_deploy/port="22"
ssh_remote_deploy/extra_args_ssh=""
ssh_remote_deploy/extra_args_scp=""
ssh_remote_deploy/run_script="#!/usr/bin/env bash
unzip -o -q \"{temp_dir}/{archive_name}\" -d \"{temp_dir}\"
open \"{temp_dir}/{exe_name}.app\" --args {cmd_args}"
ssh_remote_deploy/cleanup_script="#!/usr/bin/env bash
kill $(pgrep -x -f \"{temp_dir}/{exe_name}.app/Contents/MacOS/{exe_name} {cmd_args}\")
rm -rf \"{temp_dir}\""
[preset.2]
name="iOS"
platform="iOS"
runnable=true
advanced_options=false
dedicated_server=false
custom_features=""
export_filter="all_resources"
include_filter=""
exclude_filter=""
export_path=""
encryption_include_filters=""
encryption_exclude_filters=""
encrypt_pck=false
encrypt_directory=false
script_export_mode=2
[preset.2.options]
custom_template/debug=""
custom_template/release=""
architectures/arm64=true
application/app_store_team_id=""
application/code_sign_identity_debug=""
application/export_method_debug=1
application/code_sign_identity_release=""
application/export_method_release=1
application/targeted_device_family=2
application/bundle_identifier=""
application/signature=""
application/short_version=""
application/version=""
application/min_ios_version="12.0"
application/additional_plist_content=""
application/icon_interpolation=4
application/export_project_only=false
application/delete_old_export_files_unconditionally=false
application/generate_simulator_library_if_missing=true
capabilities/access_wifi=false
capabilities/push_notifications=false
capabilities/performance_gaming_tier=false
capabilities/performance_a12=false
user_data/accessible_from_files_app=false
user_data/accessible_from_itunes_sharing=false
privacy/camera_usage_description=""
privacy/camera_usage_description_localized={}
privacy/microphone_usage_description=""
privacy/microphone_usage_description_localized={}
privacy/photolibrary_usage_description=""
privacy/photolibrary_usage_description_localized={}
privacy/file_timestamp_access_reasons=3
privacy/system_boot_time_access_reasons=1
privacy/disk_space_access_reasons=3
privacy/active_keyboard_access_reasons=0
privacy/user_defaults_access_reasons=0
privacy/tracking_enabled=false
privacy/tracking_domains=PackedStringArray()
privacy/collected_data/name/collected=false
privacy/collected_data/name/linked_to_user=false
privacy/collected_data/name/used_for_tracking=false
privacy/collected_data/name/collection_purposes=0
privacy/collected_data/email_address/collected=false
privacy/collected_data/email_address/linked_to_user=false
privacy/collected_data/email_address/used_for_tracking=false
privacy/collected_data/email_address/collection_purposes=0
privacy/collected_data/phone_number/collected=false
privacy/collected_data/phone_number/linked_to_user=false
privacy/collected_data/phone_number/used_for_tracking=false
privacy/collected_data/phone_number/collection_purposes=0
privacy/collected_data/physical_address/collected=false
privacy/collected_data/physical_address/linked_to_user=false
privacy/collected_data/physical_address/used_for_tracking=false
privacy/collected_data/physical_address/collection_purposes=0
privacy/collected_data/other_contact_info/collected=false
privacy/collected_data/other_contact_info/linked_to_user=false
privacy/collected_data/other_contact_info/used_for_tracking=false
privacy/collected_data/other_contact_info/collection_purposes=0
privacy/collected_data/health/collected=false
privacy/collected_data/health/linked_to_user=false
privacy/collected_data/health/used_for_tracking=false
privacy/collected_data/health/collection_purposes=0
privacy/collected_data/fitness/collected=false
privacy/collected_data/fitness/linked_to_user=false
privacy/collected_data/fitness/used_for_tracking=false
privacy/collected_data/fitness/collection_purposes=0
privacy/collected_data/payment_info/collected=false
privacy/collected_data/payment_info/linked_to_user=false
privacy/collected_data/payment_info/used_for_tracking=false
privacy/collected_data/payment_info/collection_purposes=0
privacy/collected_data/credit_info/collected=false
privacy/collected_data/credit_info/linked_to_user=false
privacy/collected_data/credit_info/used_for_tracking=false
privacy/collected_data/credit_info/collection_purposes=0
privacy/collected_data/other_financial_info/collected=false
privacy/collected_data/other_financial_info/linked_to_user=false
privacy/collected_data/other_financial_info/used_for_tracking=false
privacy/collected_data/other_financial_info/collection_purposes=0
privacy/collected_data/precise_location/collected=false
privacy/collected_data/precise_location/linked_to_user=false
privacy/collected_data/precise_location/used_for_tracking=false
privacy/collected_data/precise_location/collection_purposes=0
privacy/collected_data/coarse_location/collected=false
privacy/collected_data/coarse_location/linked_to_user=false
privacy/collected_data/coarse_location/used_for_tracking=false
privacy/collected_data/coarse_location/collection_purposes=0
privacy/collected_data/sensitive_info/collected=false
privacy/collected_data/sensitive_info/linked_to_user=false
privacy/collected_data/sensitive_info/used_for_tracking=false
privacy/collected_data/sensitive_info/collection_purposes=0
privacy/collected_data/contacts/collected=false
privacy/collected_data/contacts/linked_to_user=false
privacy/collected_data/contacts/used_for_tracking=false
privacy/collected_data/contacts/collection_purposes=0
privacy/collected_data/emails_or_text_messages/collected=false
privacy/collected_data/emails_or_text_messages/linked_to_user=false
privacy/collected_data/emails_or_text_messages/used_for_tracking=false
privacy/collected_data/emails_or_text_messages/collection_purposes=0
privacy/collected_data/photos_or_videos/collected=false
privacy/collected_data/photos_or_videos/linked_to_user=false
privacy/collected_data/photos_or_videos/used_for_tracking=false
privacy/collected_data/photos_or_videos/collection_purposes=0
privacy/collected_data/audio_data/collected=false
privacy/collected_data/audio_data/linked_to_user=false
privacy/collected_data/audio_data/used_for_tracking=false
privacy/collected_data/audio_data/collection_purposes=0
privacy/collected_data/gameplay_content/collected=false
privacy/collected_data/gameplay_content/linked_to_user=false
privacy/collected_data/gameplay_content/used_for_tracking=false
privacy/collected_data/gameplay_content/collection_purposes=0
privacy/collected_data/customer_support/collected=false
privacy/collected_data/customer_support/linked_to_user=false
privacy/collected_data/customer_support/used_for_tracking=false
privacy/collected_data/customer_support/collection_purposes=0
privacy/collected_data/other_user_content/collected=false
privacy/collected_data/other_user_content/linked_to_user=false
privacy/collected_data/other_user_content/used_for_tracking=false
privacy/collected_data/other_user_content/collection_purposes=0
privacy/collected_data/browsing_history/collected=false
privacy/collected_data/browsing_history/linked_to_user=false
privacy/collected_data/browsing_history/used_for_tracking=false
privacy/collected_data/browsing_history/collection_purposes=0
privacy/collected_data/search_hhistory/collected=false
privacy/collected_data/search_hhistory/linked_to_user=false
privacy/collected_data/search_hhistory/used_for_tracking=false
privacy/collected_data/search_hhistory/collection_purposes=0
privacy/collected_data/user_id/collected=false
privacy/collected_data/user_id/linked_to_user=false
privacy/collected_data/user_id/used_for_tracking=false
privacy/collected_data/user_id/collection_purposes=0
privacy/collected_data/device_id/collected=false
privacy/collected_data/device_id/linked_to_user=false
privacy/collected_data/device_id/used_for_tracking=false
privacy/collected_data/device_id/collection_purposes=0
privacy/collected_data/purchase_history/collected=false
privacy/collected_data/purchase_history/linked_to_user=false
privacy/collected_data/purchase_history/used_for_tracking=false
privacy/collected_data/purchase_history/collection_purposes=0
privacy/collected_data/product_interaction/collected=false
privacy/collected_data/product_interaction/linked_to_user=false
privacy/collected_data/product_interaction/used_for_tracking=false
privacy/collected_data/product_interaction/collection_purposes=0
privacy/collected_data/advertising_data/collected=false
privacy/collected_data/advertising_data/linked_to_user=false
privacy/collected_data/advertising_data/used_for_tracking=false
privacy/collected_data/advertising_data/collection_purposes=0
privacy/collected_data/other_usage_data/collected=false
privacy/collected_data/other_usage_data/linked_to_user=false
privacy/collected_data/other_usage_data/used_for_tracking=false
privacy/collected_data/other_usage_data/collection_purposes=0
privacy/collected_data/crash_data/collected=false
privacy/collected_data/crash_data/linked_to_user=false
privacy/collected_data/crash_data/used_for_tracking=false
privacy/collected_data/crash_data/collection_purposes=0
privacy/collected_data/performance_data/collected=false
privacy/collected_data/performance_data/linked_to_user=false
privacy/collected_data/performance_data/used_for_tracking=false
privacy/collected_data/performance_data/collection_purposes=0
privacy/collected_data/other_diagnostic_data/collected=false
privacy/collected_data/other_diagnostic_data/linked_to_user=false
privacy/collected_data/other_diagnostic_data/used_for_tracking=false
privacy/collected_data/other_diagnostic_data/collection_purposes=0
privacy/collected_data/environment_scanning/collected=false
privacy/collected_data/environment_scanning/linked_to_user=false
privacy/collected_data/environment_scanning/used_for_tracking=false
privacy/collected_data/environment_scanning/collection_purposes=0
privacy/collected_data/hands/collected=false
privacy/collected_data/hands/linked_to_user=false
privacy/collected_data/hands/used_for_tracking=false
privacy/collected_data/hands/collection_purposes=0
privacy/collected_data/head/collected=false
privacy/collected_data/head/linked_to_user=false
privacy/collected_data/head/used_for_tracking=false
privacy/collected_data/head/collection_purposes=0
privacy/collected_data/other_data_types/collected=false
privacy/collected_data/other_data_types/linked_to_user=false
privacy/collected_data/other_data_types/used_for_tracking=false
privacy/collected_data/other_data_types/collection_purposes=0
icons/iphone_120x120=""
icons/iphone_180x180=""
icons/ipad_76x76=""
icons/ipad_152x152=""
icons/ipad_167x167=""
icons/app_store_1024x1024=""
icons/spotlight_40x40=""
icons/spotlight_80x80=""
icons/settings_58x58=""
icons/settings_87x87=""
icons/notification_40x40=""
icons/notification_60x60=""
storyboard/image_scale_mode=0
storyboard/custom_image@2x=""
storyboard/custom_image@3x=""
storyboard/use_custom_bg_color=false
storyboard/custom_bg_color=Color(0, 0, 0, 1)

1
examples/camera/icon.svg Normal file
View File

@ -0,0 +1 @@
<svg height="128" width="128" xmlns="http://www.w3.org/2000/svg"><rect x="2" y="2" width="124" height="124" rx="14" fill="#363d52" stroke="#212532" stroke-width="4"/><g transform="scale(.101) translate(122 122)"><g fill="#fff"><path d="M105 673v33q407 354 814 0v-33z"/><path d="m105 673 152 14q12 1 15 14l4 67 132 10 8-61q2-11 15-15h162q13 4 15 15l8 61 132-10 4-67q3-13 15-14l152-14V427q30-39 56-81-35-59-83-108-43 20-82 47-40-37-88-64 7-51 8-102-59-28-123-42-26 43-46 89-49-7-98 0-20-46-46-89-64 14-123 42 1 51 8 102-48 27-88 64-39-27-82-47-48 49-83 108 26 42 56 81zm0 33v39c0 276 813 276 814 0v-39l-134 12-5 69q-2 10-14 13l-162 11q-12 0-16-11l-10-65H446l-10 65q-4 11-16 11l-162-11q-12-3-14-13l-5-69z" fill="#478cbf"/><path d="M483 600c0 34 58 34 58 0v-86c0-34-58-34-58 0z"/><circle cx="725" cy="526" r="90"/><circle cx="299" cy="526" r="90"/></g><g fill="#414042"><circle cx="307" cy="532" r="60"/><circle cx="717" cy="532" r="60"/></g></g></svg>

After

Width:  |  Height:  |  Size: 949 B

View File

@ -0,0 +1,37 @@
[remap]
importer="texture"
type="CompressedTexture2D"
uid="uid://8d4e5b3aytse"
path="res://.godot/imported/icon.svg-218a8f2b3041327d8a5756f3a245f83b.ctex"
metadata={
"vram_texture": false
}
[deps]
source_file="res://icon.svg"
dest_files=["res://.godot/imported/icon.svg-218a8f2b3041327d8a5756f3a245f83b.ctex"]
[params]
compress/mode=0
compress/high_quality=false
compress/lossy_quality=0.7
compress/hdr_compression=1
compress/normal_map=0
compress/channel_pack=0
mipmaps/generate=false
mipmaps/limit=-1
roughness/mode=0
roughness/src_normal=""
process/fix_alpha_border=true
process/premult_alpha=false
process/normal_map_invert_y=false
process/hdr_as_srgb=false
process/hdr_clamp_exposure=false
process/size_limit=0
detect_3d/compress_to=1
svg/scale=1.0
editor/scale_with_editor_scale=false
editor/convert_colors_with_editor_theme=false

View File

@ -0,0 +1,36 @@
; Engine configuration file.
; It's best edited using the editor UI and not directly,
; since the parameters that go here are not all obvious.
;
; Format:
; [section] ; section goes between []
; param=value ; assign values to parameters
config_version=5
[application]
config/name="NOOMI camera test"
run/main_scene="res://control.tscn"
config/features=PackedStringArray("4.4")
config/icon="res://icon.svg"
[audio]
driver/enable_input=true
driver/mix_rate=48000
[display]
window/size/viewport_width=720
window/size/viewport_height=1280
window/handheld/orientation=1
[editor]
movie_writer/movie_file="test.ogv"
movie_writer/fps=15
[rendering]
textures/vram_compression/import_etc2_astc=true

View File

@ -6,15 +6,19 @@ Import("env_modules")
env_camera = env_modules.Clone()
if env["platform"] in ["windows", "macos", "linuxbsd"]:
env_camera.add_source_files(env.modules_sources, "register_types.cpp")
env_camera.add_source_files(env.modules_sources, "register_types.cpp")
if env["platform"] == "windows":
env_camera.add_source_files(env.modules_sources, "camera_win.cpp")
env.Append(LINKFLAGS=["mf.lib", "mfplat.lib", "mfreadwrite.lib"])
elif env["platform"] == "macos":
if env["platform"] == "macos" or env["platform"] == "ios":
env_camera.add_source_files(env.modules_sources, "camera_macos.mm")
if env["platform"] == "android":
env_camera.add_source_files(env.modules_sources, "camera_android.cpp")
env.Append(LIBS=["camera2ndk", "mediandk"])
elif env["platform"] == "linuxbsd":
env_camera.add_source_files(env.modules_sources, "camera_linux.cpp")
env_camera.add_source_files(env.modules_sources, "camera_feed_linux.cpp")

View File

@ -0,0 +1,370 @@
/**************************************************************************/
/* camera_android.cpp */
/**************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/**************************************************************************/
/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/**************************************************************************/
#include "camera_android.h"
//////////////////////////////////////////////////////////////////////////
// Helper functions
//
// The following code enables you to view the contents of a media type while
// debugging.
#ifndef IF_EQUAL_RETURN
#define IF_EQUAL_RETURN(param, val) \
if (val == param) \
return #val
#endif
String GetFormatName(const int32_t &format) {
IF_EQUAL_RETURN(format, AIMAGE_FORMAT_YUV_420_888);
IF_EQUAL_RETURN(format, AIMAGE_FORMAT_RGB_888);
IF_EQUAL_RETURN(format, AIMAGE_FORMAT_RGBA_8888);
return "Unsupported";
}
//////////////////////////////////////////////////////////////////////////
// CameraFeedAndroid - Subclass for our camera feed on Android
CameraFeedAndroid::CameraFeedAndroid(ACameraManager *manager, const char *id, int32_t position, int32_t width,
int32_t height, int32_t format, int32_t orientation) {
this->manager = manager;
this->camera_id = id;
this->width = width;
this->height = height;
// Name
name = vformat("%s | %d x %d", id, width, height);
// Data type
this->format = format;
if (format == AIMAGE_FORMAT_RGB_888) {
this->datatype = FEED_RGB;
name += " | RGB";
}
if (format == AIMAGE_FORMAT_RGBA_8888) {
this->datatype = FEED_RGBA;
name += " | RGBA";
}
if (format == AIMAGE_FORMAT_YUV_420_888) {
this->datatype = FEED_YCBCR;
name += " | YCBCR";
}
// Position
if (position == ACAMERA_LENS_FACING_BACK) {
this->position = CameraFeed::FEED_BACK;
name += " | BACK";
}
if (position == ACAMERA_LENS_FACING_FRONT) {
this->position = CameraFeed::FEED_FRONT;
name += " | FRONT";
}
// Orientation
int32_t imageRotation = 0;
if (position == ACAMERA_LENS_FACING_FRONT) {
imageRotation = orientation % 360;
imageRotation = (360 - imageRotation) % 360;
} else {
imageRotation = (orientation + 360) % 360;
}
transform.rotate(real_t(imageRotation) * 0.015707963267949F);
}
CameraFeedAndroid::~CameraFeedAndroid() {
if (is_active()) {
deactivate_feed();
};
}
bool CameraFeedAndroid::activate_feed() {
if (is_active()) {
deactivate_feed();
};
// Request permission
if (!OS::get_singleton()->request_permission("CAMERA")) {
return false;
}
// Open device
static ACameraDevice_stateCallbacks deviceCallbacks = {
.context = this,
.onDisconnected = onDisconnected,
.onError = onError,
};
camera_status_t c_status = ACameraManager_openCamera(manager, camera_id.utf8(), &deviceCallbacks, &device);
if (c_status != ACAMERA_OK) {
onError(this, device, c_status);
return false;
}
// Create image reader
media_status_t m_status = AImageReader_new(width, height, format, 1, &reader);
if (m_status != AMEDIA_OK) {
onError(this, device, m_status);
return false;
}
// Create image buffers
set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_DIFFUSE,
Image::create_empty(width, height, false, Image::FORMAT_R8));
set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_NORMAL,
Image::create_empty(width / 2, height / 2, false, Image::FORMAT_RG8));
// set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_SPECULAR,
// Image::create_empty(width, height, false, Image::FORMAT_R8));
// Get image listener
static AImageReader_ImageListener listener{
.context = this,
.onImageAvailable = onImage,
};
m_status = AImageReader_setImageListener(reader, &listener);
if (m_status != AMEDIA_OK) {
onError(this, device, m_status);
return false;
}
// Get image surface
ANativeWindow *surface;
m_status = AImageReader_getWindow(reader, &surface);
if (m_status != AMEDIA_OK) {
onError(this, device, m_status);
return false;
}
// Prepare session outputs
ACaptureSessionOutput *output = nullptr;
c_status = ACaptureSessionOutput_create(surface, &output);
if (c_status != ACAMERA_OK) {
onError(this, device, c_status);
return false;
}
ACaptureSessionOutputContainer *outputs = nullptr;
c_status = ACaptureSessionOutputContainer_create(&outputs);
if (c_status != ACAMERA_OK) {
onError(this, device, c_status);
return false;
}
c_status = ACaptureSessionOutputContainer_add(outputs, output);
if (c_status != ACAMERA_OK) {
onError(this, device, c_status);
return false;
}
// Create capture session
static ACameraCaptureSession_stateCallbacks sessionStateCallbacks{
.context = this,
.onClosed = onSessionClosed,
.onReady = onSessionReady,
.onActive = onSessionActive
};
c_status = ACameraDevice_createCaptureSession(device, outputs, &sessionStateCallbacks, &session);
if (c_status != ACAMERA_OK) {
onError(this, device, c_status);
return false;
}
// Create capture request
c_status = ACameraDevice_createCaptureRequest(device, TEMPLATE_PREVIEW, &request);
if (c_status != ACAMERA_OK) {
onError(this, device, c_status);
return false;
}
// Set capture target
ACameraOutputTarget *imageTarget = nullptr;
c_status = ACameraOutputTarget_create(surface, &imageTarget);
if (c_status != ACAMERA_OK) {
onError(this, device, c_status);
return false;
}
c_status = ACaptureRequest_addTarget(request, imageTarget);
if (c_status != ACAMERA_OK) {
onError(this, device, c_status);
return false;
}
// Start capture
c_status = ACameraCaptureSession_setRepeatingRequest(session, nullptr, 1, &request, nullptr);
if (c_status != ACAMERA_OK) {
onError(this, device, c_status);
return false;
}
return true;
}
void CameraFeedAndroid::onImage(void *context, AImageReader *p_reader) {
auto *feed = static_cast<CameraFeedAndroid *>(context);
// Get image
AImage *image = nullptr;
media_status_t status = AImageReader_acquireNextImage(p_reader, &image);
ERR_FAIL_COND(status != AMEDIA_OK);
// Get image data
uint8_t *data = nullptr;
int len = 0;
int32_t pixel_stride, row_stride;
AImage_getPlaneData(image, 0, &data, &len);
feed->set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_DIFFUSE, data, 0, len);
AImage_getPlanePixelStride(image, 1, &pixel_stride);
AImage_getPlaneRowStride(image, 1, &row_stride);
AImage_getPlaneData(image, 1, &data, &len);
feed->set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_NORMAL, data, 0, len);
// AImage_getPlaneData(image, 2, &data, &len);
// feed->set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_SPECULAR, data, 0, len);
// Release image
AImage_delete(image);
}
void CameraFeedAndroid::onSessionReady(void *context, ACameraCaptureSession *session) {
print_verbose("Capture session ready");
}
void CameraFeedAndroid::onSessionActive(void *context, ACameraCaptureSession *session) {
print_verbose("Capture session active");
}
void CameraFeedAndroid::onSessionClosed(void *context, ACameraCaptureSession *session) {
print_verbose("Capture session active");
}
void CameraFeedAndroid::deactivate_feed() {
if (session != nullptr) {
ACameraCaptureSession_stopRepeating(session);
ACameraCaptureSession_close(session);
session = nullptr;
}
if (request != nullptr) {
ACaptureRequest_free(request);
request = nullptr;
}
if (reader != nullptr) {
AImageReader_delete(reader);
reader = nullptr;
}
if (device != nullptr) {
ACameraDevice_close(device);
device = nullptr;
}
}
void CameraFeedAndroid::onError(void *context, ACameraDevice *p_device, int error) {
print_error(vformat("Camera error: %d", error));
onDisconnected(context, p_device);
}
void CameraFeedAndroid::onDisconnected(void *context, ACameraDevice *p_device) {
print_verbose("Camera disconnected");
auto *feed = static_cast<CameraFeedAndroid *>(context);
feed->set_active(false);
}
//////////////////////////////////////////////////////////////////////////
// CameraAndroid - Subclass for our camera server on Android
void CameraAndroid::update_feeds() {
ACameraIdList *cameraIds = nullptr;
camera_status_t c_status = ACameraManager_getCameraIdList(cameraManager, &cameraIds);
if (c_status != ACAMERA_OK) {
ERR_PRINT("Unable to retrieve supported cameras");
return;
}
for (int c = 0; c < cameraIds->numCameras; ++c) {
const char *id = cameraIds->cameraIds[c];
ACameraMetadata *metadata;
ACameraManager_getCameraCharacteristics(cameraManager, id, &metadata);
// Get position
ACameraMetadata_const_entry lensInfo;
ACameraMetadata_getConstEntry(metadata, ACAMERA_LENS_FACING, &lensInfo);
uint8_t position = static_cast<acamera_metadata_enum_android_lens_facing_t>(lensInfo.data.u8[0]);
// Get sensor orientation
ACameraMetadata_const_entry orientation;
ACameraMetadata_getConstEntry(metadata, ACAMERA_SENSOR_ORIENTATION, &orientation);
int32_t cameraOrientation = orientation.data.i32[0];
// Get supported formats
ACameraMetadata_const_entry formats;
ACameraMetadata_getConstEntry(metadata, ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &formats);
for (uint32_t f = 0; f < formats.count; f += 4) {
// Only support output streams
int32_t input = formats.data.i32[f + 3];
if (input) {
continue;
}
// Get format and resolution
int32_t format = formats.data.i32[f + 0];
if (format == AIMAGE_FORMAT_YUV_420_888 || format == AIMAGE_FORMAT_RGB_888 ||
format == AIMAGE_FORMAT_RGBA_8888) {
int32_t width = formats.data.i32[f + 1];
int32_t height = formats.data.i32[f + 2];
Ref<CameraFeedAndroid> feed = new CameraFeedAndroid(cameraManager, id,
position,
width,
height,
format,
cameraOrientation);
add_feed(feed);
print_line("Added camera feed: ", feed->get_name());
}
}
ACameraMetadata_free(metadata);
}
ACameraManager_deleteCameraIdList(cameraIds);
}
CameraAndroid::CameraAndroid() {
cameraManager = ACameraManager_create();
// Update feeds
update_feeds();
}
CameraAndroid::~CameraAndroid() {
if (cameraManager != nullptr) {
ACameraManager_delete(cameraManager);
}
}

View File

@ -0,0 +1,82 @@
/**************************************************************************/
/* camera_android.h */
/**************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/**************************************************************************/
/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/**************************************************************************/
#ifndef CAMERA_ANDROID_H
#define CAMERA_ANDROID_H
#include "servers/camera/camera_feed.h"
#include "servers/camera_server.h"
#include <camera/NdkCameraDevice.h>
#include <camera/NdkCameraError.h>
#include <camera/NdkCameraManager.h>
#include <camera/NdkCameraMetadataTags.h>
#include <media/NdkImageReader.h>
class CameraFeedAndroid : public CameraFeed {
private:
String camera_id;
int32_t format;
ACameraManager *manager = nullptr;
ACameraDevice *device = nullptr;
AImageReader *reader = nullptr;
ACameraCaptureSession *session = nullptr;
ACaptureRequest *request = nullptr;
static void onError(void *context, ACameraDevice *p_device, int error);
static void onDisconnected(void *context, ACameraDevice *p_device);
static void onImage(void *context, AImageReader *p_reader);
static void onSessionReady(void *context, ACameraCaptureSession *session);
static void onSessionActive(void *context, ACameraCaptureSession *session);
static void onSessionClosed(void *context, ACameraCaptureSession *session);
protected:
public:
CameraFeedAndroid(ACameraManager *manager, const char *id, int32_t position, int32_t width, int32_t height,
int32_t format, int32_t orientation);
virtual ~CameraFeedAndroid();
bool activate_feed();
void deactivate_feed();
};
class CameraAndroid : public CameraServer {
private:
ACameraManager *cameraManager;
void update_feeds();
public:
CameraAndroid();
~CameraAndroid();
};
#endif // CAMERA_ANDROID_H

View File

@ -42,10 +42,6 @@
@interface MyCaptureSession : AVCaptureSession <AVCaptureVideoDataOutputSampleBufferDelegate> {
Ref<CameraFeed> feed;
size_t width[2];
size_t height[2];
Vector<uint8_t> img_data[2];
AVCaptureDeviceInput *input;
AVCaptureVideoDataOutput *output;
}
@ -58,10 +54,6 @@
if (self = [super init]) {
NSError *error;
feed = p_feed;
width[0] = 0;
height[0] = 0;
width[1] = 0;
height[1] = 0;
[self beginConfiguration];
@ -76,7 +68,11 @@
if (!output) {
print_line("Couldn't get output device for camera");
} else {
NSDictionary *settings = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) };
NSDictionary *settings = @{
(NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange),
@"Width" : @1280,
@"Height" : @720,
};
output.videoSettings = settings;
// discard if the data output queue is blocked (as we process the still image)
@ -135,54 +131,42 @@
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
// get our buffers
unsigned char *dataY = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
unsigned char *dataCbCr = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
if (dataY == nullptr) {
print_line("Couldn't access Y pixel buffer data");
} else if (dataCbCr == nullptr) {
print_line("Couldn't access CbCr pixel buffer data");
} else {
Ref<Image> img[2];
{
// do Y
size_t new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
size_t new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
if ((width[0] != new_width) || (height[0] != new_height)) {
width[0] = new_width;
height[0] = new_height;
img_data[0].resize(new_width * new_height);
}
uint8_t *w = img_data[0].ptrw();
memcpy(w, dataY, new_width * new_height);
img[0].instantiate();
img[0]->set_data(new_width, new_height, 0, Image::FORMAT_R8, img_data[0]);
{
// do Y
unsigned char *dataY = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
if (dataY == nullptr) {
print_line("Couldn't access Y pixel buffer data");
return;
}
{
// do CbCr
size_t new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1);
size_t new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
size_t new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
size_t new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
if ((width[1] != new_width) || (height[1] != new_height)) {
width[1] = new_width;
height[1] = new_height;
img_data[1].resize(2 * new_width * new_height);
}
Ref<Image> image = feed->get_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_DIFFUSE);
if (image.is_null() || image->get_width() != new_width || image->get_height() != new_height) {
feed->set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_DIFFUSE, Image::create_empty(new_width, new_height, false, Image::FORMAT_R8));
} else {
feed->set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_DIFFUSE, dataY, 0, new_width * new_height);
}
}
uint8_t *w = img_data[1].ptrw();
memcpy(w, dataCbCr, 2 * new_width * new_height);
///TODO OpenGL doesn't support FORMAT_RG8, need to do some form of conversion
img[1].instantiate();
img[1]->set_data(new_width, new_height, 0, Image::FORMAT_RG8, img_data[1]);
{
// do CbCr
unsigned char *dataCbCr = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
if (dataCbCr == nullptr) {
print_line("Couldn't access CbCr pixel buffer data");
return;
}
// set our texture...
feed->set_ycbcr_images(img[0], img[1]);
size_t new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1);
size_t new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
Ref<Image> image = feed->get_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_NORMAL);
if (image.is_null() || image->get_width() != new_width || image->get_height() != new_height) {
feed->set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_NORMAL, Image::create_empty(new_width, new_height, false, Image::FORMAT_RG8));
} else {
feed->set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_NORMAL, dataCbCr, 0, 2 * new_width * new_height);
}
}
// and unlock
@ -343,11 +327,6 @@ void CameraMacOS::update_feeds() {
Ref<CameraFeedMacOS> newfeed;
newfeed.instantiate();
newfeed->set_device(device);
// assume display camera so inverse
Transform2D transform = Transform2D(-1.0, 0.0, 0.0, -1.0, 1.0, 1.0);
newfeed->set_transform(transform);
add_feed(newfeed);
};
};

View File

@ -29,66 +29,517 @@
/**************************************************************************/
#include "camera_win.h"
#include <strsafe.h>
///@TODO sorry guys, I got about 80% through implementing this using DirectShow only
// to find out Microsoft deprecated half the API and its replacement is as confusing
// as they could make it. Joey suggested looking into libuvc which offers a more direct
// route to webcams over USB and this is very promising but it wouldn't compile on
// windows for me...I've gutted the classes I implemented DirectShow in just to have
// a skeleton for someone to work on, mail me for more details or if you want a copy....
//////////////////////////////////////////////////////////////////////////
// Helper functions
//
// The following code enables you to view the contents of a media type while
// debugging.
#ifndef IF_EQUAL_RETURN
#define IF_EQUAL_RETURN(param, val) \
if (val == param) \
return #val
#endif
String GetGUIDNameConst(const GUID &guid) {
IF_EQUAL_RETURN(guid, MF_MT_MAJOR_TYPE);
IF_EQUAL_RETURN(guid, MF_MT_MAJOR_TYPE);
IF_EQUAL_RETURN(guid, MF_MT_SUBTYPE);
IF_EQUAL_RETURN(guid, MF_MT_ALL_SAMPLES_INDEPENDENT);
IF_EQUAL_RETURN(guid, MF_MT_FIXED_SIZE_SAMPLES);
IF_EQUAL_RETURN(guid, MF_MT_COMPRESSED);
IF_EQUAL_RETURN(guid, MF_MT_SAMPLE_SIZE);
IF_EQUAL_RETURN(guid, MF_MT_WRAPPED_TYPE);
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_NUM_CHANNELS);
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_SAMPLES_PER_SECOND);
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_FLOAT_SAMPLES_PER_SECOND);
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_AVG_BYTES_PER_SECOND);
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_BLOCK_ALIGNMENT);
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_BITS_PER_SAMPLE);
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_VALID_BITS_PER_SAMPLE);
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_SAMPLES_PER_BLOCK);
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_CHANNEL_MASK);
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_FOLDDOWN_MATRIX);
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_PEAKREF);
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_PEAKTARGET);
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_AVGREF);
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_AVGTARGET);
IF_EQUAL_RETURN(guid, MF_MT_AUDIO_PREFER_WAVEFORMATEX);
IF_EQUAL_RETURN(guid, MF_MT_AAC_PAYLOAD_TYPE);
IF_EQUAL_RETURN(guid, MF_MT_AAC_AUDIO_PROFILE_LEVEL_INDICATION);
IF_EQUAL_RETURN(guid, MF_MT_FRAME_SIZE);
IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE);
IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE_RANGE_MAX);
IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE_RANGE_MIN);
IF_EQUAL_RETURN(guid, MF_MT_PIXEL_ASPECT_RATIO);
IF_EQUAL_RETURN(guid, MF_MT_DRM_FLAGS);
IF_EQUAL_RETURN(guid, MF_MT_PAD_CONTROL_FLAGS);
IF_EQUAL_RETURN(guid, MF_MT_SOURCE_CONTENT_HINT);
IF_EQUAL_RETURN(guid, MF_MT_VIDEO_CHROMA_SITING);
IF_EQUAL_RETURN(guid, MF_MT_INTERLACE_MODE);
IF_EQUAL_RETURN(guid, MF_MT_TRANSFER_FUNCTION);
IF_EQUAL_RETURN(guid, MF_MT_VIDEO_PRIMARIES);
IF_EQUAL_RETURN(guid, MF_MT_CUSTOM_VIDEO_PRIMARIES);
IF_EQUAL_RETURN(guid, MF_MT_YUV_MATRIX);
IF_EQUAL_RETURN(guid, MF_MT_VIDEO_LIGHTING);
IF_EQUAL_RETURN(guid, MF_MT_VIDEO_NOMINAL_RANGE);
IF_EQUAL_RETURN(guid, MF_MT_GEOMETRIC_APERTURE);
IF_EQUAL_RETURN(guid, MF_MT_MINIMUM_DISPLAY_APERTURE);
IF_EQUAL_RETURN(guid, MF_MT_PAN_SCAN_APERTURE);
IF_EQUAL_RETURN(guid, MF_MT_PAN_SCAN_ENABLED);
IF_EQUAL_RETURN(guid, MF_MT_AVG_BITRATE);
IF_EQUAL_RETURN(guid, MF_MT_AVG_BIT_ERROR_RATE);
IF_EQUAL_RETURN(guid, MF_MT_MAX_KEYFRAME_SPACING);
IF_EQUAL_RETURN(guid, MF_MT_DEFAULT_STRIDE);
IF_EQUAL_RETURN(guid, MF_MT_PALETTE);
IF_EQUAL_RETURN(guid, MF_MT_USER_DATA);
IF_EQUAL_RETURN(guid, MF_MT_AM_FORMAT_TYPE);
IF_EQUAL_RETURN(guid, MF_MT_MPEG_START_TIME_CODE);
IF_EQUAL_RETURN(guid, MF_MT_MPEG2_PROFILE);
IF_EQUAL_RETURN(guid, MF_MT_MPEG2_LEVEL);
IF_EQUAL_RETURN(guid, MF_MT_MPEG2_FLAGS);
IF_EQUAL_RETURN(guid, MF_MT_MPEG_SEQUENCE_HEADER);
IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_SRC_PACK_0);
IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_CTRL_PACK_0);
IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_SRC_PACK_1);
IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_CTRL_PACK_1);
IF_EQUAL_RETURN(guid, MF_MT_DV_VAUX_SRC_PACK);
IF_EQUAL_RETURN(guid, MF_MT_DV_VAUX_CTRL_PACK);
IF_EQUAL_RETURN(guid, MF_MT_ARBITRARY_HEADER);
IF_EQUAL_RETURN(guid, MF_MT_ARBITRARY_FORMAT);
IF_EQUAL_RETURN(guid, MF_MT_IMAGE_LOSS_TOLERANT);
IF_EQUAL_RETURN(guid, MF_MT_MPEG4_SAMPLE_DESCRIPTION);
IF_EQUAL_RETURN(guid, MF_MT_MPEG4_CURRENT_SAMPLE_ENTRY);
IF_EQUAL_RETURN(guid, MF_MT_ORIGINAL_4CC);
IF_EQUAL_RETURN(guid, MF_MT_ORIGINAL_WAVE_FORMAT_TAG);
// Media types
IF_EQUAL_RETURN(guid, MFMediaType_Audio);
IF_EQUAL_RETURN(guid, MFMediaType_Video);
IF_EQUAL_RETURN(guid, MFMediaType_Protected);
IF_EQUAL_RETURN(guid, MFMediaType_SAMI);
IF_EQUAL_RETURN(guid, MFMediaType_Script);
IF_EQUAL_RETURN(guid, MFMediaType_Image);
IF_EQUAL_RETURN(guid, MFMediaType_HTML);
IF_EQUAL_RETURN(guid, MFMediaType_Binary);
IF_EQUAL_RETURN(guid, MFMediaType_FileTransfer);
IF_EQUAL_RETURN(guid, MFVideoFormat_AI44); // FCC('AI44')
IF_EQUAL_RETURN(guid, MFVideoFormat_ARGB32); // D3DFMT_A8R8G8B8
IF_EQUAL_RETURN(guid, MFVideoFormat_AYUV); // FCC('AYUV')
IF_EQUAL_RETURN(guid, MFVideoFormat_DV25); // FCC('dv25')
IF_EQUAL_RETURN(guid, MFVideoFormat_DV50); // FCC('dv50')
IF_EQUAL_RETURN(guid, MFVideoFormat_DVH1); // FCC('dvh1')
IF_EQUAL_RETURN(guid, MFVideoFormat_DVSD); // FCC('dvsd')
IF_EQUAL_RETURN(guid, MFVideoFormat_DVSL); // FCC('dvsl')
IF_EQUAL_RETURN(guid, MFVideoFormat_H264); // FCC('H264')
IF_EQUAL_RETURN(guid, MFVideoFormat_I420); // FCC('I420')
IF_EQUAL_RETURN(guid, MFVideoFormat_IYUV); // FCC('IYUV')
IF_EQUAL_RETURN(guid, MFVideoFormat_M4S2); // FCC('M4S2')
IF_EQUAL_RETURN(guid, MFVideoFormat_MJPG);
IF_EQUAL_RETURN(guid, MFVideoFormat_MP43); // FCC('MP43')
IF_EQUAL_RETURN(guid, MFVideoFormat_MP4S); // FCC('MP4S')
IF_EQUAL_RETURN(guid, MFVideoFormat_MP4V); // FCC('MP4V')
IF_EQUAL_RETURN(guid, MFVideoFormat_MPG1); // FCC('MPG1')
IF_EQUAL_RETURN(guid, MFVideoFormat_MSS1); // FCC('MSS1')
IF_EQUAL_RETURN(guid, MFVideoFormat_MSS2); // FCC('MSS2')
IF_EQUAL_RETURN(guid, MFVideoFormat_NV11); // FCC('NV11')
IF_EQUAL_RETURN(guid, MFVideoFormat_NV12); // FCC('NV12')
IF_EQUAL_RETURN(guid, MFVideoFormat_P010); // FCC('P010')
IF_EQUAL_RETURN(guid, MFVideoFormat_P016); // FCC('P016')
IF_EQUAL_RETURN(guid, MFVideoFormat_P210); // FCC('P210')
IF_EQUAL_RETURN(guid, MFVideoFormat_P216); // FCC('P216')
IF_EQUAL_RETURN(guid, MFVideoFormat_RGB24); // D3DFMT_R8G8B8
IF_EQUAL_RETURN(guid, MFVideoFormat_RGB32); // D3DFMT_X8R8G8B8
IF_EQUAL_RETURN(guid, MFVideoFormat_RGB555); // D3DFMT_X1R5G5B5
IF_EQUAL_RETURN(guid, MFVideoFormat_RGB565); // D3DFMT_R5G6B5
IF_EQUAL_RETURN(guid, MFVideoFormat_RGB8);
IF_EQUAL_RETURN(guid, MFVideoFormat_UYVY); // FCC('UYVY')
IF_EQUAL_RETURN(guid, MFVideoFormat_v210); // FCC('v210')
IF_EQUAL_RETURN(guid, MFVideoFormat_v410); // FCC('v410')
IF_EQUAL_RETURN(guid, MFVideoFormat_WMV1); // FCC('WMV1')
IF_EQUAL_RETURN(guid, MFVideoFormat_WMV2); // FCC('WMV2')
IF_EQUAL_RETURN(guid, MFVideoFormat_WMV3); // FCC('WMV3')
IF_EQUAL_RETURN(guid, MFVideoFormat_WVC1); // FCC('WVC1')
IF_EQUAL_RETURN(guid, MFVideoFormat_Y210); // FCC('Y210')
IF_EQUAL_RETURN(guid, MFVideoFormat_Y216); // FCC('Y216')
IF_EQUAL_RETURN(guid, MFVideoFormat_Y410); // FCC('Y410')
IF_EQUAL_RETURN(guid, MFVideoFormat_Y416); // FCC('Y416')
IF_EQUAL_RETURN(guid, MFVideoFormat_Y41P);
IF_EQUAL_RETURN(guid, MFVideoFormat_Y41T);
IF_EQUAL_RETURN(guid, MFVideoFormat_YUY2); // FCC('YUY2')
IF_EQUAL_RETURN(guid, MFVideoFormat_YV12); // FCC('YV12')
IF_EQUAL_RETURN(guid, MFVideoFormat_YVYU);
IF_EQUAL_RETURN(guid, MFAudioFormat_PCM); // WAVE_FORMAT_PCM
IF_EQUAL_RETURN(guid, MFAudioFormat_Float); // WAVE_FORMAT_IEEE_FLOAT
IF_EQUAL_RETURN(guid, MFAudioFormat_DTS); // WAVE_FORMAT_DTS
IF_EQUAL_RETURN(guid, MFAudioFormat_Dolby_AC3_SPDIF); // WAVE_FORMAT_DOLBY_AC3_SPDIF
IF_EQUAL_RETURN(guid, MFAudioFormat_DRM); // WAVE_FORMAT_DRM
IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudioV8); // WAVE_FORMAT_WMAUDIO2
IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudioV9); // WAVE_FORMAT_WMAUDIO3
IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudio_Lossless); // WAVE_FORMAT_WMAUDIO_LOSSLESS
IF_EQUAL_RETURN(guid, MFAudioFormat_WMASPDIF); // WAVE_FORMAT_WMASPDIF
IF_EQUAL_RETURN(guid, MFAudioFormat_MSP1); // WAVE_FORMAT_WMAVOICE9
IF_EQUAL_RETURN(guid, MFAudioFormat_MP3); // WAVE_FORMAT_MPEGLAYER3
IF_EQUAL_RETURN(guid, MFAudioFormat_MPEG); // WAVE_FORMAT_MPEG
IF_EQUAL_RETURN(guid, MFAudioFormat_AAC); // WAVE_FORMAT_MPEG_HEAAC
IF_EQUAL_RETURN(guid, MFAudioFormat_ADTS); // WAVE_FORMAT_MPEG_ADTS_AAC
return "Unknown";
}
//////////////////////////////////////////////////////////////////////////
// CameraFeedWindows - Subclass for our camera feed on windows
/// @TODO need to implement this
class CameraFeedWindows : public CameraFeed {
private:
protected:
public:
CameraFeedWindows();
virtual ~CameraFeedWindows();
bool activate_feed();
void deactivate_feed();
};
CameraFeedWindows::CameraFeedWindows() {
///@TODO implement this, should store information about our available camera
CameraFeedWindows::CameraFeedWindows(LPCWSTR camera_id, IMFMediaType *type, String name, int width, int height, GUID format) {
this->camera_id = camera_id;
this->name = name;
this->width = width;
this->height = height;
this->type = type;
this->format = format;
}
CameraFeedWindows::~CameraFeedWindows() {
// make sure we stop recording if we are!
if (is_active()) {
deactivate_feed();
};
///@TODO free up anything used by this
};
SafeRelease(&type);
}
bool CameraFeedWindows::activate_feed() {
///@TODO this should activate our camera and start the process of capturing frames
IMFAttributes *pAttributes = NULL;
HRESULT hr = MFCreateAttributes(&pAttributes, 2);
if (FAILED(hr)) {
goto done;
}
// Set the device type to video.
hr = pAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
if (FAILED(hr)) {
goto done;
}
// Set the symbolic link.
hr = pAttributes->SetString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, camera_id);
if (FAILED(hr)) {
goto done;
}
// Create media source
hr = MFCreateDeviceSource(pAttributes, &source);
if (FAILED(hr)) {
goto done;
}
// Get information about device
IMFPresentationDescriptor *pPD;
hr = source->CreatePresentationDescriptor(&pPD);
if (FAILED(hr)) {
goto done;
}
// Get information about video stream
BOOL fSelected;
IMFStreamDescriptor *pSD;
hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, &pSD);
if (FAILED(hr)) {
goto done;
}
// Get information about supported media types
IMFMediaTypeHandler *pHandler;
hr = pSD->GetMediaTypeHandler(&pHandler);
if (FAILED(hr)) {
goto done;
}
// Set media type
hr = pHandler->SetCurrentMediaType(type);
if (FAILED(hr)) {
goto done;
}
// Create media reader
hr = MFCreateSourceReaderFromMediaSource(source, NULL, &reader);
if (FAILED(hr)) {
goto done;
}
// Prepare images and textures
if (format == MFVideoFormat_RGB24) {
set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_DIFFUSE,
Image::create_empty(width, height, false, Image::FORMAT_RGB8));
}
if (format == MFVideoFormat_NV12) {
set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_DIFFUSE,
Image::create_empty(width, height, false, Image::FORMAT_R8));
set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_NORMAL,
Image::create_empty(width / 2, height / 2, false, Image::FORMAT_RG8));
}
// Start reading
worker = memnew(std::thread(capture, this));
done:
SafeRelease(&pAttributes);
SafeRelease(&pPD);
SafeRelease(&pSD);
SafeRelease(&pHandler);
if FAILED (hr) {
print_error(vformat("Unable to activate camera feed (%d)", hr));
return false;
}
return true;
};
///@TODO we should probably have a callback method here that is being called by the
// camera API which provides frames and call back into the CameraServer to update our texture
}
void CameraFeedWindows::deactivate_feed() {
///@TODO this should deactivate our camera and stop the process of capturing frames
if (worker != NULL) {
active = false;
worker->join();
memdelete(worker);
worker = NULL;
}
SafeRelease(&reader);
SafeRelease(&source);
}
void CameraFeedWindows::capture(CameraFeedWindows *feed) {
print_verbose("Camera feed is now streaming");
feed->active = true;
while (feed->active) {
feed->read();
Sleep(100);
}
}
void CameraFeedWindows::read() {
HRESULT hr = S_OK;
IMFSample *pSample = NULL;
BYTE *data;
DWORD streamIndex, flags, len;
LONGLONG llTimeStamp;
IMFMediaBuffer *buffer;
hr = reader->ReadSample(
MF_SOURCE_READER_FIRST_VIDEO_STREAM, // Stream index.
0, // Flags.
&streamIndex, // Receives the actual stream index.
&flags, // Receives status flags.
&llTimeStamp, // Receives the time stamp.
&pSample // Receives the sample or NULL.
);
if (FAILED(hr)) {
return;
}
// End of stream
if (flags & MF_SOURCE_READERF_ENDOFSTREAM) {
print_verbose("\tEnd of stream");
active = false;
}
if (flags & MF_SOURCE_READERF_NEWSTREAM) {
print_verbose("\tNew stream");
}
if (flags & MF_SOURCE_READERF_NATIVEMEDIATYPECHANGED) {
print_verbose("\tNative type changed");
}
if (flags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED) {
print_verbose("\tCurrent type changed");
}
if (flags & MF_SOURCE_READERF_STREAMTICK) {
print_verbose("\tStream tick");
}
if (flags & MF_SOURCE_READERF_NATIVEMEDIATYPECHANGED) {
print_verbose("\tOutput format changed");
}
// Process sample
if (pSample) {
hr = pSample->GetBufferByIndex(0, &buffer);
if (FAILED(hr)) {
return;
}
// Get image buffer
buffer->Lock(&data, NULL, &len);
// Get RGB or Y plane
Ref<Image> yImage = get_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_DIFFUSE);
set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_DIFFUSE, data, 0, yImage->get_data().size());
// Get UV plane
if (format == MFVideoFormat_NV12) {
Ref<Image> uvImage = get_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_NORMAL);
set_image(RenderingServer::CANVAS_TEXTURE_CHANNEL_NORMAL, data, yImage->get_data().size(), uvImage->get_data().size());
}
buffer->Unlock();
buffer->Release();
pSample->Release();
}
}
//////////////////////////////////////////////////////////////////////////
// CameraWindows - Subclass for our camera server on windows
void CameraWindows::add_active_cameras() {
///@TODO scan through any active cameras and create CameraFeedWindows objects for them
void CameraWindows::update_feeds() {
// remove existing devices
for (int i = feeds.size() - 1; i >= 0; i--) {
Ref<CameraFeedWindows> feed = (Ref<CameraFeedWindows>)feeds[i];
remove_feed(feed);
};
// Create an attribute store to hold the search criteria.
IMFAttributes *pConfig = NULL;
HRESULT hr = MFCreateAttributes(&pConfig, 1);
if (FAILED(hr)) {
goto done_all;
}
// Request video capture devices.
hr = pConfig->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
if (FAILED(hr)) {
goto done_all;
}
// Process devices
UINT32 count = 0;
IMFActivate **ppDevices = NULL;
hr = MFEnumDeviceSources(pConfig, &ppDevices, &count);
if (FAILED(hr)) {
goto done_all;
}
// Create feeds for all supported media sources
for (DWORD i = 0; i < count; i++) {
IMFActivate *pDevice = ppDevices[i];
// Get camera id
WCHAR *szCameraID = NULL;
UINT32 len;
hr = pDevice->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, &szCameraID, &len);
if (FAILED(hr)) {
goto done_device;
}
// Get name
WCHAR *szFriendlyName = NULL;
hr = pDevice->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, &szFriendlyName, &len);
if (FAILED(hr)) {
goto done_device;
}
// Get media source
IMFMediaSource *pSource = NULL;
hr = pDevice->ActivateObject(IID_PPV_ARGS(&pSource));
if (FAILED(hr)) {
goto done_device;
}
// Get information about device
IMFPresentationDescriptor *pPD = NULL;
hr = pSource->CreatePresentationDescriptor(&pPD);
if (FAILED(hr)) {
goto done_device;
}
// Get information about video stream
BOOL fSelected;
IMFStreamDescriptor *pSD = NULL;
hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, &pSD);
if (FAILED(hr)) {
goto done_device;
}
// Get information about supported media types
IMFMediaTypeHandler *pHandler = NULL;
hr = pSD->GetMediaTypeHandler(&pHandler);
if (FAILED(hr)) {
goto done_device;
}
// Get supported media types
DWORD cTypes = 0;
hr = pHandler->GetMediaTypeCount(&cTypes);
if (FAILED(hr)) {
goto done_device;
}
for (DWORD i = 0; i < cTypes; i++) {
// Get media type
IMFMediaType *pType = NULL;
hr = pHandler->GetMediaTypeByIndex(i, &pType);
if (FAILED(hr)) {
SafeRelease(&pType);
break;
}
// Get subtype
GUID subType;
hr = pType->GetGUID(MF_MT_SUBTYPE, &subType);
if (FAILED(hr)) {
SafeRelease(&pType);
break;
}
// Get image size
UINT32 width, height = 0;
hr = MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height);
if (FAILED(hr)) {
SafeRelease(&pType);
break;
}
// Add feed for supported formats
if (subType == MFVideoFormat_RGB24 || subType == MFVideoFormat_NV12) {
String format = GetGUIDNameConst(subType);
format = format.replace("MFVideoFormat_", "");
String name = szFriendlyName + vformat(" (%d x %d, %s)", width, height, format);
Ref<CameraFeedWindows> feed = new CameraFeedWindows(szCameraID, pType, name, width, height, subType);
add_feed(feed);
print_line("Added camera feed: ", name);
}
}
done_device:
SafeRelease(&pPD);
SafeRelease(&pSD);
SafeRelease(&pHandler);
SafeRelease(&pSource);
SafeRelease(&pDevice);
}
done_all:
SafeRelease(&pConfig);
if (FAILED(hr)) {
print_error(vformat("Error updating feeds (%d)", hr));
}
}
CameraWindows::CameraWindows() {
// Find cameras active right now
add_active_cameras();
// Initialize the Media Foundation platform.
HRESULT hr = MFStartup(MF_VERSION);
if (FAILED(hr)) {
print_error("Unable to initialize Media Foundation platform");
return;
}
// need to add something that will react to devices being connected/removed...
};
// Update feeds
update_feeds();
}
CameraWindows::~CameraWindows() {
MFShutdown();
}

View File

@ -33,14 +33,51 @@
#include "servers/camera/camera_feed.h"
#include "servers/camera_server.h"
#include <initguid.h>
#include <mfapi.h>
#include <mfidl.h>
#include <mferror.h>
#include <mfreadwrite.h>
#include <windows.h>
class CameraFeedWindows : public CameraFeed {
private:
LPCWSTR camera_id;
IMFMediaSource *source = NULL;
IMFMediaType *type = NULL;
GUID format;
IMFSourceReader *reader = NULL;
std::thread *worker;
static void capture(CameraFeedWindows *feed);
void read();
protected:
public:
CameraFeedWindows(LPCWSTR camera_id, IMFMediaType *type, String name, int width, int height, GUID format);
virtual ~CameraFeedWindows();
bool activate_feed();
void deactivate_feed();
};
class CameraWindows : public CameraServer {
private:
void add_active_cameras();
void update_feeds();
public:
CameraWindows();
~CameraWindows() {}
~CameraWindows();
};
template <class T> void SafeRelease(T **ppT)
{
if (*ppT)
{
(*ppT)->Release();
*ppT = NULL;
}
}
#endif // CAMERA_WIN_H

View File

@ -1,6 +1,5 @@
def can_build(env, platform):
return platform == "macos" or platform == "windows" or platform == "linuxbsd"
return platform == "macos" or platform == "windows" or platform == "android" or platform == "ios" or platform == "linuxbsd"
def configure(env):
pass

View File

@ -39,6 +39,9 @@
#if defined(MACOS_ENABLED)
#include "camera_macos.h"
#endif
#if defined(ANDROID_ENABLED)
#include "camera_android.h"
#endif
void initialize_camera_module(ModuleInitializationLevel p_level) {
if (p_level != MODULE_INITIALIZATION_LEVEL_SCENE) {
@ -54,6 +57,9 @@ void initialize_camera_module(ModuleInitializationLevel p_level) {
#if defined(MACOS_ENABLED)
CameraServer::make_default<CameraMacOS>();
#endif
#if defined(ANDROID_ENABLED)
CameraServer::make_default<CameraAndroid>();
#endif
}
void uninitialize_camera_module(ModuleInitializationLevel p_level) {

View File

@ -13,7 +13,7 @@ thirdparty_obj = []
if env["builtin_libtheora"]:
thirdparty_dir = "#thirdparty/libtheora/"
thirdparty_sources = [
# "analyze.c",
"analyze.c",
# "apiwrapper.c",
"bitpack.c",
# "collect.c",
@ -22,24 +22,24 @@ if env["builtin_libtheora"]:
"decode.c",
"dequant.c",
# "encapiwrapper.c",
# "encfrag.c",
# "encinfo.c",
# "encode.c",
"encfrag.c",
"encinfo.c",
"encode.c",
# "encoder_disabled.c",
# "enquant.c",
# "fdct.c",
"enquant.c",
"fdct.c",
"fragment.c",
"huffdec.c",
# "huffenc.c",
"huffenc.c",
"idct.c",
"info.c",
"internal.c",
# "mathops.c",
# "mcenc.c",
"mathops.c",
"mcenc.c",
"quant.c",
# "rate.c",
"rate.c",
"state.c",
# "tokenize.c",
"tokenize.c",
]
thirdparty_sources_x86 = [

View File

@ -13,7 +13,7 @@ thirdparty_obj = []
if env["builtin_libvorbis"]:
thirdparty_dir = "#thirdparty/libvorbis/"
thirdparty_sources = [
# "analysis.c",
"analysis.c",
# "barkmel.c",
"bitrate.c",
"block.c",
@ -35,7 +35,7 @@ if env["builtin_libvorbis"]:
"smallft.c",
"synthesis.c",
# "tone.c",
# "vorbisenc.c",
"vorbisenc.c",
"vorbisfile.c",
"window.c",
]

View File

@ -63,7 +63,7 @@ def get_ndk_version():
# This is kept in sync with the value in 'platform/android/java/app/config.gradle'.
def get_min_target_api():
return 21
return 24
def get_flags():

View File

@ -15,6 +15,12 @@
android:glEsVersion="0x00030000"
android:required="true" />
<uses-feature
android:name="android.hardware.camera"
android:required="false" />
<uses-permission android:name="android.permission.CAMERA" />
<application
android:label="@string/godot_project_name_string"
android:allowBackup="false"
@ -41,9 +47,8 @@
android:launchMode="singleInstancePerTask"
android:excludeFromRecents="false"
android:exported="true"
android:screenOrientation="landscape"
android:screenOrientation="portrait"
android:configChanges="orientation|keyboardHidden|screenSize|smallestScreenSize|density|keyboard|navigation|screenLayout|uiMode"
android:resizeableActivity="false"
tools:ignore="UnusedAttribute" >
<intent-filter>

View File

@ -1,2 +0,0 @@
*
!.gitignore

View File

@ -179,11 +179,7 @@ android {
// Signing and zip-aligning are skipped for prebuilt builds, but
// performed for Godot gradle builds.
zipAlignEnabled shouldZipAlign()
if (shouldSign()) {
signingConfig signingConfigs.debug
} else {
signingConfig null
}
signingConfig signingConfigs.debug
}
dev {
@ -191,22 +187,14 @@ android {
// Signing and zip-aligning are skipped for prebuilt builds, but
// performed for Godot gradle builds.
zipAlignEnabled shouldZipAlign()
if (shouldSign()) {
signingConfig signingConfigs.debug
} else {
signingConfig null
}
signingConfig signingConfigs.debug
}
release {
// Signing and zip-aligning are skipped for prebuilt builds, but
// performed for Godot gradle builds.
zipAlignEnabled shouldZipAlign()
if (shouldSign()) {
signingConfig signingConfigs.release
} else {
signingConfig null
}
signingConfig signingConfigs.release
}
}
@ -266,15 +254,6 @@ task copyAndRenameBinary(type: Copy) {
rename sourceFilename, exportFilename
}
/**
* Used to validate the version of the Java SDK used for the Godot gradle builds.
*/
task validateJavaVersion {
if (JavaVersion.current() != versions.javaVersion) {
throw new GradleException("Invalid Java version ${JavaVersion.current()}. Version ${versions.javaVersion} is the required Java version for Godot gradle builds.")
}
}
/*
When they're scheduled to run, the copy*AARToAppModule tasks generate dependencies for the 'app'
module, so we're ensuring the ':app:preBuild' task is set to run after those tasks.

View File

@ -1,13 +1,13 @@
ext.versions = [
androidGradlePlugin: '8.2.0',
androidGradlePlugin: '8.5.0',
compileSdk : 34,
// Also update 'platform/android/export/export_plugin.cpp#OPENGL_MIN_SDK_VERSION'
minSdk : 21,
minSdk : 24,
// Also update 'platform/android/export/export_plugin.cpp#DEFAULT_TARGET_SDK_VERSION'
targetSdk : 34,
buildTools : '34.0.0',
kotlinVersion : '1.9.20',
fragmentVersion : '1.7.1',
fragmentVersion : '1.8.1',
nexusPublishVersion: '1.3.0',
javaVersion : JavaVersion.VERSION_17,
// Also update 'platform/android/detect.py#get_ndk_version()' when this is updated.

View File

@ -13,8 +13,11 @@
android:glEsVersion="0x00030000"
android:required="true" />
<uses-permission
android:name="android.permission.MANAGE_EXTERNAL_STORAGE"
<uses-feature
android:name="android.hardware.camera"
android:required="false" />
<uses-permission android:name="android.permission.MANAGE_EXTERNAL_STORAGE"
tools:ignore="ScopedStorage" />
<uses-permission
android:name="android.permission.WRITE_EXTERNAL_STORAGE"
@ -24,6 +27,7 @@
android:maxSdkVersion="29" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.VIBRATE" />
<application

View File

@ -1,6 +1,6 @@
#Wed Jan 17 12:08:26 PST 2024
#Tue Jul 09 10:59:26 CEST 2024
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.2-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

View File

@ -28,5 +28,4 @@
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/**************************************************************************/
#include <alloca.h>
#include <malloc.h>

View File

@ -36,14 +36,10 @@ void CameraTexture::_bind_methods() {
ClassDB::bind_method(D_METHOD("set_camera_feed_id", "feed_id"), &CameraTexture::set_camera_feed_id);
ClassDB::bind_method(D_METHOD("get_camera_feed_id"), &CameraTexture::get_camera_feed_id);
ClassDB::bind_method(D_METHOD("set_which_feed", "which_feed"), &CameraTexture::set_which_feed);
ClassDB::bind_method(D_METHOD("get_which_feed"), &CameraTexture::get_which_feed);
ClassDB::bind_method(D_METHOD("set_camera_active", "active"), &CameraTexture::set_camera_active);
ClassDB::bind_method(D_METHOD("get_camera_active"), &CameraTexture::get_camera_active);
ADD_PROPERTY(PropertyInfo(Variant::INT, "camera_feed_id"), "set_camera_feed_id", "get_camera_feed_id");
ADD_PROPERTY(PropertyInfo(Variant::INT, "which_feed"), "set_which_feed", "get_which_feed");
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "camera_is_active"), "set_camera_active", "get_camera_active");
}
@ -55,7 +51,7 @@ void CameraTexture::_on_format_changed() {
int CameraTexture::get_width() const {
Ref<CameraFeed> feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id);
if (feed.is_valid()) {
return feed->get_base_width();
return feed->get_width();
} else {
return 0;
}
@ -64,7 +60,7 @@ int CameraTexture::get_width() const {
int CameraTexture::get_height() const {
Ref<CameraFeed> feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id);
if (feed.is_valid()) {
return feed->get_base_height();
return feed->get_height();
} else {
return 0;
}
@ -77,7 +73,7 @@ bool CameraTexture::has_alpha() const {
RID CameraTexture::get_rid() const {
Ref<CameraFeed> feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id);
if (feed.is_valid()) {
return feed->get_texture(which_feed);
return feed->get_texture();
} else {
if (_texture.is_null()) {
_texture = RenderingServer::get_singleton()->texture_2d_placeholder_create();
@ -113,16 +109,6 @@ int CameraTexture::get_camera_feed_id() const {
return camera_feed_id;
}
void CameraTexture::set_which_feed(CameraServer::FeedImage p_which) {
which_feed = p_which;
notify_property_list_changed();
callable_mp((Resource *)this, &Resource::emit_changed).call_deferred();
}
CameraServer::FeedImage CameraTexture::get_which_feed() const {
return which_feed;
}
void CameraTexture::set_camera_active(bool p_active) {
Ref<CameraFeed> feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id);
if (feed.is_valid()) {

View File

@ -39,7 +39,6 @@ class CameraTexture : public Texture2D {
private:
mutable RID _texture;
int camera_feed_id = 0;
CameraServer::FeedImage which_feed = CameraServer::FEED_RGBA_IMAGE;
protected:
static void _bind_methods();
@ -48,17 +47,14 @@ protected:
public:
virtual int get_width() const override;
virtual int get_height() const override;
virtual RID get_rid() const override;
virtual bool has_alpha() const override;
virtual RID get_rid() const override;
virtual Ref<Image> get_image() const override;
void set_camera_feed_id(int p_new_id);
int get_camera_feed_id() const;
void set_which_feed(CameraServer::FeedImage p_which);
CameraServer::FeedImage get_which_feed() const;
void set_camera_active(bool p_active);
bool get_camera_active() const;

View File

@ -34,28 +34,24 @@
void CameraFeed::_bind_methods() {
ClassDB::bind_method(D_METHOD("get_id"), &CameraFeed::get_id);
ClassDB::bind_method(D_METHOD("get_name"), &CameraFeed::get_name);
ClassDB::bind_method(D_METHOD("get_position"), &CameraFeed::get_position);
ClassDB::bind_method(D_METHOD("get_width"), &CameraFeed::get_width);
ClassDB::bind_method(D_METHOD("get_heigth"), &CameraFeed::get_height);
ClassDB::bind_method(D_METHOD("get_datatype"), &CameraFeed::get_datatype);
ClassDB::bind_method(D_METHOD("is_active"), &CameraFeed::is_active);
ClassDB::bind_method(D_METHOD("set_active", "active"), &CameraFeed::set_active);
ClassDB::bind_method(D_METHOD("get_name"), &CameraFeed::get_name);
ClassDB::bind_method(D_METHOD("set_name", "name"), &CameraFeed::set_name);
ClassDB::bind_method(D_METHOD("get_position"), &CameraFeed::get_position);
ClassDB::bind_method(D_METHOD("set_position", "position"), &CameraFeed::set_position);
// Note, for transform some feeds may override what the user sets (such as ARKit)
ClassDB::bind_method(D_METHOD("get_transform"), &CameraFeed::get_transform);
ClassDB::bind_method(D_METHOD("set_transform", "transform"), &CameraFeed::set_transform);
ClassDB::bind_method(D_METHOD("set_rgb_image", "rgb_image"), &CameraFeed::set_rgb_image);
ClassDB::bind_method(D_METHOD("set_ycbcr_image", "ycbcr_image"), &CameraFeed::set_ycbcr_image);
ClassDB::bind_method(D_METHOD("get_datatype"), &CameraFeed::get_datatype);
ClassDB::bind_method(D_METHOD("get_formats"), &CameraFeed::get_formats);
ClassDB::bind_method(D_METHOD("set_format", "index", "parameters"), &CameraFeed::set_format);
ADD_SIGNAL(MethodInfo("frame_changed"));
ADD_SIGNAL(MethodInfo("format_changed"));
@ -64,14 +60,16 @@ void CameraFeed::_bind_methods() {
ADD_PROPERTY(PropertyInfo(Variant::TRANSFORM2D, "feed_transform"), "set_transform", "get_transform");
ADD_PROPERTY(PropertyInfo(Variant::ARRAY, "formats"), "", "get_formats");
BIND_ENUM_CONSTANT(FEED_NOIMAGE);
BIND_ENUM_CONSTANT(FEED_RGB);
BIND_ENUM_CONSTANT(FEED_YCBCR);
BIND_ENUM_CONSTANT(FEED_YCBCR_SEP);
BIND_ENUM_CONSTANT(FEED_UNSPECIFIED);
BIND_ENUM_CONSTANT(FEED_FRONT);
BIND_ENUM_CONSTANT(FEED_BACK);
BIND_ENUM_CONSTANT(FEED_UNSUPPORTED);
BIND_ENUM_CONSTANT(FEED_RGB);
BIND_ENUM_CONSTANT(FEED_RGBA);
BIND_ENUM_CONSTANT(FEED_YCBCR);
BIND_ENUM_CONSTANT(FEED_YCBCR_SEP);
BIND_ENUM_CONSTANT(FEED_NV12);
}
int CameraFeed::get_id() const {
@ -101,28 +99,20 @@ String CameraFeed::get_name() const {
return name;
}
void CameraFeed::set_name(String p_name) {
name = p_name;
int CameraFeed::get_width() const {
return width;
}
int CameraFeed::get_base_width() const {
return base_width;
}
int CameraFeed::get_base_height() const {
return base_height;
}
CameraFeed::FeedDataType CameraFeed::get_datatype() const {
return datatype;
int CameraFeed::get_height() const {
return height;
}
CameraFeed::FeedPosition CameraFeed::get_position() const {
return position;
}
void CameraFeed::set_position(CameraFeed::FeedPosition p_position) {
position = p_position;
CameraFeed::FeedDataType CameraFeed::get_datatype() const {
return datatype;
}
Transform2D CameraFeed::get_transform() const {
@ -133,122 +123,54 @@ void CameraFeed::set_transform(const Transform2D &p_transform) {
transform = p_transform;
}
RID CameraFeed::get_texture(CameraServer::FeedImage p_which) {
return texture[p_which];
RID CameraFeed::get_texture() const {
return texture;
}
Ref<Image> CameraFeed::get_image(RenderingServer::CanvasTextureChannel channel) {
return channel_image[channel];
}
void CameraFeed::set_image(RenderingServer::CanvasTextureChannel channel, const Ref<Image> &image) {
if (channel_image[channel] != image) {
channel_image[channel] = image;
RenderingServer::get_singleton()->free(channel_texture[channel]);
channel_texture[channel] = RenderingServer::get_singleton()->texture_2d_create(image);
RenderingServer::get_singleton()->canvas_texture_set_channel(texture, channel, channel_texture[channel]);
} else {
RenderingServer::get_singleton()->texture_2d_update(channel_texture[channel], image);
}
}
void CameraFeed::set_image(RenderingServer::CanvasTextureChannel channel, uint8_t *data, size_t offset, size_t len) {
Ref<Image> image = channel_image[channel];
ERR_FAIL_COND_MSG(image.is_null(), "Channel not initialized");
Vector<uint8_t> image_data = image->get_data();
uint8_t *dest = image_data.ptrw();
memcpy(dest, data + offset, len);
image->set_data(image->get_width(), image->get_height(), false, image->get_format(), image_data);
RenderingServer::get_singleton()->texture_2d_update(channel_texture[channel], image);
}
CameraFeed::CameraFeed() {
// initialize our feed
id = CameraServer::get_singleton()->get_free_id();
base_width = 0;
base_height = 0;
name = "???";
name = "?";
width = 0;
height = 0;
active = false;
datatype = CameraFeed::FEED_RGB;
position = CameraFeed::FEED_UNSPECIFIED;
datatype = CameraFeed::FEED_UNSUPPORTED;
transform = Transform2D(1.0, 0.0, 0.0, -1.0, 0.0, 1.0);
texture[CameraServer::FEED_Y_IMAGE] = RenderingServer::get_singleton()->texture_2d_placeholder_create();
texture[CameraServer::FEED_CBCR_IMAGE] = RenderingServer::get_singleton()->texture_2d_placeholder_create();
}
CameraFeed::CameraFeed(String p_name, FeedPosition p_position) {
// initialize our feed
id = CameraServer::get_singleton()->get_free_id();
base_width = 0;
base_height = 0;
name = p_name;
active = false;
datatype = CameraFeed::FEED_NOIMAGE;
position = p_position;
transform = Transform2D(1.0, 0.0, 0.0, -1.0, 0.0, 1.0);
texture[CameraServer::FEED_Y_IMAGE] = RenderingServer::get_singleton()->texture_2d_placeholder_create();
texture[CameraServer::FEED_CBCR_IMAGE] = RenderingServer::get_singleton()->texture_2d_placeholder_create();
texture = RenderingServer::get_singleton()->canvas_texture_create();
}
CameraFeed::~CameraFeed() {
// Free our textures
ERR_FAIL_NULL(RenderingServer::get_singleton());
RenderingServer::get_singleton()->free(texture[CameraServer::FEED_Y_IMAGE]);
RenderingServer::get_singleton()->free(texture[CameraServer::FEED_CBCR_IMAGE]);
}
void CameraFeed::set_rgb_image(const Ref<Image> &p_rgb_img) {
ERR_FAIL_COND(p_rgb_img.is_null());
if (active) {
int new_width = p_rgb_img->get_width();
int new_height = p_rgb_img->get_height();
if ((base_width != new_width) || (base_height != new_height)) {
// We're assuming here that our camera image doesn't change around formats etc, allocate the whole lot...
base_width = new_width;
base_height = new_height;
RID new_texture = RenderingServer::get_singleton()->texture_2d_create(p_rgb_img);
RenderingServer::get_singleton()->texture_replace(texture[CameraServer::FEED_RGBA_IMAGE], new_texture);
emit_signal(SNAME("format_changed"));
} else {
RenderingServer::get_singleton()->texture_2d_update(texture[CameraServer::FEED_RGBA_IMAGE], p_rgb_img);
}
datatype = CameraFeed::FEED_RGB;
}
}
void CameraFeed::set_ycbcr_image(const Ref<Image> &p_ycbcr_img) {
ERR_FAIL_COND(p_ycbcr_img.is_null());
if (active) {
int new_width = p_ycbcr_img->get_width();
int new_height = p_ycbcr_img->get_height();
if ((base_width != new_width) || (base_height != new_height)) {
// We're assuming here that our camera image doesn't change around formats etc, allocate the whole lot...
base_width = new_width;
base_height = new_height;
RID new_texture = RenderingServer::get_singleton()->texture_2d_create(p_ycbcr_img);
RenderingServer::get_singleton()->texture_replace(texture[CameraServer::FEED_RGBA_IMAGE], new_texture);
emit_signal(SNAME("format_changed"));
} else {
RenderingServer::get_singleton()->texture_2d_update(texture[CameraServer::FEED_RGBA_IMAGE], p_ycbcr_img);
}
datatype = CameraFeed::FEED_YCBCR;
}
}
void CameraFeed::set_ycbcr_images(const Ref<Image> &p_y_img, const Ref<Image> &p_cbcr_img) {
ERR_FAIL_COND(p_y_img.is_null());
ERR_FAIL_COND(p_cbcr_img.is_null());
if (active) {
///@TODO investigate whether we can use thirdparty/misc/yuv2rgb.h here to convert our YUV data to RGB, our shader approach is potentially faster though..
// Wondering about including that into multiple projects, may cause issues.
// That said, if we convert to RGB, we could enable using texture resources again...
int new_y_width = p_y_img->get_width();
int new_y_height = p_y_img->get_height();
if ((base_width != new_y_width) || (base_height != new_y_height)) {
// We're assuming here that our camera image doesn't change around formats etc, allocate the whole lot...
base_width = new_y_width;
base_height = new_y_height;
{
RID new_texture = RenderingServer::get_singleton()->texture_2d_create(p_y_img);
RenderingServer::get_singleton()->texture_replace(texture[CameraServer::FEED_Y_IMAGE], new_texture);
}
{
RID new_texture = RenderingServer::get_singleton()->texture_2d_create(p_cbcr_img);
RenderingServer::get_singleton()->texture_replace(texture[CameraServer::FEED_CBCR_IMAGE], new_texture);
}
emit_signal(SNAME("format_changed"));
} else {
RenderingServer::get_singleton()->texture_2d_update(texture[CameraServer::FEED_Y_IMAGE], p_y_img);
RenderingServer::get_singleton()->texture_2d_update(texture[CameraServer::FEED_CBCR_IMAGE], p_cbcr_img);
}
datatype = CameraFeed::FEED_YCBCR_SEP;
RenderingServer::get_singleton()->free(texture);
for (size_t i = 0; i < 3; i++) {
RenderingServer::get_singleton()->free(channel_texture[i]);
}
}
@ -260,16 +182,3 @@ bool CameraFeed::activate_feed() {
void CameraFeed::deactivate_feed() {
// nothing to do here
}
bool CameraFeed::set_format(int p_index, const Dictionary &p_parameters) {
return false;
}
Array CameraFeed::get_formats() const {
return Array();
}
CameraFeed::FeedFormat CameraFeed::get_format() const {
FeedFormat feed_format = {};
return feed_format;
}

View File

@ -45,22 +45,28 @@ class CameraFeed : public RefCounted {
GDCLASS(CameraFeed, RefCounted);
public:
enum FeedDataType {
FEED_NOIMAGE, // we don't have an image yet
FEED_RGB, // our texture will contain a normal RGB texture that can be used directly
FEED_YCBCR, // our texture will contain a YCbCr texture that needs to be converted to RGB before output
FEED_YCBCR_SEP // our camera is split into two textures, first plane contains Y data, second plane contains CbCr data
};
enum FeedPosition {
FEED_UNSPECIFIED, // we have no idea
FEED_FRONT, // this is a camera on the front of the device
FEED_BACK // this is a camera on the back of the device
};
enum FeedDataType {
FEED_UNSUPPORTED, // unsupported type
FEED_RGB, // TEXTURE contains RGB data
FEED_RGBA, // TEXTURE contains RGBA data
FEED_NV12, // TEXTURE contains Y data, NORMAL_TEXTURE contains CbCr data
FEED_YCBCR, // TEXTURE contains YCbCr data
FEED_YCBCR_SEP // TEXTURE contains Y data, NORMAL_TEXTURE contains Cb data, SPECULAR_TEXTURE contains Cr data
};
private:
int id; // unique id for this, for internal use in case feeds are removed
RID texture; // layered texture
RID channel_texture[3]; // channel textures
Ref<Image> channel_image[3]; // channel images
protected:
struct FeedFormat {
int width = 0;
@ -72,8 +78,11 @@ protected:
};
String name; // name of our camera feed
FeedDataType datatype; // type of texture data stored
FeedDataType datatype; // type of texture data stored
FeedPosition position; // position of camera on the device
int width; // width of camera frames
int height; // height of camera frames
Transform2D transform; // display transform
int base_width = 0;
int base_height = 0;
@ -82,47 +91,37 @@ protected:
int selected_format = -1;
bool active; // only when active do we actually update the camera texture each frame
RID texture[CameraServer::FEED_IMAGES]; // texture images needed for this
static void _bind_methods();
public:
int get_id() const;
String get_name() const;
int get_width() const;
int get_height() const;
FeedPosition get_position() const;
FeedDataType get_datatype() const;
RID get_texture() const;
bool is_active() const;
void set_active(bool p_is_active);
String get_name() const;
void set_name(String p_name);
int get_base_width() const;
int get_base_height() const;
FeedPosition get_position() const;
void set_position(FeedPosition p_position);
Transform2D get_transform() const;
void set_transform(const Transform2D &p_transform);
RID get_texture(CameraServer::FeedImage p_which);
Ref<Image> get_image(RenderingServer::CanvasTextureChannel channel);
void set_image(RenderingServer::CanvasTextureChannel channel, const Ref<Image> &image);
void set_image(RenderingServer::CanvasTextureChannel channel, uint8_t *data, size_t offset, size_t len);
CameraFeed();
CameraFeed(String p_name, FeedPosition p_position = CameraFeed::FEED_UNSPECIFIED);
virtual ~CameraFeed();
FeedDataType get_datatype() const;
void set_rgb_image(const Ref<Image> &p_rgb_img);
void set_ycbcr_image(const Ref<Image> &p_ycbcr_img);
void set_ycbcr_images(const Ref<Image> &p_y_img, const Ref<Image> &p_cbcr_img);
virtual bool set_format(int p_index, const Dictionary &p_parameters);
virtual Array get_formats() const;
virtual FeedFormat get_format() const;
virtual bool activate_feed();
virtual void deactivate_feed();
};
VARIANT_ENUM_CAST(CameraFeed::FeedDataType);
VARIANT_ENUM_CAST(CameraFeed::FeedPosition);
VARIANT_ENUM_CAST(CameraFeed::FeedDataType);
#endif // CAMERA_FEED_H

View File

@ -40,6 +40,7 @@ CameraServer::CreateFunc CameraServer::create_func = nullptr;
void CameraServer::_bind_methods() {
ClassDB::bind_method(D_METHOD("get_feed", "index"), &CameraServer::get_feed);
ClassDB::bind_method(D_METHOD("get_feed_by_id", "feed_id"), &CameraServer::get_feed_by_id);
ClassDB::bind_method(D_METHOD("get_feed_count"), &CameraServer::get_feed_count);
ClassDB::bind_method(D_METHOD("feeds"), &CameraServer::get_feeds);
@ -150,15 +151,6 @@ TypedArray<CameraFeed> CameraServer::get_feeds() {
return return_feeds;
};
RID CameraServer::feed_texture(int p_id, CameraServer::FeedImage p_texture) {
int index = get_feed_index(p_id);
ERR_FAIL_COND_V(index == -1, RID());
Ref<CameraFeed> feed = get_feed(index);
return feed->get_texture(p_texture);
};
CameraServer::CameraServer() {
singleton = this;
};

View File

@ -104,9 +104,6 @@ public:
int get_feed_count();
TypedArray<CameraFeed> get_feeds();
// Intended for use with custom CameraServer implementation.
RID feed_texture(int p_id, FeedImage p_texture);
CameraServer();
~CameraServer();
};

View File

@ -4,3 +4,11 @@ from misc.utility.scons_hints import *
Import("env")
env.add_source_files(env.servers_sources, "*.cpp")
# also requires libogg, libtheora and libvorbis
if env["builtin_libogg"]:
env.Prepend(CPPPATH=["#thirdparty/libogg"])
if env["builtin_libtheora"]:
env.Prepend(CPPPATH=["#thirdparty/libtheora", "#thirdparty/misc"])
if env["builtin_libvorbis"]:
env.Prepend(CPPPATH=["#thirdparty/libvorbis"])

View File

@ -141,7 +141,9 @@ void MovieWriter::_bind_methods() {
GLOBAL_DEF(PropertyInfo(Variant::INT, "editor/movie_writer/mix_rate", PROPERTY_HINT_RANGE, "8000,192000,1,suffix:Hz"), 48000);
GLOBAL_DEF(PropertyInfo(Variant::INT, "editor/movie_writer/speaker_mode", PROPERTY_HINT_ENUM, "Stereo,3.1,5.1,7.1"), 0);
GLOBAL_DEF(PropertyInfo(Variant::FLOAT, "editor/movie_writer/mjpeg_quality", PROPERTY_HINT_RANGE, "0.01,1.0,0.01"), 0.75);
GLOBAL_DEF(PropertyInfo(Variant::FLOAT, "editor/movie_writer/video_quality", PROPERTY_HINT_RANGE, "0.0,1.0,0.01"), 0.75);
GLOBAL_DEF(PropertyInfo(Variant::FLOAT, "editor/movie_writer/audio_quality", PROPERTY_HINT_RANGE, "0.0,1.0,0.01"), 0.2);
// Used by the editor.
GLOBAL_DEF_BASIC("editor/movie_writer/movie_file", "");
GLOBAL_DEF_BASIC("editor/movie_writer/disable_vsync", false);

View File

@ -85,6 +85,7 @@ public:
void begin(const Size2i &p_movie_size, uint32_t p_fps, const String &p_base_path);
void add_frame();
void add_frame(const Ref<Image> image);
static void set_extensions_hint();

View File

@ -259,5 +259,5 @@ void MovieWriterMJPEG::write_end() {
MovieWriterMJPEG::MovieWriterMJPEG() {
mix_rate = GLOBAL_GET("editor/movie_writer/mix_rate");
speaker_mode = AudioServer::SpeakerMode(int(GLOBAL_GET("editor/movie_writer/speaker_mode")));
quality = GLOBAL_GET("editor/movie_writer/mjpeg_quality");
quality = GLOBAL_GET("editor/movie_writer/video_quality");
}

View File

@ -0,0 +1,426 @@
/**************************************************************************/
/* movie_writer_ogv.cpp */
/**************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/**************************************************************************/
/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/**************************************************************************/
#include "movie_writer_ogv.h"
#include "core/config/project_settings.h"
#include <rgb2yuv.h>
int MovieWriterOGV::encode_audio(const int32_t *p_audio_data) {
ogg_packet op;
if (ogg_stream_eos(&vo))
return 0;
if (p_audio_data == nullptr) {
/* end of file. this can be done implicitly, but it's
easier to see here in non-clever fashion. Tell the
library we're at end of stream so that it can handle the
last frame and mark end of stream in the output properly */
vorbis_analysis_wrote(&vd, 0);
} else {
/* read and process more audio */
float **vorbis_buffer = vorbis_analysis_buffer(&vd, audio_frames);
/* uninterleave samples */
uint32_t count = 0;
for (uint32_t i = 0; i < audio_frames; i++) {
for (uint32_t j = 0; j < audio_ch; j++) {
vorbis_buffer[j][i] = p_audio_data[count] / 2147483647.f;
count++;
}
}
vorbis_analysis_wrote(&vd, audio_frames);
}
while (vorbis_analysis_blockout(&vd, &vb) > 0) {
/* analysis, assume we want to use bitrate management */
vorbis_analysis(&vb, NULL);
vorbis_bitrate_addblock(&vb);
/* weld packets into the bitstream */
while (vorbis_bitrate_flushpacket(&vd, &op) > 0) {
ogg_stream_packetin(&vo, &op);
}
}
if (ogg_stream_pageout(&vo, &audiopage) > 0)
return 1;
return 0;
}
int MovieWriterOGV::encode_video(const Ref<Image> &p_image) {
ogg_packet op;
if (ogg_stream_eos(&to))
return 0;
if (p_image != nullptr) {
PackedByteArray data = p_image->get_data();
rgb2yuv420(y, u, v, data.ptrw(), p_image->get_width(), p_image->get_height());
/*We submit the buffer using the size of the picture region. libtheora will pad the picture region out to the full frame size for us,
whether we pass in a full frame or not.*/
ycbcr[0].width = p_image->get_width();
ycbcr[0].height = p_image->get_height();
ycbcr[0].stride = p_image->get_width();
ycbcr[0].data = y;
ycbcr[1].width = p_image->get_width() / 2;
ycbcr[1].height = p_image->get_height() / 2;
ycbcr[1].stride = p_image->get_width() / 2;
ycbcr[1].data = u;
ycbcr[2].width = p_image->get_width() / 2;
ycbcr[2].height = p_image->get_height() / 2;
ycbcr[2].stride = p_image->get_width() / 2;
ycbcr[2].data = v;
th_encode_ycbcr_in(td, ycbcr);
}
int ret = 0;
do {
ret = th_encode_packetout(td, p_image == nullptr, &op);
if (ret > 0)
ogg_stream_packetin(&to, &op);
} while (ret > 0);
if (ogg_stream_pageout(&to, &videopage) > 0)
return 1;
return 0;
}
uint32_t MovieWriterOGV::get_audio_mix_rate() const {
return mix_rate;
}
AudioServer::SpeakerMode MovieWriterOGV::get_audio_speaker_mode() const {
return speaker_mode;
}
bool MovieWriterOGV::handles_file(const String &p_path) const {
return p_path.get_extension().to_lower() == "ogv";
}
void MovieWriterOGV::get_supported_extensions(List<String> *r_extensions) const {
r_extensions->push_back("ogv");
}
Error MovieWriterOGV::write_begin(const Size2i &p_movie_size, uint32_t p_fps, const String &p_base_path) {
base_path = p_base_path.get_basename();
if (base_path.is_relative_path()) {
base_path = "res://" + base_path;
}
base_path += ".ogv";
f = FileAccess::open(base_path, FileAccess::WRITE_READ);
ERR_FAIL_COND_V(f.is_null(), ERR_CANT_OPEN);
fps = p_fps;
speed = 4;
audio_ch = 2;
switch (speaker_mode) {
case AudioServer::SPEAKER_MODE_STEREO:
audio_ch = 2;
break;
case AudioServer::SPEAKER_SURROUND_31:
audio_ch = 4;
break;
case AudioServer::SPEAKER_SURROUND_51:
audio_ch = 6;
break;
case AudioServer::SPEAKER_SURROUND_71:
audio_ch = 8;
break;
}
audio_frames = mix_rate / fps;
/* Set up Ogg output streams */
srand(time(NULL));
ogg_stream_init(&to, rand()); // video
ogg_stream_init(&vo, rand()); // audio
/* Initialize Vorbis audio encoding */
vorbis_info_init(&vi);
int ret = 0;
if (audio_r == 0)
ret = vorbis_encode_init_vbr(&vi, audio_ch, mix_rate, audio_q);
else
ret = vorbis_encode_init(&vi, audio_ch, mix_rate, -1, (int)(64870 * (ogg_int64_t)audio_r >> 16), -1);
ERR_FAIL_COND_V_MSG(ret, ERR_UNAVAILABLE, "The Vorbis encoder could not set up a mode according to the requested quality or bitrate.");
vorbis_comment_init(&vc);
vorbis_analysis_init(&vd, &vi);
vorbis_block_init(&vd, &vb);
/* Set up Theora encoder */
/* Theora has a divisible-by-sixteen restriction for the encoded frame size */
/* scale the picture size up to the nearest /16 and calculate offsets */
int pic_w = p_movie_size.width;
int pic_h = p_movie_size.height;
int frame_w = (pic_w + 15) & ~0xF;
int frame_h = (pic_h + 15) & ~0xF;
/*Force the offsets to be even so that chroma samples line up like we
expect.*/
int pic_x = (frame_w - pic_w) / 2 & ~1;
int pic_y = (frame_h - pic_h) / 2 & ~1;
y = (uint8_t *)memalloc(pic_w * pic_h);
u = (uint8_t *)memalloc(pic_w * pic_h / 4);
v = (uint8_t *)memalloc(pic_w * pic_h / 4);
th_info_init(&ti);
ti.frame_width = frame_w;
ti.frame_height = frame_h;
ti.pic_width = pic_w;
ti.pic_height = pic_h;
ti.pic_x = pic_x;
ti.pic_y = pic_y;
ti.fps_numerator = fps;
ti.fps_denominator = 1;
ti.aspect_numerator = 1;
ti.aspect_denominator = 1;
ti.colorspace = TH_CS_UNSPECIFIED;
/*Account for the Ogg page overhead.
This is 1 byte per 255 for lacing values, plus 26 bytes per 4096 bytes for
the page header, plus approximately 1/2 byte per packet (not accounted for
here).*/
ti.target_bitrate = (int)(64870 * (ogg_int64_t)video_r >> 16);
ti.quality = video_q * 63;
ti.pixel_fmt = TH_PF_420;
td = th_encode_alloc(&ti);
th_info_clear(&ti);
ERR_FAIL_COND_V_MSG(td == NULL, ERR_UNCONFIGURED, "Error: Could not create an encoder instance. Check that video parameters are valid.");
/* setting just the granule shift only allows power-of-two keyframe spacing. Set the actual requested spacing. */
ret = th_encode_ctl(td, TH_ENCCTL_SET_KEYFRAME_FREQUENCY_FORCE, &keyframe_frequency, sizeof(keyframe_frequency - 1));
if (ret < 0)
ERR_PRINT("Could not set keyframe interval");
if (vp3_compatible) {
ret = th_encode_ctl(td, TH_ENCCTL_SET_VP3_COMPATIBLE, &vp3_compatible, sizeof(vp3_compatible));
if (ret < 0)
ERR_PRINT("Could not enable strict VP3 compatibility");
}
/* reverse the rate control flags to favor a 'long time' strategy */
if (soft_target) {
int arg = TH_RATECTL_CAP_UNDERFLOW;
ret = th_encode_ctl(td, TH_ENCCTL_SET_RATE_FLAGS, &arg, sizeof(arg));
if (ret < 0)
ERR_PRINT("Could not set encoder flags for soft-target");
if (buf_delay < 0) {
if ((keyframe_frequency * 7 >> 1) > 5 * fps)
arg = keyframe_frequency * 7 >> 1;
else
arg = 5 * fps;
ret = th_encode_ctl(td, TH_ENCCTL_SET_RATE_BUFFER, &arg, sizeof(arg));
if (ret < 0)
ERR_PRINT("Could not set rate control buffer for soft-target");
}
}
/* Now we can set the buffer delay if the user requested a non-default one
(this has to be done after two-pass is enabled).*/
if (buf_delay >= 0) {
ret = th_encode_ctl(td, TH_ENCCTL_SET_RATE_BUFFER, &buf_delay, sizeof(buf_delay));
if (ret < 0)
WARN_PRINT("Warning: could not set desired buffer delay");
}
/*Speed should also be set after the current encoder mode is established,
since the available speed levels may change depending.*/
if (speed >= 0) {
int speed_max;
int ret;
ret = th_encode_ctl(td, TH_ENCCTL_GET_SPLEVEL_MAX, &speed_max, sizeof(speed_max));
if (ret < 0) {
WARN_PRINT("Warning: could not determine maximum speed level.");
speed_max = 0;
}
ret = th_encode_ctl(td, TH_ENCCTL_SET_SPLEVEL, &speed, sizeof(speed));
if (ret < 0) {
if (ret < 0)
print_line("Warning: could not set speed level to %i of %i\n", speed, speed_max);
if (speed > speed_max) {
print_line("Setting it to %i instead\n", speed_max);
}
ret = th_encode_ctl(td, TH_ENCCTL_SET_SPLEVEL, &speed_max, sizeof(speed_max));
if (ret < 0) {
print_line("Warning: could not set speed level to %i of %i\n", speed_max, speed_max);
}
}
}
/* write the bitstream header packets with proper page interleave */
th_comment_init(&tc);
/* first packet will get its own page automatically */
ogg_packet op;
if (th_encode_flushheader(td, &tc, &op) <= 0) {
ERR_FAIL_V_MSG(ERR_UNCONFIGURED, "Internal Theora library error.");
}
ogg_stream_packetin(&to, &op);
if (ogg_stream_pageout(&to, &videopage) != 1) {
ERR_FAIL_V_MSG(ERR_UNCONFIGURED, "Internal Ogg library error.");
}
f->store_buffer(videopage.header, videopage.header_len);
f->store_buffer(videopage.body, videopage.body_len);
/* create the remaining theora headers */
for (;;) {
ret = th_encode_flushheader(td, &tc, &op);
if (ret < 0) {
ERR_FAIL_V_MSG(ERR_UNCONFIGURED, "Internal Theora library error.");
} else if (ret == 0) {
break;
}
ogg_stream_packetin(&to, &op);
}
/* vorbis streams start with three standard header packets. */
ogg_packet id;
ogg_packet comment;
ogg_packet code;
if (vorbis_analysis_headerout(&vd, &vc, &id, &comment, &code) < 0) {
ERR_FAIL_V_MSG(ERR_UNCONFIGURED, "Internal Vorbis library error.");
}
/* id header is automatically placed in its own page */
ogg_stream_packetin(&vo, &id);
if (ogg_stream_pageout(&vo, &audiopage) != 1) {
ERR_FAIL_V_MSG(ERR_UNCONFIGURED, "Internal Ogg library error.");
}
f->store_buffer(audiopage.header, audiopage.header_len);
f->store_buffer(audiopage.body, audiopage.body_len);
/* append remaining vorbis header packets */
ogg_stream_packetin(&vo, &comment);
ogg_stream_packetin(&vo, &code);
/* Flush the rest of our headers. This ensures the actual data in each stream will start on a new page, as per spec. */
for (;;) {
ret = ogg_stream_flush(&to, &videopage);
if (ret < 0) {
ERR_FAIL_V_MSG(ERR_UNCONFIGURED, "Internal Ogg library error.");
} else if (ret == 0) {
break;
}
f->store_buffer(videopage.header, videopage.header_len);
f->store_buffer(videopage.body, videopage.body_len);
}
for (;;) {
ret = ogg_stream_flush(&vo, &audiopage);
if (ret < 0) {
ERR_FAIL_V_MSG(ERR_UNCONFIGURED, "Internal Ogg library error.");
} else if (ret == 0) {
break;
}
f->store_buffer(audiopage.header, audiopage.header_len);
f->store_buffer(audiopage.body, audiopage.body_len);
}
return OK;
}
Error MovieWriterOGV::write_frame(const Ref<Image> &p_image, const int32_t *p_audio_data) {
ERR_FAIL_COND_V(!f.is_valid() || td == NULL, ERR_UNCONFIGURED);
int audio_or_video = -1;
/* is there an audio page flushed? If not, fetch one if possible */
int audioflag = encode_audio(p_audio_data);
/* is there a video page flushed? If not, fetch one if possible */
int videoflag = encode_video(p_image);
/* no pages of either? Must be end of stream. */
if (!audioflag && !videoflag)
return OK;
/* which is earlier; the end of the audio page or the end of the video page? Flush the earlier to stream */
double audiotime = audioflag ? vorbis_granule_time(&vd, ogg_page_granulepos(&audiopage)) : -1;
double videotime = videoflag ? th_granule_time(td, ogg_page_granulepos(&videopage)) : -1;
if (!audioflag) {
audio_or_video = 1;
} else if (!videoflag) {
audio_or_video = 0;
} else {
if (audiotime < videotime)
audio_or_video = 0;
else
audio_or_video = 1;
}
if (audio_or_video == 1) {
/* flush a video page */
f->store_buffer(videopage.header, videopage.header_len);
f->store_buffer(videopage.body, videopage.body_len);
} else {
/* flush an audio page */
f->store_buffer(audiopage.header, audiopage.header_len);
f->store_buffer(audiopage.body, audiopage.body_len);
}
frame_count++;
return OK;
}
void MovieWriterOGV::write_end() {
write_frame(nullptr, nullptr);
th_encode_free(td);
ogg_stream_clear(&vo);
vorbis_block_clear(&vb);
vorbis_dsp_clear(&vd);
vorbis_comment_clear(&vc);
vorbis_info_clear(&vi);
ogg_stream_clear(&to);
th_comment_clear(&tc);
memfree(y);
memfree(u);
memfree(v);
if (f.is_valid()) {
f.unref();
}
}
MovieWriterOGV::MovieWriterOGV() {
mix_rate = GLOBAL_GET("editor/movie_writer/mix_rate");
speaker_mode = AudioServer::SpeakerMode(int(GLOBAL_GET("editor/movie_writer/speaker_mode")));
video_q = GLOBAL_GET("editor/movie_writer/video_quality");
audio_q = GLOBAL_GET("editor/movie_writer/audio_quality");
}

View File

@ -0,0 +1,146 @@
/**************************************************************************/
/* movie_writer_ogv.h */
/**************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/**************************************************************************/
/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/**************************************************************************/
#ifndef MOVIE_WRITER_OGV_H
#define MOVIE_WRITER_OGV_H
#include "servers/audio_server.h"
#include "servers/movie_writer/movie_writer.h"
#include <theora/theoraenc.h>
#include <vorbis/codec.h>
#include <vorbis/vorbisenc.h>
class MovieWriterOGV : public MovieWriter {
GDCLASS(MovieWriterOGV, MovieWriter)
uint32_t mix_rate = 48000;
AudioServer::SpeakerMode speaker_mode = AudioServer::SPEAKER_MODE_STEREO;
String base_path;
uint32_t frame_count = 0;
uint32_t fps = 0;
uint32_t audio_ch = 0;
uint32_t audio_frames = 0;
Ref<FileAccess> f;
// Bitrate target for Vorbis audio
int audio_r = 0;
// Vorbis quality -0.1 to 1 (-0.1 yields smallest files but lowest fidelity; 1 yields highest fidelity but large files. '0.2' is a reasonable default)
float audio_q = 0.2;
// VP3 strict compatibility
int vp3_compatible = 0;
// Bitrate target for Theora video
int video_r = 0;
// Theora quality selector from 0 to 1.0 (0 yields smallest files but lowest video quality. 1.0 yields highest fidelity but large files)
float video_q = 0.75;
// Streaming video
ogg_uint32_t keyframe_frequency = 64;
// Buffer delay (in frames). Longer delays allow smoother rate adaptation and provide better overall quality, but require more
// client side buffering and add latency. The default value is the keyframe interval for one-pass encoding (or somewhat larger if
// soft-target is used)
int buf_delay = -1;
// Sets the encoder speed level. Higher speed levels favor quicker encoding over better quality per bit. Depending on the encoding
// mode, and the internal algorithms used, quality may actually improve with higher speeds, but in this case bitrate will also
// likely increase. The maximum value, and the meaning of each value, are implementation-specific and may change depending on the
// current encoding mode
int speed = 4;
/* Use a large reservoir and treat the rate as a soft target; rate control is less strict but resulting quality is usually
higher/smoother overall. Soft target also allows an optional setting to specify a minimum allowed quality. */
int soft_target = 1;
// Take physical pages, weld into a logical stream of packets
ogg_stream_state to;
// Take physical pages, weld into a logical stream of packets
ogg_stream_state vo;
// Theora encoding context
th_enc_ctx *td;
// Theora bitstream information
th_info ti;
// Theora comment information
th_comment tc;
// Vorbis bitstream information
vorbis_info vi;
// Vorbis comment information
vorbis_comment vc;
// central working state for the packet->PCM decoder
vorbis_dsp_state vd;
// local working space for packet->PCM decode
vorbis_block vb;
// Video buffer
uint8_t *y, *u, *v;
th_ycbcr_buffer ycbcr;
ogg_page audiopage;
ogg_page videopage;
int encode_audio(const int32_t *p_audio_data);
int encode_video(const Ref<Image> &p_image);
inline int ilog(unsigned _v) {
int ret;
for (ret = 0; _v; ret++)
_v >>= 1;
return ret;
}
protected:
virtual uint32_t get_audio_mix_rate() const override;
virtual AudioServer::SpeakerMode get_audio_speaker_mode() const override;
virtual void get_supported_extensions(List<String> *r_extensions) const override;
virtual Error write_begin(const Size2i &p_movie_size, uint32_t p_fps, const String &p_base_path) override;
virtual Error write_frame(const Ref<Image> &p_image, const int32_t *p_audio_data) override;
virtual void write_end() override;
virtual bool handles_file(const String &p_path) const override;
public:
MovieWriterOGV();
};
#endif // MOVIE_WRITER_OGV_H

View File

@ -61,6 +61,7 @@
#include "display_server.h"
#include "movie_writer/movie_writer.h"
#include "movie_writer/movie_writer_mjpeg.h"
#include "movie_writer/movie_writer_ogv.h"
#include "movie_writer/movie_writer_pngwav.h"
#include "rendering/renderer_compositor.h"
#include "rendering/renderer_rd/framebuffer_cache_rd.h"
@ -127,6 +128,7 @@ static bool has_server_feature_callback(const String &p_feature) {
static MovieWriterMJPEG *writer_mjpeg = nullptr;
static MovieWriterPNGWAV *writer_pngwav = nullptr;
static MovieWriterOGV *writer_ogv = nullptr;
void register_server_types() {
OS::get_singleton()->benchmark_begin_measure("Servers", "Register Extensions");
@ -331,6 +333,9 @@ void register_server_types() {
writer_pngwav = memnew(MovieWriterPNGWAV);
MovieWriter::add_writer(writer_pngwav);
writer_ogv = memnew(MovieWriterOGV);
MovieWriter::add_writer(writer_ogv);
OS::get_singleton()->benchmark_end_measure("Servers", "Register Extensions");
}
@ -341,6 +346,7 @@ void unregister_server_types() {
memdelete(shader_types);
memdelete(writer_mjpeg);
memdelete(writer_pngwav);
memdelete(writer_ogv);
OS::get_singleton()->benchmark_end_measure("Servers", "Unregister Extensions");
}

41
thirdparty/misc/rgb2yuv.h vendored Normal file
View File

@ -0,0 +1,41 @@
#ifndef RGB2YUV_H
#define RGB2YUV_H
#include "core/typedefs.h"
static void rgb2yuv420(uint8_t *y, uint8_t *u, uint8_t *v, uint8_t *rgb, size_t width, size_t height) {
size_t upos = 0;
size_t vpos = 0;
size_t i = 0;
for (size_t line = 0; line < height; ++line) {
if (!(line % 2)) {
for (size_t x = 0; x < width; x += 2) {
uint8_t r = rgb[3 * i];
uint8_t g = rgb[3 * i + 1];
uint8_t b = rgb[3 * i + 2];
y[i++] = ((66 * r + 129 * g + 25 * b) >> 8) + 16;
u[upos++] = ((-38 * r + -74 * g + 112 * b) >> 8) + 128;
v[vpos++] = ((112 * r + -94 * g + -18 * b) >> 8) + 128;
r = rgb[3 * i];
g = rgb[3 * i + 1];
b = rgb[3 * i + 2];
y[i++] = ((66 * r + 129 * g + 25 * b) >> 8) + 16;
}
} else {
for (size_t x = 0; x < width; x += 1) {
uint8_t r = rgb[3 * i];
uint8_t g = rgb[3 * i + 1];
uint8_t b = rgb[3 * i + 2];
y[i++] = ((66 * r + 129 * g + 25 * b) >> 8) + 16;
}
}
}
}
#endif // RGB2YUV_H