move RPI Camera component into dedicated repository (#3656)

This commit is contained in:
Alessandro Ros 2024-08-14 23:24:17 +02:00 committed by GitHub
parent 4f54ea8b7e
commit c5059fa7a0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
35 changed files with 189 additions and 1911 deletions

View File

@ -4,5 +4,4 @@
/coverage*.txt /coverage*.txt
/apidocs/*.html /apidocs/*.html
/internal/servers/hls/hls.min.js /internal/servers/hls/hls.min.js
/internal/protocols/rpicamera/exe/text_font.* /internal/staticsources/rpicamera/mtxrpicam_*
/internal/protocols/rpicamera/exe/exe

3
.gitignore vendored
View File

@ -3,5 +3,4 @@
/coverage*.txt /coverage*.txt
/apidocs/*.html /apidocs/*.html
/internal/servers/hls/hls.min.js /internal/servers/hls/hls.min.js
/internal/protocols/rpicamera/exe/text_font.* /internal/staticsources/rpicamera/mtxrpicam_*
/internal/protocols/rpicamera/exe/exe

View File

@ -144,7 +144,6 @@ _rtsp-simple-server_ has been rebranded as _MediaMTX_. The reason is pretty obvi
* [Encryption](#encryption-1) * [Encryption](#encryption-1)
* [Compile from source](#compile-from-source) * [Compile from source](#compile-from-source)
* [Standard](#standard) * [Standard](#standard)
* [Raspberry Pi](#raspberry-pi)
* [OpenWrt](#openwrt-1) * [OpenWrt](#openwrt-1)
* [Cross compile](#cross-compile) * [Cross compile](#cross-compile)
* [Compile for all supported platforms](#compile-for-all-supported-platforms) * [Compile for all supported platforms](#compile-for-all-supported-platforms)
@ -2047,25 +2046,6 @@ CGO_ENABLED=0 go build .
The command will produce the `mediamtx` binary. The command will produce the `mediamtx` binary.
### Raspberry Pi
The server can be compiled with native support for the Raspberry Pi Camera. Compilation must be performed on a Raspberry Pi, with the following dependencies:
* Go ≥ 1.22
* `libcamera-dev`
* `libfreetype-dev`
* `xxd`
Download the repository, open a terminal in it and run:
```sh
make -C internal/protocols/rpicamera/exe -j$(nproc)
go generate ./...
go build -tags rpicamera .
```
The command will produce the `mediamtx` binary.
### OpenWrt ### OpenWrt
The compilation procedure is the same as the standard one. On the OpenWrt device, install git and Go: The compilation procedure is the same as the standard one. On the OpenWrt device, install git and Go:

View File

@ -1,57 +0,0 @@
CFLAGS = \
-Ofast \
-Werror \
-Wall \
-Wextra \
-Wno-unused-parameter \
-Wno-unused-result \
$$(pkg-config --cflags freetype2)
CXXFLAGS = \
-Ofast \
-Werror \
-Wall \
-Wextra \
-Wno-unused-parameter \
-Wno-unused-result \
-std=c++17 \
$$(pkg-config --cflags libcamera)
LDFLAGS = \
-s \
-pthread \
$$(pkg-config --libs freetype2) \
$$(pkg-config --libs libcamera)
OBJS = \
base64.o \
camera.o \
encoder.o \
main.o \
parameters.o \
pipe.o \
sensor_mode.o \
text.o \
window.o
all: exe
TEXT_FONT_URL = https://github.com/IBM/plex/raw/v6.4.2/IBM-Plex-Mono/fonts/complete/ttf/IBMPlexMono-Medium.ttf
TEXT_FONT_SHA256 = 0bede3debdea8488bbb927f8f0650d915073209734a67fe8cd5a3320b572511c
text_font.ttf:
wget -O text_font.tmp $(TEXT_FONT_URL)
H=$$(sha256sum text_font.tmp | awk '{ print $$1 }'); [ "$$H" = "$(TEXT_FONT_SHA256)" ] || { echo "hash mismatch; got $$H, expected $(TEXT_FONT_SHA256)"; exit 1; }
mv text_font.tmp $@
text_font.h: text_font.ttf
xxd --include $< > text_font.h
%.o: %.c text_font.h
$(CC) $(CFLAGS) -c $< -o $@
%.o: %.cpp
$(CXX) $(CXXFLAGS) -c $< -o $@
exe: $(OBJS)
$(CXX) $^ $(LDFLAGS) -o $@

View File

@ -1,87 +0,0 @@
#include <stddef.h>
#include <stdint.h>
#include <string.h>
#include <stdlib.h>
#include "base64.h"
static const unsigned char decoding_table[256] = {
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x3e, 0x00, 0x00, 0x00, 0x3f,
0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b,
0x3c, 0x3d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06,
0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e,
0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16,
0x17, 0x18, 0x19, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20,
0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28,
0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30,
0x31, 0x32, 0x33, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
};
char* base64_decode(const char *data) {
size_t input_length = strlen(data);
if (input_length % 4 != 0) {
return NULL;
}
size_t output_length = input_length / 4 * 3;
if (data[input_length - 1] == '=') {
(output_length)--;
}
if (data[input_length - 2] == '=') {
(output_length)--;
}
unsigned char* output = (unsigned char *)malloc(output_length + 1);
if (output == NULL) {
return NULL;
}
for (int i = 0, j = 0; i < (int)input_length;) {
uint32_t sextet_a = (data[i] == '=') ? (0 & i++) : decoding_table[(unsigned char)(data[i++])];
uint32_t sextet_b = (data[i] == '=') ? (0 & i++) : decoding_table[(unsigned char)(data[i++])];
uint32_t sextet_c = (data[i] == '=') ? (0 & i++) : decoding_table[(unsigned char)(data[i++])];
uint32_t sextet_d = (data[i] == '=') ? (0 & i++) : decoding_table[(unsigned char)(data[i++])];
uint32_t triple = (sextet_a << 3 * 6)
+ (sextet_b << 2 * 6)
+ (sextet_c << 1 * 6)
+ (sextet_d << 0 * 6);
if (j < (int)output_length) {
output[j++] = (triple >> 2 * 8) & 0xFF;
}
if (j < (int)output_length) {
output[j++] = (triple >> 1 * 8) & 0xFF;
}
if (j < (int)output_length) {
output[j++] = (triple >> 0 * 8) & 0xFF;
}
}
output[output_length] = 0x00;
return (char *)output;
};

View File

@ -1,6 +0,0 @@
#ifndef __BASE64_H__
#define __BASE64_H__
char* base64_decode(const char *data);
#endif

View File

@ -1,499 +0,0 @@
#include <stdio.h>
#include <stdarg.h>
#include <cstring>
#include <sys/mman.h>
#include <iostream>
#include <mutex>
#include <sys/ioctl.h>
#include <fcntl.h>
#include <unistd.h>
#include <libcamera/camera_manager.h>
#include <libcamera/camera.h>
#include <libcamera/formats.h>
#include <libcamera/control_ids.h>
#include <libcamera/controls.h>
#include <libcamera/framebuffer_allocator.h>
#include <libcamera/property_ids.h>
#include <linux/videodev2.h>
#include "camera.h"
using libcamera::CameraManager;
using libcamera::CameraConfiguration;
using libcamera::Camera;
using libcamera::ColorSpace;
using libcamera::ControlList;
using libcamera::FrameBufferAllocator;
using libcamera::FrameBuffer;
using libcamera::PixelFormat;
using libcamera::Rectangle;
using libcamera::Request;
using libcamera::Size;
using libcamera::Span;
using libcamera::Stream;
using libcamera::StreamRole;
using libcamera::StreamConfiguration;
using libcamera::Transform;
namespace controls = libcamera::controls;
namespace formats = libcamera::formats;
namespace properties = libcamera::properties;
static char errbuf[256];
static void set_error(const char *format, ...) {
va_list args;
va_start(args, format);
vsnprintf(errbuf, 256, format, args);
}
const char *camera_get_error() {
return errbuf;
}
// https://github.com/raspberrypi/libcamera-apps/blob/dd97618a25523c2c4aa58f87af5f23e49aa6069c/core/libcamera_app.cpp#L42
static PixelFormat mode_to_pixel_format(sensor_mode_t *mode) {
static std::vector<std::pair<std::pair<int, bool>, PixelFormat>> table = {
{ {8, false}, formats::SBGGR8 },
{ {8, true}, formats::SBGGR8 },
{ {10, false}, formats::SBGGR10 },
{ {10, true}, formats::SBGGR10_CSI2P },
{ {12, false}, formats::SBGGR12 },
{ {12, true}, formats::SBGGR12_CSI2P },
};
auto it = std::find_if(table.begin(), table.end(), [&mode] (auto &m) {
return mode->bit_depth == m.first.first && mode->packed == m.first.second; });
if (it != table.end()) {
return it->second;
}
return formats::SBGGR12_CSI2P;
}
struct CameraPriv {
const parameters_t *params;
camera_frame_cb frame_cb;
std::unique_ptr<CameraManager> camera_manager;
std::shared_ptr<Camera> camera;
Stream *video_stream;
std::unique_ptr<FrameBufferAllocator> allocator;
std::vector<std::unique_ptr<Request>> requests;
std::mutex ctrls_mutex;
std::unique_ptr<ControlList> ctrls;
std::map<FrameBuffer *, uint8_t *> mapped_buffers;
};
static int get_v4l2_colorspace(std::optional<ColorSpace> const &cs) {
if (cs == ColorSpace::Rec709) {
return V4L2_COLORSPACE_REC709;
}
return V4L2_COLORSPACE_SMPTE170M;
}
// https://github.com/raspberrypi/libcamera-apps/blob/a5b5506a132056ac48ba22bc581cc394456da339/core/libcamera_app.cpp#L824
static uint8_t *map_buffer(FrameBuffer *buffer) {
size_t buffer_size = 0;
for (unsigned i = 0; i < buffer->planes().size(); i++) {
const FrameBuffer::Plane &plane = buffer->planes()[i];
buffer_size += plane.length;
if (i == buffer->planes().size() - 1 || plane.fd.get() != buffer->planes()[i + 1].fd.get()) {
return (uint8_t *)mmap(NULL, buffer_size, PROT_READ | PROT_WRITE, MAP_SHARED, plane.fd.get(), 0);
}
}
return NULL;
}
// https://github.com/raspberrypi/libcamera-apps/blob/a6267d51949d0602eedf60f3ddf8c6685f652812/core/options.cpp#L101
static void set_hdr(bool hdr) {
bool ok = false;
for (int i = 0; i < 4 && !ok; i++)
{
std::string dev("/dev/v4l-subdev");
dev += (char)('0' + i);
int fd = open(dev.c_str(), O_RDWR, 0);
if (fd < 0)
continue;
v4l2_control ctrl { V4L2_CID_WIDE_DYNAMIC_RANGE, hdr };
ok = !ioctl(fd, VIDIOC_S_CTRL, &ctrl);
close(fd);
}
}
bool camera_create(const parameters_t *params, camera_frame_cb frame_cb, camera_t **cam) {
std::unique_ptr<CameraPriv> camp = std::make_unique<CameraPriv>();
set_hdr(params->hdr);
if (strcmp(params->log_level, "debug") == 0) {
setenv("LIBCAMERA_LOG_LEVELS", "*:DEBUG", 1);
} else if (strcmp(params->log_level, "info") == 0) {
setenv("LIBCAMERA_LOG_LEVELS", "*:INFO", 1);
} else if (strcmp(params->log_level, "warn") == 0) {
setenv("LIBCAMERA_LOG_LEVELS", "*:WARN", 1);
} else { // error
setenv("LIBCAMERA_LOG_LEVELS", "*:ERROR", 1);
}
// We make sure to set the environment variable before libcamera init
setenv("LIBCAMERA_RPI_TUNING_FILE", params->tuning_file, 1);
camp->camera_manager = std::make_unique<CameraManager>();
int ret = camp->camera_manager->start();
if (ret != 0) {
set_error("CameraManager.start() failed");
return false;
}
std::vector<std::shared_ptr<Camera>> cameras = camp->camera_manager->cameras();
auto rem = std::remove_if(cameras.begin(), cameras.end(),
[](auto &cam) { return cam->id().find("/usb") != std::string::npos; });
cameras.erase(rem, cameras.end());
if (params->camera_id >= cameras.size()){
set_error("selected camera is not available");
return false;
}
camp->camera = camp->camera_manager->get(cameras[params->camera_id]->id());
if (camp->camera == NULL) {
set_error("CameraManager.get() failed");
return false;
}
ret = camp->camera->acquire();
if (ret != 0) {
set_error("Camera.acquire() failed");
return false;
}
std::vector<libcamera::StreamRole> stream_roles = { StreamRole::VideoRecording };
if (params->mode != NULL) {
stream_roles.push_back(StreamRole::Raw);
}
std::unique_ptr<CameraConfiguration> conf = camp->camera->generateConfiguration(stream_roles);
if (conf == NULL) {
set_error("Camera.generateConfiguration() failed");
return false;
}
StreamConfiguration &video_stream_conf = conf->at(0);
video_stream_conf.size = libcamera::Size(params->width, params->height);
video_stream_conf.pixelFormat = formats::YUV420;
video_stream_conf.bufferCount = params->buffer_count;
if (params->width >= 1280 || params->height >= 720) {
video_stream_conf.colorSpace = ColorSpace::Rec709;
} else {
video_stream_conf.colorSpace = ColorSpace::Smpte170m;
}
if (params->mode != NULL) {
StreamConfiguration &raw_stream_conf = conf->at(1);
raw_stream_conf.size = Size(params->mode->width, params->mode->height);
raw_stream_conf.pixelFormat = mode_to_pixel_format(params->mode);
raw_stream_conf.bufferCount = video_stream_conf.bufferCount;
}
conf->transform = Transform::Identity;
if (params->h_flip) {
conf->transform = Transform::HFlip * conf->transform;
}
if (params->v_flip) {
conf->transform = Transform::VFlip * conf->transform;
}
CameraConfiguration::Status vstatus = conf->validate();
if (vstatus == CameraConfiguration::Invalid) {
set_error("StreamConfiguration.validate() failed");
return false;
}
int res = camp->camera->configure(conf.get());
if (res != 0) {
set_error("Camera.configure() failed");
return false;
}
camp->video_stream = video_stream_conf.stream();
for (unsigned int i = 0; i < params->buffer_count; i++) {
std::unique_ptr<Request> request = camp->camera->createRequest((uint64_t)camp.get());
if (request == NULL) {
set_error("createRequest() failed");
return false;
}
camp->requests.push_back(std::move(request));
}
camp->allocator = std::make_unique<FrameBufferAllocator>(camp->camera);
for (StreamConfiguration &stream_conf : *conf) {
Stream *stream = stream_conf.stream();
res = camp->allocator->allocate(stream);
if (res < 0) {
set_error("allocate() failed");
return false;
}
int i = 0;
for (const std::unique_ptr<FrameBuffer> &buffer : camp->allocator->buffers(stream)) {
// map buffer of the video stream only
if (stream == video_stream_conf.stream()) {
camp->mapped_buffers[buffer.get()] = map_buffer(buffer.get());
}
res = camp->requests.at(i++)->addBuffer(stream, buffer.get());
if (res != 0) {
set_error("addBuffer() failed");
return false;
}
}
}
camp->params = params;
camp->frame_cb = frame_cb;
*cam = camp.release();
return true;
}
static int buffer_size(const std::vector<FrameBuffer::Plane> &planes) {
int size = 0;
for (const FrameBuffer::Plane &plane : planes) {
size += plane.length;
}
return size;
}
static void on_request_complete(Request *request) {
if (request->status() == Request::RequestCancelled) {
return;
}
CameraPriv *camp = (CameraPriv *)request->cookie();
FrameBuffer *buffer = request->buffers().at(camp->video_stream);
camp->frame_cb(
camp->mapped_buffers.at(buffer),
camp->video_stream->configuration().stride,
camp->video_stream->configuration().size.height,
buffer->planes()[0].fd.get(),
buffer_size(buffer->planes()),
buffer->metadata().timestamp / 1000);
request->reuse(Request::ReuseFlag::ReuseBuffers);
{
std::lock_guard<std::mutex> lock(camp->ctrls_mutex);
request->controls() = *camp->ctrls;
camp->ctrls->clear();
}
camp->camera->queueRequest(request);
}
int camera_get_mode_stride(camera_t *cam) {
CameraPriv *camp = (CameraPriv *)cam;
return camp->video_stream->configuration().stride;
}
int camera_get_mode_colorspace(camera_t *cam) {
CameraPriv *camp = (CameraPriv *)cam;
return get_v4l2_colorspace(camp->video_stream->configuration().colorSpace);
}
static void fill_dynamic_controls(ControlList *ctrls, const parameters_t *params) {
ctrls->set(controls::Brightness, params->brightness);
ctrls->set(controls::Contrast, params->contrast);
ctrls->set(controls::Saturation, params->saturation);
ctrls->set(controls::Sharpness, params->sharpness);
int exposure_mode;
if (strcmp(params->exposure, "short") == 0) {
exposure_mode = controls::ExposureShort;
} else if (strcmp(params->exposure, "long") == 0) {
exposure_mode = controls::ExposureLong;
} else if (strcmp(params->exposure, "custom") == 0) {
exposure_mode = controls::ExposureCustom;
} else {
exposure_mode = controls::ExposureNormal;
}
ctrls->set(controls::AeExposureMode, exposure_mode);
int awb_mode;
if (strcmp(params->awb, "incandescent") == 0) {
awb_mode = controls::AwbIncandescent;
} else if (strcmp(params->awb, "tungsten") == 0) {
awb_mode = controls::AwbTungsten;
} else if (strcmp(params->awb, "fluorescent") == 0) {
awb_mode = controls::AwbFluorescent;
} else if (strcmp(params->awb, "indoor") == 0) {
awb_mode = controls::AwbIndoor;
} else if (strcmp(params->awb, "daylight") == 0) {
awb_mode = controls::AwbDaylight;
} else if (strcmp(params->awb, "cloudy") == 0) {
awb_mode = controls::AwbCloudy;
} else if (strcmp(params->awb, "custom") == 0) {
awb_mode = controls::AwbCustom;
} else {
awb_mode = controls::AwbAuto;
}
ctrls->set(controls::AwbMode, awb_mode);
if (params->awb_gain_red > 0 && params->awb_gain_blue > 0) {
ctrls->set(controls::ColourGains,
Span<const float, 2>({params->awb_gain_red, params->awb_gain_blue}));
}
int denoise_mode;
if (strcmp(params->denoise, "cdn_off") == 0) {
denoise_mode = controls::draft::NoiseReductionModeMinimal;
} else if (strcmp(params->denoise, "cdn_hq") == 0) {
denoise_mode = controls::draft::NoiseReductionModeHighQuality;
} else if (strcmp(params->denoise, "cdn_fast") == 0) {
denoise_mode = controls::draft::NoiseReductionModeFast;
} else {
denoise_mode = controls::draft::NoiseReductionModeOff;
}
ctrls->set(controls::draft::NoiseReductionMode, denoise_mode);
ctrls->set(controls::ExposureTime, params->shutter);
int metering_mode;
if (strcmp(params->metering, "spot") == 0) {
metering_mode = controls::MeteringSpot;
} else if (strcmp(params->metering, "matrix") == 0) {
metering_mode = controls::MeteringMatrix;
} else if (strcmp(params->metering, "custom") == 0) {
metering_mode = controls::MeteringCustom;
} else {
metering_mode = controls::MeteringCentreWeighted;
}
ctrls->set(controls::AeMeteringMode, metering_mode);
ctrls->set(controls::AnalogueGain, params->gain);
ctrls->set(controls::ExposureValue, params->ev);
int64_t frame_time = (int64_t)(((float)1000000) / params->fps);
ctrls->set(controls::FrameDurationLimits, Span<const int64_t, 2>({ frame_time, frame_time }));
}
bool camera_start(camera_t *cam) {
CameraPriv *camp = (CameraPriv *)cam;
camp->ctrls = std::make_unique<ControlList>(controls::controls);
fill_dynamic_controls(camp->ctrls.get(), camp->params);
if (camp->camera->controls().count(&controls::AfMode) > 0) {
if (camp->params->af_window != NULL) {
std::optional<Rectangle> opt = camp->camera->properties().get(properties::ScalerCropMaximum);
Rectangle sensor_area;
try {
sensor_area = opt.value();
} catch(const std::bad_optional_access& exc) {
set_error("get(ScalerCropMaximum) failed");
return false;
}
Rectangle afwindows_rectangle[1];
afwindows_rectangle[0] = Rectangle(
camp->params->af_window->x * sensor_area.width,
camp->params->af_window->y * sensor_area.height,
camp->params->af_window->width * sensor_area.width,
camp->params->af_window->height * sensor_area.height);
afwindows_rectangle[0].translateBy(sensor_area.topLeft());
camp->ctrls->set(controls::AfMetering, controls::AfMeteringWindows);
camp->ctrls->set(controls::AfWindows, afwindows_rectangle);
}
int af_mode;
if (strcmp(camp->params->af_mode, "manual") == 0) {
af_mode = controls::AfModeManual;
} else if (strcmp(camp->params->af_mode, "continuous") == 0) {
af_mode = controls::AfModeContinuous;
} else {
af_mode = controls::AfModeAuto;
}
camp->ctrls->set(controls::AfMode, af_mode);
int af_range;
if (strcmp(camp->params->af_range, "macro") == 0) {
af_range = controls::AfRangeMacro;
} else if (strcmp(camp->params->af_range, "full") == 0) {
af_range = controls::AfRangeFull;
} else {
af_range = controls::AfRangeNormal;
}
camp->ctrls->set(controls::AfRange, af_range);
int af_speed;
if (strcmp(camp->params->af_range, "fast") == 0) {
af_speed = controls::AfSpeedFast;
} else {
af_speed = controls::AfSpeedNormal;
}
camp->ctrls->set(controls::AfSpeed, af_speed);
if (strcmp(camp->params->af_mode, "auto") == 0) {
camp->ctrls->set(controls::AfTrigger, controls::AfTriggerStart);
} else if (strcmp(camp->params->af_mode, "manual") == 0) {
camp->ctrls->set(controls::LensPosition, camp->params->lens_position);
}
}
if (camp->params->roi != NULL) {
std::optional<Rectangle> opt = camp->camera->properties().get(properties::ScalerCropMaximum);
Rectangle sensor_area;
try {
sensor_area = opt.value();
} catch(const std::bad_optional_access& exc) {
set_error("get(ScalerCropMaximum) failed");
return false;
}
Rectangle crop(
camp->params->roi->x * sensor_area.width,
camp->params->roi->y * sensor_area.height,
camp->params->roi->width * sensor_area.width,
camp->params->roi->height * sensor_area.height);
crop.translateBy(sensor_area.topLeft());
camp->ctrls->set(controls::ScalerCrop, crop);
}
int res = camp->camera->start(camp->ctrls.get());
if (res != 0) {
set_error("Camera.start() failed");
return false;
}
camp->ctrls->clear();
camp->camera->requestCompleted.connect(on_request_complete);
for (std::unique_ptr<Request> &request : camp->requests) {
int res = camp->camera->queueRequest(request.get());
if (res != 0) {
set_error("Camera.queueRequest() failed");
return false;
}
}
return true;
}
void camera_reload_params(camera_t *cam, const parameters_t *params) {
CameraPriv *camp = (CameraPriv *)cam;
std::lock_guard<std::mutex> lock(camp->ctrls_mutex);
fill_dynamic_controls(camp->ctrls.get(), params);
}

View File

@ -1,31 +0,0 @@
#ifndef __CAMERA_H__
#define __CAMERA_H__
#include "parameters.h"
typedef void camera_t;
typedef void (*camera_frame_cb)(
uint8_t *mapped_buffer,
int stride,
int height,
int buffer_fd,
uint64_t size,
uint64_t timestamp);
#ifdef __cplusplus
extern "C" {
#endif
const char *camera_get_error();
bool camera_create(const parameters_t *params, camera_frame_cb frame_cb, camera_t **cam);
int camera_get_mode_stride(camera_t *cam);
int camera_get_mode_colorspace(camera_t *cam);
bool camera_start(camera_t *cam);
void camera_reload_params(camera_t *cam, const parameters_t *params);
#ifdef __cplusplus
}
#endif
#endif

View File

@ -1,337 +0,0 @@
#include <stdbool.h>
#include <stdio.h>
#include <stdarg.h>
#include <stdlib.h>
#include <stdint.h>
#include <fcntl.h>
#include <unistd.h>
#include <string.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <errno.h>
#include <poll.h>
#include <pthread.h>
#include <linux/videodev2.h>
#include "encoder.h"
#define DEVICE "/dev/video11"
#define POLL_TIMEOUT_MS 200
static char errbuf[256];
static void set_error(const char *format, ...) {
va_list args;
va_start(args, format);
vsnprintf(errbuf, 256, format, args);
}
const char *encoder_get_error() {
return errbuf;
}
typedef struct {
const parameters_t *params;
int fd;
void **capture_buffers;
int cur_buffer;
encoder_output_cb output_cb;
pthread_t output_thread;
bool ts_initialized;
uint64_t start_ts;
} encoder_priv_t;
static void *output_thread(void *userdata) {
encoder_priv_t *encp = (encoder_priv_t *)userdata;
while (true) {
struct pollfd p = { encp->fd, POLLIN, 0 };
int res = poll(&p, 1, POLL_TIMEOUT_MS);
if (res == -1) {
fprintf(stderr, "output_thread(): poll() failed\n");
exit(1);
}
if (p.revents & POLLIN) {
struct v4l2_buffer buf = {0};
struct v4l2_plane planes[VIDEO_MAX_PLANES] = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
buf.memory = V4L2_MEMORY_DMABUF;
buf.length = 1;
buf.m.planes = planes;
int res = ioctl(encp->fd, VIDIOC_DQBUF, &buf);
if (res != 0) {
fprintf(stderr, "output_thread(): ioctl(VIDIOC_DQBUF) failed\n");
exit(1);
}
memset(&buf, 0, sizeof(buf));
memset(planes, 0, sizeof(planes));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
buf.memory = V4L2_MEMORY_MMAP;
buf.length = 1;
buf.m.planes = planes;
res = ioctl(encp->fd, VIDIOC_DQBUF, &buf);
if (res == 0) {
uint64_t ts = ((uint64_t)buf.timestamp.tv_sec * (uint64_t)1000000) + (uint64_t)buf.timestamp.tv_usec;
if (!encp->ts_initialized) {
encp->ts_initialized = true;
encp->start_ts = ts;
}
ts -= encp->start_ts;
const uint8_t *bufmem = (const uint8_t *)encp->capture_buffers[buf.index];
int bufsize = buf.m.planes[0].bytesused;
encp->output_cb(ts, bufmem, bufsize);
int index = buf.index;
int length = buf.m.planes[0].length;
struct v4l2_buffer buf = {0};
struct v4l2_plane planes[VIDEO_MAX_PLANES] = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = index;
buf.length = 1;
buf.m.planes = planes;
buf.m.planes[0].bytesused = 0;
buf.m.planes[0].length = length;
int res = ioctl(encp->fd, VIDIOC_QBUF, &buf);
if (res < 0) {
fprintf(stderr, "output_thread(): ioctl(VIDIOC_QBUF) failed\n");
exit(1);
}
}
}
}
return NULL;
}
static bool fill_dynamic_params(int fd, const parameters_t *params) {
struct v4l2_control ctrl = {0};
ctrl.id = V4L2_CID_MPEG_VIDEO_H264_I_PERIOD;
ctrl.value = params->idr_period;
int res = ioctl(fd, VIDIOC_S_CTRL, &ctrl);
if (res != 0) {
set_error("unable to set IDR period");
return false;
}
ctrl.id = V4L2_CID_MPEG_VIDEO_BITRATE;
ctrl.value = params->bitrate;
res = ioctl(fd, VIDIOC_S_CTRL, &ctrl);
if (res != 0) {
set_error("unable to set bitrate");
return false;
}
return true;
}
bool encoder_create(const parameters_t *params, int stride, int colorspace, encoder_output_cb output_cb, encoder_t **enc) {
*enc = malloc(sizeof(encoder_priv_t));
encoder_priv_t *encp = (encoder_priv_t *)(*enc);
memset(encp, 0, sizeof(encoder_priv_t));
encp->fd = open(DEVICE, O_RDWR, 0);
if (encp->fd < 0) {
set_error("unable to open device");
goto failed;
}
bool res2 = fill_dynamic_params(encp->fd, params);
if (!res2) {
goto failed;
}
struct v4l2_control ctrl = {0};
ctrl.id = V4L2_CID_MPEG_VIDEO_H264_PROFILE;
ctrl.value = params->profile;
int res = ioctl(encp->fd, VIDIOC_S_CTRL, &ctrl);
if (res != 0) {
set_error("unable to set profile");
goto failed;
}
ctrl.id = V4L2_CID_MPEG_VIDEO_H264_LEVEL;
ctrl.value = params->level;
res = ioctl(encp->fd, VIDIOC_S_CTRL, &ctrl);
if (res != 0) {
set_error("unable to set level");
goto failed;
}
ctrl.id = V4L2_CID_MPEG_VIDEO_REPEAT_SEQ_HEADER;
ctrl.value = 0;
res = ioctl(encp->fd, VIDIOC_S_CTRL, &ctrl);
if (res != 0) {
set_error("unable to set REPEAT_SEQ_HEADER");
goto failed;
}
struct v4l2_format fmt = {0};
fmt.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
fmt.fmt.pix_mp.width = params->width;
fmt.fmt.pix_mp.height = params->height;
fmt.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_YUV420;
fmt.fmt.pix_mp.plane_fmt[0].bytesperline = stride;
fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
fmt.fmt.pix_mp.colorspace = colorspace;
fmt.fmt.pix_mp.num_planes = 1;
res = ioctl(encp->fd, VIDIOC_S_FMT, &fmt);
if (res != 0) {
set_error("unable to set output format");
goto failed;
}
memset(&fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
fmt.fmt.pix_mp.width = params->width;
fmt.fmt.pix_mp.height = params->height;
fmt.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_H264;
fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
fmt.fmt.pix_mp.colorspace = V4L2_COLORSPACE_DEFAULT;
fmt.fmt.pix_mp.num_planes = 1;
fmt.fmt.pix_mp.plane_fmt[0].bytesperline = 0;
fmt.fmt.pix_mp.plane_fmt[0].sizeimage = 512 << 10;
res = ioctl(encp->fd, VIDIOC_S_FMT, &fmt);
if (res != 0) {
set_error("unable to set capture format");
goto failed;
}
struct v4l2_streamparm parm = {0};
parm.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
parm.parm.output.timeperframe.numerator = 1;
parm.parm.output.timeperframe.denominator = params->fps;
res = ioctl(encp->fd, VIDIOC_S_PARM, &parm);
if (res != 0) {
set_error("unable to set fps");
goto failed;
}
struct v4l2_requestbuffers reqbufs = {0};
reqbufs.count = params->buffer_count;
reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
reqbufs.memory = V4L2_MEMORY_DMABUF;
res = ioctl(encp->fd, VIDIOC_REQBUFS, &reqbufs);
if (res != 0) {
set_error("unable to set output buffers");
goto failed;
}
memset(&reqbufs, 0, sizeof(reqbufs));
reqbufs.count = params->capture_buffer_count;
reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
reqbufs.memory = V4L2_MEMORY_MMAP;
res = ioctl(encp->fd, VIDIOC_REQBUFS, &reqbufs);
if (res != 0) {
set_error("unable to set capture buffers");
goto failed;
}
encp->capture_buffers = malloc(sizeof(void *) * reqbufs.count);
for (unsigned int i = 0; i < reqbufs.count; i++) {
struct v4l2_plane planes[VIDEO_MAX_PLANES];
struct v4l2_buffer buffer = {0};
buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
buffer.memory = V4L2_MEMORY_MMAP;
buffer.index = i;
buffer.length = 1;
buffer.m.planes = planes;
int res = ioctl(encp->fd, VIDIOC_QUERYBUF, &buffer);
if (res != 0) {
set_error("unable to query buffer");
goto failed;
}
encp->capture_buffers[i] = mmap(
0,
buffer.m.planes[0].length,
PROT_READ | PROT_WRITE, MAP_SHARED,
encp->fd,
buffer.m.planes[0].m.mem_offset);
if (encp->capture_buffers[i] == MAP_FAILED) {
set_error("mmap() failed");
goto failed;
}
res = ioctl(encp->fd, VIDIOC_QBUF, &buffer);
if (res != 0) {
set_error("ioctl(VIDIOC_QBUF) failed");
goto failed;
}
}
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
res = ioctl(encp->fd, VIDIOC_STREAMON, &type);
if (res != 0) {
set_error("unable to activate output stream");
goto failed;
}
type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
res = ioctl(encp->fd, VIDIOC_STREAMON, &type);
if (res != 0) {
set_error("unable to activate capture stream");
}
encp->params = params;
encp->cur_buffer = 0;
encp->output_cb = output_cb;
encp->ts_initialized = false;
pthread_create(&encp->output_thread, NULL, output_thread, encp);
return true;
failed:
if (encp->capture_buffers != NULL) {
free(encp->capture_buffers);
}
if (encp->fd >= 0) {
close(encp->fd);
}
free(encp);
return false;
}
void encoder_encode(encoder_t *enc, int buffer_fd, size_t size, int64_t timestamp_us) {
encoder_priv_t *encp = (encoder_priv_t *)enc;
int index = encp->cur_buffer++;
encp->cur_buffer %= encp->params->buffer_count;
struct v4l2_buffer buf = {0};
struct v4l2_plane planes[VIDEO_MAX_PLANES] = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
buf.index = index;
buf.field = V4L2_FIELD_NONE;
buf.memory = V4L2_MEMORY_DMABUF;
buf.length = 1;
buf.timestamp.tv_sec = timestamp_us / 1000000;
buf.timestamp.tv_usec = timestamp_us % 1000000;
buf.m.planes = planes;
buf.m.planes[0].m.fd = buffer_fd;
buf.m.planes[0].bytesused = size;
buf.m.planes[0].length = size;
int res = ioctl(encp->fd, VIDIOC_QBUF, &buf);
if (res != 0) {
fprintf(stderr, "encoder_encode(): ioctl(VIDIOC_QBUF) failed\n");
// it happens when the raspberry is under pressure. do not exit.
}
}
void encoder_reload_params(encoder_t *enc, const parameters_t *params) {
encoder_priv_t *encp = (encoder_priv_t *)enc;
fill_dynamic_params(encp->fd, params);
}

View File

@ -1,15 +0,0 @@
#ifndef __ENCODER_H__
#define __ENCODER_H__
#include "parameters.h"
typedef void encoder_t;
typedef void (*encoder_output_cb)(uint64_t ts, const uint8_t *buf, uint64_t size);
const char *encoder_get_error();
bool encoder_create(const parameters_t *params, int stride, int colorspace, encoder_output_cb output_cb, encoder_t **enc);
void encoder_encode(encoder_t *enc, int buffer_fd, size_t size, int64_t timestamp_us);
void encoder_reload_params(encoder_t *enc, const parameters_t *params);
#endif

View File

@ -1,116 +0,0 @@
#include <stdio.h>
#include <stdbool.h>
#include <stdarg.h>
#include <stdint.h>
#include <stdlib.h>
#include <string.h>
#include <pthread.h>
#include "parameters.h"
#include "pipe.h"
#include "camera.h"
#include "text.h"
#include "encoder.h"
static int pipe_video_fd;
static pthread_mutex_t pipe_video_mutex;
static text_t *text;
static encoder_t *enc;
static void on_frame(
uint8_t *mapped_buffer,
int stride,
int height,
int buffer_fd,
uint64_t size,
uint64_t timestamp) {
text_draw(text, mapped_buffer, stride, height);
encoder_encode(enc, buffer_fd, size, timestamp);
}
static void on_encoder_output(uint64_t ts, const uint8_t *buf, uint64_t size) {
pthread_mutex_lock(&pipe_video_mutex);
pipe_write_buf(pipe_video_fd, ts, buf, size);
pthread_mutex_unlock(&pipe_video_mutex);
}
int main() {
int pipe_conf_fd = atoi(getenv("PIPE_CONF_FD"));
pipe_video_fd = atoi(getenv("PIPE_VIDEO_FD"));
uint8_t *buf;
uint32_t n = pipe_read(pipe_conf_fd, &buf);
parameters_t params;
bool ok = parameters_unserialize(&params, &buf[1], n-1);
free(buf);
if (!ok) {
pipe_write_error(pipe_video_fd, "parameters_unserialize(): %s", parameters_get_error());
return 5;
}
pthread_mutex_init(&pipe_video_mutex, NULL);
pthread_mutex_lock(&pipe_video_mutex);
camera_t *cam;
ok = camera_create(
&params,
on_frame,
&cam);
if (!ok) {
pipe_write_error(pipe_video_fd, "camera_create(): %s", camera_get_error());
return 5;
}
ok = text_create(&params, &text);
if (!ok) {
pipe_write_error(pipe_video_fd, "text_create(): %s", text_get_error());
return 5;
}
ok = encoder_create(
&params,
camera_get_mode_stride(cam),
camera_get_mode_colorspace(cam),
on_encoder_output,
&enc);
if (!ok) {
pipe_write_error(pipe_video_fd, "encoder_create(): %s", encoder_get_error());
return 5;
}
ok = camera_start(cam);
if (!ok) {
pipe_write_error(pipe_video_fd, "camera_start(): %s", camera_get_error());
return 5;
}
pipe_write_ready(pipe_video_fd);
pthread_mutex_unlock(&pipe_video_mutex);
while (true) {
uint8_t *buf;
uint32_t n = pipe_read(pipe_conf_fd, &buf);
switch (buf[0]) {
case 'e':
return 0;
case 'c':
{
parameters_t params;
bool ok = parameters_unserialize(&params, &buf[1], n-1);
free(buf);
if (!ok) {
printf("skipping reloading parameters since they are invalid: %s\n", parameters_get_error());
continue;
}
camera_reload_params(cam, &params);
encoder_reload_params(enc, &params);
parameters_destroy(&params);
}
}
}
return 0;
}

View File

@ -1,210 +0,0 @@
#include <stdlib.h>
#include <string.h>
#include <stdbool.h>
#include <stdarg.h>
#include <stdio.h>
#include <linux/videodev2.h>
#include "base64.h"
#include "parameters.h"
static char errbuf[256];
static void set_error(const char *format, ...) {
va_list args;
va_start(args, format);
vsnprintf(errbuf, 256, format, args);
}
const char *parameters_get_error() {
return errbuf;
}
bool parameters_unserialize(parameters_t *params, const uint8_t *buf, size_t buf_size) {
memset(params, 0, sizeof(parameters_t));
char *tmp = malloc(buf_size + 1);
memcpy(tmp, buf, buf_size);
tmp[buf_size] = 0x00;
while (true) {
char *entry = strsep(&tmp, " ");
if (entry == NULL) {
break;
}
char *key = strsep(&entry, ":");
char *val = strsep(&entry, ":");
if (strcmp(key, "LogLevel") == 0) {
params->log_level = base64_decode(val);
} else if (strcmp(key, "CameraID") == 0) {
params->camera_id = atoi(val);
} else if (strcmp(key, "Width") == 0) {
params->width = atoi(val);
} else if (strcmp(key, "Height") == 0) {
params->height = atoi(val);
} else if (strcmp(key, "HFlip") == 0) {
params->h_flip = (strcmp(val, "1") == 0);
} else if (strcmp(key, "VFlip") == 0) {
params->v_flip = (strcmp(val, "1") == 0);
} else if (strcmp(key, "Brightness") == 0) {
params->brightness = atof(val);
} else if (strcmp(key, "Contrast") == 0) {
params->contrast = atof(val);
} else if (strcmp(key, "Saturation") == 0) {
params->saturation = atof(val);
} else if (strcmp(key, "Sharpness") == 0) {
params->sharpness = atof(val);
} else if (strcmp(key, "Exposure") == 0) {
params->exposure = base64_decode(val);
} else if (strcmp(key, "AWB") == 0) {
params->awb = base64_decode(val);
} else if (strcmp(key, "AWBGainRed") == 0) {
params->awb_gain_red = atof(val);
} else if (strcmp(key, "AWBGainBlue") == 0) {
params->awb_gain_blue = atof(val);
} else if (strcmp(key, "Denoise") == 0) {
params->denoise = base64_decode(val);
} else if (strcmp(key, "Shutter") == 0) {
params->shutter = atoi(val);
} else if (strcmp(key, "Metering") == 0) {
params->metering = base64_decode(val);
} else if (strcmp(key, "Gain") == 0) {
params->gain = atof(val);
} else if (strcmp(key, "EV") == 0) {
params->ev = atof(val);
} else if (strcmp(key, "ROI") == 0) {
char *decoded_val = base64_decode(val);
if (strlen(decoded_val) != 0) {
params->roi = malloc(sizeof(window_t));
bool ok = window_load(decoded_val, params->roi);
if (!ok) {
set_error("invalid ROI");
free(decoded_val);
goto failed;
}
}
free(decoded_val);
} else if (strcmp(key, "HDR") == 0) {
params->hdr = (strcmp(val, "1") == 0);
} else if (strcmp(key, "TuningFile") == 0) {
params->tuning_file = base64_decode(val);
} else if (strcmp(key, "Mode") == 0) {
char *decoded_val = base64_decode(val);
if (strlen(decoded_val) != 0) {
params->mode = malloc(sizeof(sensor_mode_t));
bool ok = sensor_mode_load(decoded_val, params->mode);
if (!ok) {
set_error("invalid sensor mode");
free(decoded_val);
goto failed;
}
}
free(decoded_val);
} else if (strcmp(key, "FPS") == 0) {
params->fps = atof(val);
} else if (strcmp(key, "IDRPeriod") == 0) {
params->idr_period = atoi(val);
} else if (strcmp(key, "Bitrate") == 0) {
params->bitrate = atoi(val);
} else if (strcmp(key, "Profile") == 0) {
char *decoded_val = base64_decode(val);
if (strcmp(decoded_val, "baseline") == 0) {
params->profile = V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE;
} else if (strcmp(decoded_val, "main") == 0) {
params->profile = V4L2_MPEG_VIDEO_H264_PROFILE_MAIN;
} else {
params->profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH;
}
free(decoded_val);
} else if (strcmp(key, "Level") == 0) {
char *decoded_val = base64_decode(val);
if (strcmp(decoded_val, "4.0") == 0) {
params->level = V4L2_MPEG_VIDEO_H264_LEVEL_4_0;
} else if (strcmp(decoded_val, "4.1") == 0) {
params->level = V4L2_MPEG_VIDEO_H264_LEVEL_4_1;
} else {
params->level = V4L2_MPEG_VIDEO_H264_LEVEL_4_2;
}
free(decoded_val);
} else if (strcmp(key, "AfMode") == 0) {
params->af_mode = base64_decode(val);
} else if (strcmp(key, "AfRange") == 0) {
params->af_range = base64_decode(val);
} else if (strcmp(key, "AfSpeed") == 0) {
params->af_speed = base64_decode(val);
} else if (strcmp(key, "LensPosition") == 0) {
params->lens_position = atof(val);
} else if (strcmp(key, "AfWindow") == 0) {
char *decoded_val = base64_decode(val);
if (strlen(decoded_val) != 0) {
params->af_window = malloc(sizeof(window_t));
bool ok = window_load(decoded_val, params->af_window);
if (!ok) {
set_error("invalid AfWindow");
free(decoded_val);
goto failed;
}
}
free(decoded_val);
} else if (strcmp(key, "TextOverlayEnable") == 0) {
params->text_overlay_enable = (strcmp(val, "1") == 0);
} else if (strcmp(key, "TextOverlay") == 0) {
params->text_overlay = base64_decode(val);
}
}
free(tmp);
params->buffer_count = 6;
params->capture_buffer_count = params->buffer_count * 2;
return true;
failed:
free(tmp);
parameters_destroy(params);
return false;
}
void parameters_destroy(parameters_t *params) {
if (params->exposure != NULL) {
free(params->exposure);
}
if (params->awb != NULL) {
free(params->awb);
}
if (params->denoise != NULL) {
free(params->denoise);
}
if (params->metering != NULL) {
free(params->metering);
}
if (params->roi != NULL) {
free(params->roi);
}
if (params->tuning_file != NULL) {
free(params->tuning_file);
}
if (params->mode != NULL) {
free(params->mode);
}
if (params->af_mode != NULL) {
free(params->af_mode);
}
if (params->af_range != NULL) {
free(params->af_range);
}
if (params->af_speed != NULL) {
free(params->af_speed);
}
if (params->af_window != NULL) {
free(params->af_window);
}
if (params->text_overlay != NULL) {
free(params->text_overlay);
}
}

View File

@ -1,64 +0,0 @@
#ifndef __PARAMETERS_H__
#define __PARAMETERS_H__
#include <stdint.h>
#include <stdbool.h>
#include "window.h"
#include "sensor_mode.h"
typedef struct {
char *log_level;
unsigned int camera_id;
unsigned int width;
unsigned int height;
bool h_flip;
bool v_flip;
float brightness;
float contrast;
float saturation;
float sharpness;
char *exposure;
char *awb;
float awb_gain_red;
float awb_gain_blue;
char *denoise;
unsigned int shutter;
char *metering;
float gain;
float ev;
window_t *roi;
bool hdr;
char *tuning_file;
sensor_mode_t *mode;
float fps;
unsigned int idr_period;
unsigned int bitrate;
unsigned int profile;
unsigned int level;
char *af_mode;
char *af_range;
char *af_speed;
float lens_position;
window_t *af_window;
bool text_overlay_enable;
char *text_overlay;
// private
unsigned int buffer_count;
unsigned int capture_buffer_count;
} parameters_t;
#ifdef __cplusplus
extern "C" {
#endif
const char *parameters_get_error();
bool parameters_unserialize(parameters_t *params, const uint8_t *buf, size_t buf_size);
void parameters_destroy(parameters_t *params);
#ifdef __cplusplus
}
#endif
#endif

View File

@ -1,44 +0,0 @@
#include <stdarg.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <fcntl.h>
#include <unistd.h>
#include "pipe.h"
void pipe_write_error(int fd, const char *format, ...) {
char buf[256];
buf[0] = 'e';
va_list args;
va_start(args, format);
vsnprintf(&buf[1], 255, format, args);
uint32_t n = strlen(buf);
write(fd, &n, 4);
write(fd, buf, n);
}
void pipe_write_ready(int fd) {
char buf[] = {'r'};
uint32_t n = 1;
write(fd, &n, 4);
write(fd, buf, n);
}
void pipe_write_buf(int fd, uint64_t ts, const uint8_t *buf, uint32_t n) {
char head[] = {'b'};
n += 1 + sizeof(uint64_t);
write(fd, &n, 4);
write(fd, head, 1);
write(fd, &ts, sizeof(uint64_t));
write(fd, buf, n - 1 - sizeof(uint64_t));
}
uint32_t pipe_read(int fd, uint8_t **pbuf) {
uint32_t n;
read(fd, &n, 4);
*pbuf = malloc(n);
read(fd, *pbuf, n);
return n;
}

View File

@ -1,12 +0,0 @@
#ifndef __PIPE_H__
#define __PIPE_H__
#include <stdbool.h>
#include <stdint.h>
void pipe_write_error(int fd, const char *format, ...);
void pipe_write_ready(int fd);
void pipe_write_buf(int fd, uint64_t ts, const uint8_t *buf, uint32_t n);
uint32_t pipe_read(int fd, uint8_t **pbuf);
#endif

View File

@ -1,27 +0,0 @@
#include <stdio.h>
#include <ctype.h>
#include <stdlib.h>
#include "sensor_mode.h"
bool sensor_mode_load(const char *encoded, sensor_mode_t *mode) {
char p;
int n = sscanf(encoded, "%u:%u:%u:%c", &(mode->width), &(mode->height), &(mode->bit_depth), &p);
if (n < 2) {
return false;
}
if (n < 4) {
mode->packed = true;
} else if (toupper(p) == 'P') {
mode->packed = true;
} else if (toupper(p) == 'U') {
mode->packed = false;
}
if (n < 3) {
mode->bit_depth = 12;
}
return true;
}

View File

@ -1,15 +0,0 @@
#ifndef __SENSOR_MODE_H__
#define __SENSOR_MODE_H__
#include <stdbool.h>
typedef struct {
int width;
int height;
int bit_depth;
bool packed;
} sensor_mode_t;
bool sensor_mode_load(const char *encoded, sensor_mode_t *mode);
#endif

View File

@ -1,171 +0,0 @@
#include <time.h>
#include <ft2build.h>
#include FT_FREETYPE_H
#include "text_font.h"
#include "text.h"
static char errbuf[256];
static void set_error(const char *format, ...) {
va_list args;
va_start(args, format);
vsnprintf(errbuf, 256, format, args);
}
const char *text_get_error() {
return errbuf;
}
typedef struct {
bool enabled;
char *text_overlay;
FT_Library library;
FT_Face face;
} text_priv_t;
bool text_create(const parameters_t *params, text_t **text) {
*text = malloc(sizeof(text_priv_t));
text_priv_t *textp = (text_priv_t *)(*text);
memset(textp, 0, sizeof(text_priv_t));
textp->enabled = params->text_overlay_enable;
textp->text_overlay = strdup(params->text_overlay);
if (textp->enabled) {
int error = FT_Init_FreeType(&textp->library);
if (error) {
set_error("FT_Init_FreeType() failed");
goto failed;
}
error = FT_New_Memory_Face(
textp->library,
text_font_ttf,
sizeof(text_font_ttf),
0,
&textp->face);
if (error) {
set_error("FT_New_Memory_Face() failed");
goto failed;
}
error = FT_Set_Pixel_Sizes(
textp->face,
25,
25);
if (error) {
set_error("FT_Set_Pixel_Sizes() failed");
goto failed;
}
}
return true;
failed:
free(textp);
return false;
}
static void draw_rect(uint8_t *buf, int stride, int height, int x, int y, unsigned int rect_width, unsigned int rect_height) {
uint8_t *Y = buf;
uint8_t *U = Y + stride * height;
uint8_t *V = U + (stride / 2) * (height / 2);
const uint8_t color[3] = {0, 128, 128};
uint32_t opacity = 45;
for (unsigned int src_y = 0; src_y < rect_height; src_y++) {
for (unsigned int src_x = 0; src_x < rect_width; src_x++) {
unsigned int dest_x = x + src_x;
unsigned int dest_y = y + src_y;
int i1 = dest_y*stride + dest_x;
int i2 = dest_y/2*stride/2 + dest_x/2;
Y[i1] = ((color[0] * opacity) + (uint32_t)Y[i1] * (255 - opacity)) / 255;
U[i2] = ((color[1] * opacity) + (uint32_t)U[i2] * (255 - opacity)) / 255;
V[i2] = ((color[2] * opacity) + (uint32_t)V[i2] * (255 - opacity)) / 255;
}
}
}
static void draw_bitmap(uint8_t *buf, int stride, int height, const FT_Bitmap *bitmap, int x, int y) {
uint8_t *Y = buf;
uint8_t *U = Y + stride * height;
uint8_t *V = U + (stride / 2) * (height / 2);
for (unsigned int src_y = 0; src_y < bitmap->rows; src_y++) {
for (unsigned int src_x = 0; src_x < bitmap->width; src_x++) {
uint8_t v = bitmap->buffer[src_y*bitmap->pitch + src_x];
if (v != 0) {
unsigned int dest_x = x + src_x;
unsigned int dest_y = y + src_y;
int i1 = dest_y*stride + dest_x;
int i2 = dest_y/2*stride/2 + dest_x/2;
uint32_t opacity = (uint32_t)v;
Y[i1] = (uint8_t)(((uint32_t)v * opacity + (uint32_t)Y[i1] * (255 - opacity)) / 255);
U[i2] = (uint8_t)((128 * opacity + (uint32_t)U[i2] * (255 - opacity)) / 255);
V[i2] = (uint8_t)((128 * opacity + (uint32_t)V[i2] * (255 - opacity)) / 255);
}
}
}
}
static int get_text_width(FT_Face face, const char *text) {
int ret = 0;
for (const char *ptr = text; *ptr != 0x00; ptr++) {
int error = FT_Load_Char(face, *ptr, FT_LOAD_RENDER);
if (error) {
continue;
}
ret += face->glyph->advance.x >> 6;
}
return ret;
}
void text_draw(text_t *text, uint8_t *buf, int stride, int height) {
text_priv_t *textp = (text_priv_t *)text;
if (textp->enabled) {
time_t timer = time(NULL);
struct tm *tm_info = localtime(&timer);
char buffer[255];
memset(buffer, 0, sizeof(buffer));
strftime(buffer, 255, textp->text_overlay, tm_info);
draw_rect(
buf,
stride,
height,
7,
7,
get_text_width(textp->face, buffer) + 10,
34);
int x = 12;
int y = 33;
for (const char *ptr = buffer; *ptr != 0x00; ptr++) {
int error = FT_Load_Char(textp->face, *ptr, FT_LOAD_RENDER);
if (error) {
continue;
}
draw_bitmap(
buf,
stride,
height,
&textp->face->glyph->bitmap,
x + textp->face->glyph->bitmap_left,
y - textp->face->glyph->bitmap_top);
x += textp->face->glyph->advance.x >> 6;
}
}
}

View File

@ -1,15 +0,0 @@
#ifndef __TEXT_H__
#define __TEXT_H__
#include <stdint.h>
#include <stdbool.h>
#include "parameters.h"
typedef void text_t;
const char *text_get_error();
bool text_create(const parameters_t *params, text_t **text);
void text_draw(text_t *text, uint8_t *buf, int stride, int height);
#endif

View File

@ -1,30 +0,0 @@
#include <stdlib.h>
#include <string.h>
#include "window.h"
bool window_load(const char *encoded, window_t *window) {
float vals[4];
int i = 0;
char *token = strtok((char *)encoded, ",");
while (token != NULL) {
vals[i] = atof(token);
if (vals[i] < 0 || vals[i] > 1) {
return false;
}
i++;
token = strtok(NULL, ",");
}
if (i != 4) {
return false;
}
window->x = vals[0];
window->y = vals[1];
window->width = vals[2];
window->height = vals[3];
return true;
}

View File

@ -1,15 +0,0 @@
#ifndef __WINDOW_H__
#define __WINDOW_H__
#include <stdbool.h>
typedef struct {
float x;
float y;
float width;
float height;
} window_t;
bool window_load(const char *encoded, window_t *window);
#endif

View File

@ -1,33 +0,0 @@
//go:build !rpicamera
// +build !rpicamera
// Package rpicamera allows to interact with a Raspberry Pi Camera.
package rpicamera
import (
"fmt"
"time"
)
// Cleanup cleanups files created by the camera implementation.
func Cleanup() {
}
// RPICamera is a RPI Camera reader.
type RPICamera struct {
Params Params
OnData func(time.Duration, [][]byte)
}
// Initialize initializes a RPICamera.
func (c *RPICamera) Initialize() error {
return fmt.Errorf("server was compiled without support for the Raspberry Pi Camera")
}
// Close closes a RPICamera.
func (c *RPICamera) Close() {
}
// ReloadParams reloads the camera parameters.
func (c *RPICamera) ReloadParams(_ Params) {
}

View File

@ -20,7 +20,6 @@ func do() error {
if err != nil { if err != nil {
return err return err
} }
version := strings.TrimSpace(string(buf)) version := strings.TrimSpace(string(buf))
log.Printf("downloading hls.js version %s...", version) log.Printf("downloading hls.js version %s...", version)
@ -40,12 +39,11 @@ func do() error {
return err return err
} }
hashBuf, err := os.ReadFile("./hlsjsdownloader/HASH") buf, err = os.ReadFile("./hlsjsdownloader/HASH")
if err != nil { if err != nil {
return err return err
} }
str := strings.TrimSpace(string(buf))
str := strings.TrimSpace(string(hashBuf))
hash, err := hex.DecodeString(str) hash, err := hex.DecodeString(str)
if err != nil { if err != nil {

View File

@ -1,11 +1,10 @@
//go:build rpicamera //go:build (linux && arm) || (linux && arm64)
// +build rpicamera // +build linux,arm linux,arm64
package rpicamera package rpicamera
import ( import (
"debug/elf" "debug/elf"
_ "embed"
"fmt" "fmt"
"os" "os"
"os/exec" "os/exec"
@ -19,12 +18,9 @@ import (
) )
const ( const (
tempPathPrefix = "/dev/shm/rtspss-embeddedexe-" tempPathPrefix = "/dev/shm/mediamtx-rpicamera-"
) )
//go:embed exe/exe
var exeContent []byte
func startEmbeddedExe(content []byte, env []string) (*exec.Cmd, error) { func startEmbeddedExe(content []byte, env []string) (*exec.Cmd, error) {
tempPath := tempPathPrefix + strconv.FormatInt(time.Now().UnixNano(), 10) tempPath := tempPathPrefix + strconv.FormatInt(time.Now().UnixNano(), 10)
@ -111,9 +107,8 @@ func checkLibraries64Bit() error {
return nil return nil
} }
// RPICamera is a RPI Camera reader. type camera struct {
type RPICamera struct { Params params
Params Params
OnData func(time.Duration, [][]byte) OnData func(time.Duration, [][]byte)
cmd *exec.Cmd cmd *exec.Cmd
@ -124,8 +119,7 @@ type RPICamera struct {
readerDone chan error readerDone chan error
} }
// Initialize initializes a RPICamera. func (c *camera) initialize() error {
func (c *RPICamera) Initialize() error {
if runtime.GOARCH == "arm" { if runtime.GOARCH == "arm" {
err := checkLibraries64Bit() err := checkLibraries64Bit()
if err != nil { if err != nil {
@ -150,7 +144,7 @@ func (c *RPICamera) Initialize() error {
"PIPE_VIDEO_FD=" + strconv.FormatInt(int64(c.pipeVideo.writeFD), 10), "PIPE_VIDEO_FD=" + strconv.FormatInt(int64(c.pipeVideo.writeFD), 10),
} }
c.cmd, err = startEmbeddedExe(exeContent, env) c.cmd, err = startEmbeddedExe(component, env)
if err != nil { if err != nil {
c.pipeConf.close() c.pipeConf.close()
c.pipeVideo.close() c.pipeVideo.close()
@ -194,7 +188,7 @@ func (c *RPICamera) Initialize() error {
return nil return nil
} }
func (c *RPICamera) Close() { func (c *camera) close() {
c.pipeConf.write([]byte{'e'}) c.pipeConf.write([]byte{'e'})
<-c.waitDone <-c.waitDone
c.pipeConf.close() c.pipeConf.close()
@ -202,11 +196,11 @@ func (c *RPICamera) Close() {
<-c.readerDone <-c.readerDone
} }
func (c *RPICamera) ReloadParams(params Params) { func (c *camera) reloadParams(params params) {
c.pipeConf.write(append([]byte{'c'}, params.serialize()...)) c.pipeConf.write(append([]byte{'c'}, params.serialize()...))
} }
func (c *RPICamera) readReady() error { func (c *camera) readReady() error {
buf, err := c.pipeVideo.read() buf, err := c.pipeVideo.read()
if err != nil { if err != nil {
return err return err
@ -224,7 +218,7 @@ func (c *RPICamera) readReady() error {
} }
} }
func (c *RPICamera) readData() error { func (c *camera) readData() error {
for { for {
buf, err := c.pipeVideo.read() buf, err := c.pipeVideo.read()
if err != nil { if err != nil {

View File

@ -0,0 +1,24 @@
//go:build !linux || (!arm && !arm64)
// +build !linux !arm,!arm64
package rpicamera
import (
"fmt"
"time"
)
type camera struct {
Params params
OnData func(time.Duration, [][]byte)
}
func (c *camera) initialize() error {
return fmt.Errorf("server was compiled without support for the Raspberry Pi Camera")
}
func (c *camera) close() {
}
func (c *camera) reloadParams(_ params) {
}

View File

@ -0,0 +1,3 @@
package rpicamera
//go:generate go run ./mtxrpicamdownloader

View File

@ -0,0 +1,11 @@
//go:build linux && arm
// +build linux,arm
package rpicamera
import (
_ "embed"
)
//go:embed mtxrpicam_32
var component []byte

View File

@ -0,0 +1,11 @@
//go:build linux && arm64
// +build linux,arm64
package rpicamera
import (
_ "embed"
)
//go:embed mtxrpicam_64
var component []byte

View File

@ -0,0 +1 @@
v1.0.0

View File

@ -0,0 +1,53 @@
// Package main contains an utility to download hls.js
package main
import (
"fmt"
"io"
"log"
"net/http"
"os"
"strings"
)
func do() error {
buf, err := os.ReadFile("./mtxrpicamdownloader/VERSION")
if err != nil {
return err
}
version := strings.TrimSpace(string(buf))
log.Printf("downloading mediamtx-rpicamera version %s...", version)
for _, f := range []string{"mtxrpicam_32", "mtxrpicam_64"} {
res, err := http.Get("https://github.com/bluenviron/mediamtx-rpicamera/releases/download/" + version + "/" + f)
if err != nil {
return err
}
defer res.Body.Close()
if res.StatusCode != http.StatusOK {
return fmt.Errorf("bad status code: %v", res.StatusCode)
}
buf, err := io.ReadAll(res.Body)
if err != nil {
return err
}
if err = os.WriteFile(f, buf, 0o644); err != nil {
return err
}
}
log.Println("ok")
return nil
}
func main() {
err := do()
if err != nil {
log.Printf("ERR: %v", err)
os.Exit(1)
}
}

View File

@ -1,14 +1,6 @@
package rpicamera package rpicamera
import ( type params struct {
"encoding/base64"
"reflect"
"strconv"
"strings"
)
// Params is a set of camera parameters.
type Params struct {
LogLevel string LogLevel string
CameraID int CameraID int
Width int Width int
@ -45,40 +37,3 @@ type Params struct {
TextOverlayEnable bool TextOverlayEnable bool
TextOverlay string TextOverlay string
} }
func (p Params) serialize() []byte { //nolint:unused
rv := reflect.ValueOf(p)
rt := rv.Type()
nf := rv.NumField()
ret := make([]string, nf)
for i := 0; i < nf; i++ {
entry := rt.Field(i).Name + ":"
f := rv.Field(i)
switch f.Kind() {
case reflect.Int:
entry += strconv.FormatInt(f.Int(), 10)
case reflect.Float64:
entry += strconv.FormatFloat(f.Float(), 'f', -1, 64)
case reflect.String:
entry += base64.StdEncoding.EncodeToString([]byte(f.String()))
case reflect.Bool:
if f.Bool() {
entry += "1"
} else {
entry += "0"
}
default:
panic("unhandled type")
}
ret[i] = entry
}
return []byte(strings.Join(ret, " "))
}

View File

@ -0,0 +1,48 @@
//go:build (linux && arm) || (linux && arm64)
// +build linux,arm linux,arm64
package rpicamera
import (
"encoding/base64"
"reflect"
"strconv"
"strings"
)
func (p params) serialize() []byte {
rv := reflect.ValueOf(p)
rt := rv.Type()
nf := rv.NumField()
ret := make([]string, nf)
for i := 0; i < nf; i++ {
entry := rt.Field(i).Name + ":"
f := rv.Field(i)
switch f.Kind() {
case reflect.Int:
entry += strconv.FormatInt(f.Int(), 10)
case reflect.Float64:
entry += strconv.FormatFloat(f.Float(), 'f', -1, 64)
case reflect.String:
entry += base64.StdEncoding.EncodeToString([]byte(f.String()))
case reflect.Bool:
if f.Bool() {
entry += "1"
} else {
entry += "0"
}
default:
panic("unhandled type")
}
ret[i] = entry
}
return []byte(strings.Join(ret, " "))
}

View File

@ -1,5 +1,5 @@
//go:build rpicamera //go:build (linux && arm) || (linux && arm64)
// +build rpicamera // +build linux,arm linux,arm64
package rpicamera package rpicamera

View File

@ -10,13 +10,12 @@ import (
"github.com/bluenviron/mediamtx/internal/conf" "github.com/bluenviron/mediamtx/internal/conf"
"github.com/bluenviron/mediamtx/internal/defs" "github.com/bluenviron/mediamtx/internal/defs"
"github.com/bluenviron/mediamtx/internal/logger" "github.com/bluenviron/mediamtx/internal/logger"
"github.com/bluenviron/mediamtx/internal/protocols/rpicamera"
"github.com/bluenviron/mediamtx/internal/stream" "github.com/bluenviron/mediamtx/internal/stream"
"github.com/bluenviron/mediamtx/internal/unit" "github.com/bluenviron/mediamtx/internal/unit"
) )
func paramsFromConf(logLevel conf.LogLevel, cnf *conf.Path) rpicamera.Params { func paramsFromConf(logLevel conf.LogLevel, cnf *conf.Path) params {
return rpicamera.Params{ return params{
LogLevel: func() string { LogLevel: func() string {
switch logLevel { switch logLevel {
case conf.LogLevel(logger.Debug): case conf.LogLevel(logger.Debug):
@ -110,15 +109,15 @@ func (s *Source) Run(params defs.StaticSourceRunParams) error {
}) })
} }
cam := &rpicamera.RPICamera{ cam := &camera{
Params: paramsFromConf(s.LogLevel, params.Conf), Params: paramsFromConf(s.LogLevel, params.Conf),
OnData: onData, OnData: onData,
} }
err := cam.Initialize() err := cam.initialize()
if err != nil { if err != nil {
return err return err
} }
defer cam.Close() defer cam.close()
defer func() { defer func() {
if stream != nil { if stream != nil {
@ -129,7 +128,7 @@ func (s *Source) Run(params defs.StaticSourceRunParams) error {
for { for {
select { select {
case cnf := <-params.ReloadConf: case cnf := <-params.ReloadConf:
cam.ReloadParams(paramsFromConf(s.LogLevel, cnf)) cam.reloadParams(paramsFromConf(s.LogLevel, cnf))
case <-params.Context.Done(): case <-params.Context.Done():
return nil return nil

View File

@ -1,20 +1,6 @@
BINARY_NAME = mediamtx BINARY_NAME = mediamtx
define DOCKERFILE_BINARIES define DOCKERFILE_BINARIES
FROM $(RPI32_IMAGE) AS rpicamera32
RUN ["cross-build-start"]
RUN apt update && apt install -y --no-install-recommends g++ pkg-config make libcamera-dev libfreetype-dev xxd wget
WORKDIR /s/internal/protocols/rpicamera/exe
COPY internal/protocols/rpicamera/exe .
RUN make -j$$(nproc)
FROM $(RPI64_IMAGE) AS rpicamera64
RUN ["cross-build-start"]
RUN apt update && apt install -y --no-install-recommends g++ pkg-config make libcamera-dev libfreetype-dev xxd wget
WORKDIR /s/internal/protocols/rpicamera/exe
COPY internal/protocols/rpicamera/exe .
RUN make -j$$(nproc)
FROM $(BASE_IMAGE) AS build-base FROM $(BASE_IMAGE) AS build-base
RUN apk add --no-cache zip make git tar RUN apk add --no-cache zip make git tar
WORKDIR /s WORKDIR /s
@ -29,38 +15,39 @@ RUN cp mediamtx.yml LICENSE tmp/
RUN go generate ./... RUN go generate ./...
FROM build-base AS build-windows-amd64 FROM build-base AS build-windows-amd64
RUN GOOS=windows GOARCH=amd64 go build -ldflags "-X github.com/bluenviron/mediamtx/internal/core.version=$$VERSION" -o tmp/$(BINARY_NAME).exe ENV GOOS=windows GOARCH=amd64
RUN go build -ldflags "-X github.com/bluenviron/mediamtx/internal/core.version=$$VERSION" -o tmp/$(BINARY_NAME).exe
RUN cd tmp && zip -q ../binaries/$(BINARY_NAME)_$${VERSION}_windows_amd64.zip $(BINARY_NAME).exe mediamtx.yml LICENSE RUN cd tmp && zip -q ../binaries/$(BINARY_NAME)_$${VERSION}_windows_amd64.zip $(BINARY_NAME).exe mediamtx.yml LICENSE
FROM build-base AS build-linux-amd64 FROM build-base AS build-linux-amd64
RUN GOOS=linux GOARCH=amd64 go build -ldflags "-X github.com/bluenviron/mediamtx/internal/core.version=$$VERSION" -o tmp/$(BINARY_NAME) ENV GOOS=linux GOARCH=amd64
RUN go build -ldflags "-X github.com/bluenviron/mediamtx/internal/core.version=$$VERSION" -o tmp/$(BINARY_NAME)
RUN tar -C tmp -czf binaries/$(BINARY_NAME)_$${VERSION}_linux_amd64.tar.gz --owner=0 --group=0 $(BINARY_NAME) mediamtx.yml LICENSE RUN tar -C tmp -czf binaries/$(BINARY_NAME)_$${VERSION}_linux_amd64.tar.gz --owner=0 --group=0 $(BINARY_NAME) mediamtx.yml LICENSE
FROM build-base AS build-darwin-amd64 FROM build-base AS build-darwin-amd64
RUN GOOS=darwin GOARCH=amd64 go build -ldflags "-X github.com/bluenviron/mediamtx/internal/core.version=$$VERSION" -o tmp/$(BINARY_NAME) ENV GOOS=darwin GOARCH=amd64
RUN go build -ldflags "-X github.com/bluenviron/mediamtx/internal/core.version=$$VERSION" -o tmp/$(BINARY_NAME)
RUN tar -C tmp -czf binaries/$(BINARY_NAME)_$${VERSION}_darwin_amd64.tar.gz --owner=0 --group=0 $(BINARY_NAME) mediamtx.yml LICENSE RUN tar -C tmp -czf binaries/$(BINARY_NAME)_$${VERSION}_darwin_amd64.tar.gz --owner=0 --group=0 $(BINARY_NAME) mediamtx.yml LICENSE
FROM build-base AS build-darwin-arm64 FROM build-base AS build-darwin-arm64
RUN GOOS=darwin GOARCH=arm64 go build -ldflags "-X github.com/bluenviron/mediamtx/internal/core.version=$$VERSION" -o tmp/$(BINARY_NAME) ENV GOOS=darwin GOARCH=arm64
RUN go build -ldflags "-X github.com/bluenviron/mediamtx/internal/core.version=$$VERSION" -o tmp/$(BINARY_NAME)
RUN tar -C tmp -czf binaries/$(BINARY_NAME)_$${VERSION}_darwin_arm64.tar.gz --owner=0 --group=0 $(BINARY_NAME) mediamtx.yml LICENSE RUN tar -C tmp -czf binaries/$(BINARY_NAME)_$${VERSION}_darwin_arm64.tar.gz --owner=0 --group=0 $(BINARY_NAME) mediamtx.yml LICENSE
FROM build-base AS build-linux-armv6 FROM build-base AS build-linux-armv6
COPY --from=rpicamera32 /s/internal/protocols/rpicamera/exe/exe internal/protocols/rpicamera/exe/ ENV GOOS=linux GOARCH=arm GOARM=6
RUN GOOS=linux GOARCH=arm GOARM=6 go build -ldflags "-X github.com/bluenviron/mediamtx/internal/core.version=$$VERSION" -o tmp/$(BINARY_NAME) -tags rpicamera RUN go build -ldflags "-X github.com/bluenviron/mediamtx/internal/core.version=$$VERSION" -o tmp/$(BINARY_NAME)
RUN tar -C tmp -czf binaries/$(BINARY_NAME)_$${VERSION}_linux_armv6.tar.gz --owner=0 --group=0 $(BINARY_NAME) mediamtx.yml LICENSE RUN tar -C tmp -czf binaries/$(BINARY_NAME)_$${VERSION}_linux_armv6.tar.gz --owner=0 --group=0 $(BINARY_NAME) mediamtx.yml LICENSE
RUN rm internal/protocols/rpicamera/exe/exe
FROM build-base AS build-linux-armv7 FROM build-base AS build-linux-armv7
COPY --from=rpicamera32 /s/internal/protocols/rpicamera/exe/exe internal/protocols/rpicamera/exe/ ENV GOOS=linux GOARCH=arm GOARM=7
RUN GOOS=linux GOARCH=arm GOARM=7 go build -ldflags "-X github.com/bluenviron/mediamtx/internal/core.version=$$VERSION" -o tmp/$(BINARY_NAME) -tags rpicamera RUN go build -ldflags "-X github.com/bluenviron/mediamtx/internal/core.version=$$VERSION" -o tmp/$(BINARY_NAME)
RUN tar -C tmp -czf binaries/$(BINARY_NAME)_$${VERSION}_linux_armv7.tar.gz --owner=0 --group=0 $(BINARY_NAME) mediamtx.yml LICENSE RUN tar -C tmp -czf binaries/$(BINARY_NAME)_$${VERSION}_linux_armv7.tar.gz --owner=0 --group=0 $(BINARY_NAME) mediamtx.yml LICENSE
RUN rm internal/protocols/rpicamera/exe/exe
FROM build-base AS build-linux-arm64 FROM build-base AS build-linux-arm64
COPY --from=rpicamera64 /s/internal/protocols/rpicamera/exe/exe internal/protocols/rpicamera/exe/ ENV GOOS=linux GOARCH=arm64
RUN GOOS=linux GOARCH=arm64 go build -ldflags "-X github.com/bluenviron/mediamtx/internal/core.version=$$VERSION" -o tmp/$(BINARY_NAME) -tags rpicamera RUN go build -ldflags "-X github.com/bluenviron/mediamtx/internal/core.version=$$VERSION" -o tmp/$(BINARY_NAME)
RUN tar -C tmp -czf binaries/$(BINARY_NAME)_$${VERSION}_linux_arm64v8.tar.gz --owner=0 --group=0 $(BINARY_NAME) mediamtx.yml LICENSE RUN tar -C tmp -czf binaries/$(BINARY_NAME)_$${VERSION}_linux_arm64v8.tar.gz --owner=0 --group=0 $(BINARY_NAME) mediamtx.yml LICENSE
RUN rm internal/protocols/rpicamera/exe/exe
FROM $(BASE_IMAGE) FROM $(BASE_IMAGE)
COPY --from=build-windows-amd64 /s/binaries /s/binaries COPY --from=build-windows-amd64 /s/binaries /s/binaries