1384 lines
45 KiB
C
1384 lines
45 KiB
C
#include "process_pipeline.h"
|
|
|
|
#include "gles2_debayer.h"
|
|
#include "io_pipeline.h"
|
|
#include "main.h"
|
|
#include "pipeline.h"
|
|
#include "state.h"
|
|
#include "zbar_pipeline.h"
|
|
#include <assert.h>
|
|
#include <gtk/gtk.h>
|
|
#include <math.h>
|
|
#ifndef SYSCONFDIR
|
|
#include "config.h"
|
|
#endif
|
|
#include "medianame.h"
|
|
|
|
#include "dcp.h"
|
|
#include "gl_util.h"
|
|
#include "libdng.h"
|
|
#include <jpeglib.h>
|
|
#include <sys/mman.h>
|
|
#include <sys/prctl.h>
|
|
#include <sys/time.h>
|
|
|
|
static const float colormatrix_srgb[] = { 3.2409f, -1.5373f, -0.4986f,
|
|
-0.9692f, 1.8759f, 0.0415f,
|
|
0.0556f, -0.2039f, 1.0569f };
|
|
|
|
static MPPipeline *pipeline;
|
|
mp_state_proc state_proc;
|
|
|
|
static char burst_dir[255];
|
|
|
|
static volatile bool is_capturing = false;
|
|
static volatile int frames_processed = 0;
|
|
static volatile int frames_received = 0;
|
|
|
|
libmegapixels_camera *pr_camera;
|
|
|
|
static int output_buffer_width = -1;
|
|
static int output_buffer_height = -1;
|
|
|
|
static bool flash_enabled;
|
|
static int framecounter = 0;
|
|
|
|
static char capture_fname[255], movie_script[255];
|
|
|
|
static GSettings *settings;
|
|
|
|
void
|
|
mp_process_find_all_processors(GtkListStore *store)
|
|
{
|
|
GtkTreeIter iter;
|
|
char buffer[512];
|
|
// Find all the original postprocess.sh locations
|
|
|
|
// Check postprocess.sh in the current working directory
|
|
if (access("./data/postprocess.sh", F_OK) != -1) {
|
|
gtk_list_store_insert(store, &iter, -1);
|
|
gtk_list_store_set(store,
|
|
&iter,
|
|
0,
|
|
"./data/postprocess.sh",
|
|
1,
|
|
"(cwd) postprocess.sh",
|
|
-1);
|
|
}
|
|
|
|
// Check for a script in XDG_CONFIG_HOME
|
|
sprintf(buffer, "%s/megapixels/postprocess.sh", g_get_user_config_dir());
|
|
if (access(buffer, F_OK) != -1) {
|
|
gtk_list_store_insert(store, &iter, -1);
|
|
gtk_list_store_set(
|
|
store, &iter, 0, buffer, 1, "(user) postprocess.sh", -1);
|
|
}
|
|
|
|
// Check user overridden /etc/megapixels/postprocess.sh
|
|
sprintf(buffer, "%s/megapixels/postprocess.sh", SYSCONFDIR);
|
|
if (access(buffer, F_OK) != -1) {
|
|
gtk_list_store_insert(store, &iter, -1);
|
|
gtk_list_store_set(
|
|
store, &iter, 0, buffer, 1, "(system) postprocess.sh", -1);
|
|
}
|
|
|
|
// Check user overridden /usr/share/megapixels/postprocess.sh
|
|
sprintf(buffer, "%s/megapixels/postprocess.sh", DATADIR);
|
|
if (access(buffer, F_OK) != -1) {
|
|
gtk_list_store_insert(store, &iter, -1);
|
|
gtk_list_store_set(
|
|
store, &iter, 0, buffer, 1, "(built-in) postprocess.sh", -1);
|
|
}
|
|
|
|
// Find extra packaged postprocessor scripts
|
|
// These should be packaged in
|
|
// /usr/share/megapixels/postprocessor.d/executable
|
|
sprintf(buffer, "%s/megapixels/postprocessor.d", DATADIR);
|
|
DIR *d;
|
|
struct dirent *dir;
|
|
d = opendir(buffer);
|
|
if (d) {
|
|
while ((dir = readdir(d)) != NULL) {
|
|
if (dir->d_name[0] == '.') {
|
|
continue;
|
|
}
|
|
sprintf(buffer,
|
|
"%s/megapixels/postprocessor.d/%s",
|
|
DATADIR,
|
|
dir->d_name);
|
|
gtk_list_store_insert(store, &iter, -1);
|
|
gtk_list_store_set(
|
|
store, &iter, 0, buffer, 1, dir->d_name, -1);
|
|
}
|
|
closedir(d);
|
|
}
|
|
}
|
|
|
|
bool
|
|
mp_process_find_processor(char *script, char *filename)
|
|
{
|
|
// Check postprocess.sh in the current working directory
|
|
sprintf(script, "./data/%s", filename);
|
|
if (access(script, F_OK) != -1) {
|
|
sprintf(script, "./data/%s", filename);
|
|
printf("Found postprocessor script at %s\n", script);
|
|
return true;
|
|
}
|
|
|
|
// Check for a script in XDG_CONFIG_HOME
|
|
sprintf(script, "%s/megapixels/%s", g_get_user_config_dir(), filename);
|
|
if (access(script, F_OK) != -1) {
|
|
printf("Found postprocessor script at %s\n", script);
|
|
return true;
|
|
}
|
|
|
|
// Check user overridden /etc/megapixels/postprocessor.sh
|
|
sprintf(script, "%s/megapixels/%s", SYSCONFDIR, filename);
|
|
if (access(script, F_OK) != -1) {
|
|
printf("Found postprocessor script at %s\n", script);
|
|
return true;
|
|
}
|
|
|
|
// Check packaged /usr/share/megapixels/postprocessor.sh
|
|
sprintf(script, "%s/megapixels/%s", DATADIR, filename);
|
|
if (access(script, F_OK) != -1) {
|
|
printf("Found postprocessor script at %s\n", script);
|
|
return true;
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
static void setup_capture(void)
|
|
{
|
|
char template[] = "/tmp/megapixels.XXXXXX";
|
|
char *tempdir;
|
|
tempdir = mkdtemp(template);
|
|
|
|
if (tempdir == NULL) {
|
|
g_printerr("Could not make capture directory %s\n", template);
|
|
exit(EXIT_FAILURE);
|
|
}
|
|
|
|
strcpy(burst_dir, tempdir);
|
|
}
|
|
|
|
static void
|
|
setup(MPPipeline *pipeline, const void *data)
|
|
{
|
|
libdng_init();
|
|
settings = g_settings_new(APP_ID);
|
|
prctl(PR_SET_NAME, "megapixels-pr", NULL, NULL, NULL);
|
|
|
|
state_proc.mode_balance = AAA_BY_POST;
|
|
state_proc.mode_exposure = AAA_BY_V4L2_CONTROLS;
|
|
state_proc.mode_focus = AAA_DISABLED;
|
|
|
|
if (!mp_process_find_processor(movie_script, "movie.sh")) {
|
|
printf("movie.sh not found\n");
|
|
exit(1);
|
|
}
|
|
setup_capture();
|
|
}
|
|
|
|
void
|
|
mp_process_pipeline_start()
|
|
{
|
|
pipeline = mp_pipeline_new();
|
|
mp_pipeline_invoke(pipeline, setup, NULL, 0);
|
|
mp_zbar_pipeline_start();
|
|
}
|
|
|
|
void
|
|
mp_process_pipeline_stop()
|
|
{
|
|
mp_pipeline_free(pipeline);
|
|
mp_zbar_pipeline_stop();
|
|
}
|
|
|
|
void
|
|
mp_process_pipeline_sync()
|
|
{
|
|
mp_pipeline_sync(pipeline);
|
|
}
|
|
|
|
#define NUM_BUFFERS 4
|
|
|
|
struct _MPProcessPipelineBuffer {
|
|
GLuint texture_id;
|
|
|
|
_Atomic(int) refcount;
|
|
};
|
|
static MPProcessPipelineBuffer output_buffers[NUM_BUFFERS];
|
|
|
|
void
|
|
mp_process_pipeline_buffer_ref(MPProcessPipelineBuffer *buf)
|
|
{
|
|
++buf->refcount;
|
|
}
|
|
|
|
void
|
|
mp_process_pipeline_buffer_unref(MPProcessPipelineBuffer *buf)
|
|
{
|
|
--buf->refcount;
|
|
}
|
|
|
|
uint32_t
|
|
mp_process_pipeline_buffer_get_texture_id(MPProcessPipelineBuffer *buf)
|
|
{
|
|
return buf->texture_id;
|
|
}
|
|
|
|
static void
|
|
repack_image_sequencial(const uint8_t *src_buf,
|
|
uint8_t *dst_buf,
|
|
libmegapixels_mode *mode)
|
|
{
|
|
uint16_t pixels[4];
|
|
uint32_t row_length =
|
|
libmegapixels_mode_width_to_bytes(mode->format, mode->width);
|
|
uint32_t padding_bytes =
|
|
libmegapixels_mode_width_to_padding(mode->format, mode->width);
|
|
size_t si = 0;
|
|
|
|
// Image data must be 10-bit packed
|
|
assert(libmegapixels_format_bits_per_pixel(mode->format) == 10);
|
|
|
|
/*
|
|
* Repack 40 bits stored in sensor format into sequencial format
|
|
*
|
|
* src_buf: 11111111 22222222 33333333 44444444 11223344 ...
|
|
* dst_buf: 11111111 11222222 22223333 33333344 44444444 ...
|
|
*/
|
|
for (size_t i = 0; i < row_length * mode->height; i += 5) {
|
|
// Skip padding bytes in source buffer
|
|
if (i && i % row_length == 0)
|
|
si += padding_bytes;
|
|
|
|
/* Extract pixels from packed sensor format */
|
|
pixels[0] = (src_buf[si] << 2) | (src_buf[si + 4] >> 6);
|
|
pixels[1] = (src_buf[si + 1] << 2) | (src_buf[si + 4] >> 4 & 0x03);
|
|
pixels[2] = (src_buf[si + 2] << 2) | (src_buf[si + 4] >> 2 & 0x03);
|
|
pixels[3] = (src_buf[si + 3] << 2) | (src_buf[si + 4] & 0x03);
|
|
|
|
/* Pack pixels into sequencial format */
|
|
dst_buf[i] = (pixels[0] >> 2 & 0xff);
|
|
dst_buf[i + 1] = (pixels[0] << 6 & 0xff) | (pixels[1] >> 4 & 0x3f);
|
|
dst_buf[i + 2] = (pixels[1] << 4 & 0xff) | (pixels[2] >> 6 & 0x0f);
|
|
dst_buf[i + 3] = (pixels[2] << 2 & 0xff) | (pixels[3] >> 8 & 0x03);
|
|
dst_buf[i + 4] = (pixels[3] & 0xff);
|
|
|
|
si += 5;
|
|
}
|
|
}
|
|
|
|
static GLES2Debayer *gles2_debayer = NULL;
|
|
|
|
static GdkGLContext *context;
|
|
|
|
// #define RENDERDOC
|
|
|
|
#ifdef RENDERDOC
|
|
#include <renderdoc/app.h>
|
|
extern RENDERDOC_API_1_1_2 *rdoc_api;
|
|
#endif
|
|
|
|
static void
|
|
init_gl(MPPipeline *pipeline, GdkSurface **surface)
|
|
{
|
|
GError *error = NULL;
|
|
context = gdk_surface_create_gl_context(*surface, &error);
|
|
if (context == NULL) {
|
|
printf("Failed to initialize OpenGL context: %s\n", error->message);
|
|
g_clear_error(&error);
|
|
return;
|
|
}
|
|
|
|
gdk_gl_context_set_use_es(context, true);
|
|
gdk_gl_context_set_required_version(context, 2, 0);
|
|
gdk_gl_context_set_forward_compatible(context, false);
|
|
#ifdef DEBUG
|
|
gdk_gl_context_set_debug_enabled(context, true);
|
|
#else
|
|
gdk_gl_context_set_debug_enabled(context, false);
|
|
#endif
|
|
|
|
gdk_gl_context_realize(context, &error);
|
|
if (error != NULL) {
|
|
printf("Failed to create OpenGL context: %s\n", error->message);
|
|
g_clear_object(&context);
|
|
g_clear_error(&error);
|
|
return;
|
|
}
|
|
|
|
gdk_gl_context_make_current(context);
|
|
check_gl();
|
|
|
|
// Make a VAO for OpenGL
|
|
if (!gdk_gl_context_get_use_es(context)) {
|
|
GLuint vao;
|
|
glGenVertexArrays(1, &vao);
|
|
glBindVertexArray(vao);
|
|
check_gl();
|
|
}
|
|
|
|
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
|
|
check_gl();
|
|
|
|
for (size_t i = 0; i < NUM_BUFFERS; ++i) {
|
|
glGenTextures(1, &output_buffers[i].texture_id);
|
|
glBindTexture(GL_TEXTURE_2D, output_buffers[i].texture_id);
|
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
|
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
|
|
}
|
|
|
|
glBindTexture(GL_TEXTURE_2D, 0);
|
|
|
|
gboolean is_es = gdk_gl_context_get_use_es(context);
|
|
int major, minor;
|
|
gdk_gl_context_get_version(context, &major, &minor);
|
|
|
|
printf("Initialized %s %d.%d\n",
|
|
is_es ? "OpenGL ES" : "OpenGL",
|
|
major,
|
|
minor);
|
|
}
|
|
|
|
void
|
|
mp_process_pipeline_init_gl(GdkSurface *surface)
|
|
{
|
|
mp_pipeline_invoke(pipeline,
|
|
(MPPipelineCallback)init_gl,
|
|
&surface,
|
|
sizeof(GdkSurface *));
|
|
}
|
|
|
|
float
|
|
clamp_float(float value, float min, float max)
|
|
{
|
|
if (value > max)
|
|
return max;
|
|
|
|
if (value < min)
|
|
return min;
|
|
|
|
return value;
|
|
}
|
|
|
|
static void
|
|
clamp_control(controlstate *control)
|
|
{
|
|
if (control->value_req > control->max) {
|
|
control->value_req = control->max;
|
|
}
|
|
}
|
|
|
|
static void
|
|
process_aaa()
|
|
{
|
|
bool auto_exposure =
|
|
!state_proc.exposure.manual && state_proc.exposure.auto_control == 0;
|
|
bool auto_focus =
|
|
!state_proc.focus.manual && state_proc.focus.auto_control == 0;
|
|
bool auto_balance = TRUE;
|
|
if (!auto_exposure && !auto_focus && !auto_balance) {
|
|
return;
|
|
}
|
|
|
|
int width = output_buffer_width;
|
|
int height = output_buffer_height / 3;
|
|
uint32_t *center = g_malloc_n(width * height * sizeof(uint32_t), 1);
|
|
glReadPixels(0, height, width, height, GL_RGBA, GL_UNSIGNED_BYTE, center);
|
|
|
|
libmegapixels_aaa_set_matrix(&state_proc.stats,
|
|
state_proc.calibration.color_matrix_1,
|
|
state_proc.calibration.color_matrix_2);
|
|
libmegapixels_aaa_software_statistics(
|
|
&state_proc.stats, center, width, height);
|
|
|
|
state_proc.blacklevel -= (float)state_proc.stats.blacklevel * 0.001f;
|
|
state_proc.blacklevel = clamp_float(state_proc.blacklevel, 0.0f, 0.07f);
|
|
|
|
if (auto_exposure) {
|
|
int direction = state_proc.stats.exposure;
|
|
int step = 0;
|
|
if (direction > 0) {
|
|
// Preview is too dark
|
|
|
|
// Try raising the exposure time first
|
|
if (state_proc.exposure.value < state_proc.exposure.max) {
|
|
step = state_proc.exposure.value / 4;
|
|
step = step < 4 ? 4 : step;
|
|
state_proc.exposure.value_req =
|
|
state_proc.exposure.value +
|
|
(step * direction);
|
|
printf("Expose + %d\n",
|
|
state_proc.exposure.value_req);
|
|
} else {
|
|
// Raise sensor gain if exposure limit is hit
|
|
step = state_proc.gain.value / 4;
|
|
step = step < 4 ? 4 : step;
|
|
state_proc.gain.value_req =
|
|
state_proc.gain.value + (step * direction);
|
|
printf("Gain + %d\n", state_proc.gain.value_req);
|
|
}
|
|
} else if (direction < 0) {
|
|
// Preview is too bright
|
|
|
|
// Lower the sensor gain first to have less noise
|
|
if (state_proc.gain.value > 0) {
|
|
step = state_proc.gain.value / 4;
|
|
state_proc.gain.value_req =
|
|
state_proc.gain.value + (step * direction);
|
|
printf("Gain - %d\n", state_proc.gain.value_req);
|
|
} else {
|
|
// Shorten the exposure time to go even darker
|
|
step = state_proc.exposure.value / 4;
|
|
state_proc.exposure.value_req =
|
|
state_proc.exposure.value +
|
|
(step * direction);
|
|
printf("Expose - %d\n",
|
|
state_proc.exposure.value_req);
|
|
}
|
|
}
|
|
|
|
clamp_control(&state_proc.gain);
|
|
clamp_control(&state_proc.exposure);
|
|
mp_io_pipeline_set_control_int32(state_proc.gain.control,
|
|
state_proc.gain.value_req);
|
|
mp_io_pipeline_set_control_int32(state_proc.exposure.control,
|
|
state_proc.exposure.value_req);
|
|
state_proc.gain.value = state_proc.gain.value_req;
|
|
state_proc.exposure.value = state_proc.exposure.value_req;
|
|
}
|
|
|
|
if (auto_balance) {
|
|
float r = state_proc.stats.avg_r;
|
|
float g = state_proc.stats.avg_g;
|
|
float b = state_proc.stats.avg_b;
|
|
|
|
// Revert the current gains set on the preview
|
|
b /= state_proc.red;
|
|
b /= state_proc.blue;
|
|
|
|
float t = 2.0f;
|
|
if (r < t && g < t && b < t) {
|
|
// Don't try to AWB on very dark frames
|
|
} else {
|
|
// Calculate the new R/B gains based on the average color of
|
|
// the frame
|
|
float new_r = g / clamp_float(r, 1.0f, 999.0f);
|
|
float new_b = g / clamp_float(b, 1.0f, 999.0f);
|
|
|
|
state_proc.red = clamp_float(new_r, 0.01f, 4.0f);
|
|
state_proc.blue = clamp_float(new_b, 0.01f, 4.0f);
|
|
}
|
|
}
|
|
|
|
gles2_debayer_set_shading(gles2_debayer,
|
|
state_proc.red,
|
|
state_proc.blue,
|
|
state_proc.blacklevel);
|
|
}
|
|
|
|
static GdkTexture *
|
|
process_image_for_preview(const uint8_t *image)
|
|
{
|
|
#ifdef PROFILE_DEBAYER
|
|
clock_t t1 = clock();
|
|
#endif
|
|
|
|
// Pick an available buffer
|
|
MPProcessPipelineBuffer *output_buffer = NULL;
|
|
for (size_t i = 0; i < NUM_BUFFERS; ++i) {
|
|
if (output_buffers[i].refcount == 0) {
|
|
output_buffer = &output_buffers[i];
|
|
}
|
|
}
|
|
|
|
if (output_buffer == NULL) {
|
|
return NULL;
|
|
}
|
|
assert(output_buffer != NULL);
|
|
|
|
#ifdef RENDERDOC
|
|
if (rdoc_api) {
|
|
rdoc_api->StartFrameCapture(NULL, NULL);
|
|
}
|
|
#endif
|
|
|
|
// Copy image to a GL texture. TODO: This can be avoided
|
|
GLuint input_texture;
|
|
glGenTextures(1, &input_texture);
|
|
glBindTexture(GL_TEXTURE_2D, input_texture);
|
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
|
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
|
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
|
glTexImage2D(
|
|
GL_TEXTURE_2D,
|
|
0,
|
|
GL_LUMINANCE,
|
|
libmegapixels_mode_width_to_bytes(state_proc.mode->format,
|
|
state_proc.mode->width) +
|
|
libmegapixels_mode_width_to_padding(state_proc.mode->format,
|
|
state_proc.mode->width),
|
|
state_proc.mode->height,
|
|
0,
|
|
GL_LUMINANCE,
|
|
GL_UNSIGNED_BYTE,
|
|
image);
|
|
check_gl();
|
|
|
|
gles2_debayer_process(
|
|
gles2_debayer, output_buffer->texture_id, input_texture);
|
|
check_gl();
|
|
|
|
glFinish();
|
|
|
|
glDeleteTextures(1, &input_texture);
|
|
|
|
#ifdef PROFILE_DEBAYER
|
|
clock_t t2 = clock();
|
|
printf("process_image_for_preview %fms\n",
|
|
(float)(t2 - t1) / CLOCKS_PER_SEC * 1000);
|
|
#endif
|
|
|
|
#ifdef RENDERDOC
|
|
if (rdoc_api) {
|
|
rdoc_api->EndFrameCapture(NULL, NULL);
|
|
}
|
|
#endif
|
|
|
|
mp_process_pipeline_buffer_ref(output_buffer);
|
|
mp_main_set_preview(output_buffer);
|
|
|
|
if (framecounter++ == 2) {
|
|
framecounter = 0;
|
|
process_aaa();
|
|
}
|
|
|
|
// Create a thumbnail from the preview for the last capture
|
|
GdkTexture *thumb = NULL;
|
|
if (state_proc.captures_remaining == 1) {
|
|
printf("Making thumbnail\n");
|
|
|
|
size_t size = output_buffer_width * output_buffer_height *
|
|
sizeof(uint32_t);
|
|
|
|
uint32_t *data = g_malloc_n(size, 1);
|
|
|
|
glReadPixels(0,
|
|
0,
|
|
output_buffer_width,
|
|
output_buffer_height,
|
|
GL_RGBA,
|
|
GL_UNSIGNED_BYTE,
|
|
data);
|
|
check_gl();
|
|
|
|
// Flip vertically
|
|
for (size_t y = 0; y < output_buffer_height / 2; ++y) {
|
|
for (size_t x = 0; x < output_buffer_width; ++x) {
|
|
uint32_t tmp = data[(output_buffer_height - y - 1) *
|
|
output_buffer_width +
|
|
x];
|
|
data[(output_buffer_height - y - 1) *
|
|
output_buffer_width +
|
|
x] = data[y * output_buffer_width + x];
|
|
data[y * output_buffer_width + x] = tmp;
|
|
}
|
|
}
|
|
|
|
thumb = gdk_memory_texture_new(output_buffer_width,
|
|
output_buffer_height,
|
|
GDK_MEMORY_R8G8B8A8,
|
|
g_bytes_new_take(data, size),
|
|
output_buffer_width *
|
|
sizeof(uint32_t));
|
|
}
|
|
|
|
return thumb;
|
|
}
|
|
|
|
static void
|
|
format_timestamp(char *timestamp)
|
|
{
|
|
static char capture_fname[255];
|
|
time_t rawtime;
|
|
time(&rawtime);
|
|
struct tm tim = *(localtime(&rawtime));
|
|
|
|
strftime(timestamp, 30, "%Y%m%d%H%M%S", &tim);
|
|
}
|
|
|
|
static void
|
|
format_movie_name(char *capture_fname)
|
|
{
|
|
char timestamp[30];
|
|
format_timestamp(timestamp);
|
|
|
|
if (g_get_user_special_dir(G_USER_DIRECTORY_VIDEOS) != NULL) {
|
|
sprintf(capture_fname,
|
|
"%s/VID%s.mp4",
|
|
g_get_user_special_dir(G_USER_DIRECTORY_VIDEOS),
|
|
timestamp);
|
|
} else if (getenv("XDG_VIDOES_DIR") != NULL) {
|
|
sprintf(capture_fname,
|
|
"%s/VID%s.mp4",
|
|
getenv("XDG_VIDEOS_DIR"),
|
|
timestamp);
|
|
} else {
|
|
sprintf(capture_fname,
|
|
"%s/Videos/VID%s.mp4",
|
|
getenv("HOME"),
|
|
timestamp);
|
|
}
|
|
}
|
|
|
|
int movie_recording;
|
|
static char movie_fname[255];
|
|
static char stdout_buf[1024];
|
|
|
|
static void on_read_complete(GObject *source_object, GAsyncResult *res, gpointer user_data) {
|
|
GInputStream *stream = G_INPUT_STREAM(source_object);
|
|
GError *error = NULL;
|
|
gssize bytes_read;
|
|
|
|
// Read the output from the stream
|
|
bytes_read = g_input_stream_read_finish(stream, res, &error);
|
|
|
|
if (bytes_read == 0) {
|
|
// End of file reached, close the stream
|
|
g_input_stream_close(stream, NULL, NULL);
|
|
g_object_unref(stream);
|
|
notify_movie_progress();
|
|
return;
|
|
}
|
|
if (bytes_read < 0) {
|
|
// Error occurred
|
|
g_print("Error reading subprocess output: %s\n", error->message);
|
|
g_error_free(error);
|
|
g_object_unref(stream);
|
|
return;
|
|
}
|
|
|
|
//g_print("Got buffer: %.*s", (int)bytes_read, stdout_buf);
|
|
stdout_buf[bytes_read] = 0;
|
|
|
|
{
|
|
char msg[] = "Message: ";
|
|
int l = sizeof(msg);
|
|
if (!strncmp(stdout_buf, msg, l-1)) {
|
|
char *c = strchr(stdout_buf, '\n');
|
|
if (!c)
|
|
return;
|
|
*c = 0;
|
|
notify_movie_message(strdup(stdout_buf + l - 1));
|
|
}
|
|
}
|
|
|
|
// Continue reading asynchronously
|
|
g_input_stream_read_async(stream, stdout_buf, sizeof(stdout_buf), G_PRIORITY_DEFAULT, NULL,
|
|
on_read_complete, NULL);
|
|
}
|
|
|
|
static void
|
|
spawn_movie(char *cmd)
|
|
{
|
|
g_autoptr(GError) error = NULL;
|
|
char *mode;
|
|
|
|
switch (state_proc.mode->v4l_pixfmt) {
|
|
case V4L2_PIX_FMT_UYVY:
|
|
case V4L2_PIX_FMT_YUYV:
|
|
case V4L2_PIX_FMT_YVYU:
|
|
case V4L2_PIX_FMT_VYUY:
|
|
mode = "grw";
|
|
break;
|
|
default:
|
|
mode = "dng";
|
|
break;
|
|
}
|
|
|
|
GSubprocess *proc = g_subprocess_new(G_SUBPROCESS_FLAGS_STDOUT_PIPE,
|
|
&error,
|
|
movie_script,
|
|
cmd,
|
|
burst_dir,
|
|
movie_fname,
|
|
"305",
|
|
mode,
|
|
NULL);
|
|
|
|
if (!proc) {
|
|
g_printerr("Failed to spawn postprocess process: %s\n",
|
|
error->message);
|
|
return;
|
|
}
|
|
|
|
|
|
GInputStream *stdout_stream;
|
|
// Get the stdout stream of the subprocess
|
|
stdout_stream = g_subprocess_get_stdout_pipe(proc);
|
|
|
|
// Read the output of the subprocess asynchronously
|
|
g_input_stream_read_async(stdout_stream, stdout_buf, sizeof(stdout_buf), G_PRIORITY_DEFAULT, NULL,
|
|
on_read_complete, NULL);
|
|
}
|
|
|
|
|
|
void
|
|
on_movie_start(void)
|
|
{
|
|
format_movie_name(movie_fname);
|
|
|
|
movie_recording = 1;
|
|
printf("movie recording on\n");
|
|
spawn_movie("start");
|
|
}
|
|
|
|
void
|
|
on_movie_stop(void)
|
|
{
|
|
movie_recording = 0;
|
|
printf("movie recording off\n");
|
|
spawn_movie("stop");
|
|
}
|
|
|
|
static void
|
|
save_grw(const uint8_t *image, char *fname)
|
|
{
|
|
FILE *outfile;
|
|
if ((outfile = fopen(fname, "wb")) == NULL) {
|
|
g_printerr("grw open %s: error %d, %s\n",
|
|
fname,
|
|
errno,
|
|
strerror(errno));
|
|
return;
|
|
}
|
|
int width = state_proc.mode->width;
|
|
int height = state_proc.mode->height;
|
|
int size = width * height * 2;
|
|
char *format;
|
|
switch (state_proc.mode->v4l_pixfmt) {
|
|
case V4L2_PIX_FMT_YUYV:
|
|
format = "YUY2";
|
|
break;
|
|
default:
|
|
printf("Please fill appropriate translation for YUV.\n");
|
|
}
|
|
fwrite(image, size, 1, outfile);
|
|
char buf[1024];
|
|
buf[0] = 0;
|
|
int header = sprintf(buf+1,
|
|
"Caps: video/x-raw,format=%s,width=%d,height=%d\nSize: %d\nGRW",
|
|
format, width, height, size);
|
|
fwrite(buf, header+1, 1, outfile);
|
|
fclose(outfile);
|
|
}
|
|
|
|
static void
|
|
save_jpeg(const uint8_t *image, char *fname)
|
|
{
|
|
FILE *outfile;
|
|
if ((outfile = fopen(fname, "wb")) == NULL) {
|
|
g_printerr("jpeg open %s: error %d, %s\n",
|
|
fname,
|
|
errno,
|
|
strerror(errno));
|
|
return;
|
|
}
|
|
int width = state_proc.mode->width;
|
|
|
|
struct jpeg_compress_struct cinfo;
|
|
struct jpeg_error_mgr jerr;
|
|
|
|
cinfo.err = jpeg_std_error(&jerr);
|
|
jpeg_create_compress(&cinfo);
|
|
jpeg_stdio_dest(&cinfo, outfile);
|
|
|
|
cinfo.image_width = state_proc.mode->width & -1;
|
|
cinfo.image_height = state_proc.mode->height & -1;
|
|
cinfo.input_components = 3;
|
|
cinfo.in_color_space = JCS_YCbCr;
|
|
jpeg_set_defaults(&cinfo);
|
|
jpeg_set_quality(&cinfo, 92, TRUE);
|
|
jpeg_start_compress(&cinfo, TRUE);
|
|
uint8_t *row = malloc(width * 3);
|
|
JSAMPROW row_pointer[1];
|
|
row_pointer[0] = row;
|
|
|
|
unsigned int y1 = 0;
|
|
unsigned int u = 1;
|
|
unsigned int y2 = 2;
|
|
unsigned int v = 3;
|
|
|
|
switch (state_proc.mode->v4l_pixfmt) {
|
|
case V4L2_PIX_FMT_UYVY:
|
|
u = 0;
|
|
y1 = 1;
|
|
v = 2;
|
|
y2 = 3;
|
|
break;
|
|
case V4L2_PIX_FMT_YUYV:
|
|
y1 = 0;
|
|
u = 1;
|
|
y2 = 2;
|
|
v = 3;
|
|
break;
|
|
case V4L2_PIX_FMT_YVYU:
|
|
y1 = 0;
|
|
v = 1;
|
|
y2 = 2;
|
|
u = 3;
|
|
break;
|
|
case V4L2_PIX_FMT_VYUY:
|
|
v = 0;
|
|
y1 = 1;
|
|
u = 2;
|
|
y2 = 3;
|
|
break;
|
|
}
|
|
|
|
while (cinfo.next_scanline < cinfo.image_height) {
|
|
unsigned int i, j = 0;
|
|
unsigned int offset = cinfo.next_scanline * cinfo.image_width * 2;
|
|
for (i = 0; i < cinfo.image_width * 2; i += 4) {
|
|
row[j + 0] = image[offset + i + y1];
|
|
row[j + 1] = image[offset + i + u];
|
|
row[j + 2] = image[offset + i + v];
|
|
row[j + 3] = image[offset + i + y2];
|
|
row[j + 4] = image[offset + i + u];
|
|
row[j + 5] = image[offset + i + v];
|
|
j += 6;
|
|
}
|
|
jpeg_write_scanlines(&cinfo, row_pointer, 1);
|
|
}
|
|
|
|
jpeg_finish_compress(&cinfo);
|
|
fclose(outfile);
|
|
jpeg_destroy_compress(&cinfo);
|
|
}
|
|
|
|
static void
|
|
process_image_for_capture_yuv(const uint8_t *image, int count)
|
|
{
|
|
char fname[255];
|
|
sprintf(fname, "%s/%d.jpeg", burst_dir, count);
|
|
|
|
save_jpeg(image, fname);
|
|
}
|
|
|
|
static void
|
|
save_dng(const uint8_t *image, char *fname, int count)
|
|
{
|
|
uint16_t orientation;
|
|
if (state_proc.device_rotation == 0) {
|
|
orientation = state_proc.mode->mirrored ?
|
|
LIBDNG_ORIENTATION_TOPRIGHT :
|
|
LIBDNG_ORIENTATION_TOPLEFT;
|
|
} else if (state_proc.device_rotation == 90) {
|
|
orientation = state_proc.mode->mirrored ?
|
|
LIBDNG_ORIENTATION_RIGHTBOT :
|
|
LIBDNG_ORIENTATION_LEFTBOT;
|
|
} else if (state_proc.device_rotation == 180) {
|
|
orientation = state_proc.mode->mirrored ?
|
|
LIBDNG_ORIENTATION_BOTLEFT :
|
|
LIBDNG_ORIENTATION_BOTRIGHT;
|
|
} else {
|
|
orientation = state_proc.mode->mirrored ?
|
|
LIBDNG_ORIENTATION_LEFTTOP :
|
|
LIBDNG_ORIENTATION_RIGHTTOP;
|
|
}
|
|
|
|
libdng_info dng = { 0 };
|
|
libdng_new(&dng);
|
|
|
|
libdng_set_datetime_now(&dng);
|
|
libdng_set_mode_from_pixfmt(&dng, state_proc.mode->v4l_pixfmt);
|
|
if (state_proc.configuration->make != NULL &&
|
|
state_proc.configuration->model != NULL) {
|
|
libdng_set_make_model(&dng,
|
|
state_proc.configuration->make,
|
|
state_proc.configuration->model);
|
|
}
|
|
libdng_set_orientation(&dng, orientation);
|
|
libdng_set_software(&dng, "Megapixels");
|
|
libdng_set_neutral(&dng, state_proc.red, 1.0f, state_proc.blue);
|
|
libdng_set_analog_balance(&dng,
|
|
state_proc.balance[0],
|
|
state_proc.balance[1],
|
|
state_proc.balance[2]);
|
|
|
|
if (!state_proc.exposure.manual) {
|
|
libdng_set_exposure_program(&dng, LIBDNG_EXPOSUREPROGRAM_NORMAL);
|
|
} else {
|
|
libdng_set_exposure_program(&dng, LIBDNG_EXPOSUREPROGRAM_MANUAL);
|
|
}
|
|
|
|
//printf("Writing frame to %s, %d x %d\n", fname, state_proc.mode->width, state_proc.mode->height);
|
|
libdng_write(&dng,
|
|
fname,
|
|
state_proc.mode->width,
|
|
state_proc.mode->height,
|
|
image,
|
|
count);
|
|
libdng_free(&dng);
|
|
|
|
/*
|
|
TIFFSetField(tif,
|
|
EXIFTAG_EXPOSURETIME,
|
|
(mode.frame_interval.numerator /
|
|
(float)mode.frame_interval.denominator) /
|
|
((float)mode.height / (float)exposure));
|
|
|
|
if (pr_camera->iso_min && pr_camera->iso_max) {
|
|
uint16_t isospeed = remap(
|
|
gain - 1, 0, gain_max, pr_camera->iso_min,
|
|
pr_camera->iso_max); TIFFSetField(tif, EXIFTAG_ISOSPEEDRATINGS, 1,
|
|
&isospeed);
|
|
}
|
|
if (!pr_camera->has_flash) {
|
|
// No flash function
|
|
TIFFSetField(tif, EXIFTAG_FLASH, 0x20);
|
|
} else if (flash_enabled) {
|
|
// Flash present and fired
|
|
TIFFSetField(tif, EXIFTAG_FLASH, 0x1);
|
|
} else {
|
|
// Flash present but not fired
|
|
TIFFSetField(tif, EXIFTAG_FLASH, 0x0);
|
|
}
|
|
*/
|
|
|
|
/*
|
|
if (pr_camera->fnumber) {
|
|
TIFFSetField(tif, EXIFTAG_FNUMBER, pr_camera->fnumber);
|
|
}
|
|
if (pr_camera->focallength) {
|
|
TIFFSetField(tif, EXIFTAG_FOCALLENGTH, pr_camera->focallength);
|
|
}
|
|
if (pr_camera->focallength && pr_camera->cropfactor) {
|
|
TIFFSetField(tif,
|
|
EXIFTAG_FOCALLENGTHIN35MMFILM,
|
|
(short)(pr_camera->focallength *
|
|
pr_camera->cropfactor));
|
|
}
|
|
*/
|
|
}
|
|
|
|
static void
|
|
process_image_for_capture_bayer(const uint8_t *image, int count)
|
|
{
|
|
char fname[255];
|
|
sprintf(fname, "%s/%d.dng", burst_dir, count);
|
|
|
|
save_dng(image, fname, count);
|
|
}
|
|
|
|
static void
|
|
process_image_for_capture(const uint8_t *image, int count)
|
|
{
|
|
switch (state_proc.mode->v4l_pixfmt) {
|
|
case V4L2_PIX_FMT_UYVY:
|
|
case V4L2_PIX_FMT_YUYV:
|
|
case V4L2_PIX_FMT_YVYU:
|
|
case V4L2_PIX_FMT_VYUY:
|
|
process_image_for_capture_yuv(image, count);
|
|
break;
|
|
default:
|
|
process_image_for_capture_bayer(image, count);
|
|
break;
|
|
}
|
|
}
|
|
|
|
static void
|
|
post_process_finished(GSubprocess *proc, GAsyncResult *res, GdkTexture *thumb)
|
|
{
|
|
char *stdout;
|
|
g_subprocess_communicate_utf8_finish(proc, res, &stdout, NULL, NULL);
|
|
|
|
// The last line contains the file name
|
|
int end = strlen(stdout);
|
|
// Skip the newline at the end
|
|
stdout[--end] = '\0';
|
|
|
|
char *path = path = stdout + end - 1;
|
|
do {
|
|
if (*path == '\n') {
|
|
path++;
|
|
break;
|
|
}
|
|
--path;
|
|
} while (path > stdout);
|
|
|
|
mp_main_capture_completed(thumb, path);
|
|
}
|
|
|
|
static void
|
|
process_capture_burst(GdkTexture *thumb)
|
|
{
|
|
static char capture_fname[255];
|
|
|
|
char timestamp[30];
|
|
format_timestamp(timestamp);
|
|
|
|
if (g_get_user_special_dir(G_USER_DIRECTORY_PICTURES) != NULL) {
|
|
sprintf(capture_fname,
|
|
"%s/IMG%s",
|
|
g_get_user_special_dir(G_USER_DIRECTORY_PICTURES),
|
|
timestamp);
|
|
} else if (getenv("XDG_PICTURES_DIR") != NULL) {
|
|
sprintf(capture_fname,
|
|
"%s/IMG%s",
|
|
getenv("XDG_PICTURES_DIR"),
|
|
timestamp);
|
|
} else {
|
|
sprintf(capture_fname,
|
|
"%s/Pictures/IMG%s",
|
|
getenv("HOME"),
|
|
timestamp);
|
|
}
|
|
|
|
bool save_dng = g_settings_get_boolean(settings, "save-raw");
|
|
char *postprocessor = g_settings_get_string(settings, "postprocessor");
|
|
|
|
if (postprocessor == NULL) {
|
|
g_printerr("Postprocessor setting is null\n");
|
|
return;
|
|
}
|
|
|
|
char save_dng_s[2] = "0";
|
|
if (save_dng) {
|
|
save_dng_s[0] = '1';
|
|
}
|
|
|
|
// Start post-processing the captured burst
|
|
g_print("Post process %s to %s.ext (save-dng %s)\n",
|
|
burst_dir,
|
|
capture_fname,
|
|
save_dng_s);
|
|
g_autoptr(GError) error = NULL;
|
|
GSubprocess *proc = g_subprocess_new(G_SUBPROCESS_FLAGS_STDOUT_PIPE,
|
|
&error,
|
|
postprocessor,
|
|
burst_dir,
|
|
capture_fname,
|
|
save_dng_s,
|
|
NULL);
|
|
|
|
if (!proc) {
|
|
g_printerr("Failed to spawn postprocess process: %s\n",
|
|
error->message);
|
|
return;
|
|
}
|
|
|
|
g_subprocess_communicate_utf8_async(
|
|
proc, NULL, NULL, (GAsyncReadyCallback)post_process_finished, thumb);
|
|
}
|
|
|
|
static void
|
|
process_image(MPPipeline *pipeline, const MPBuffer *buffer)
|
|
{
|
|
#ifdef PROFILE_PROCESS
|
|
clock_t t1 = clock();
|
|
#endif
|
|
|
|
size_t size = (libmegapixels_mode_width_to_bytes(state_proc.mode->format,
|
|
state_proc.mode->width) +
|
|
libmegapixels_mode_width_to_padding(state_proc.mode->format,
|
|
state_proc.mode->width)) *
|
|
state_proc.mode->height;
|
|
uint8_t *image = malloc(size);
|
|
memcpy(image, buffer->data, size);
|
|
mp_io_pipeline_release_buffer(buffer->index);
|
|
|
|
if (movie_recording) {
|
|
char name[1024];
|
|
|
|
switch (state_proc.mode->v4l_pixfmt) {
|
|
case V4L2_PIX_FMT_UYVY:
|
|
case V4L2_PIX_FMT_YUYV:
|
|
case V4L2_PIX_FMT_YVYU:
|
|
case V4L2_PIX_FMT_VYUY:
|
|
get_name(name, burst_dir, "grw");
|
|
save_grw(image, name);
|
|
break;
|
|
default:
|
|
get_name(name, burst_dir, "dng");
|
|
save_dng(image, name, 1);
|
|
break;
|
|
}
|
|
}
|
|
|
|
MPZBarImage *zbar_image = mp_zbar_image_new(image,
|
|
state_proc.mode->format,
|
|
state_proc.mode->width,
|
|
state_proc.mode->height,
|
|
state_proc.device_rotation,
|
|
state_proc.mode->mirrored);
|
|
mp_zbar_pipeline_process_image(mp_zbar_image_ref(zbar_image));
|
|
|
|
#ifdef PROFILE_PROCESS
|
|
clock_t t2 = clock();
|
|
#endif
|
|
|
|
GdkTexture *thumb = process_image_for_preview(image);
|
|
|
|
if (state_proc.captures_remaining > 0) {
|
|
--state_proc.captures_remaining;
|
|
|
|
process_image_for_capture(image, state_proc.counter++);
|
|
|
|
if (state_proc.captures_remaining == 0) {
|
|
assert(thumb);
|
|
process_capture_burst(thumb);
|
|
} else {
|
|
assert(!thumb);
|
|
}
|
|
} else {
|
|
assert(!thumb);
|
|
}
|
|
|
|
mp_zbar_image_unref(zbar_image);
|
|
|
|
++frames_processed;
|
|
if (state_proc.captures_remaining == 0) {
|
|
is_capturing = false;
|
|
}
|
|
|
|
#ifdef PROFILE_PROCESS
|
|
clock_t t3 = clock();
|
|
printf("process_image %fms, step 1:%fms, step 2:%fms\n",
|
|
(float)(t3 - t1) / CLOCKS_PER_SEC * 1000,
|
|
(float)(t2 - t1) / CLOCKS_PER_SEC * 1000,
|
|
(float)(t3 - t2) / CLOCKS_PER_SEC * 1000);
|
|
#endif
|
|
}
|
|
|
|
void
|
|
mp_process_pipeline_process_image(MPBuffer buffer)
|
|
{
|
|
#ifdef DEBUG_FPS
|
|
static clock_t last, now;
|
|
static int last_n, now_n;
|
|
now_n++;
|
|
now = clock();
|
|
if (now - last > CLOCKS_PER_SEC * 10) {
|
|
printf("period %fms -- %d -- %f fps\n",
|
|
(float)(now - last) / CLOCKS_PER_SEC * 1000,
|
|
now_n - last_n,
|
|
((float) now_n - last_n) / ((now - last) / CLOCKS_PER_SEC));
|
|
last = now;
|
|
last_n = now_n;
|
|
}
|
|
#endif
|
|
|
|
// If we haven't processed the previous frame yet, drop this one
|
|
if (frames_received != frames_processed && !is_capturing) {
|
|
printf("Dropping frame\n");
|
|
mp_io_pipeline_release_buffer(buffer.index);
|
|
return;
|
|
}
|
|
|
|
++frames_received;
|
|
|
|
mp_pipeline_invoke(pipeline,
|
|
(MPPipelineCallback)process_image,
|
|
&buffer,
|
|
sizeof(MPBuffer));
|
|
}
|
|
|
|
static void
|
|
capture()
|
|
{
|
|
setup_capture();
|
|
|
|
state_proc.captures_remaining = state_proc.burst_length;
|
|
state_proc.counter = 0;
|
|
}
|
|
|
|
void
|
|
mp_process_pipeline_capture()
|
|
{
|
|
is_capturing = true;
|
|
|
|
mp_pipeline_invoke(pipeline, capture, NULL, 0);
|
|
}
|
|
|
|
static void
|
|
on_output_changed(bool format_changed)
|
|
{
|
|
output_buffer_width = state_proc.mode->width / 2;
|
|
output_buffer_height = state_proc.mode->height / 2;
|
|
|
|
if (state_proc.mode->rotation != 0 && state_proc.mode->rotation != 180) {
|
|
int tmp = output_buffer_width;
|
|
output_buffer_width = output_buffer_height;
|
|
output_buffer_height = tmp;
|
|
}
|
|
|
|
for (size_t i = 0; i < NUM_BUFFERS; ++i) {
|
|
glBindTexture(GL_TEXTURE_2D, output_buffers[i].texture_id);
|
|
glTexImage2D(GL_TEXTURE_2D,
|
|
0,
|
|
GL_RGBA,
|
|
output_buffer_width,
|
|
output_buffer_height,
|
|
0,
|
|
GL_RGBA,
|
|
GL_UNSIGNED_BYTE,
|
|
NULL);
|
|
}
|
|
|
|
glBindTexture(GL_TEXTURE_2D, 0);
|
|
|
|
// Create new gles2_debayer on format change
|
|
if (format_changed) {
|
|
if (gles2_debayer)
|
|
gles2_debayer_free(gles2_debayer);
|
|
|
|
gles2_debayer = gles2_debayer_new(state_proc.mode->format);
|
|
check_gl();
|
|
|
|
gles2_debayer_use(gles2_debayer);
|
|
}
|
|
|
|
state_proc.blacklevel = 0.0f;
|
|
state_proc.red = 1.0f;
|
|
state_proc.blue = 1.0f;
|
|
gles2_debayer_configure(gles2_debayer,
|
|
output_buffer_width,
|
|
output_buffer_height,
|
|
state_proc.mode->width,
|
|
state_proc.mode->height,
|
|
state_proc.mode->rotation,
|
|
0,
|
|
state_proc.calibration);
|
|
}
|
|
|
|
static int
|
|
mod(int a, int b)
|
|
{
|
|
int r = a % b;
|
|
return r < 0 ? r + b : r;
|
|
}
|
|
|
|
static void
|
|
update_state(MPPipeline *pipeline, const mp_state_proc *new_state)
|
|
{
|
|
bool camera_changed = state_proc.camera != new_state->camera;
|
|
state_proc.configuration = new_state->configuration;
|
|
state_proc.camera = new_state->camera;
|
|
|
|
state_proc.gain.control = new_state->gain.control;
|
|
state_proc.gain.auto_control = new_state->gain.auto_control;
|
|
state_proc.gain.value = new_state->gain.value;
|
|
state_proc.gain.max = new_state->gain.max;
|
|
state_proc.gain.manual = new_state->gain.manual;
|
|
|
|
state_proc.exposure.control = new_state->exposure.control;
|
|
state_proc.exposure.auto_control = new_state->exposure.auto_control;
|
|
state_proc.exposure.value = new_state->exposure.value;
|
|
state_proc.exposure.max = new_state->exposure.max;
|
|
state_proc.exposure.manual = new_state->exposure.manual;
|
|
|
|
state_proc.focus.control = new_state->focus.control;
|
|
state_proc.focus.auto_control = new_state->focus.auto_control;
|
|
state_proc.focus.value = new_state->focus.value;
|
|
state_proc.focus.max = new_state->focus.max;
|
|
state_proc.focus.manual = new_state->focus.manual;
|
|
|
|
const bool output_changed =
|
|
!libmegapixels_mode_equals(state_proc.mode,
|
|
new_state->camera->current_mode) ||
|
|
state_proc.preview_width != new_state->preview_width ||
|
|
state_proc.preview_height != new_state->preview_height ||
|
|
state_proc.device_rotation != new_state->device_rotation;
|
|
|
|
bool format_changed = state_proc.mode == NULL;
|
|
|
|
if (!format_changed && state_proc.mode->v4l_pixfmt !=
|
|
new_state->camera->current_mode->v4l_pixfmt) {
|
|
format_changed = true;
|
|
}
|
|
|
|
state_proc.mode = new_state->camera->current_mode;
|
|
|
|
state_proc.preview_width = new_state->preview_width;
|
|
state_proc.preview_height = new_state->preview_height;
|
|
|
|
state_proc.device_rotation = new_state->device_rotation;
|
|
state_proc.burst_length = new_state->burst_length;
|
|
|
|
state_proc.balance[0] = new_state->balance[0];
|
|
state_proc.balance[1] = new_state->balance[1];
|
|
state_proc.balance[2] = new_state->balance[2];
|
|
|
|
if (output_changed) {
|
|
state_proc.camera_rotation = mod(
|
|
state_proc.mode->rotation - state_proc.device_rotation, 360);
|
|
|
|
on_output_changed(format_changed);
|
|
}
|
|
|
|
if (camera_changed) {
|
|
char cf[PATH_MAX];
|
|
if (find_calibration(cf, state_proc.camera->name)) {
|
|
state_proc.calibration = parse_calibration_file(cf);
|
|
} else {
|
|
printf("No calibration for %s\n", state_proc.camera->name);
|
|
}
|
|
}
|
|
|
|
mp_state_main new_main = {
|
|
.camera = pr_camera,
|
|
.has_auto_focus_continuous = false,
|
|
.has_auto_focus_start = false,
|
|
.preview_buffer_width = output_buffer_width,
|
|
.preview_buffer_height = output_buffer_height,
|
|
.control_flash = false,
|
|
|
|
.gain.control = state_proc.gain.control,
|
|
.gain.auto_control = state_proc.gain.auto_control,
|
|
.gain.value = state_proc.gain.value,
|
|
.gain.value_req = state_proc.gain.value_req,
|
|
.gain.max = state_proc.gain.max,
|
|
.gain.manual = state_proc.gain.manual,
|
|
|
|
.exposure.control = state_proc.exposure.control,
|
|
.exposure.auto_control = state_proc.exposure.auto_control,
|
|
.exposure.value = state_proc.exposure.value,
|
|
.exposure.value_req = state_proc.exposure.value_req,
|
|
.exposure.max = state_proc.exposure.max,
|
|
.exposure.manual = state_proc.exposure.manual,
|
|
|
|
.focus.control = state_proc.focus.control,
|
|
.focus.auto_control = state_proc.focus.auto_control,
|
|
.focus.value = state_proc.focus.value,
|
|
.focus.max = state_proc.focus.max,
|
|
.focus.manual = state_proc.focus.manual,
|
|
|
|
.stats.exposure = state_proc.stats.exposure,
|
|
.stats.temp = state_proc.stats.temp,
|
|
.stats.tint = state_proc.stats.tint,
|
|
.stats.focus = state_proc.stats.focus,
|
|
};
|
|
mp_main_update_state(&new_main);
|
|
}
|
|
|
|
void
|
|
mp_process_pipeline_update_state(const mp_state_proc *new_state)
|
|
{
|
|
mp_pipeline_invoke(pipeline,
|
|
(MPPipelineCallback)update_state,
|
|
new_state,
|
|
sizeof(mp_state_proc));
|
|
}
|
|
|
|
// GTK4 seems to require this
|
|
void
|
|
pango_fc_font_get_languages()
|
|
{
|
|
}
|