#include "io_pipeline.h" #include "camera.h" #include "flash.h" #include "pipeline.h" #include "process_pipeline.h" #include "state.h" #include #include #include #include #include #include #include #include mp_state_io state_io; MPCamera *mpcamera = NULL; static MPPipeline *pipeline; static GSource *capture_source; static void setup(MPPipeline *pipeline, const void *data) { return; } void mp_io_pipeline_start() { mp_process_pipeline_start(); pipeline = mp_pipeline_new(); mp_pipeline_invoke(pipeline, setup, NULL, 0); } void mp_io_pipeline_stop() { if (capture_source) { g_source_destroy(capture_source); } mp_pipeline_free(pipeline); mp_process_pipeline_stop(); } /* * Update state from IO -> Process */ static void update_process_pipeline() { // Grab the latest control values if (!state_io.gain.manual && state_io.gain.control) { state_io.gain.value = mp_camera_control_get_int32( state_io.camera, state_io.gain.control); } if (!state_io.exposure.manual && state_io.exposure.control) { state_io.exposure.value = mp_camera_control_get_int32( state_io.camera, state_io.exposure.control); } float balance_red = 1.0f; float balance_blue = 1.0f; if (state_io.red.control && state_io.blue.control) { int red = mp_camera_control_get_int32(state_io.camera, state_io.red.control); int blue = mp_camera_control_get_int32(state_io.camera, state_io.blue.control); balance_red = (float)red / (float)state_io.red.max; balance_blue = (float)blue / (float)state_io.blue.max; } struct mp_process_pipeline_state pipeline_state = { .camera = state_io.camera, .burst_length = state_io.burst_length, .preview_width = state_io.preview_width, .preview_height = state_io.preview_height, .device_rotation = state_io.device_rotation, .gain_is_manual = state_io.gain.manual, .gain = state_io.gain.value, .gain_max = state_io.gain.max, .balance_red = balance_red, .balance_blue = balance_blue, .exposure_is_manual = state_io.exposure.manual, .exposure = state_io.exposure.value, .has_auto_focus_continuous = state_io.focus.control != 0, .has_auto_focus_start = state_io.can_af_trigger, .flash_enabled = state_io.flash_enabled, .control_gain = state_io.gain.control != 0, .control_exposure = state_io.exposure.control != 0, .control_focus = state_io.focus.control != 0, .control_flash = true, }; mp_process_pipeline_update_state(&pipeline_state); } static void focus(MPPipeline *pipeline, const void *data) { state_io.trigger_af = true; } void mp_io_pipeline_focus() { mp_pipeline_invoke(pipeline, focus, NULL, 0); } static void capture(MPPipeline *pipeline, const void *data) { float gain_norm; // Disable the autogain/exposure while taking the burst mp_camera_control_set_int32(state_io.camera, V4L2_CID_AUTOGAIN, 0); mp_camera_control_set_int32( state_io.camera, V4L2_CID_EXPOSURE_AUTO, V4L2_EXPOSURE_MANUAL); // Get current gain to calculate a burst length; // with low gain there's 3, with the max automatic gain of the ov5640 // the value seems to be 248 which creates a 5 frame burst // for manual gain you can go up to 11 frames state_io.gain.value = mp_camera_control_get_int32(state_io.camera, V4L2_CID_GAIN); gain_norm = (float)state_io.gain.value / (float)state_io.gain.max; state_io.burst_length = (int)fmax(sqrtf(gain_norm) * 10, 2) + 1; state_io.burst_length = MAX(1, state_io.burst_length); state_io.captures_remaining = state_io.burst_length; // Change camera mode for capturing mp_process_pipeline_sync(); mp_camera_stop_capture(mpcamera); struct v4l2_format format = { 0 }; libmegapixels_select_mode(state_io.camera, state_io.mode_capture, &format); state_io.flush_pipeline = true; mp_camera_start_capture(mpcamera); // Enable flash /* TODO: implement if (info->flash && flash_enabled) { mp_flash_enable(info->flash); } */ update_process_pipeline(); mp_process_pipeline_capture(); } void mp_io_pipeline_capture() { mp_pipeline_invoke(pipeline, capture, NULL, 0); } static void release_buffer(MPPipeline *pipeline, const uint32_t *buffer_index) { mp_camera_release_buffer(mpcamera, *buffer_index); } void mp_io_pipeline_release_buffer(uint32_t buffer_index) { mp_pipeline_invoke(pipeline, (MPPipelineCallback)release_buffer, &buffer_index, sizeof(uint32_t)); } static pid_t focus_continuous_task = 0; static pid_t start_focus_task = 0; static void start_focus() { // only run 1 manual focus at once if (!mp_camera_check_task_complete(mpcamera, start_focus_task) || !mp_camera_check_task_complete(mpcamera, focus_continuous_task)) return; if (state_io.focus.control) { focus_continuous_task = mp_camera_control_set_bool_bg( state_io.camera, state_io.focus.control, 1); } else if (state_io.can_af_trigger) { start_focus_task = mp_camera_control_set_bool_bg( state_io.camera, V4L2_CID_AUTO_FOCUS_START, 1); } } static void update_controls() { // Don't update controls while capturing if (state_io.captures_remaining > 0) { return; } if (state_io.trigger_af) { state_io.trigger_af = false; start_focus(); } if (state_io.gain.manual != state_io.gain.manual_req) { mp_camera_control_set_bool_bg(state_io.camera, V4L2_CID_AUTOGAIN, !state_io.gain.manual_req); state_io.gain.manual = state_io.gain.manual_req; } if (state_io.gain.manual && state_io.gain.value != state_io.gain.value_req) { mp_camera_control_set_int32_bg(state_io.camera, state_io.gain.control, state_io.gain.value_req); state_io.gain.value = state_io.gain.value_req; } if (state_io.exposure.manual != state_io.exposure.manual_req) { mp_camera_control_set_bool_bg(state_io.camera, V4L2_CID_EXPOSURE_AUTO, state_io.exposure.manual_req ? V4L2_EXPOSURE_MANUAL : V4L2_EXPOSURE_AUTO); state_io.exposure.manual = state_io.exposure.manual_req; } if (state_io.exposure.manual && state_io.exposure.value != state_io.exposure.value_req) { mp_camera_control_set_int32_bg(state_io.camera, state_io.exposure.control, state_io.exposure.value_req); state_io.exposure.value = state_io.exposure.value_req; } } static void on_frame(MPBuffer buffer, void *_data) { // Only update controls right after a frame was captured update_controls(); // When the mode is switched while capturing we get a couple blank frames, // presumably from buffers made ready during the switch. Ignore these. if (state_io.flush_pipeline) { if (state_io.blank_frame_count < 20) { // Only check a 10x10 area size_t test_size = MIN(10, state_io.camera->current_mode->width) * MIN(10, state_io.camera->current_mode->height); bool image_is_blank = true; for (size_t i = 0; i < test_size; ++i) { if (buffer.data[i] != 0) { image_is_blank = false; } } if (image_is_blank) { ++state_io.blank_frame_count; return; } } else { printf("Blank image limit reached, resulting capture may be blank\n"); } state_io.flush_pipeline = false; state_io.blank_frame_count = 0; } // Send the image off for processing mp_process_pipeline_process_image(buffer); if (state_io.captures_remaining > 0) { --state_io.captures_remaining; if (state_io.captures_remaining == 0) { // Restore the auto exposure and gain if needed if (!state_io.exposure.manual) { mp_camera_control_set_int32_bg( state_io.camera, V4L2_CID_EXPOSURE_AUTO, V4L2_EXPOSURE_AUTO); } if (!state_io.gain.manual) { mp_camera_control_set_bool_bg( state_io.camera, V4L2_CID_AUTOGAIN, true); } // Go back to preview mode mp_process_pipeline_sync(); mp_camera_stop_capture(mpcamera); struct v4l2_format format = { 0 }; libmegapixels_select_mode( state_io.camera, state_io.mode_preview, &format); state_io.flush_pipeline = true; mp_camera_start_capture(mpcamera); // Disable flash /* TODO: implement if (info->flash && flash_enabled) { mp_flash_disable(info->flash); } */ update_process_pipeline(); } } } static void init_controls() { if (mp_camera_query_control( state_io.camera, V4L2_CID_FOCUS_ABSOLUTE, NULL)) { // TODO: Set focus state state_io.focus.control = V4L2_CID_FOCUS_ABSOLUTE; } else { state_io.focus.control = 0; } if (mp_camera_query_control(state_io.camera, V4L2_CID_FOCUS_AUTO, NULL)) { mp_camera_control_set_bool_bg( state_io.camera, V4L2_CID_FOCUS_AUTO, true); } state_io.can_af_trigger = mp_camera_query_control( state_io.camera, V4L2_CID_AUTO_FOCUS_START, NULL); MPControl control; if (mp_camera_query_control(state_io.camera, V4L2_CID_GAIN, &control)) { state_io.gain.control = V4L2_CID_GAIN; state_io.gain.max = control.max; } else if (mp_camera_query_control( state_io.camera, V4L2_CID_ANALOGUE_GAIN, &control)) { state_io.gain.control = V4L2_CID_ANALOGUE_GAIN; state_io.gain.max = control.max; } else { state_io.gain.max = 0; state_io.gain.control = 0; } if (state_io.gain.control) { state_io.gain.value = mp_camera_control_get_int32( state_io.camera, state_io.gain.control); } else { state_io.gain.value = 0; } state_io.gain.manual = mp_camera_control_get_bool(state_io.camera, V4L2_CID_AUTOGAIN) == 0; state_io.exposure.value = mp_camera_control_get_int32(state_io.camera, V4L2_CID_EXPOSURE); state_io.exposure.manual = mp_camera_control_get_int32(state_io.camera, V4L2_CID_EXPOSURE_AUTO) == V4L2_EXPOSURE_MANUAL; if (mp_camera_query_control( state_io.camera, V4L2_CID_RED_BALANCE, &control)) { state_io.red.control = V4L2_CID_RED_BALANCE; state_io.red.max = control.max; } else { state_io.red.control = 0; } if (mp_camera_query_control( state_io.camera, V4L2_CID_BLUE_BALANCE, &control)) { state_io.blue.control = V4L2_CID_BLUE_BALANCE; state_io.blue.max = control.max; } else { state_io.blue.control = 0; } } /* * State transfer from Main -> IO */ static void update_state(MPPipeline *pipeline, const struct mp_io_pipeline_state *state) { if (state_io.camera != state->camera) { if (state_io.camera != NULL) { mp_process_pipeline_sync(); mp_camera_stop_capture(mpcamera); libmegapixels_close(state_io.camera); } if (capture_source) { g_source_destroy(capture_source); capture_source = NULL; } state_io.camera = state->camera; if (state_io.camera) { libmegapixels_open(state_io.camera); mpcamera = mp_camera_new(state_io.camera); state_io.mode_preview = NULL; state_io.mode_capture = NULL; float score = 0; int area_preview = state_io.preview_width * state_io.preview_height; for (int m = 0; m < state_io.camera->num_modes; m++) { float mscore = 0; if (state_io.camera->modes[m]->rate > 29) { mscore += 1; } int mode_area = state_io.camera->modes[m]->width * state_io.camera->modes[m]->height; mscore += 1.0f - (float)(ABS(mode_area - area_preview) / area_preview); if (mscore > score) { state_io.mode_preview = state_io.camera->modes[m]; score = mscore; } } long area = 0; for (int m = 0; m < state_io.camera->num_modes; m++) { long this_pixels = state_io.camera->modes[m]->width * state_io.camera->modes[m]->height; if (this_pixels > area) { area = this_pixels; state_io.mode_capture = state_io.camera->modes[m]; } } if (state_io.mode_preview == NULL && state_io.mode_capture != NULL) { // If no fast preview mode is available, make due // with slow modes. state_io.mode_preview = state_io.mode_capture; } if (state_io.mode_preview != NULL) { if (state_io.camera->video_fd == 0) { libmegapixels_open(state_io.camera); } struct v4l2_format format = { 0 }; libmegapixels_select_mode(state_io.camera, state_io.mode_preview, &format); } mp_camera_start_capture(mpcamera); capture_source = mp_pipeline_add_capture_source( pipeline, mpcamera, on_frame, NULL); init_controls(); } } state_io.burst_length = state->burst_length; state_io.preview_width = state->preview_width; state_io.preview_height = state->preview_height; state_io.device_rotation = state->device_rotation; if (state_io.camera) { state_io.gain.manual_req = state->gain_is_manual; state_io.gain.value_req = state->gain; state_io.exposure.manual_req = state->exposure_is_manual; state_io.exposure.value_req = state->exposure; state_io.flash_enabled = state->flash_enabled; } update_process_pipeline(); } void mp_io_pipeline_update_state(const struct mp_io_pipeline_state *state) { mp_pipeline_invoke(pipeline, (MPPipelineCallback)update_state, state, sizeof(struct mp_io_pipeline_state)); }