程序启动时 会调用 obs_init_video函数,创建一个obs_video_thread 线程
-
static int obs_init_video(struct obs_video_info *ovi) -
{ -
struct obs_core_video *video = &obs->video; -
struct video_output_info vi; -
int errorcode; -
make_video_info(&vi, ovi); -
video->base_width = ovi->base_width; -
video->base_height = ovi->base_height; -
video->output_width = ovi->output_width; -
video->output_height = ovi->output_height; -
video->gpu_conversion = ovi->gpu_conversion; -
video->scale_type = ovi->scale_type; -
set_video_matrix(video, ovi); -
errorcode = video_output_open(&video->video, &vi); -
if (errorcode != VIDEO_OUTPUT_SUCCESS) { -
if (errorcode == VIDEO_OUTPUT_INVALIDPARAM) { -
blog(LOG_ERROR, "Invalid video parameters specified"); -
return OBS_VIDEO_INVALID_PARAM; -
} else { -
blog(LOG_ERROR, "Could not open video output"); -
} -
return OBS_VIDEO_FAIL; -
} -
gs_enter_context(video->graphics); -
if (ovi->gpu_conversion && !obs_init_gpu_conversion(ovi)) -
return OBS_VIDEO_FAIL; -
if (!obs_init_textures(ovi)) -
return OBS_VIDEO_FAIL; -
gs_leave_context(); -
errorcode = pthread_create(&video->video_thread, NULL, -
obs_video_thread, obs); -
if (errorcode != 0) -
return OBS_VIDEO_FAIL; -
video->thread_initialized = true; -
return OBS_VIDEO_SUCCESS; -
}
obs_video_thread线程中 进行 数据采集 ,渲染 、保存数据到 缓冲区
-
void *obs_video_thread(void *param) -
{ -
uint64_t last_time = 0; -
uint64_t interval = video_output_get_frame_time(obs->video.video); -
uint64_t fps_total_ns = 0; -
uint32_t fps_total_frames = 0; -
obs->video.video_time = os_gettime_ns(); -
os_set_thread_name("libobs: graphics thread"); -
const char *video_thread_name = -
profile_store_name(obs_get_profiler_name_store(), -
"obs_video_thread(%g"NBSP"ms)", interval / 1000000.); -
profile_register_root(video_thread_name, interval); -
while (!video_output_stopped(obs->video.video)) { -
profile_start(video_thread_name); -
profile_start(tick_sources_name); -
last_time = tick_sources(obs->video.video_time, last_time); -
profile_end(tick_sources_name); -
profile_start(render_displays_name); -
render_displays(); -
profile_end(render_displays_name); -
profile_start(output_frame_name); -
output_frame(); -
profile_end(output_frame_name); -
profile_end(video_thread_name); -
profile_reenable_thread(); -
video_sleep(&obs->video, &obs->video.video_time, interval); -
fps_total_ns += (obs->video.video_time - last_time); -
fps_total_frames++; -
if (fps_total_ns >= 1000000000ULL) { -
obs->video.video_fps = (double)fps_total_frames / -
((double)fps_total_ns / 1000000000.0); -
fps_total_ns = 0; -
fps_total_frames = 0; -
} -
} -
UNUSED_PARAMETER(param); -
return NULL; -
}
tick_sources遍历当前加入的所有source,调用obs_source_video_tick
-
static uint64_t tick_sources(uint64_t cur_time, uint64_t last_time) -
{ -
struct obs_core_data *data = &obs->data; -
struct obs_source *source; -
uint64_t delta_time; -
float seconds; -
if (!last_time) -
last_time = cur_time - -
video_output_get_frame_time(obs->video.video); -
delta_time = cur_time - last_time; -
seconds = (float)((double)delta_time / 1000000000.0); -
pthread_mutex_lock(&data->sources_mutex); -
/* call the tick function of each source */ -
source = data->first_source; -
while (source) { -
obs_source_video_tick(source, seconds); -
source = (struct obs_source*)source->context.next; -
} -
pthread_mutex_unlock(&data->sources_mutex); -
return cur_time; -
}
obs_source_video_tick 最终会调用 函数指针 video_tick
-
void obs_source_video_tick(obs_source_t *source, float seconds) -
{ -
bool now_showing, now_active; -
if (!obs_source_valid(source, "obs_source_video_tick")) -
return; -
if (source->info.type == OBS_SOURCE_TYPE_TRANSITION) -
obs_transition_tick(source); -
if ((source->info.output_flags & OBS_SOURCE_ASYNC) != 0) { -
uint64_t sys_time = obs->video.video_time; -
pthread_mutex_lock(&source->async_mutex); -
if (deinterlacing_enabled(source)) { -
deinterlace_process_last_frame(source, sys_time); -
} else { -
if (source->cur_async_frame) { -
remove_async_frame(source, -
source->cur_async_frame); -
source->cur_async_frame = NULL; -
} -
source->cur_async_frame = get_closest_frame(source, -
sys_time); -
} -
source->last_sys_timestamp = sys_time; -
pthread_mutex_unlock(&source->async_mutex); -
} -
if (source->defer_update) -
obs_source_deferred_update(source); -
/* reset the filter render texture information once every frame */ -
if (source->filter_texrender) -
gs_texrender_reset(source->filter_texrender); -
/* call show/hide if the reference changed */ -
now_showing = !!source->show_refs; -
if (now_showing != source->showing) { -
if (now_showing) { -
show_source(source); -
} else { -
hide_source(source); -
} -
source->showing = now_showing; -
} -
/* call activate/deactivate if the reference changed */ -
now_active = !!source->activate_refs; -
if (now_active != source->active) { -
if (now_active) { -
activate_source(source); -
} else { -
deactivate_source(source); -
} -
source->active = now_active; -
} -
if (source->context.data && source->info.video_tick) -
source->info.video_tick(source->context.data, seconds); -
source->async_rendered = false; -
source->deinterlace_rendered = false; -
}
video_tick 在每个插件对应的结构体中进行初始化,例如window-capture 窗口捕获
-
struct obs_source_info window_capture_info = { -
.id = "window_capture", -
.type = OBS_SOURCE_TYPE_INPUT, -
.output_flags = OBS_SOURCE_VIDEO | OBS_SOURCE_CUSTOM_DRAW, -
.get_name = wc_getname, -
.create = wc_create, -
.destroy = wc_destroy, -
.update = wc_update, -
.video_render = wc_render, -
.video_tick = wc_tick, -
.get_width = wc_width, -
.get_height = wc_height, -
.get_defaults = wc_defaults, -
.get_properties = wc_properties -
};
wc_tick进行数据采集,采集的数据放到了source->context.data中 数据读取到后 进行渲染
-
static inline void render_displays(void) -
{ -
struct obs_display *display; -
if (!obs->data.valid) -
return; -
gs_enter_context(obs->video.graphics); -
/* render extra displays/swaps */ -
pthread_mutex_lock(&obs->data.displays_mutex); -
display = obs->data.first_display; -
while (display) { -
render_display(display); -
display = display->next; -
} -
pthread_mutex_unlock(&obs->data.displays_mutex); -
gs_leave_context(); -
}
-
void render_display(struct obs_display *display) -
{ -
if (!display || !display->enabled) return; -
render_display_begin(display); -
pthread_mutex_lock(&display->draw_callbacks_mutex); -
for (size_t i = 0; i < display->draw_callbacks.num; i++) { -
struct draw_callback *callback; -
callback = display->draw_callbacks.array+i; -
callback->draw(callback->param, display->cx, display->cy); -
} -
pthread_mutex_unlock(&display->draw_callbacks_mutex); -
render_display_end(); -
}
callback->draw 对应 RenderMain 最终
-
void obs_view_render(obs_view_t *view) -
{ -
if (!view) return; -
pthread_mutex_lock(&view->channels_mutex); -
for (size_t i = 0; i < MAX_CHANNELS; i++) { -
struct obs_source *source; -
source = view->channels[i]; -
if (source) { -
if (source->removed) { -
obs_source_release(source); -
view->channels[i] = NULL; -
} else { -
obs_source_video_render(source); -
} -
} -
} -
pthread_mutex_unlock(&view->channels_mutex); -
}
会调用到
-
static inline void obs_source_main_render(obs_source_t *source) -
{ -
uint32_t flags = source->info.output_flags; -
bool custom_draw = (flags & OBS_SOURCE_CUSTOM_DRAW) != 0; -
bool default_effect = !source->filter_parent && -
source->filters.num == 0 && -
!custom_draw; -
if (default_effect) -
obs_source_default_render(source); -
else if (source->context.data) -
source->info.video_render(source->context.data, -
custom_draw ? NULL : gs_get_effect()); -
}
而video_render在插件结构体中进行了初始化,最后调用到wc_render,然后数据传送到 opengl 或者d3d中进行处理,显示
-
static void wc_render(void *data, gs_effect_t *effect) -
{ -
struct window_capture *wc = data; -
dc_capture_render(&wc->capture, obs_get_base_effect(OBS_EFFECT_OPAQUE)); -
UNUSED_PARAMETER(effect); -
}
而obs_video_thread线程通过out_frame获取opengl 或者d3d处理后的数据,然后把数据放到缓冲区
-
static inline void output_frame(void) -
{ -
struct obs_core_video *video = &obs->video; -
int cur_texture = video->cur_texture; -
int prev_texture = cur_texture == 0 ? NUM_TEXTURES-1 : cur_texture-1; -
struct video_data frame; -
bool frame_ready; -
memset(&frame, 0, sizeof(struct video_data)); -
profile_start(output_frame_gs_context_name); -
gs_enter_context(video->graphics); -
profile_start(output_frame_render_video_name); -
render_video(video, cur_texture, prev_texture); -
profile_end(output_frame_render_video_name); -
profile_start(output_frame_download_frame_name); -
frame_ready = download_frame(video, prev_texture, &frame);//获得数据buffer指针 -
profile_end(output_frame_download_frame_name); -
profile_start(output_frame_gs_flush_name); -
gs_flush(); -
profile_end(output_frame_gs_flush_name); -
gs_leave_context(); -
profile_end(output_frame_gs_context_name); -
if (frame_ready) { -
struct obs_vframe_info vframe_info; -
circlebuf_pop_front(&video->vframe_info_buffer, &vframe_info, -
sizeof(vframe_info)); -
frame.timestamp = vframe_info.timestamp; -
profile_start(output_frame_output_video_data_name); -
output_video_data(video, &frame, vframe_info.count); -
profile_end(output_frame_output_video_data_name); -
} -
if (++video->cur_texture == NUM_TEXTURES) -
video->cur_texture = 0; -
}
数据最终保存在video cache中,
-
bool video_output_lock_frame(video_t *video, struct video_frame *frame, -
int count, uint64_t timestamp) -
{ -
struct cached_frame_info *cfi; -
bool locked; -
if (!video) return false; -
pthread_mutex_lock(&video->data_mutex); -
if (video->available_frames == 0) { -
video->skipped_frames += count; -
video->cache[video->last_added].count += count; -
locked = false; -
} else { -
if (video->available_frames != video->info.cache_size) { -
if (++video->last_added == video->info.cache_size) -
video->last_added = 0; -
} -
cfi = &video->cache[video->last_added]; -
cfi->frame.timestamp = timestamp; -
cfi->count = count; -
memcpy(frame, &cfi->frame, sizeof(*frame)); -
locked = true; -
} -
pthread_mutex_unlock(&video->data_mutex); -
return locked; -
}
-
static inline void video_sleep(struct obs_core_video *video, -
uint64_t *p_time, uint64_t interval_ns) -
{ -
struct obs_vframe_info vframe_info; -
uint64_t cur_time = *p_time; -
uint64_t t = cur_time + interval_ns; -
int count; -
if (os_sleepto_ns(t)) { -
*p_time = t; -
count = 1; -
} else { -
count = (int)((os_gettime_ns() - cur_time) / interval_ns); -
*p_time = cur_time + interval_ns * count; -
} -
video->total_frames += count; -
video->lagged_frames += count - 1; -
vframe_info.timestamp = cur_time; -
vframe_info.count = count; -
circlebuf_push_back(&video->vframe_info_buffer, &vframe_info, -
sizeof(vframe_info)); -
}
编码线程 video_thread 取数据 ,编码 ,数据放到缓冲区
-
static void *video_thread(void *param) -
{ -
struct video_output *video = param; -
os_set_thread_name("video-io: video thread"); -
const char *video_thread_name = -
profile_store_name(obs_get_profiler_name_store(), -
"video_thread(%s)", video->info.name); -
while (os_sem_wait(video->update_semaphore) == 0) { -
if (video->stop) -
break; -
profile_start(video_thread_name); -
while (!video->stop && !video_output_cur_frame(video)) { -
video->total_frames++; -
} -
video->total_frames++; -
profile_end(video_thread_name); -
profile_reenable_thread(); -
} -
return NULL; -
}
-
static inline bool video_output_cur_frame(struct video_output *video) -
{ -
struct cached_frame_info *frame_info; -
bool complete; -
/* -------------------------------- */ -
pthread_mutex_lock(&video->data_mutex); -
frame_info = &video->cache[video->first_added]; -
pthread_mutex_unlock(&video->data_mutex); -
/* -------------------------------- */ -
pthread_mutex_lock(&video->input_mutex); -
for (size_t i = 0; i < video->inputs.num; i++) { -
struct video_input *input = video->inputs.array+i; -
struct video_data frame = frame_info->frame; -
if (scale_video_output(input, &frame)) -
input->callback(input->param, &frame); -
} -
pthread_mutex_unlock(&video->input_mutex); -
/* -------------------------------- */ -
pthread_mutex_lock(&video->data_mutex); -
frame_info->frame.timestamp += video->frame_time; -
complete = --frame_info->count == 0; -
if (complete) { -
if (++video->first_added == video->info.cache_size) -
video->first_added = 0; -
if (++video->available_frames == video->info.cache_size) -
video->last_added = video->first_added; -
} -
pthread_mutex_unlock(&video->data_mutex); -
/* -------------------------------- */ -
return complete; -
}
input->callback对应 receive_video
-
static void receive_video(void *param, struct video_data *frame) -
{ -
profile_start(receive_video_name); -
struct obs_encoder *encoder = param; -
struct obs_encoder *pair = encoder->paired_encoder; -
struct encoder_frame enc_frame; -
if (!encoder->first_received && pair) { -
if (!pair->first_received || -
pair->first_raw_ts > frame->timestamp) { -
goto wait_for_audio; -
} -
} -
memset(&enc_frame, 0, sizeof(struct encoder_frame)); -
for (size_t i = 0; i < MAX_AV_PLANES; i++) { -
enc_frame.data[i] = frame->data[i]; -
enc_frame.linesize[i] = frame->linesize[i]; -
} -
if (!encoder->start_ts) -
encoder->start_ts = frame->timestamp; -
enc_frame.frames = 1; -
enc_frame.pts = encoder->cur_pts; -
do_encode(encoder, &enc_frame); -
encoder->cur_pts += encoder->timebase_num; -
wait_for_audio: -
profile_end(receive_video_name); -
}
最后编码的数据放到DARRAY(struct encoder_packet) interleaved_packets;
-
static inline void insert_interleaved_packet(struct obs_output *output, -
struct encoder_packet *out) -
{ -
size_t idx; -
for (idx = 0; idx < output->interleaved_packets.num; idx++) { -
struct encoder_packet *cur_packet; -
cur_packet = output->interleaved_packets.array + idx; -
if (out->dts_usec < cur_packet->dts_usec) -
break; -
} -
da_insert(output->interleaved_packets, idx, out); -
}
最后 send_thread 去缓冲数据发送,这块过程还有点问题,以后再看看
