| 1 | // SPDX-License-Identifier: MIT |
| 2 | /* |
| 3 | * Copyright © 2024 Intel Corporation |
| 4 | */ |
| 5 | |
| 6 | #include <linux/workqueue.h> |
| 7 | |
| 8 | #include "intel_display_core.h" |
| 9 | #include "intel_display_types.h" |
| 10 | #include "intel_encoder.h" |
| 11 | #include "intel_hotplug.h" |
| 12 | |
| 13 | static void intel_encoder_link_check_work_fn(struct work_struct *work) |
| 14 | { |
| 15 | struct intel_encoder *encoder = |
| 16 | container_of(work, typeof(*encoder), link_check_work.work); |
| 17 | |
| 18 | encoder->link_check(encoder); |
| 19 | } |
| 20 | |
| 21 | void intel_encoder_link_check_init(struct intel_encoder *encoder, |
| 22 | void (*callback)(struct intel_encoder *encoder)) |
| 23 | { |
| 24 | INIT_DELAYED_WORK(&encoder->link_check_work, intel_encoder_link_check_work_fn); |
| 25 | encoder->link_check = callback; |
| 26 | } |
| 27 | |
| 28 | void intel_encoder_link_check_flush_work(struct intel_encoder *encoder) |
| 29 | { |
| 30 | cancel_delayed_work_sync(dwork: &encoder->link_check_work); |
| 31 | } |
| 32 | |
| 33 | void intel_encoder_link_check_queue_work(struct intel_encoder *encoder, int delay_ms) |
| 34 | { |
| 35 | struct intel_display *display = to_intel_display(encoder); |
| 36 | |
| 37 | mod_delayed_work(wq: display->wq.unordered, |
| 38 | dwork: &encoder->link_check_work, delay: msecs_to_jiffies(m: delay_ms)); |
| 39 | } |
| 40 | |
| 41 | void intel_encoder_unblock_all_hpds(struct intel_display *display) |
| 42 | { |
| 43 | struct intel_encoder *encoder; |
| 44 | |
| 45 | if (!HAS_DISPLAY(display)) |
| 46 | return; |
| 47 | |
| 48 | for_each_intel_encoder(display->drm, encoder) |
| 49 | intel_hpd_unblock(encoder); |
| 50 | } |
| 51 | |
| 52 | void intel_encoder_block_all_hpds(struct intel_display *display) |
| 53 | { |
| 54 | struct intel_encoder *encoder; |
| 55 | |
| 56 | if (!HAS_DISPLAY(display)) |
| 57 | return; |
| 58 | |
| 59 | for_each_intel_encoder(display->drm, encoder) |
| 60 | intel_hpd_block(encoder); |
| 61 | } |
| 62 | |
| 63 | void intel_encoder_suspend_all(struct intel_display *display) |
| 64 | { |
| 65 | struct intel_encoder *encoder; |
| 66 | |
| 67 | if (!HAS_DISPLAY(display)) |
| 68 | return; |
| 69 | |
| 70 | /* |
| 71 | * TODO: check and remove holding the modeset locks if none of |
| 72 | * the encoders depends on this. |
| 73 | */ |
| 74 | drm_modeset_lock_all(dev: display->drm); |
| 75 | for_each_intel_encoder(display->drm, encoder) |
| 76 | if (encoder->suspend) |
| 77 | encoder->suspend(encoder); |
| 78 | drm_modeset_unlock_all(dev: display->drm); |
| 79 | |
| 80 | for_each_intel_encoder(display->drm, encoder) |
| 81 | if (encoder->suspend_complete) |
| 82 | encoder->suspend_complete(encoder); |
| 83 | } |
| 84 | |
| 85 | void intel_encoder_shutdown_all(struct intel_display *display) |
| 86 | { |
| 87 | struct intel_encoder *encoder; |
| 88 | |
| 89 | if (!HAS_DISPLAY(display)) |
| 90 | return; |
| 91 | |
| 92 | /* |
| 93 | * TODO: check and remove holding the modeset locks if none of |
| 94 | * the encoders depends on this. |
| 95 | */ |
| 96 | drm_modeset_lock_all(dev: display->drm); |
| 97 | for_each_intel_encoder(display->drm, encoder) |
| 98 | if (encoder->shutdown) |
| 99 | encoder->shutdown(encoder); |
| 100 | drm_modeset_unlock_all(dev: display->drm); |
| 101 | |
| 102 | for_each_intel_encoder(display->drm, encoder) |
| 103 | if (encoder->shutdown_complete) |
| 104 | encoder->shutdown_complete(encoder); |
| 105 | } |
| 106 | |
| 107 | struct intel_digital_port *intel_dig_port_alloc(void) |
| 108 | { |
| 109 | struct intel_digital_port *dig_port; |
| 110 | |
| 111 | dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL); |
| 112 | if (!dig_port) |
| 113 | return NULL; |
| 114 | |
| 115 | dig_port->hdmi.hdmi_reg = INVALID_MMIO_REG; |
| 116 | dig_port->dp.output_reg = INVALID_MMIO_REG; |
| 117 | dig_port->aux_ch = AUX_CH_NONE; |
| 118 | dig_port->max_lanes = 4; |
| 119 | |
| 120 | mutex_init(&dig_port->hdcp.mutex); |
| 121 | |
| 122 | return dig_port; |
| 123 | } |
| 124 | |