forked from luck/tmp_suning_uos_patched
OMAPDSS: DSI: improve wait_for_bit_change
We sometimes get timeout when disabling the DSI interface with video mode. It looks like the disable will stall until the current frame has been finished, and this can take multiple milliseconds. wait_for_bit_change() currently uses a busyloop to wait for a bit to change. This is used in multiple places. The problem is, we don't have clear understanding how long particular operations can take, so the function needs to support longer waits. Improve wait_for_bit_change() to first busy loop for 100 times to see if the bit changes almost right away. If the bit hasn't changed, move to a loop with a sleep of 1ms, and poll for 500ms. Signed-off-by: Tomi Valkeinen <tomi.valkeinen@ti.com>
This commit is contained in:
parent
522a0c2fe5
commit
3b98409eb7
@ -418,14 +418,29 @@ static void dsi_completion_handler(void *data, u32 mask)
|
||||
static inline int wait_for_bit_change(struct platform_device *dsidev,
|
||||
const struct dsi_reg idx, int bitnum, int value)
|
||||
{
|
||||
int t = 100000;
|
||||
unsigned long timeout;
|
||||
ktime_t wait;
|
||||
int t;
|
||||
|
||||
while (REG_GET(dsidev, idx, bitnum, bitnum) != value) {
|
||||
if (--t == 0)
|
||||
return !value;
|
||||
/* first busyloop to see if the bit changes right away */
|
||||
t = 100;
|
||||
while (t-- > 0) {
|
||||
if (REG_GET(dsidev, idx, bitnum, bitnum) == value)
|
||||
return value;
|
||||
}
|
||||
|
||||
/* then loop for 500ms, sleeping for 1ms in between */
|
||||
timeout = jiffies + msecs_to_jiffies(500);
|
||||
while (time_before(jiffies, timeout)) {
|
||||
if (REG_GET(dsidev, idx, bitnum, bitnum) == value)
|
||||
return value;
|
||||
|
||||
wait = ns_to_ktime(1000 * 1000);
|
||||
set_current_state(TASK_UNINTERRUPTIBLE);
|
||||
schedule_hrtimeout(&wait, HRTIMER_MODE_REL);
|
||||
}
|
||||
|
||||
return !value;
|
||||
}
|
||||
|
||||
u8 dsi_get_pixel_size(enum omap_dss_dsi_pixel_format fmt)
|
||||
|
Loading…
Reference in New Issue
Block a user