@@ -244,18 +244,18 @@ struct lpuart_port {
244244 struct dma_chan * dma_rx_chan ;
245245 struct dma_async_tx_descriptor * dma_tx_desc ;
246246 struct dma_async_tx_descriptor * dma_rx_desc ;
247- dma_addr_t dma_tx_buf_bus ;
248247 dma_cookie_t dma_tx_cookie ;
249248 dma_cookie_t dma_rx_cookie ;
250- unsigned char * dma_tx_buf_virt ;
251249 unsigned int dma_tx_bytes ;
252250 unsigned int dma_rx_bytes ;
253- int dma_tx_in_progress ;
251+ bool dma_tx_in_progress ;
254252 unsigned int dma_rx_timeout ;
255253 struct timer_list lpuart_timer ;
256- struct scatterlist rx_sgl ;
254+ struct scatterlist rx_sgl , tx_sgl [ 2 ] ;
257255 struct circ_buf rx_ring ;
258256 int rx_dma_rng_buf_len ;
257+ unsigned int dma_tx_nents ;
258+ wait_queue_head_t dma_wait ;
259259};
260260
261261static const struct of_device_id lpuart_dt_ids [] = {
@@ -316,103 +316,118 @@ static void lpuart32_stop_rx(struct uart_port *port)
316316 lpuart32_write (temp & ~UARTCTRL_RE , port -> membase + UARTCTRL );
317317}
318318
319- static void lpuart_pio_tx (struct lpuart_port * sport )
319+ static void lpuart_dma_tx (struct lpuart_port * sport )
320320{
321321 struct circ_buf * xmit = & sport -> port .state -> xmit ;
322- unsigned long flags ;
323-
324- spin_lock_irqsave ( & sport -> port . lock , flags ) ;
322+ struct scatterlist * sgl = sport -> tx_sgl ;
323+ struct device * dev = sport -> port . dev ;
324+ int ret ;
325325
326- while (!uart_circ_empty (xmit ) &&
327- readb (sport -> port .membase + UARTTCFIFO ) < sport -> txfifo_size ) {
328- writeb (xmit -> buf [xmit -> tail ], sport -> port .membase + UARTDR );
329- xmit -> tail = (xmit -> tail + 1 ) & (UART_XMIT_SIZE - 1 );
330- sport -> port .icount .tx ++ ;
331- }
326+ if (sport -> dma_tx_in_progress )
327+ return ;
332328
333- if (uart_circ_chars_pending (xmit ) < WAKEUP_CHARS )
334- uart_write_wakeup (& sport -> port );
329+ sport -> dma_tx_bytes = uart_circ_chars_pending (xmit );
335330
336- if (uart_circ_empty (xmit ))
337- writeb (readb (sport -> port .membase + UARTCR5 ) | UARTCR5_TDMAS ,
338- sport -> port .membase + UARTCR5 );
331+ if (xmit -> tail < xmit -> head ) {
332+ sport -> dma_tx_nents = 1 ;
333+ sg_init_one (sgl , xmit -> buf + xmit -> tail , sport -> dma_tx_bytes );
334+ } else {
335+ sport -> dma_tx_nents = 2 ;
336+ sg_init_table (sgl , 2 );
337+ sg_set_buf (sgl , xmit -> buf + xmit -> tail ,
338+ UART_XMIT_SIZE - xmit -> tail );
339+ sg_set_buf (sgl + 1 , xmit -> buf , xmit -> head );
340+ }
339341
340- spin_unlock_irqrestore (& sport -> port .lock , flags );
341- }
342+ ret = dma_map_sg (dev , sgl , sport -> dma_tx_nents , DMA_TO_DEVICE );
343+ if (!ret ) {
344+ dev_err (dev , "DMA mapping error for TX.\n" );
345+ return ;
346+ }
342347
343- static int lpuart_dma_tx (struct lpuart_port * sport , unsigned long count )
344- {
345- struct circ_buf * xmit = & sport -> port .state -> xmit ;
346- dma_addr_t tx_bus_addr ;
347-
348- dma_sync_single_for_device (sport -> port .dev , sport -> dma_tx_buf_bus ,
349- UART_XMIT_SIZE , DMA_TO_DEVICE );
350- sport -> dma_tx_bytes = count & ~(sport -> txfifo_size - 1 );
351- tx_bus_addr = sport -> dma_tx_buf_bus + xmit -> tail ;
352- sport -> dma_tx_desc = dmaengine_prep_slave_single (sport -> dma_tx_chan ,
353- tx_bus_addr , sport -> dma_tx_bytes ,
348+ sport -> dma_tx_desc = dmaengine_prep_slave_sg (sport -> dma_tx_chan , sgl ,
349+ sport -> dma_tx_nents ,
354350 DMA_MEM_TO_DEV , DMA_PREP_INTERRUPT );
355-
356351 if (!sport -> dma_tx_desc ) {
357- dev_err (sport -> port .dev , "Not able to get desc for tx\n" );
358- return - EIO ;
352+ dma_unmap_sg (dev , sgl , sport -> dma_tx_nents , DMA_TO_DEVICE );
353+ dev_err (dev , "Cannot prepare TX slave DMA!\n" );
354+ return ;
359355 }
360356
361357 sport -> dma_tx_desc -> callback = lpuart_dma_tx_complete ;
362358 sport -> dma_tx_desc -> callback_param = sport ;
363- sport -> dma_tx_in_progress = 1 ;
359+ sport -> dma_tx_in_progress = true ;
364360 sport -> dma_tx_cookie = dmaengine_submit (sport -> dma_tx_desc );
365361 dma_async_issue_pending (sport -> dma_tx_chan );
366362
367- return 0 ;
368- }
369-
370- static void lpuart_prepare_tx (struct lpuart_port * sport )
371- {
372- struct circ_buf * xmit = & sport -> port .state -> xmit ;
373- unsigned long count = CIRC_CNT_TO_END (xmit -> head ,
374- xmit -> tail , UART_XMIT_SIZE );
375-
376- if (!count )
377- return ;
378-
379- if (count < sport -> txfifo_size )
380- writeb (readb (sport -> port .membase + UARTCR5 ) & ~UARTCR5_TDMAS ,
381- sport -> port .membase + UARTCR5 );
382- else {
383- writeb (readb (sport -> port .membase + UARTCR5 ) | UARTCR5_TDMAS ,
384- sport -> port .membase + UARTCR5 );
385- lpuart_dma_tx (sport , count );
386- }
387363}
388364
389365static void lpuart_dma_tx_complete (void * arg )
390366{
391367 struct lpuart_port * sport = arg ;
368+ struct scatterlist * sgl = & sport -> tx_sgl [0 ];
392369 struct circ_buf * xmit = & sport -> port .state -> xmit ;
393370 unsigned long flags ;
394371
395- async_tx_ack (sport -> dma_tx_desc );
396-
397372 spin_lock_irqsave (& sport -> port .lock , flags );
398373
374+ dma_unmap_sg (sport -> port .dev , sgl , sport -> dma_tx_nents , DMA_TO_DEVICE );
375+
399376 xmit -> tail = (xmit -> tail + sport -> dma_tx_bytes ) & (UART_XMIT_SIZE - 1 );
400- sport -> dma_tx_in_progress = 0 ;
377+
378+ sport -> port .icount .tx += sport -> dma_tx_bytes ;
379+ sport -> dma_tx_in_progress = false;
380+ spin_unlock_irqrestore (& sport -> port .lock , flags );
401381
402382 if (uart_circ_chars_pending (xmit ) < WAKEUP_CHARS )
403383 uart_write_wakeup (& sport -> port );
404384
405- lpuart_prepare_tx (sport );
385+ if (waitqueue_active (& sport -> dma_wait )) {
386+ wake_up (& sport -> dma_wait );
387+ return ;
388+ }
389+
390+ spin_lock_irqsave (& sport -> port .lock , flags );
391+
392+ if (!uart_circ_empty (xmit ) && !uart_tx_stopped (& sport -> port ))
393+ lpuart_dma_tx (sport );
406394
407395 spin_unlock_irqrestore (& sport -> port .lock , flags );
408396}
409397
398+ static int lpuart_dma_tx_request (struct uart_port * port )
399+ {
400+ struct lpuart_port * sport = container_of (port ,
401+ struct lpuart_port , port );
402+ struct dma_slave_config dma_tx_sconfig = {};
403+ int ret ;
404+
405+ dma_tx_sconfig .dst_addr = sport -> port .mapbase + UARTDR ;
406+ dma_tx_sconfig .dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE ;
407+ dma_tx_sconfig .dst_maxburst = 1 ;
408+ dma_tx_sconfig .direction = DMA_MEM_TO_DEV ;
409+ ret = dmaengine_slave_config (sport -> dma_tx_chan , & dma_tx_sconfig );
410+
411+ if (ret ) {
412+ dev_err (sport -> port .dev ,
413+ "DMA slave config failed, err = %d\n" , ret );
414+ return ret ;
415+ }
416+
417+ return 0 ;
418+ }
419+
410420static void lpuart_flush_buffer (struct uart_port * port )
411421{
412422 struct lpuart_port * sport = container_of (port , struct lpuart_port , port );
423+
413424 if (sport -> lpuart_dma_tx_use ) {
425+ if (sport -> dma_tx_in_progress ) {
426+ dma_unmap_sg (sport -> port .dev , & sport -> tx_sgl [0 ],
427+ sport -> dma_tx_nents , DMA_TO_DEVICE );
428+ sport -> dma_tx_in_progress = false;
429+ }
414430 dmaengine_terminate_all (sport -> dma_tx_chan );
415- sport -> dma_tx_in_progress = 0 ;
416431 }
417432}
418433
@@ -469,8 +484,8 @@ static void lpuart_start_tx(struct uart_port *port)
469484 writeb (temp | UARTCR2_TIE , port -> membase + UARTCR2 );
470485
471486 if (sport -> lpuart_dma_tx_use ) {
472- if (!uart_circ_empty (xmit ) && !sport -> dma_tx_in_progress )
473- lpuart_prepare_tx (sport );
487+ if (!uart_circ_empty (xmit ) && !uart_tx_stopped ( port ) )
488+ lpuart_dma_tx (sport );
474489 } else {
475490 if (readb (port -> membase + UARTSR1 ) & UARTSR1_TDRE )
476491 lpuart_transmit_buffer (sport );
@@ -489,6 +504,29 @@ static void lpuart32_start_tx(struct uart_port *port)
489504 lpuart32_transmit_buffer (sport );
490505}
491506
507+ /* return TIOCSER_TEMT when transmitter is not busy */
508+ static unsigned int lpuart_tx_empty (struct uart_port * port )
509+ {
510+ struct lpuart_port * sport = container_of (port ,
511+ struct lpuart_port , port );
512+ unsigned char sr1 = readb (port -> membase + UARTSR1 );
513+ unsigned char sfifo = readb (port -> membase + UARTSFIFO );
514+
515+ if (sport -> dma_tx_in_progress )
516+ return 0 ;
517+
518+ if (sr1 & UARTSR1_TC && sfifo & UARTSFIFO_TXEMPT )
519+ return TIOCSER_TEMT ;
520+
521+ return 0 ;
522+ }
523+
524+ static unsigned int lpuart32_tx_empty (struct uart_port * port )
525+ {
526+ return (lpuart32_read (port -> membase + UARTSTAT ) & UARTSTAT_TC ) ?
527+ TIOCSER_TEMT : 0 ;
528+ }
529+
492530static irqreturn_t lpuart_txint (int irq , void * dev_id )
493531{
494532 struct lpuart_port * sport = dev_id ;
@@ -662,12 +700,8 @@ static irqreturn_t lpuart_int(int irq, void *dev_id)
662700 if (sts & UARTSR1_RDRF )
663701 lpuart_rxint (irq , dev_id );
664702
665- if (sts & UARTSR1_TDRE ) {
666- if (sport -> lpuart_dma_tx_use )
667- lpuart_pio_tx (sport );
668- else
669- lpuart_txint (irq , dev_id );
670- }
703+ if (sts & UARTSR1_TDRE )
704+ lpuart_txint (irq , dev_id );
671705
672706 return IRQ_HANDLED ;
673707}
@@ -692,29 +726,6 @@ static irqreturn_t lpuart32_int(int irq, void *dev_id)
692726 return IRQ_HANDLED ;
693727}
694728
695- /* return TIOCSER_TEMT when transmitter is not busy */
696- static unsigned int lpuart_tx_empty (struct uart_port * port )
697- {
698- struct lpuart_port * sport = container_of (port ,
699- struct lpuart_port , port );
700- unsigned char sr1 = readb (port -> membase + UARTSR1 );
701- unsigned char sfifo = readb (port -> membase + UARTSFIFO );
702-
703- if (sport -> dma_tx_in_progress )
704- return 0 ;
705-
706- if (sr1 & UARTSR1_TC && sfifo & UARTSFIFO_TXEMPT )
707- return TIOCSER_TEMT ;
708-
709- return 0 ;
710- }
711-
712- static unsigned int lpuart32_tx_empty (struct uart_port * port )
713- {
714- return (lpuart32_read (port -> membase + UARTSTAT ) & UARTSTAT_TC ) ?
715- TIOCSER_TEMT : 0 ;
716- }
717-
718729static void lpuart_copy_rx_to_tty (struct lpuart_port * sport )
719730{
720731 struct tty_port * port = & sport -> port .state -> port ;
@@ -890,18 +901,6 @@ static inline int lpuart_start_rx_dma(struct lpuart_port *sport)
890901 return 0 ;
891902}
892903
893- static void lpuart_dma_tx_free (struct uart_port * port )
894- {
895- struct lpuart_port * sport = container_of (port ,
896- struct lpuart_port , port );
897-
898- dma_unmap_single (sport -> port .dev , sport -> dma_tx_buf_bus ,
899- UART_XMIT_SIZE , DMA_TO_DEVICE );
900-
901- sport -> dma_tx_buf_bus = 0 ;
902- sport -> dma_tx_buf_virt = NULL ;
903- }
904-
905904static void lpuart_dma_rx_free (struct uart_port * port )
906905{
907906 struct lpuart_port * sport = container_of (port ,
@@ -1061,44 +1060,6 @@ static void lpuart32_setup_watermark(struct lpuart_port *sport)
10611060 lpuart32_write (ctrl_saved , sport -> port .membase + UARTCTRL );
10621061}
10631062
1064- static int lpuart_dma_tx_request (struct uart_port * port )
1065- {
1066- struct lpuart_port * sport = container_of (port ,
1067- struct lpuart_port , port );
1068- struct dma_slave_config dma_tx_sconfig ;
1069- dma_addr_t dma_bus ;
1070- unsigned char * dma_buf ;
1071- int ret ;
1072-
1073- dma_bus = dma_map_single (sport -> dma_tx_chan -> device -> dev ,
1074- sport -> port .state -> xmit .buf ,
1075- UART_XMIT_SIZE , DMA_TO_DEVICE );
1076-
1077- if (dma_mapping_error (sport -> dma_tx_chan -> device -> dev , dma_bus )) {
1078- dev_err (sport -> port .dev , "dma_map_single tx failed\n" );
1079- return - ENOMEM ;
1080- }
1081-
1082- dma_buf = sport -> port .state -> xmit .buf ;
1083- dma_tx_sconfig .dst_addr = sport -> port .mapbase + UARTDR ;
1084- dma_tx_sconfig .dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE ;
1085- dma_tx_sconfig .dst_maxburst = sport -> txfifo_size ;
1086- dma_tx_sconfig .direction = DMA_MEM_TO_DEV ;
1087- ret = dmaengine_slave_config (sport -> dma_tx_chan , & dma_tx_sconfig );
1088-
1089- if (ret < 0 ) {
1090- dev_err (sport -> port .dev ,
1091- "Dma slave config failed, err = %d\n" , ret );
1092- return ret ;
1093- }
1094-
1095- sport -> dma_tx_buf_virt = dma_buf ;
1096- sport -> dma_tx_buf_bus = dma_bus ;
1097- sport -> dma_tx_in_progress = 0 ;
1098-
1099- return 0 ;
1100- }
1101-
11021063static void rx_dma_timer_init (struct lpuart_port * sport )
11031064{
11041065 setup_timer (& sport -> lpuart_timer , lpuart_timer_func ,
@@ -1151,6 +1112,7 @@ static int lpuart_startup(struct uart_port *port)
11511112 }
11521113
11531114 if (sport -> dma_tx_chan && !lpuart_dma_tx_request (port )) {
1115+ init_waitqueue_head (& sport -> dma_wait );
11541116 sport -> lpuart_dma_tx_use = true;
11551117 temp = readb (port -> membase + UARTCR5 );
11561118 writeb (temp | UARTCR5_TDMAS , port -> membase + UARTCR5 );
@@ -1220,8 +1182,15 @@ static void lpuart_shutdown(struct uart_port *port)
12201182 lpuart_dma_rx_free (& sport -> port );
12211183 }
12221184
1223- if (sport -> lpuart_dma_tx_use )
1224- lpuart_dma_tx_free (& sport -> port );
1185+ if (sport -> lpuart_dma_tx_use ) {
1186+ if (wait_event_interruptible (sport -> dma_wait ,
1187+ !sport -> dma_tx_in_progress ) != false) {
1188+ sport -> dma_tx_in_progress = false;
1189+ dmaengine_terminate_all (sport -> dma_tx_chan );
1190+ }
1191+
1192+ lpuart_stop_tx (port );
1193+ }
12251194}
12261195
12271196static void lpuart32_shutdown (struct uart_port * port )
0 commit comments