summaryrefslogtreecommitdiff
path: root/drivers/net/ethernet/amd/xgbe/xgbe-dev.c
diff options
context:
space:
mode:
authorLendacky, Thomas <Thomas.Lendacky@amd.com>2016-11-11 02:10:17 +0300
committerDavid S. Miller <davem@davemloft.net>2016-11-13 08:56:26 +0300
commit4c70dd8ac9ef88a1902b4d63dda987746a34ebc4 (patch)
tree2a65dcd6bdaff64bddfd649161510ceeb704cff1 /drivers/net/ethernet/amd/xgbe/xgbe-dev.c
parent8d6b2e92bdadc925c9ea4d8d0c4554918357ee35 (diff)
downloadlinux-4c70dd8ac9ef88a1902b4d63dda987746a34ebc4.tar.xz
amd-xgbe: Add support for new DMA interrupt mode
The current per channel DMA interrupt support is based on an edge triggered interrupt that is not maskable. This results in having to call the disable_irq/enable_irq functions in order to prevent interrupts during napi processing. The hardware now has a way to configure the per channel DMA interrupt that will allow for masking the interrupt which prevents calling disable_irq/enable_irq now. This patch makes use of this support. Signed-off-by: Tom Lendacky <thomas.lendacky@amd.com> Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'drivers/net/ethernet/amd/xgbe/xgbe-dev.c')
-rw-r--r--drivers/net/ethernet/amd/xgbe/xgbe-dev.c15
1 files changed, 11 insertions, 4 deletions
diff --git a/drivers/net/ethernet/amd/xgbe/xgbe-dev.c b/drivers/net/ethernet/amd/xgbe/xgbe-dev.c
index 81d47807c3cc..ff7f5ab4d5fb 100644
--- a/drivers/net/ethernet/amd/xgbe/xgbe-dev.c
+++ b/drivers/net/ethernet/amd/xgbe/xgbe-dev.c
@@ -646,6 +646,11 @@ static void xgbe_enable_dma_interrupts(struct xgbe_prv_data *pdata)
unsigned int dma_ch_isr, dma_ch_ier;
unsigned int i;
+ /* Set the interrupt mode if supported */
+ if (pdata->channel_irq_mode)
+ XGMAC_IOWRITE_BITS(pdata, DMA_MR, INTM,
+ pdata->channel_irq_mode);
+
channel = pdata->channel;
for (i = 0; i < pdata->channel_count; i++, channel++) {
/* Clear all the interrupts which are set */
@@ -667,19 +672,21 @@ static void xgbe_enable_dma_interrupts(struct xgbe_prv_data *pdata)
if (channel->tx_ring) {
/* Enable the following Tx interrupts
* TIE - Transmit Interrupt Enable (unless using
- * per channel interrupts)
+ * per channel interrupts in edge triggered
+ * mode)
*/
- if (!pdata->per_channel_irq)
+ if (!pdata->per_channel_irq || pdata->channel_irq_mode)
XGMAC_SET_BITS(dma_ch_ier, DMA_CH_IER, TIE, 1);
}
if (channel->rx_ring) {
/* Enable following Rx interrupts
* RBUE - Receive Buffer Unavailable Enable
* RIE - Receive Interrupt Enable (unless using
- * per channel interrupts)
+ * per channel interrupts in edge triggered
+ * mode)
*/
XGMAC_SET_BITS(dma_ch_ier, DMA_CH_IER, RBUE, 1);
- if (!pdata->per_channel_irq)
+ if (!pdata->per_channel_irq || pdata->channel_irq_mode)
XGMAC_SET_BITS(dma_ch_ier, DMA_CH_IER, RIE, 1);
}