Lines Matching refs:ring

255 bge_reinit_buff_ring(buff_ring_t *brp, uint32_t ring)  in bge_reinit_buff_ring()  argument
288 hw_rbd_p->flags |= ring_type_flag[ring]; in bge_reinit_buff_ring()
305 uint32_t ring; in bge_reinit_rings() local
312 for (ring = 0; ring < bgep->chipid.tx_rings; ++ring) in bge_reinit_rings()
313 bge_reinit_send_ring(&bgep->send[ring]); in bge_reinit_rings()
318 for (ring = 0; ring < bgep->chipid.rx_rings; ++ring) in bge_reinit_rings()
319 bge_reinit_recv_ring(&bgep->recv[ring]); in bge_reinit_rings()
324 for (ring = 0; ring < BGE_BUFF_RINGS_USED; ++ring) in bge_reinit_rings()
325 bge_reinit_buff_ring(&bgep->buff[ring], ring); in bge_reinit_rings()
352 uint32_t ring; in bge_reset() local
363 for (ring = 0; ring < BGE_RECV_RINGS_MAX; ++ring) in bge_reset()
364 mutex_enter(bgep->recv[ring].rx_lock); in bge_reset()
365 for (ring = 0; ring < BGE_BUFF_RINGS_MAX; ++ring) in bge_reset()
366 mutex_enter(bgep->buff[ring].rf_lock); in bge_reset()
368 for (ring = 0; ring < BGE_SEND_RINGS_MAX; ++ring) in bge_reset()
369 mutex_enter(bgep->send[ring].tx_lock); in bge_reset()
370 for (ring = 0; ring < BGE_SEND_RINGS_MAX; ++ring) in bge_reset()
371 mutex_enter(bgep->send[ring].tc_lock); in bge_reset()
383 for (ring = BGE_SEND_RINGS_MAX; ring-- > 0; ) in bge_reset()
384 mutex_exit(bgep->send[ring].tc_lock); in bge_reset()
385 for (ring = 0; ring < BGE_SEND_RINGS_MAX; ++ring) in bge_reset()
386 mutex_exit(bgep->send[ring].tx_lock); in bge_reset()
388 for (ring = BGE_BUFF_RINGS_MAX; ring-- > 0; ) in bge_reset()
389 mutex_exit(bgep->buff[ring].rf_lock); in bge_reset()
390 for (ring = BGE_RECV_RINGS_MAX; ring-- > 0; ) in bge_reset()
391 mutex_exit(bgep->recv[ring].rx_lock); in bge_reset()
488 uint32_t ring; in bge_m_stop() local
513 for (ring = 0; ring < bgep->chipid.tx_rings; ++ring) { in bge_m_stop()
514 srp = &bgep->send[ring]; in bge_m_stop()
1500 uint8_t ring = (uint8_t)(rrp - bgep->recv) + 1; local
1573 rulep[i].control = RULE_DEST_MAC_1(ring) | RECV_RULE_CTL_AND;
1579 rulep[i+1].control = RULE_DEST_MAC_2(ring);
2192 bge_init_buff_ring(bge_t *bgep, uint64_t ring) argument
2220 (void *)bgep, ring));
2222 brp = &bgep->buff[ring];
2237 brp->hw_rcb.nic_ring_addr = nic_ring_addrs[ring];
2244 brp->cons_index_p = &bsp->buff_cons_index[buff_cons_xref[ring]];
2245 brp->chip_mbx_reg = mailbox_regs[ring];
2270 bge_fini_buff_ring(bge_t *bgep, uint64_t ring) argument
2276 (void *)bgep, ring));
2278 brp = &bgep->buff[ring];
2292 bge_init_recv_ring(bge_t *bgep, uint64_t ring) argument
2299 (void *)bgep, ring));
2305 rrp = &bgep->recv[ring];
2323 rrp->prod_index_p = RECV_INDEX_P(bsp, ring);
2324 rrp->chip_mbx_reg = RECV_RING_CONS_INDEX_REG(ring);
2334 bge_fini_recv_ring(bge_t *bgep, uint64_t ring) argument
2339 (void *)bgep, ring));
2341 rrp = &bgep->recv[ring];
2353 bge_init_send_ring(bge_t *bgep, uint64_t ring) argument
2366 (void *)bgep, ring));
2372 srp = &bgep->send[ring];
2382 srp->hw_rcb.nic_ring_addr = NIC_MEM_SHADOW_SEND_RING(ring, nslots);
2389 srp->cons_index_p = SEND_INDEX_P(bsp, ring);
2390 srp->chip_mbx_reg = SEND_RING_HOST_INDEX_REG(ring);
2448 bge_fini_send_ring(bge_t *bgep, uint64_t ring) argument
2456 (void *)bgep, ring));
2458 srp = &bgep->send[ring];
2486 uint32_t ring; local
2493 for (ring = 0; ring < BGE_SEND_RINGS_MAX; ++ring)
2494 bge_init_send_ring(bgep, ring);
2495 for (ring = 0; ring < BGE_RECV_RINGS_MAX; ++ring)
2496 bge_init_recv_ring(bgep, ring);
2497 for (ring = 0; ring < BGE_BUFF_RINGS_MAX; ++ring)
2498 bge_init_buff_ring(bgep, ring);
2507 uint32_t ring; local
2511 for (ring = 0; ring < BGE_BUFF_RINGS_MAX; ++ring)
2512 bge_fini_buff_ring(bgep, ring);
2513 for (ring = 0; ring < BGE_RECV_RINGS_MAX; ++ring)
2514 bge_fini_recv_ring(bgep, ring);
2515 for (ring = 0; ring < BGE_SEND_RINGS_MAX; ++ring)
2516 bge_fini_send_ring(bgep, ring);
2646 uint32_t ring; local
2846 for (ring = 0; ring < tx_rings; ++ring) {
2847 bge_slice_chunk(&bgep->send[ring].buf[0][split],
2852 split, ring,
2853 bgep->send[ring].buf[0][split].mem_va,
2854 bgep->send[ring].buf[0][split].alength,
2855 bgep->send[ring].buf[0][split].offset,
2856 bgep->send[ring].buf[0][split].cookie.dmac_laddress,
2857 bgep->send[ring].buf[0][split].cookie.dmac_size,
2862 for (; ring < BGE_SEND_RINGS_MAX; ++ring) {
2863 bge_slice_chunk(&bgep->send[ring].buf[0][split],
2876 for (ring = 0; ring < rx_rings; ++ring) {
2877 bge_slice_chunk(&bgep->recv[ring].desc, &bgep->rx_desc[ring],
2881 ring,
2882 bgep->recv[ring].desc.mem_va,
2883 bgep->recv[ring].desc.alength,
2884 bgep->recv[ring].desc.offset,
2885 bgep->recv[ring].desc.cookie.dmac_laddress,
2886 bgep->recv[ring].desc.cookie.dmac_size,
2894 for (; ring < BGE_RECV_RINGS_MAX; ++ring) /* skip unused rings */
2895 bge_slice_chunk(&bgep->recv[ring].desc, &area,
2956 for (ring = 0; ring < tx_rings; ++ring) {
2957 bge_slice_chunk(&bgep->send[ring].desc, &area,
2961 ring,
2962 bgep->send[ring].desc.mem_va,
2963 bgep->send[ring].desc.alength,
2964 bgep->send[ring].desc.offset,
2965 bgep->send[ring].desc.cookie.dmac_laddress,
2966 bgep->send[ring].desc.cookie.dmac_size,
2971 for (; ring < BGE_SEND_RINGS_MAX; ++ring) /* skip unused rings */
2972 bge_slice_chunk(&bgep->send[ring].desc, &area,