arch/um/drivers/vector_kern.c
1418
struct ethtool_ringparam *ring,
arch/um/drivers/vector_kern.c
1424
ring->rx_max_pending = vp->rx_queue->max_depth;
arch/um/drivers/vector_kern.c
1425
ring->tx_max_pending = vp->tx_queue->max_depth;
arch/um/drivers/vector_kern.c
1426
ring->rx_pending = vp->rx_queue->max_depth;
arch/um/drivers/vector_kern.c
1427
ring->tx_pending = vp->tx_queue->max_depth;
drivers/ata/libata-eh.c
380
ent = &ering->ring[ering->cursor];
drivers/ata/libata-eh.c
388
struct ata_ering_entry *ent = &ering->ring[ering->cursor];
drivers/ata/libata-eh.c
404
ent = &ering->ring[idx];
drivers/block/xen-blkback/blkback.c
1004
make_response(ring, req->u.discard.id, req->operation, status);
drivers/block/xen-blkback/blkback.c
1009
static int dispatch_other_io(struct xen_blkif_ring *ring,
drivers/block/xen-blkback/blkback.c
1013
free_req(ring, pending_req);
drivers/block/xen-blkback/blkback.c
1014
make_response(ring, req->u.other.id, req->operation,
drivers/block/xen-blkback/blkback.c
1019
static void xen_blk_drain_io(struct xen_blkif_ring *ring)
drivers/block/xen-blkback/blkback.c
1021
struct xen_blkif *blkif = ring->blkif;
drivers/block/xen-blkback/blkback.c
1025
if (atomic_read(&ring->inflight) == 0)
drivers/block/xen-blkback/blkback.c
1043
xen_blkbk_flush_diskcache(XBT_NIL, pending_req->ring->blkif->be, 0);
drivers/block/xen-blkback/blkback.c
1048
xen_blkbk_barrier(XBT_NIL, pending_req->ring->blkif->be, 0);
drivers/block/xen-blkback/blkback.c
1186
__do_block_io_op(struct xen_blkif_ring *ring, unsigned int *eoi_flags)
drivers/block/xen-blkback/blkback.c
1188
union blkif_back_rings *blk_rings = &ring->blk_rings;
drivers/block/xen-blkback/blkback.c
1201
rp, rc, rp - rc, ring->blkif->vbd.pdevice);
drivers/block/xen-blkback/blkback.c
1217
pending_req = alloc_req(ring);
drivers/block/xen-blkback/blkback.c
1219
ring->st_oo_req++;
drivers/block/xen-blkback/blkback.c
1224
switch (ring->blkif->blk_protocol) {
drivers/block/xen-blkback/blkback.c
1248
if (dispatch_rw_block_io(ring, &req, pending_req))
drivers/block/xen-blkback/blkback.c
1252
free_req(ring, pending_req);
drivers/block/xen-blkback/blkback.c
1253
if (dispatch_discard_io(ring, &req))
drivers/block/xen-blkback/blkback.c
1257
if (dispatch_other_io(ring, &req, pending_req))
drivers/block/xen-blkback/blkback.c
1270
do_block_io_op(struct xen_blkif_ring *ring, unsigned int *eoi_flags)
drivers/block/xen-blkback/blkback.c
1272
union blkif_back_rings *blk_rings = &ring->blk_rings;
drivers/block/xen-blkback/blkback.c
1276
more_to_do = __do_block_io_op(ring, eoi_flags);
drivers/block/xen-blkback/blkback.c
1289
static int dispatch_rw_block_io(struct xen_blkif_ring *ring,
drivers/block/xen-blkback/blkback.c
1318
ring->st_rd_req++;
drivers/block/xen-blkback/blkback.c
1322
ring->st_wr_req++;
drivers/block/xen-blkback/blkback.c
1330
ring->st_f_req++;
drivers/block/xen-blkback/blkback.c
1356
pending_req->ring = ring;
drivers/block/xen-blkback/blkback.c
1383
if (xen_vbd_translate(&preq, ring->blkif, operation) != 0) {
drivers/block/xen-blkback/blkback.c
1388
ring->blkif->vbd.pdevice);
drivers/block/xen-blkback/blkback.c
1400
ring->blkif->domid);
drivers/block/xen-blkback/blkback.c
1409
xen_blk_drain_io(pending_req->ring);
drivers/block/xen-blkback/blkback.c
1424
xen_blkif_get(ring->blkif);
drivers/block/xen-blkback/blkback.c
1425
atomic_inc(&ring->inflight);
drivers/block/xen-blkback/blkback.c
143
static int do_block_io_op(struct xen_blkif_ring *ring, unsigned int *eoi_flags);
drivers/block/xen-blkback/blkback.c
144
static int dispatch_rw_block_io(struct xen_blkif_ring *ring,
drivers/block/xen-blkback/blkback.c
1466
ring->st_rd_sect += preq.nr_sects;
drivers/block/xen-blkback/blkback.c
1468
ring->st_wr_sect += preq.nr_sects;
drivers/block/xen-blkback/blkback.c
147
static void make_response(struct xen_blkif_ring *ring, u64 id,
drivers/block/xen-blkback/blkback.c
1473
xen_blkbk_unmap(ring, pending_req->segments,
drivers/block/xen-blkback/blkback.c
1477
make_response(ring, req->u.rw.id, req_operation, BLKIF_RSP_ERROR);
drivers/block/xen-blkback/blkback.c
1478
free_req(ring, pending_req);
drivers/block/xen-blkback/blkback.c
1488
static void make_response(struct xen_blkif_ring *ring, u64 id,
drivers/block/xen-blkback/blkback.c
1496
spin_lock_irqsave(&ring->blk_ring_lock, flags);
drivers/block/xen-blkback/blkback.c
1497
blk_rings = &ring->blk_rings;
drivers/block/xen-blkback/blkback.c
1499
switch (ring->blkif->blk_protocol) {
drivers/block/xen-blkback/blkback.c
1522
spin_unlock_irqrestore(&ring->blk_ring_lock, flags);
drivers/block/xen-blkback/blkback.c
1524
notify_remote_via_irq(ring->irq);
drivers/block/xen-blkback/blkback.c
168
static int add_persistent_gnt(struct xen_blkif_ring *ring,
drivers/block/xen-blkback/blkback.c
173
struct xen_blkif *blkif = ring->blkif;
drivers/block/xen-blkback/blkback.c
175
if (ring->persistent_gnt_c >= max_pgrants) {
drivers/block/xen-blkback/blkback.c
181
new = &ring->persistent_gnts.rb_node;
drivers/block/xen-blkback/blkback.c
199
rb_insert_color(&(persistent_gnt->node), &ring->persistent_gnts);
drivers/block/xen-blkback/blkback.c
200
ring->persistent_gnt_c++;
drivers/block/xen-blkback/blkback.c
201
atomic_inc(&ring->persistent_gnt_in_use);
drivers/block/xen-blkback/blkback.c
205
static struct persistent_gnt *get_persistent_gnt(struct xen_blkif_ring *ring,
drivers/block/xen-blkback/blkback.c
211
node = ring->persistent_gnts.rb_node;
drivers/block/xen-blkback/blkback.c
225
atomic_inc(&ring->persistent_gnt_in_use);
drivers/block/xen-blkback/blkback.c
232
static void put_persistent_gnt(struct xen_blkif_ring *ring,
drivers/block/xen-blkback/blkback.c
239
atomic_dec(&ring->persistent_gnt_in_use);
drivers/block/xen-blkback/blkback.c
242
static void free_persistent_gnts(struct xen_blkif_ring *ring)
drivers/block/xen-blkback/blkback.c
244
struct rb_root *root = &ring->persistent_gnts;
drivers/block/xen-blkback/blkback.c
276
gnttab_page_cache_put(&ring->free_pages, pages,
drivers/block/xen-blkback/blkback.c
283
ring->persistent_gnt_c--;
drivers/block/xen-blkback/blkback.c
286
BUG_ON(!RB_EMPTY_ROOT(&ring->persistent_gnts));
drivers/block/xen-blkback/blkback.c
287
BUG_ON(ring->persistent_gnt_c != 0);
drivers/block/xen-blkback/blkback.c
296
struct xen_blkif_ring *ring = container_of(work, typeof(*ring), persistent_purge_work);
drivers/block/xen-blkback/blkback.c
303
while(!list_empty(&ring->persistent_purge_list)) {
drivers/block/xen-blkback/blkback.c
304
persistent_gnt = list_first_entry(&ring->persistent_purge_list,
drivers/block/xen-blkback/blkback.c
319
gnttab_page_cache_put(&ring->free_pages, pages,
drivers/block/xen-blkback/blkback.c
328
gnttab_page_cache_put(&ring->free_pages, pages, segs_to_unmap);
drivers/block/xen-blkback/blkback.c
332
static void purge_persistent_gnt(struct xen_blkif_ring *ring)
drivers/block/xen-blkback/blkback.c
340
if (work_busy(&ring->persistent_purge_work)) {
drivers/block/xen-blkback/blkback.c
345
if (ring->persistent_gnt_c < max_pgrants ||
drivers/block/xen-blkback/blkback.c
346
(ring->persistent_gnt_c == max_pgrants &&
drivers/block/xen-blkback/blkback.c
347
!ring->blkif->vbd.overflow_max_grants)) {
drivers/block/xen-blkback/blkback.c
351
num_clean = ring->persistent_gnt_c - max_pgrants + num_clean;
drivers/block/xen-blkback/blkback.c
352
num_clean = min(ring->persistent_gnt_c, num_clean);
drivers/block/xen-blkback/blkback.c
368
BUG_ON(!list_empty(&ring->persistent_purge_list));
drivers/block/xen-blkback/blkback.c
369
root = &ring->persistent_gnts;
drivers/block/xen-blkback/blkback.c
384
&ring->persistent_purge_list);
drivers/block/xen-blkback/blkback.c
399
ring->persistent_gnt_c -= total;
drivers/block/xen-blkback/blkback.c
400
ring->blkif->vbd.overflow_max_grants = 0;
drivers/block/xen-blkback/blkback.c
403
schedule_work(&ring->persistent_purge_work);
drivers/block/xen-blkback/blkback.c
414
static struct pending_req *alloc_req(struct xen_blkif_ring *ring)
drivers/block/xen-blkback/blkback.c
419
spin_lock_irqsave(&ring->pending_free_lock, flags);
drivers/block/xen-blkback/blkback.c
420
if (!list_empty(&ring->pending_free)) {
drivers/block/xen-blkback/blkback.c
421
req = list_entry(ring->pending_free.next, struct pending_req,
drivers/block/xen-blkback/blkback.c
425
spin_unlock_irqrestore(&ring->pending_free_lock, flags);
drivers/block/xen-blkback/blkback.c
433
static void free_req(struct xen_blkif_ring *ring, struct pending_req *req)
drivers/block/xen-blkback/blkback.c
438
spin_lock_irqsave(&ring->pending_free_lock, flags);
drivers/block/xen-blkback/blkback.c
439
was_empty = list_empty(&ring->pending_free);
drivers/block/xen-blkback/blkback.c
440
list_add(&req->free_list, &ring->pending_free);
drivers/block/xen-blkback/blkback.c
441
spin_unlock_irqrestore(&ring->pending_free_lock, flags);
drivers/block/xen-blkback/blkback.c
443
wake_up(&ring->pending_free_wq);
drivers/block/xen-blkback/blkback.c
523
static void blkif_notify_work(struct xen_blkif_ring *ring)
drivers/block/xen-blkback/blkback.c
525
ring->waiting_reqs = 1;
drivers/block/xen-blkback/blkback.c
526
wake_up(&ring->wq);
drivers/block/xen-blkback/blkback.c
539
static void print_stats(struct xen_blkif_ring *ring)
drivers/block/xen-blkback/blkback.c
543
current->comm, ring->st_oo_req,
drivers/block/xen-blkback/blkback.c
544
ring->st_rd_req, ring->st_wr_req,
drivers/block/xen-blkback/blkback.c
545
ring->st_f_req, ring->st_ds_req,
drivers/block/xen-blkback/blkback.c
546
ring->persistent_gnt_c, max_pgrants);
drivers/block/xen-blkback/blkback.c
547
ring->st_print = jiffies + secs_to_jiffies(10);
drivers/block/xen-blkback/blkback.c
548
ring->st_rd_req = 0;
drivers/block/xen-blkback/blkback.c
549
ring->st_wr_req = 0;
drivers/block/xen-blkback/blkback.c
550
ring->st_oo_req = 0;
drivers/block/xen-blkback/blkback.c
551
ring->st_ds_req = 0;
drivers/block/xen-blkback/blkback.c
556
struct xen_blkif_ring *ring = arg;
drivers/block/xen-blkback/blkback.c
557
struct xen_blkif *blkif = ring->blkif;
drivers/block/xen-blkback/blkback.c
574
ring->wq,
drivers/block/xen-blkback/blkback.c
575
ring->waiting_reqs || kthread_should_stop(),
drivers/block/xen-blkback/blkback.c
580
ring->pending_free_wq,
drivers/block/xen-blkback/blkback.c
581
!list_empty(&ring->pending_free) ||
drivers/block/xen-blkback/blkback.c
587
do_eoi = ring->waiting_reqs;
drivers/block/xen-blkback/blkback.c
589
ring->waiting_reqs = 0;
drivers/block/xen-blkback/blkback.c
592
ret = do_block_io_op(ring, &eoi_flags);
drivers/block/xen-blkback/blkback.c
594
ring->waiting_reqs = 1;
drivers/block/xen-blkback/blkback.c
596
wait_event_interruptible(ring->shutdown_wq,
drivers/block/xen-blkback/blkback.c
599
if (do_eoi && !ring->waiting_reqs) {
drivers/block/xen-blkback/blkback.c
600
xen_irq_lateeoi(ring->irq, eoi_flags);
drivers/block/xen-blkback/blkback.c
606
time_after(jiffies, ring->next_lru)) {
drivers/block/xen-blkback/blkback.c
607
purge_persistent_gnt(ring);
drivers/block/xen-blkback/blkback.c
608
ring->next_lru = jiffies + msecs_to_jiffies(LRU_INTERVAL);
drivers/block/xen-blkback/blkback.c
613
gnttab_page_cache_shrink(&ring->free_pages, 0);
drivers/block/xen-blkback/blkback.c
615
gnttab_page_cache_shrink(&ring->free_pages,
drivers/block/xen-blkback/blkback.c
618
if (log_stats && time_after(jiffies, ring->st_print))
drivers/block/xen-blkback/blkback.c
619
print_stats(ring);
drivers/block/xen-blkback/blkback.c
623
flush_work(&ring->persistent_purge_work);
drivers/block/xen-blkback/blkback.c
626
print_stats(ring);
drivers/block/xen-blkback/blkback.c
628
ring->xenblkd = NULL;
drivers/block/xen-blkback/blkback.c
636
void xen_blkbk_free_caches(struct xen_blkif_ring *ring)
drivers/block/xen-blkback/blkback.c
639
free_persistent_gnts(ring);
drivers/block/xen-blkback/blkback.c
642
gnttab_page_cache_shrink(&ring->free_pages, 0 /* All */);
drivers/block/xen-blkback/blkback.c
646
struct xen_blkif_ring *ring,
drivers/block/xen-blkback/blkback.c
656
put_persistent_gnt(ring, pages[i]->persistent_gnt);
drivers/block/xen-blkback/blkback.c
674
struct xen_blkif_ring *ring = pending_req->ring;
drivers/block/xen-blkback/blkback.c
675
struct xen_blkif *blkif = ring->blkif;
drivers/block/xen-blkback/blkback.c
681
gnttab_page_cache_put(&ring->free_pages, data->pages, data->count);
drivers/block/xen-blkback/blkback.c
682
make_response(ring, pending_req->id,
drivers/block/xen-blkback/blkback.c
684
free_req(ring, pending_req);
drivers/block/xen-blkback/blkback.c
697
if (atomic_dec_and_test(&ring->inflight) && atomic_read(&blkif->drain)) {
drivers/block/xen-blkback/blkback.c
706
struct xen_blkif_ring *ring = req->ring;
drivers/block/xen-blkback/blkback.c
710
invcount = xen_blkbk_unmap_prepare(ring, pages, req->nr_segs,
drivers/block/xen-blkback/blkback.c
731
static void xen_blkbk_unmap(struct xen_blkif_ring *ring,
drivers/block/xen-blkback/blkback.c
743
invcount = xen_blkbk_unmap_prepare(ring, pages, batch,
drivers/block/xen-blkback/blkback.c
748
gnttab_page_cache_put(&ring->free_pages, unmap_pages,
drivers/block/xen-blkback/blkback.c
756
static int xen_blkbk_map(struct xen_blkif_ring *ring,
drivers/block/xen-blkback/blkback.c
769
struct xen_blkif *blkif = ring->blkif;
drivers/block/xen-blkback/blkback.c
784
ring,
drivers/block/xen-blkback/blkback.c
796
if (gnttab_page_cache_get(&ring->free_pages,
drivers/block/xen-blkback/blkback.c
798
gnttab_page_cache_put(&ring->free_pages,
drivers/block/xen-blkback/blkback.c
833
gnttab_page_cache_put(&ring->free_pages,
drivers/block/xen-blkback/blkback.c
844
ring->persistent_gnt_c < max_pgrants) {
drivers/block/xen-blkback/blkback.c
861
if (add_persistent_gnt(ring,
drivers/block/xen-blkback/blkback.c
869
persistent_gnt->gnt, ring->persistent_gnt_c,
drivers/block/xen-blkback/blkback.c
905
rc = xen_blkbk_map(pending_req->ring, pending_req->segments,
drivers/block/xen-blkback/blkback.c
918
struct xen_blkif_ring *ring = pending_req->ring;
drivers/block/xen-blkback/blkback.c
929
rc = xen_blkbk_map(ring, pages, indirect_grefs, true);
drivers/block/xen-blkback/blkback.c
961
xen_blkbk_unmap(ring, pages, indirect_grefs);
drivers/block/xen-blkback/blkback.c
965
static int dispatch_discard_io(struct xen_blkif_ring *ring,
drivers/block/xen-blkback/blkback.c
970
struct xen_blkif *blkif = ring->blkif;
drivers/block/xen-blkback/blkback.c
986
ring->st_ds_req++;
drivers/block/xen-blkback/common.h
345
struct xen_blkif_ring *ring;
drivers/block/xen-blkback/common.h
387
void xen_blkbk_free_caches(struct xen_blkif_ring *ring);
drivers/block/xen-blkback/xenbus.c
1014
list_add_tail(&req->free_list, &ring->pending_free);
drivers/block/xen-blkback/xenbus.c
1028
err = xen_blkif_map(ring, ring_ref, nr_grefs, evtchn);
drivers/block/xen-blkback/xenbus.c
1037
list_for_each_entry_safe(req, n, &ring->pending_free, free_list) {
drivers/block/xen-blkback/xenbus.c
110
ring = &blkif->rings[i];
drivers/block/xen-blkback/xenbus.c
111
ring->xenblkd = kthread_run(xen_blkif_schedule, ring, "%s-%d", name, i);
drivers/block/xen-blkback/xenbus.c
112
if (IS_ERR(ring->xenblkd)) {
drivers/block/xen-blkback/xenbus.c
113
err = PTR_ERR(ring->xenblkd);
drivers/block/xen-blkback/xenbus.c
114
ring->xenblkd = NULL;
drivers/block/xen-blkback/xenbus.c
124
ring = &blkif->rings[i];
drivers/block/xen-blkback/xenbus.c
125
kthread_stop(ring->xenblkd);
drivers/block/xen-blkback/xenbus.c
139
struct xen_blkif_ring *ring = &blkif->rings[r];
drivers/block/xen-blkback/xenbus.c
141
spin_lock_init(&ring->blk_ring_lock);
drivers/block/xen-blkback/xenbus.c
142
init_waitqueue_head(&ring->wq);
drivers/block/xen-blkback/xenbus.c
143
INIT_LIST_HEAD(&ring->pending_free);
drivers/block/xen-blkback/xenbus.c
144
INIT_LIST_HEAD(&ring->persistent_purge_list);
drivers/block/xen-blkback/xenbus.c
145
INIT_WORK(&ring->persistent_purge_work, xen_blkbk_unmap_purged_grants);
drivers/block/xen-blkback/xenbus.c
146
gnttab_page_cache_init(&ring->free_pages);
drivers/block/xen-blkback/xenbus.c
148
spin_lock_init(&ring->pending_free_lock);
drivers/block/xen-blkback/xenbus.c
149
init_waitqueue_head(&ring->pending_free_wq);
drivers/block/xen-blkback/xenbus.c
150
init_waitqueue_head(&ring->shutdown_wq);
drivers/block/xen-blkback/xenbus.c
151
ring->blkif = blkif;
drivers/block/xen-blkback/xenbus.c
152
ring->st_print = jiffies;
drivers/block/xen-blkback/xenbus.c
153
ring->active = true;
drivers/block/xen-blkback/xenbus.c
191
static int xen_blkif_map(struct xen_blkif_ring *ring, grant_ref_t *gref,
drivers/block/xen-blkback/xenbus.c
195
struct xen_blkif *blkif = ring->blkif;
drivers/block/xen-blkback/xenbus.c
201
if (ring->irq)
drivers/block/xen-blkback/xenbus.c
205
&ring->blk_ring);
drivers/block/xen-blkback/xenbus.c
209
sring_common = (struct blkif_common_sring *)ring->blk_ring;
drivers/block/xen-blkback/xenbus.c
217
(struct blkif_sring *)ring->blk_ring;
drivers/block/xen-blkback/xenbus.c
219
BACK_RING_ATTACH(&ring->blk_rings.native, sring_native,
drivers/block/xen-blkback/xenbus.c
227
(struct blkif_x86_32_sring *)ring->blk_ring;
drivers/block/xen-blkback/xenbus.c
229
BACK_RING_ATTACH(&ring->blk_rings.x86_32, sring_x86_32,
drivers/block/xen-blkback/xenbus.c
237
(struct blkif_x86_64_sring *)ring->blk_ring;
drivers/block/xen-blkback/xenbus.c
239
BACK_RING_ATTACH(&ring->blk_rings.x86_64, sring_x86_64,
drivers/block/xen-blkback/xenbus.c
253
evtchn, xen_blkif_be_int, 0, "blkif-backend", ring);
drivers/block/xen-blkback/xenbus.c
256
ring->irq = err;
drivers/block/xen-blkback/xenbus.c
261
xenbus_unmap_ring_vfree(blkif->be->dev, ring->blk_ring);
drivers/block/xen-blkback/xenbus.c
262
ring->blk_rings.common.sring = NULL;
drivers/block/xen-blkback/xenbus.c
273
struct xen_blkif_ring *ring = &blkif->rings[r];
drivers/block/xen-blkback/xenbus.c
276
if (!ring->active)
drivers/block/xen-blkback/xenbus.c
279
if (ring->xenblkd) {
drivers/block/xen-blkback/xenbus.c
280
kthread_stop(ring->xenblkd);
drivers/block/xen-blkback/xenbus.c
281
ring->xenblkd = NULL;
drivers/block/xen-blkback/xenbus.c
282
wake_up(&ring->shutdown_wq);
drivers/block/xen-blkback/xenbus.c
289
if (atomic_read(&ring->inflight) > 0) {
drivers/block/xen-blkback/xenbus.c
294
if (ring->irq) {
drivers/block/xen-blkback/xenbus.c
295
unbind_from_irqhandler(ring->irq, ring);
drivers/block/xen-blkback/xenbus.c
296
ring->irq = 0;
drivers/block/xen-blkback/xenbus.c
299
if (ring->blk_rings.common.sring) {
drivers/block/xen-blkback/xenbus.c
300
xenbus_unmap_ring_vfree(blkif->be->dev, ring->blk_ring);
drivers/block/xen-blkback/xenbus.c
301
ring->blk_rings.common.sring = NULL;
drivers/block/xen-blkback/xenbus.c
305
xen_blkbk_free_caches(ring);
drivers/block/xen-blkback/xenbus.c
308
list_for_each_entry_safe(req, n, &ring->pending_free, free_list) {
drivers/block/xen-blkback/xenbus.c
321
BUG_ON(atomic_read(&ring->persistent_gnt_in_use) != 0);
drivers/block/xen-blkback/xenbus.c
322
BUG_ON(!list_empty(&ring->persistent_purge_list));
drivers/block/xen-blkback/xenbus.c
323
BUG_ON(!RB_EMPTY_ROOT(&ring->persistent_gnts));
drivers/block/xen-blkback/xenbus.c
324
BUG_ON(ring->free_pages.num_pages != 0);
drivers/block/xen-blkback/xenbus.c
325
BUG_ON(ring->persistent_gnt_c != 0);
drivers/block/xen-blkback/xenbus.c
327
ring->active = false;
drivers/block/xen-blkback/xenbus.c
392
struct xen_blkif_ring *ring = &blkif->rings[i]; \
drivers/block/xen-blkback/xenbus.c
394
result += ring->st_##name; \
drivers/block/xen-blkback/xenbus.c
80
struct xen_blkif_ring *ring;
drivers/block/xen-blkback/xenbus.c
964
static int read_per_ring_refs(struct xen_blkif_ring *ring, const char *dir)
drivers/block/xen-blkback/xenbus.c
969
struct xen_blkif *blkif = ring->blkif;
drivers/block/xen-blkfront.c
1186
if (!RING_FULL(&rinfo->ring))
drivers/block/xen-blkfront.c
1293
xenbus_teardown_ring((void **)&rinfo->ring.sring, info->nr_ring_pages,
drivers/block/xen-blkfront.c
1530
rp = READ_ONCE(rinfo->ring.sring->rsp_prod);
drivers/block/xen-blkfront.c
1532
if (RING_RESPONSE_PROD_OVERFLOW(&rinfo->ring, rp)) {
drivers/block/xen-blkfront.c
1534
info->gd->disk_name, rp - rinfo->ring.rsp_cons);
drivers/block/xen-blkfront.c
1538
for (i = rinfo->ring.rsp_cons; i != rp; i++) {
drivers/block/xen-blkfront.c
1544
RING_COPY_RESPONSE(&rinfo->ring, i, &bret);
drivers/block/xen-blkfront.c
1650
rinfo->ring.rsp_cons = i;
drivers/block/xen-blkfront.c
1652
if (i != rinfo->ring.req_prod_pvt) {
drivers/block/xen-blkfront.c
1654
RING_FINAL_CHECK_FOR_RESPONSES(&rinfo->ring, more_to_do);
drivers/block/xen-blkfront.c
1658
rinfo->ring.sring->rsp_event = i + 1;
drivers/block/xen-blkfront.c
1693
XEN_FRONT_RING_INIT(&rinfo->ring, sring, ring_size);
drivers/block/xen-blkfront.c
180
struct blkif_front_ring ring;
drivers/block/xen-blkfront.c
539
*ring_req = RING_GET_REQUEST(&rinfo->ring, rinfo->ring.req_prod_pvt);
drivers/block/xen-blkfront.c
540
rinfo->ring.req_prod_pvt++;
drivers/block/xen-blkfront.c
887
RING_PUSH_REQUESTS_AND_CHECK_NOTIFY(&rinfo->ring, notify);
drivers/block/xen-blkfront.c
918
if (RING_FULL(&rinfo->ring))
drivers/bluetooth/hci_bcm4377.c
1000
msg.n_elements = cpu_to_le16(ring->n_entries);
drivers/bluetooth/hci_bcm4377.c
1003
msg.intmod_delay = cpu_to_le16(ring->delay);
drivers/bluetooth/hci_bcm4377.c
1004
msg.footer_size = ring->payload_size / 4;
drivers/bluetooth/hci_bcm4377.c
1009
ring->enabled = true;
drivers/bluetooth/hci_bcm4377.c
1015
struct bcm4377_completion_ring *ring)
drivers/bluetooth/hci_bcm4377.c
1022
msg.ring_id = cpu_to_le16(ring->ring_id);
drivers/bluetooth/hci_bcm4377.c
1029
ring->ring_id);
drivers/bluetooth/hci_bcm4377.c
1031
ring->enabled = false;
drivers/bluetooth/hci_bcm4377.c
1036
struct bcm4377_transfer_ring *ring)
drivers/bluetooth/hci_bcm4377.c
1043
if (ring->virtual)
drivers/bluetooth/hci_bcm4377.c
1045
if (ring->sync)
drivers/bluetooth/hci_bcm4377.c
1048
spin_lock_irqsave(&ring->lock, spinlock_flags);
drivers/bluetooth/hci_bcm4377.c
1051
msg.ring_id = cpu_to_le16(ring->ring_id);
drivers/bluetooth/hci_bcm4377.c
1052
msg.ring_id_again = cpu_to_le16(ring->ring_id);
drivers/bluetooth/hci_bcm4377.c
1053
msg.ring_iova = cpu_to_le64(ring->ring_dma);
drivers/bluetooth/hci_bcm4377.c
1054
msg.n_elements = cpu_to_le16(ring->n_entries);
drivers/bluetooth/hci_bcm4377.c
1055
msg.completion_ring_id = cpu_to_le16(ring->completion_ring);
drivers/bluetooth/hci_bcm4377.c
1056
msg.doorbell = cpu_to_le16(ring->doorbell);
drivers/bluetooth/hci_bcm4377.c
1058
msg.footer_size = ring->payload_size / 4;
drivers/bluetooth/hci_bcm4377.c
1060
bcm4377->ring_state->xfer_ring_head[ring->ring_id] = 0;
drivers/bluetooth/hci_bcm4377.c
1061
bcm4377->ring_state->xfer_ring_tail[ring->ring_id] = 0;
drivers/bluetooth/hci_bcm4377.c
1062
ring->generation++;
drivers/bluetooth/hci_bcm4377.c
1063
spin_unlock_irqrestore(&ring->lock, spinlock_flags);
drivers/bluetooth/hci_bcm4377.c
1068
spin_lock_irqsave(&ring->lock, spinlock_flags);
drivers/bluetooth/hci_bcm4377.c
1070
if (ring->d2h_buffers_only) {
drivers/bluetooth/hci_bcm4377.c
1071
for (i = 0; i < ring->n_entries; ++i) {
drivers/bluetooth/hci_bcm4377.c
1073
ring->ring + i * sizeof(*entry);
drivers/bluetooth/hci_bcm4377.c
1075
ring->generation);
drivers/bluetooth/hci_bcm4377.c
1080
entry->len = cpu_to_le16(ring->mapped_payload_size);
drivers/bluetooth/hci_bcm4377.c
1083
cpu_to_le64(ring->payloads_dma +
drivers/bluetooth/hci_bcm4377.c
1084
i * ring->mapped_payload_size);
drivers/bluetooth/hci_bcm4377.c
1092
if (ring->virtual || ring->d2h_buffers_only) {
drivers/bluetooth/hci_bcm4377.c
1093
bcm4377->ring_state->xfer_ring_head[ring->ring_id] =
drivers/bluetooth/hci_bcm4377.c
1095
bcm4377_ring_doorbell(bcm4377, ring->doorbell, 0xf);
drivers/bluetooth/hci_bcm4377.c
1098
ring->enabled = true;
drivers/bluetooth/hci_bcm4377.c
1099
spin_unlock_irqrestore(&ring->lock, spinlock_flags);
drivers/bluetooth/hci_bcm4377.c
1105
struct bcm4377_transfer_ring *ring)
drivers/bluetooth/hci_bcm4377.c
1112
msg.ring_id = cpu_to_le16(ring->ring_id);
drivers/bluetooth/hci_bcm4377.c
1118
"failed to destroy transfer ring %d\n", ring->ring_id);
drivers/bluetooth/hci_bcm4377.c
1120
ring->enabled = false;
drivers/bluetooth/hci_bcm4377.c
1473
struct bcm4377_transfer_ring *ring;
drivers/bluetooth/hci_bcm4377.c
1479
ring = &bcm4377->hci_h2d_ring;
drivers/bluetooth/hci_bcm4377.c
1484
ring = &bcm4377->acl_h2d_ring;
drivers/bluetooth/hci_bcm4377.c
1489
ring = &bcm4377->sco_h2d_ring;
drivers/bluetooth/hci_bcm4377.c
1496
ret = bcm4377_enqueue(bcm4377, ring, skb->data, skb->len, false);
drivers/bluetooth/hci_bcm4377.c
1526
struct bcm4377_transfer_ring *ring)
drivers/bluetooth/hci_bcm4377.c
1530
spin_lock_init(&ring->lock);
drivers/bluetooth/hci_bcm4377.c
1531
ring->payload_size = ALIGN(ring->payload_size, 4);
drivers/bluetooth/hci_bcm4377.c
1532
ring->mapped_payload_size = ALIGN(ring->mapped_payload_size, 4);
drivers/bluetooth/hci_bcm4377.c
1534
if (ring->payload_size > BCM4377_XFER_RING_MAX_INPLACE_PAYLOAD_SIZE)
drivers/bluetooth/hci_bcm4377.c
1536
if (ring->n_entries > BCM4377_MAX_RING_SIZE)
drivers/bluetooth/hci_bcm4377.c
1538
if (ring->virtual && ring->allow_wait)
drivers/bluetooth/hci_bcm4377.c
1541
if (ring->d2h_buffers_only) {
drivers/bluetooth/hci_bcm4377.c
1542
if (ring->virtual)
drivers/bluetooth/hci_bcm4377.c
1544
if (ring->payload_size)
drivers/bluetooth/hci_bcm4377.c
1546
if (!ring->mapped_payload_size)
drivers/bluetooth/hci_bcm4377.c
1549
if (ring->virtual)
drivers/bluetooth/hci_bcm4377.c
1553
ring->payload_size + sizeof(struct bcm4377_xfer_ring_entry);
drivers/bluetooth/hci_bcm4377.c
1554
ring->ring = dmam_alloc_coherent(&bcm4377->pdev->dev,
drivers/bluetooth/hci_bcm4377.c
1555
ring->n_entries * entry_size,
drivers/bluetooth/hci_bcm4377.c
1556
&ring->ring_dma, GFP_KERNEL);
drivers/bluetooth/hci_bcm4377.c
1557
if (!ring->ring)
drivers/bluetooth/hci_bcm4377.c
1560
if (ring->allow_wait) {
drivers/bluetooth/hci_bcm4377.c
1561
ring->events = devm_kcalloc(&bcm4377->pdev->dev,
drivers/bluetooth/hci_bcm4377.c
1562
ring->n_entries,
drivers/bluetooth/hci_bcm4377.c
1563
sizeof(*ring->events), GFP_KERNEL);
drivers/bluetooth/hci_bcm4377.c
1564
if (!ring->events)
drivers/bluetooth/hci_bcm4377.c
1568
if (ring->mapped_payload_size) {
drivers/bluetooth/hci_bcm4377.c
1569
ring->payloads = dmam_alloc_coherent(
drivers/bluetooth/hci_bcm4377.c
1571
ring->n_entries * ring->mapped_payload_size,
drivers/bluetooth/hci_bcm4377.c
1572
&ring->payloads_dma, GFP_KERNEL);
drivers/bluetooth/hci_bcm4377.c
1573
if (!ring->payloads)
drivers/bluetooth/hci_bcm4377.c
1581
struct bcm4377_completion_ring *ring)
drivers/bluetooth/hci_bcm4377.c
1585
ring->payload_size = ALIGN(ring->payload_size, 4);
drivers/bluetooth/hci_bcm4377.c
1586
if (ring->payload_size > BCM4377_XFER_RING_MAX_INPLACE_PAYLOAD_SIZE)
drivers/bluetooth/hci_bcm4377.c
1588
if (ring->n_entries > BCM4377_MAX_RING_SIZE)
drivers/bluetooth/hci_bcm4377.c
1591
entry_size = ring->payload_size +
drivers/bluetooth/hci_bcm4377.c
1594
ring->ring = dmam_alloc_coherent(&bcm4377->pdev->dev,
drivers/bluetooth/hci_bcm4377.c
1595
ring->n_entries * entry_size,
drivers/bluetooth/hci_bcm4377.c
1596
&ring->ring_dma, GFP_KERNEL);
drivers/bluetooth/hci_bcm4377.c
1597
if (!ring->ring)
drivers/bluetooth/hci_bcm4377.c
440
void *ring;
drivers/bluetooth/hci_bcm4377.c
474
void *ring;
drivers/bluetooth/hci_bcm4377.c
628
struct bcm4377_transfer_ring *ring,
drivers/bluetooth/hci_bcm4377.c
634
if (generation != ring->generation) {
drivers/bluetooth/hci_bcm4377.c
638
generation, ring->generation, ring->ring_id);
drivers/bluetooth/hci_bcm4377.c
642
if (*msgid >= ring->n_entries) {
drivers/bluetooth/hci_bcm4377.c
645
ring->ring_id, *msgid, ring->n_entries);
drivers/bluetooth/hci_bcm4377.c
653
struct bcm4377_transfer_ring *ring,
drivers/bluetooth/hci_bcm4377.c
662
spin_lock_irqsave(&ring->lock, flags);
drivers/bluetooth/hci_bcm4377.c
663
if (!ring->enabled) {
drivers/bluetooth/hci_bcm4377.c
666
ring->ring_id);
drivers/bluetooth/hci_bcm4377.c
670
if (ring->d2h_buffers_only &&
drivers/bluetooth/hci_bcm4377.c
672
if (bcm4377_extract_msgid(bcm4377, ring, raw_msgid, &msgid))
drivers/bluetooth/hci_bcm4377.c
675
if (len > ring->mapped_payload_size) {
drivers/bluetooth/hci_bcm4377.c
679
ring->ring_id, len, ring->mapped_payload_size);
drivers/bluetooth/hci_bcm4377.c
683
payload = ring->payloads + msgid * ring->mapped_payload_size;
drivers/bluetooth/hci_bcm4377.c
695
head = le16_to_cpu(bcm4377->ring_state->xfer_ring_head[ring->ring_id]);
drivers/bluetooth/hci_bcm4377.c
696
head = (head + 1) % ring->n_entries;
drivers/bluetooth/hci_bcm4377.c
697
bcm4377->ring_state->xfer_ring_head[ring->ring_id] = cpu_to_le16(head);
drivers/bluetooth/hci_bcm4377.c
699
bcm4377_ring_doorbell(bcm4377, ring->doorbell, head);
drivers/bluetooth/hci_bcm4377.c
701
spin_unlock_irqrestore(&ring->lock, flags);
drivers/bluetooth/hci_bcm4377.c
705
struct bcm4377_transfer_ring *ring,
drivers/bluetooth/hci_bcm4377.c
711
spin_lock_irqsave(&ring->lock, flags);
drivers/bluetooth/hci_bcm4377.c
713
if (bcm4377_extract_msgid(bcm4377, ring, raw_msgid, &msgid))
drivers/bluetooth/hci_bcm4377.c
716
if (!test_bit(msgid, ring->msgids)) {
drivers/bluetooth/hci_bcm4377.c
720
ring->ring_id, msgid);
drivers/bluetooth/hci_bcm4377.c
724
if (ring->allow_wait && ring->events[msgid]) {
drivers/bluetooth/hci_bcm4377.c
725
complete(ring->events[msgid]);
drivers/bluetooth/hci_bcm4377.c
726
ring->events[msgid] = NULL;
drivers/bluetooth/hci_bcm4377.c
729
bitmap_release_region(ring->msgids, msgid, 0);
drivers/bluetooth/hci_bcm4377.c
732
spin_unlock_irqrestore(&ring->lock, flags);
drivers/bluetooth/hci_bcm4377.c
736
struct bcm4377_completion_ring *ring,
drivers/bluetooth/hci_bcm4377.c
744
if (pos >= ring->n_entries) {
drivers/bluetooth/hci_bcm4377.c
747
ring->ring_id);
drivers/bluetooth/hci_bcm4377.c
751
entry_size = sizeof(*entry) + ring->payload_size;
drivers/bluetooth/hci_bcm4377.c
752
entry = ring->ring + pos * entry_size;
drivers/bluetooth/hci_bcm4377.c
753
data = ring->ring + pos * entry_size + sizeof(*entry);
drivers/bluetooth/hci_bcm4377.c
758
if ((ring->transfer_rings & BIT(transfer_ring)) == 0) {
drivers/bluetooth/hci_bcm4377.c
762
pos, transfer_ring, ring->ring_id);
drivers/bluetooth/hci_bcm4377.c
768
ring->ring_id, transfer_ring, msg_id);
drivers/bluetooth/hci_bcm4377.c
804
ring->ring_id, transfer_ring, msg_id);
drivers/bluetooth/hci_bcm4377.c
809
struct bcm4377_completion_ring *ring)
drivers/bluetooth/hci_bcm4377.c
815
if (!ring->enabled)
drivers/bluetooth/hci_bcm4377.c
818
tail = le16_to_cpu(tails[ring->ring_id]);
drivers/bluetooth/hci_bcm4377.c
820
"completion ring #%d: head: %d, tail: %d\n", ring->ring_id,
drivers/bluetooth/hci_bcm4377.c
821
le16_to_cpu(heads[ring->ring_id]), tail);
drivers/bluetooth/hci_bcm4377.c
823
while (tail != le16_to_cpu(READ_ONCE(heads[ring->ring_id]))) {
drivers/bluetooth/hci_bcm4377.c
831
bcm4377_handle_completion(bcm4377, ring, tail);
drivers/bluetooth/hci_bcm4377.c
833
tail = (tail + 1) % ring->n_entries;
drivers/bluetooth/hci_bcm4377.c
834
tails[ring->ring_id] = cpu_to_le16(tail);
drivers/bluetooth/hci_bcm4377.c
870
struct bcm4377_transfer_ring *ring, void *data,
drivers/bluetooth/hci_bcm4377.c
882
if (len > ring->payload_size && len > ring->mapped_payload_size) {
drivers/bluetooth/hci_bcm4377.c
886
len, ring->ring_id, ring->payload_size,
drivers/bluetooth/hci_bcm4377.c
887
ring->mapped_payload_size);
drivers/bluetooth/hci_bcm4377.c
890
if (wait && !ring->allow_wait)
drivers/bluetooth/hci_bcm4377.c
892
if (ring->virtual)
drivers/bluetooth/hci_bcm4377.c
895
spin_lock_irqsave(&ring->lock, flags);
drivers/bluetooth/hci_bcm4377.c
897
head = le16_to_cpu(bcm4377->ring_state->xfer_ring_head[ring->ring_id]);
drivers/bluetooth/hci_bcm4377.c
898
tail = le16_to_cpu(bcm4377->ring_state->xfer_ring_tail[ring->ring_id]);
drivers/bluetooth/hci_bcm4377.c
900
new_head = (head + 1) % ring->n_entries;
drivers/bluetooth/hci_bcm4377.c
905
ring->ring_id);
drivers/bluetooth/hci_bcm4377.c
910
msgid = bitmap_find_free_region(ring->msgids, ring->n_entries, 0);
drivers/bluetooth/hci_bcm4377.c
913
"can't find message id for ring %d\n", ring->ring_id);
drivers/bluetooth/hci_bcm4377.c
918
raw_msgid = FIELD_PREP(BCM4377_MSGID_GENERATION, ring->generation);
drivers/bluetooth/hci_bcm4377.c
921
offset = head * (sizeof(*entry) + ring->payload_size);
drivers/bluetooth/hci_bcm4377.c
922
entry = ring->ring + offset;
drivers/bluetooth/hci_bcm4377.c
928
if (len <= ring->payload_size) {
drivers/bluetooth/hci_bcm4377.c
930
payload = ring->ring + offset + sizeof(*entry);
drivers/bluetooth/hci_bcm4377.c
933
entry->payload = cpu_to_le64(ring->payloads_dma +
drivers/bluetooth/hci_bcm4377.c
934
msgid * ring->mapped_payload_size);
drivers/bluetooth/hci_bcm4377.c
935
payload = ring->payloads + msgid * ring->mapped_payload_size;
drivers/bluetooth/hci_bcm4377.c
941
ring->events[msgid] = &event;
drivers/bluetooth/hci_bcm4377.c
952
"updating head for transfer queue #%d to %d\n", ring->ring_id,
drivers/bluetooth/hci_bcm4377.c
954
bcm4377->ring_state->xfer_ring_head[ring->ring_id] =
drivers/bluetooth/hci_bcm4377.c
957
if (!ring->sync)
drivers/bluetooth/hci_bcm4377.c
958
bcm4377_ring_doorbell(bcm4377, ring->doorbell, new_head);
drivers/bluetooth/hci_bcm4377.c
962
spin_unlock_irqrestore(&ring->lock, flags);
drivers/bluetooth/hci_bcm4377.c
972
spin_lock_irqsave(&ring->lock, flags);
drivers/bluetooth/hci_bcm4377.c
973
ring->events[msgid] = NULL;
drivers/bluetooth/hci_bcm4377.c
974
spin_unlock_irqrestore(&ring->lock, flags);
drivers/bluetooth/hci_bcm4377.c
981
struct bcm4377_completion_ring *ring)
drivers/bluetooth/hci_bcm4377.c
986
if (ring->enabled) {
drivers/bluetooth/hci_bcm4377.c
988
"completion ring %d already enabled\n", ring->ring_id);
drivers/bluetooth/hci_bcm4377.c
992
memset(ring->ring, 0,
drivers/bluetooth/hci_bcm4377.c
993
ring->n_entries * (sizeof(struct bcm4377_completion_ring_entry) +
drivers/bluetooth/hci_bcm4377.c
994
ring->payload_size));
drivers/bluetooth/hci_bcm4377.c
997
msg.id = cpu_to_le16(ring->ring_id);
drivers/bluetooth/hci_bcm4377.c
998
msg.id_again = cpu_to_le16(ring->ring_id);
drivers/bluetooth/hci_bcm4377.c
999
msg.ring_iova = cpu_to_le64(ring->ring_dma);
drivers/bus/mhi/ep/internal.h
121
struct mhi_ep_ring *ring;
drivers/bus/mhi/ep/internal.h
145
struct mhi_ep_ring ring;
drivers/bus/mhi/ep/internal.h
149
struct mhi_ep_ring ring;
drivers/bus/mhi/ep/internal.h
160
struct mhi_ep_ring ring;
drivers/bus/mhi/ep/internal.h
174
void mhi_ep_ring_init(struct mhi_ep_ring *ring, enum mhi_ep_ring_type type, u32 id);
drivers/bus/mhi/ep/internal.h
175
void mhi_ep_ring_reset(struct mhi_ep_cntrl *mhi_cntrl, struct mhi_ep_ring *ring);
drivers/bus/mhi/ep/internal.h
176
int mhi_ep_ring_start(struct mhi_ep_cntrl *mhi_cntrl, struct mhi_ep_ring *ring,
drivers/bus/mhi/ep/internal.h
178
size_t mhi_ep_ring_addr2offset(struct mhi_ep_ring *ring, u64 ptr);
drivers/bus/mhi/ep/internal.h
179
int mhi_ep_ring_add_element(struct mhi_ep_ring *ring, struct mhi_ring_element *element);
drivers/bus/mhi/ep/internal.h
180
void mhi_ep_ring_inc_index(struct mhi_ep_ring *ring);
drivers/bus/mhi/ep/internal.h
181
int mhi_ep_update_wr_offset(struct mhi_ep_ring *ring);
drivers/bus/mhi/ep/internal.h
200
u64 mhi_ep_mmio_get_db(struct mhi_ep_ring *ring);
drivers/bus/mhi/ep/main.c
101
ret = mhi_ep_send_event(mhi_cntrl, ring->er_index, event, MHI_TRE_DATA_GET_BEI(tre));
drivers/bus/mhi/ep/main.c
1031
if (!mhi_chan->ring.started)
drivers/bus/mhi/ep/main.c
1054
if (!mhi_chan->ring.started)
drivers/bus/mhi/ep/main.c
1057
ch_ring = &mhi_cntrl->mhi_chan[i].ring;
drivers/bus/mhi/ep/main.c
1065
ev_ring = &mhi_cntrl->mhi_event[i].ring;
drivers/bus/mhi/ep/main.c
1075
mhi_ep_ring_reset(mhi_cntrl, &mhi_cntrl->mhi_cmd->ring);
drivers/bus/mhi/ep/main.c
1145
mhi_ep_ring_init(&mhi_cntrl->mhi_cmd->ring, RING_TYPE_CMD, 0);
drivers/bus/mhi/ep/main.c
1147
mhi_ep_ring_init(&mhi_cntrl->mhi_chan[i].ring, RING_TYPE_CH, i);
drivers/bus/mhi/ep/main.c
1149
mhi_ep_ring_init(&mhi_cntrl->mhi_event[i].ring, RING_TYPE_ER, i);
drivers/bus/mhi/ep/main.c
145
struct mhi_ep_ring *ring = &mhi_cntrl->mhi_cmd->ring;
drivers/bus/mhi/ep/main.c
153
event->ptr = cpu_to_le64(ring->rbase + ring->rd_offset * sizeof(struct mhi_ring_element));
drivers/bus/mhi/ep/main.c
163
static int mhi_ep_process_cmd_ring(struct mhi_ep_ring *ring, struct mhi_ring_element *el)
drivers/bus/mhi/ep/main.c
165
struct mhi_ep_cntrl *mhi_cntrl = ring->mhi_cntrl;
drivers/bus/mhi/ep/main.c
182
ch_ring = &mhi_cntrl->mhi_chan[ch_id].ring;
drivers/bus/mhi/ep/main.c
33
struct mhi_ep_ring *ring;
drivers/bus/mhi/ep/main.c
331
struct mhi_ep_ring *ring = &mhi_cntrl->mhi_chan[mhi_chan->chan].ring;
drivers/bus/mhi/ep/main.c
333
return !!(mhi_chan->rd_offset == ring->wr_offset);
drivers/bus/mhi/ep/main.c
342
struct mhi_ep_ring *ring = &mhi_cntrl->mhi_chan[mhi_chan->chan].ring;
drivers/bus/mhi/ep/main.c
343
struct mhi_ring_element *el = &ring->ring_cache[ring->rd_offset];
drivers/bus/mhi/ep/main.c
367
ret = mhi_ep_send_completion_event(mhi_cntrl, ring, el,
drivers/bus/mhi/ep/main.c
37
ring = &mhi_cntrl->mhi_event[ring_idx].ring;
drivers/bus/mhi/ep/main.c
383
ret = mhi_ep_send_completion_event(mhi_cntrl, ring, el,
drivers/bus/mhi/ep/main.c
39
if (!ring->started) {
drivers/bus/mhi/ep/main.c
395
mhi_ep_ring_inc_index(ring);
drivers/bus/mhi/ep/main.c
40
ret = mhi_ep_ring_start(mhi_cntrl, ring, ctx);
drivers/bus/mhi/ep/main.c
402
struct mhi_ep_ring *ring)
drivers/bus/mhi/ep/main.c
404
struct mhi_ep_chan *mhi_chan = &mhi_cntrl->mhi_chan[ring->ch_id];
drivers/bus/mhi/ep/main.c
420
el = &ring->ring_cache[mhi_chan->rd_offset];
drivers/bus/mhi/ep/main.c
450
dev_dbg(dev, "Reading %zd bytes from channel (%u)\n", tr_len, ring->ch_id);
drivers/bus/mhi/ep/main.c
460
mhi_chan->rd_offset = (mhi_chan->rd_offset + 1) % ring->ring_size;
drivers/bus/mhi/ep/main.c
472
static int mhi_ep_process_ch_ring(struct mhi_ep_ring *ring)
drivers/bus/mhi/ep/main.c
474
struct mhi_ep_cntrl *mhi_cntrl = ring->mhi_cntrl;
drivers/bus/mhi/ep/main.c
479
mhi_chan = &mhi_cntrl->mhi_chan[ring->ch_id];
drivers/bus/mhi/ep/main.c
48
ret = mhi_ep_ring_add_element(ring, el);
drivers/bus/mhi/ep/main.c
490
if (ring->ch_id % 2) {
drivers/bus/mhi/ep/main.c
496
ret = mhi_ep_read_channel(mhi_cntrl, ring);
drivers/bus/mhi/ep/main.c
511
struct mhi_ep_ring *ring = &mhi_cntrl->mhi_chan[mhi_chan->chan].ring;
drivers/bus/mhi/ep/main.c
512
struct mhi_ring_element *el = &ring->ring_cache[ring->rd_offset];
drivers/bus/mhi/ep/main.c
525
ret = mhi_ep_send_completion_event(mhi_cntrl, ring, el, buf_info->size,
drivers/bus/mhi/ep/main.c
532
mhi_ep_ring_inc_index(ring);
drivers/bus/mhi/ep/main.c
544
struct mhi_ep_ring *ring;
drivers/bus/mhi/ep/main.c
550
ring = &mhi_cntrl->mhi_chan[mhi_chan->chan].ring;
drivers/bus/mhi/ep/main.c
568
el = &ring->ring_cache[mhi_chan->rd_offset];
drivers/bus/mhi/ep/main.c
592
dev_dbg(dev, "Writing %zd bytes to channel (%u)\n", tr_len, ring->ch_id);
drivers/bus/mhi/ep/main.c
605
mhi_chan->rd_offset = (mhi_chan->rd_offset + 1) % ring->ring_size;
drivers/bus/mhi/ep/main.c
675
ret = mhi_ep_ring_start(mhi_cntrl, &mhi_cntrl->mhi_cmd->ring,
drivers/bus/mhi/ep/main.c
70
if (READ_ONCE(ring->irq_pending))
drivers/bus/mhi/ep/main.c
71
cancel_delayed_work(&ring->intmodt_work);
drivers/bus/mhi/ep/main.c
73
mhi_cntrl->raise_irq(mhi_cntrl, ring->irq_vector);
drivers/bus/mhi/ep/main.c
74
} else if (ring->intmodt && !READ_ONCE(ring->irq_pending)) {
drivers/bus/mhi/ep/main.c
75
WRITE_ONCE(ring->irq_pending, true);
drivers/bus/mhi/ep/main.c
76
schedule_delayed_work(&ring->intmodt_work, msecs_to_jiffies(ring->intmodt));
drivers/bus/mhi/ep/main.c
770
struct mhi_ep_ring *ring = &mhi_cntrl->mhi_cmd->ring;
drivers/bus/mhi/ep/main.c
776
ret = mhi_ep_update_wr_offset(ring);
drivers/bus/mhi/ep/main.c
783
if (ring->rd_offset == ring->wr_offset)
drivers/bus/mhi/ep/main.c
790
while (ring->rd_offset != ring->wr_offset) {
drivers/bus/mhi/ep/main.c
791
el = &ring->ring_cache[ring->rd_offset];
drivers/bus/mhi/ep/main.c
793
ret = mhi_ep_process_cmd_ring(ring, el);
drivers/bus/mhi/ep/main.c
795
dev_err(dev, "Error processing cmd ring element: %zu\n", ring->rd_offset);
drivers/bus/mhi/ep/main.c
797
mhi_ep_ring_inc_index(ring);
drivers/bus/mhi/ep/main.c
806
struct mhi_ep_ring *ring;
drivers/bus/mhi/ep/main.c
819
ring = itr->ring;
drivers/bus/mhi/ep/main.c
821
chan = &mhi_cntrl->mhi_chan[ring->ch_id];
drivers/bus/mhi/ep/main.c
828
if (!ring->started) {
drivers/bus/mhi/ep/main.c
835
ret = mhi_ep_update_wr_offset(ring);
drivers/bus/mhi/ep/main.c
844
if (chan->rd_offset == ring->wr_offset) {
drivers/bus/mhi/ep/main.c
850
dev_dbg(dev, "Processing the ring for channel (%u)\n", ring->ch_id);
drivers/bus/mhi/ep/main.c
851
ret = mhi_ep_process_ch_ring(ring);
drivers/bus/mhi/ep/main.c
854
ring->ch_id, ret);
drivers/bus/mhi/ep/main.c
87
static int mhi_ep_send_completion_event(struct mhi_ep_cntrl *mhi_cntrl, struct mhi_ep_ring *ring,
drivers/bus/mhi/ep/main.c
906
struct mhi_ep_ring *ring;
drivers/bus/mhi/ep/main.c
916
ring = &mhi_cntrl->mhi_chan[ch_id].ring;
drivers/bus/mhi/ep/main.c
921
item->ring = ring;
drivers/bus/mhi/ep/main.c
97
event->ptr = cpu_to_le64(ring->rbase + ring->rd_offset * sizeof(*tre));
drivers/bus/mhi/ep/main.c
99
event->dword[1] = MHI_TRE_EV_DWORD1(ring->ch_id, MHI_PKT_TYPE_TX_EVENT);
drivers/bus/mhi/ep/mmio.c
219
u64 mhi_ep_mmio_get_db(struct mhi_ep_ring *ring)
drivers/bus/mhi/ep/mmio.c
221
struct mhi_ep_cntrl *mhi_cntrl = ring->mhi_cntrl;
drivers/bus/mhi/ep/mmio.c
225
regval = mhi_ep_mmio_read(mhi_cntrl, ring->db_offset_h);
drivers/bus/mhi/ep/mmio.c
229
regval = mhi_ep_mmio_read(mhi_cntrl, ring->db_offset_l);
drivers/bus/mhi/ep/ring.c
10
size_t mhi_ep_ring_addr2offset(struct mhi_ep_ring *ring, u64 ptr)
drivers/bus/mhi/ep/ring.c
100
wr_ptr = mhi_ep_mmio_get_db(ring);
drivers/bus/mhi/ep/ring.c
102
return mhi_ep_cache_ring(ring, wr_ptr);
drivers/bus/mhi/ep/ring.c
106
int mhi_ep_ring_add_element(struct mhi_ep_ring *ring, struct mhi_ring_element *el)
drivers/bus/mhi/ep/ring.c
108
struct mhi_ep_cntrl *mhi_cntrl = ring->mhi_cntrl;
drivers/bus/mhi/ep/ring.c
116
ret = mhi_ep_update_wr_offset(ring);
drivers/bus/mhi/ep/ring.c
12
return (ptr - ring->rbase) / sizeof(struct mhi_ring_element);
drivers/bus/mhi/ep/ring.c
122
if (ring->rd_offset < ring->wr_offset)
drivers/bus/mhi/ep/ring.c
123
num_free_elem = (ring->wr_offset - ring->rd_offset) - 1;
drivers/bus/mhi/ep/ring.c
125
num_free_elem = ((ring->ring_size - ring->rd_offset) + ring->wr_offset) - 1;
drivers/bus/mhi/ep/ring.c
133
old_offset = ring->rd_offset;
drivers/bus/mhi/ep/ring.c
135
dev_dbg(dev, "Adding an element to ring at offset (%zu)\n", ring->rd_offset);
drivers/bus/mhi/ep/ring.c
136
buf_info.host_addr = ring->rbase + (old_offset * sizeof(*el));
drivers/bus/mhi/ep/ring.c
144
mhi_ep_ring_inc_index(ring);
drivers/bus/mhi/ep/ring.c
147
rp = cpu_to_le64(ring->rd_offset * sizeof(*el) + ring->rbase);
drivers/bus/mhi/ep/ring.c
148
memcpy_toio((void __iomem *) &ring->ring_ctx->generic.rp, &rp, sizeof(u64));
drivers/bus/mhi/ep/ring.c
15
static u32 mhi_ep_ring_num_elems(struct mhi_ep_ring *ring)
drivers/bus/mhi/ep/ring.c
153
void mhi_ep_ring_init(struct mhi_ep_ring *ring, enum mhi_ep_ring_type type, u32 id)
drivers/bus/mhi/ep/ring.c
155
ring->type = type;
drivers/bus/mhi/ep/ring.c
156
if (ring->type == RING_TYPE_CMD) {
drivers/bus/mhi/ep/ring.c
157
ring->db_offset_h = EP_CRDB_HIGHER;
drivers/bus/mhi/ep/ring.c
158
ring->db_offset_l = EP_CRDB_LOWER;
drivers/bus/mhi/ep/ring.c
159
} else if (ring->type == RING_TYPE_CH) {
drivers/bus/mhi/ep/ring.c
160
ring->db_offset_h = CHDB_HIGHER_n(id);
drivers/bus/mhi/ep/ring.c
161
ring->db_offset_l = CHDB_LOWER_n(id);
drivers/bus/mhi/ep/ring.c
162
ring->ch_id = id;
drivers/bus/mhi/ep/ring.c
164
ring->db_offset_h = ERDB_HIGHER_n(id);
drivers/bus/mhi/ep/ring.c
165
ring->db_offset_l = ERDB_LOWER_n(id);
drivers/bus/mhi/ep/ring.c
171
struct mhi_ep_ring *ring = container_of(work, struct mhi_ep_ring, intmodt_work.work);
drivers/bus/mhi/ep/ring.c
172
struct mhi_ep_cntrl *mhi_cntrl = ring->mhi_cntrl;
drivers/bus/mhi/ep/ring.c
174
mhi_cntrl->raise_irq(mhi_cntrl, ring->irq_vector);
drivers/bus/mhi/ep/ring.c
175
WRITE_ONCE(ring->irq_pending, false);
drivers/bus/mhi/ep/ring.c
178
int mhi_ep_ring_start(struct mhi_ep_cntrl *mhi_cntrl, struct mhi_ep_ring *ring,
drivers/bus/mhi/ep/ring.c
185
ring->mhi_cntrl = mhi_cntrl;
drivers/bus/mhi/ep/ring.c
186
ring->ring_ctx = ctx;
drivers/bus/mhi/ep/ring.c
187
ring->ring_size = mhi_ep_ring_num_elems(ring);
drivers/bus/mhi/ep/ring.c
188
memcpy_fromio(&val, (void __iomem *) &ring->ring_ctx->generic.rbase, sizeof(u64));
drivers/bus/mhi/ep/ring.c
189
ring->rbase = le64_to_cpu(val);
drivers/bus/mhi/ep/ring.c
19
memcpy_fromio(&rlen, (void __iomem *) &ring->ring_ctx->generic.rlen, sizeof(u64));
drivers/bus/mhi/ep/ring.c
191
if (ring->type == RING_TYPE_CH)
drivers/bus/mhi/ep/ring.c
192
ring->er_index = le32_to_cpu(ring->ring_ctx->ch.erindex);
drivers/bus/mhi/ep/ring.c
194
if (ring->type == RING_TYPE_ER) {
drivers/bus/mhi/ep/ring.c
195
ring->irq_vector = le32_to_cpu(ring->ring_ctx->ev.msivec);
drivers/bus/mhi/ep/ring.c
196
ring->intmodt = FIELD_GET(EV_CTX_INTMODT_MASK,
drivers/bus/mhi/ep/ring.c
197
le32_to_cpu(ring->ring_ctx->ev.intmod));
drivers/bus/mhi/ep/ring.c
199
INIT_DELAYED_WORK(&ring->intmodt_work, mhi_ep_raise_irq);
drivers/bus/mhi/ep/ring.c
203
memcpy_fromio(&val, (void __iomem *) &ring->ring_ctx->generic.rp, sizeof(u64));
drivers/bus/mhi/ep/ring.c
204
ring->rd_offset = mhi_ep_ring_addr2offset(ring, le64_to_cpu(val));
drivers/bus/mhi/ep/ring.c
205
ring->wr_offset = mhi_ep_ring_addr2offset(ring, le64_to_cpu(val));
drivers/bus/mhi/ep/ring.c
208
ring->ring_cache = kzalloc_objs(struct mhi_ring_element,
drivers/bus/mhi/ep/ring.c
209
ring->ring_size);
drivers/bus/mhi/ep/ring.c
210
if (!ring->ring_cache)
drivers/bus/mhi/ep/ring.c
213
memcpy_fromio(&val, (void __iomem *) &ring->ring_ctx->generic.wp, sizeof(u64));
drivers/bus/mhi/ep/ring.c
214
ret = mhi_ep_cache_ring(ring, le64_to_cpu(val));
drivers/bus/mhi/ep/ring.c
217
kfree(ring->ring_cache);
drivers/bus/mhi/ep/ring.c
221
ring->started = true;
drivers/bus/mhi/ep/ring.c
226
void mhi_ep_ring_reset(struct mhi_ep_cntrl *mhi_cntrl, struct mhi_ep_ring *ring)
drivers/bus/mhi/ep/ring.c
228
if (ring->type == RING_TYPE_ER)
drivers/bus/mhi/ep/ring.c
229
cancel_delayed_work_sync(&ring->intmodt_work);
drivers/bus/mhi/ep/ring.c
231
ring->started = false;
drivers/bus/mhi/ep/ring.c
232
kfree(ring->ring_cache);
drivers/bus/mhi/ep/ring.c
233
ring->ring_cache = NULL;
drivers/bus/mhi/ep/ring.c
24
void mhi_ep_ring_inc_index(struct mhi_ep_ring *ring)
drivers/bus/mhi/ep/ring.c
26
ring->rd_offset = (ring->rd_offset + 1) % ring->ring_size;
drivers/bus/mhi/ep/ring.c
29
static int __mhi_ep_cache_ring(struct mhi_ep_ring *ring, size_t end)
drivers/bus/mhi/ep/ring.c
31
struct mhi_ep_cntrl *mhi_cntrl = ring->mhi_cntrl;
drivers/bus/mhi/ep/ring.c
38
if (ring->type == RING_TYPE_ER)
drivers/bus/mhi/ep/ring.c
42
if (ring->wr_offset == end)
drivers/bus/mhi/ep/ring.c
45
start = ring->wr_offset;
drivers/bus/mhi/ep/ring.c
48
buf_info.host_addr = ring->rbase + (start * sizeof(struct mhi_ring_element));
drivers/bus/mhi/ep/ring.c
49
buf_info.dev_addr = &ring->ring_cache[start];
drivers/bus/mhi/ep/ring.c
55
buf_info.size = (ring->ring_size - start) * sizeof(struct mhi_ring_element);
drivers/bus/mhi/ep/ring.c
56
buf_info.host_addr = ring->rbase + (start * sizeof(struct mhi_ring_element));
drivers/bus/mhi/ep/ring.c
57
buf_info.dev_addr = &ring->ring_cache[start];
drivers/bus/mhi/ep/ring.c
64
buf_info.host_addr = ring->rbase;
drivers/bus/mhi/ep/ring.c
65
buf_info.dev_addr = &ring->ring_cache[0];
drivers/bus/mhi/ep/ring.c
79
static int mhi_ep_cache_ring(struct mhi_ep_ring *ring, u64 wr_ptr)
drivers/bus/mhi/ep/ring.c
84
wr_offset = mhi_ep_ring_addr2offset(ring, wr_ptr);
drivers/bus/mhi/ep/ring.c
87
ret = __mhi_ep_cache_ring(ring, wr_offset);
drivers/bus/mhi/ep/ring.c
91
ring->wr_offset = wr_offset;
drivers/bus/mhi/ep/ring.c
96
int mhi_ep_update_wr_offset(struct mhi_ep_ring *ring)
drivers/bus/mhi/host/debugfs.c
124
ring->rp, ring->wp,
drivers/bus/mhi/host/debugfs.c
55
struct mhi_ring *ring = &mhi_event->ring;
drivers/bus/mhi/host/debugfs.c
75
seq_printf(m, " local rp: 0x%pK db: 0x%pad\n", ring->rp,
drivers/bus/mhi/host/debugfs.c
97
struct mhi_ring *ring = &mhi_chan->tre_ring;
drivers/bus/mhi/host/init.c
164
struct mhi_ring *ring,
drivers/bus/mhi/host/init.c
167
ring->alloc_size = len + (len - 1);
drivers/bus/mhi/host/init.c
168
ring->pre_aligned = dma_alloc_coherent(mhi_cntrl->cntrl_dev, ring->alloc_size,
drivers/bus/mhi/host/init.c
169
&ring->dma_handle, GFP_KERNEL);
drivers/bus/mhi/host/init.c
170
if (!ring->pre_aligned)
drivers/bus/mhi/host/init.c
173
ring->iommu_base = (ring->dma_handle + (len - 1)) & ~(len - 1);
drivers/bus/mhi/host/init.c
174
ring->base = ring->pre_aligned + (ring->iommu_base - ring->dma_handle);
drivers/bus/mhi/host/init.c
262
struct mhi_ring *ring;
drivers/bus/mhi/host/init.c
266
ring = &mhi_cmd->ring;
drivers/bus/mhi/host/init.c
267
dma_free_coherent(mhi_cntrl->cntrl_dev, ring->alloc_size,
drivers/bus/mhi/host/init.c
268
ring->pre_aligned, ring->dma_handle);
drivers/bus/mhi/host/init.c
269
ring->base = NULL;
drivers/bus/mhi/host/init.c
270
ring->iommu_base = 0;
drivers/bus/mhi/host/init.c
282
ring = &mhi_event->ring;
drivers/bus/mhi/host/init.c
283
dma_free_coherent(mhi_cntrl->cntrl_dev, ring->alloc_size,
drivers/bus/mhi/host/init.c
284
ring->pre_aligned, ring->dma_handle);
drivers/bus/mhi/host/init.c
285
ring->base = NULL;
drivers/bus/mhi/host/init.c
286
ring->iommu_base = 0;
drivers/bus/mhi/host/init.c
365
struct mhi_ring *ring = &mhi_event->ring;
drivers/bus/mhi/host/init.c
381
ring->el_size = sizeof(struct mhi_ring_element);
drivers/bus/mhi/host/init.c
382
ring->len = ring->el_size * ring->elements;
drivers/bus/mhi/host/init.c
383
ret = mhi_alloc_aligned_ring(mhi_cntrl, ring, ring->len);
drivers/bus/mhi/host/init.c
391
ring->rp = ring->wp = ring->base;
drivers/bus/mhi/host/init.c
392
er_ctxt->rbase = cpu_to_le64(ring->iommu_base);
drivers/bus/mhi/host/init.c
394
er_ctxt->rlen = cpu_to_le64(ring->len);
drivers/bus/mhi/host/init.c
395
ring->ctxt_wp = &er_ctxt->wp;
drivers/bus/mhi/host/init.c
411
struct mhi_ring *ring = &mhi_cmd->ring;
drivers/bus/mhi/host/init.c
413
ring->el_size = sizeof(struct mhi_ring_element);
drivers/bus/mhi/host/init.c
414
ring->elements = CMD_EL_PER_RING;
drivers/bus/mhi/host/init.c
415
ring->len = ring->el_size * ring->elements;
drivers/bus/mhi/host/init.c
416
ret = mhi_alloc_aligned_ring(mhi_cntrl, ring, ring->len);
drivers/bus/mhi/host/init.c
420
ring->rp = ring->wp = ring->base;
drivers/bus/mhi/host/init.c
421
cmd_ctxt->rbase = cpu_to_le64(ring->iommu_base);
drivers/bus/mhi/host/init.c
423
cmd_ctxt->rlen = cpu_to_le64(ring->len);
drivers/bus/mhi/host/init.c
424
ring->ctxt_wp = &cmd_ctxt->wp;
drivers/bus/mhi/host/init.c
433
struct mhi_ring *ring = &mhi_cmd->ring;
drivers/bus/mhi/host/init.c
435
dma_free_coherent(mhi_cntrl->cntrl_dev, ring->alloc_size,
drivers/bus/mhi/host/init.c
436
ring->pre_aligned, ring->dma_handle);
drivers/bus/mhi/host/init.c
446
struct mhi_ring *ring = &mhi_event->ring;
drivers/bus/mhi/host/init.c
451
dma_free_coherent(mhi_cntrl->cntrl_dev, ring->alloc_size,
drivers/bus/mhi/host/init.c
452
ring->pre_aligned, ring->dma_handle);
drivers/bus/mhi/host/init.c
581
mhi_event->ring.db_addr = base + val;
drivers/bus/mhi/host/init.c
585
mhi_cntrl->mhi_cmd[PRIMARY_CMD_RING].ring.db_addr = base + CRDB_LOWER;
drivers/bus/mhi/host/init.c
712
mhi_event->ring.elements = event_cfg->num_elements;
drivers/bus/mhi/host/internal.h
225
struct mhi_ring ring;
drivers/bus/mhi/host/internal.h
250
struct mhi_ring ring;
drivers/bus/mhi/host/main.c
1104
struct mhi_ring *ring)
drivers/bus/mhi/host/main.c
1106
void *tmp = ring->wp + ring->el_size;
drivers/bus/mhi/host/main.c
1108
if (tmp >= (ring->base + ring->len))
drivers/bus/mhi/host/main.c
1109
tmp = ring->base;
drivers/bus/mhi/host/main.c
1111
return (tmp == ring->rp);
drivers/bus/mhi/host/main.c
118
struct mhi_ring *ring = &mhi_event->ring;
drivers/bus/mhi/host/main.c
121
ring->db_addr, le64_to_cpu(*ring->ctxt_wp));
drivers/bus/mhi/host/main.c
1262
struct mhi_ring *ring = &mhi_cmd->ring;
drivers/bus/mhi/host/main.c
127
struct mhi_ring *ring = &mhi_cmd->ring;
drivers/bus/mhi/host/main.c
1270
if (!get_nr_avail_ring_elements(mhi_cntrl, ring)) {
drivers/bus/mhi/host/main.c
1276
cmd_tre = ring->wp;
drivers/bus/mhi/host/main.c
129
db = ring->iommu_base + (ring->wp - ring->base);
drivers/bus/mhi/host/main.c
1299
mhi_add_ring_element(mhi_cntrl, ring);
drivers/bus/mhi/host/main.c
130
*ring->ctxt_wp = cpu_to_le64(db);
drivers/bus/mhi/host/main.c
131
mhi_write_db(mhi_cntrl, ring->db_addr, db);
drivers/bus/mhi/host/main.c
137
struct mhi_ring *ring = &mhi_chan->tre_ring;
drivers/bus/mhi/host/main.c
140
db = ring->iommu_base + (ring->wp - ring->base);
drivers/bus/mhi/host/main.c
147
*ring->ctxt_wp = cpu_to_le64(db);
drivers/bus/mhi/host/main.c
1481
ev_ring = &mhi_event->ring;
drivers/bus/mhi/host/main.c
150
ring->db_addr, db);
drivers/bus/mhi/host/main.c
231
struct mhi_ring *ring)
drivers/bus/mhi/host/main.c
235
if (ring->wp < ring->rp) {
drivers/bus/mhi/host/main.c
236
nr_el = ((ring->rp - ring->wp) / ring->el_size) - 1;
drivers/bus/mhi/host/main.c
238
nr_el = (ring->rp - ring->base) / ring->el_size;
drivers/bus/mhi/host/main.c
239
nr_el += ((ring->base + ring->len - ring->wp) /
drivers/bus/mhi/host/main.c
240
ring->el_size) - 1;
drivers/bus/mhi/host/main.c
246
static void *mhi_to_virtual(struct mhi_ring *ring, dma_addr_t addr)
drivers/bus/mhi/host/main.c
248
return (addr - ring->iommu_base) + ring->base;
drivers/bus/mhi/host/main.c
252
struct mhi_ring *ring)
drivers/bus/mhi/host/main.c
254
ring->wp += ring->el_size;
drivers/bus/mhi/host/main.c
255
if (ring->wp >= (ring->base + ring->len))
drivers/bus/mhi/host/main.c
256
ring->wp = ring->base;
drivers/bus/mhi/host/main.c
262
struct mhi_ring *ring)
drivers/bus/mhi/host/main.c
264
ring->rp += ring->el_size;
drivers/bus/mhi/host/main.c
265
if (ring->rp >= (ring->base + ring->len))
drivers/bus/mhi/host/main.c
266
ring->rp = ring->base;
drivers/bus/mhi/host/main.c
271
static bool is_valid_ring_ptr(struct mhi_ring *ring, dma_addr_t addr)
drivers/bus/mhi/host/main.c
273
return addr >= ring->iommu_base && addr < ring->iommu_base + ring->len &&
drivers/bus/mhi/host/main.c
437
struct mhi_ring *ev_ring = &mhi_event->ring;
drivers/bus/mhi/host/main.c
549
struct mhi_ring *ring)
drivers/bus/mhi/host/main.c
552
ring->wp += ring->el_size;
drivers/bus/mhi/host/main.c
554
if (ring->wp >= (ring->base + ring->len))
drivers/bus/mhi/host/main.c
555
ring->wp = ring->base;
drivers/bus/mhi/host/main.c
557
*ring->ctxt_wp = cpu_to_le64(ring->iommu_base + (ring->wp - ring->base));
drivers/bus/mhi/host/main.c
560
ring->rp += ring->el_size;
drivers/bus/mhi/host/main.c
561
if (ring->rp >= (ring->base + ring->len))
drivers/bus/mhi/host/main.c
562
ring->rp = ring->base;
drivers/bus/mhi/host/main.c
769
struct mhi_ring *mhi_ring = &cmd_ring->ring;
drivers/bus/mhi/host/main.c
804
struct mhi_ring *ev_ring = &mhi_event->ring;
drivers/bus/mhi/host/main.c
977
struct mhi_ring *ev_ring = &mhi_event->ring;
drivers/bus/mhi/host/pm.c
233
struct mhi_ring *ring = &mhi_event->ring;
drivers/bus/mhi/host/pm.c
239
ring->wp = ring->base + ring->len - ring->el_size;
drivers/bus/mhi/host/pm.c
240
*ring->ctxt_wp = cpu_to_le64(ring->iommu_base + ring->len - ring->el_size);
drivers/bus/mhi/host/pm.c
300
if (mhi_cmd->ring.rp != mhi_cmd->ring.wp)
drivers/bus/mhi/host/pm.c
438
struct mhi_ring *ring = &mhi_event->ring;
drivers/bus/mhi/host/pm.c
443
ring->wp = ring->base + ring->len - ring->el_size;
drivers/bus/mhi/host/pm.c
444
*ring->ctxt_wp = cpu_to_le64(ring->iommu_base + ring->len - ring->el_size);
drivers/bus/mhi/host/pm.c
556
struct mhi_ring *ring = &mhi_cmd->ring;
drivers/bus/mhi/host/pm.c
558
ring->rp = ring->base;
drivers/bus/mhi/host/pm.c
559
ring->wp = ring->base;
drivers/bus/mhi/host/pm.c
568
struct mhi_ring *ring = &mhi_event->ring;
drivers/bus/mhi/host/pm.c
574
ring->rp = ring->base;
drivers/bus/mhi/host/pm.c
575
ring->wp = ring->base;
drivers/bus/mhi/host/pm.c
713
struct mhi_ring *ring = &mhi_cmd->ring;
drivers/bus/mhi/host/pm.c
715
ring->rp = ring->base;
drivers/bus/mhi/host/pm.c
716
ring->wp = ring->base;
drivers/bus/mhi/host/pm.c
725
struct mhi_ring *ring = &mhi_event->ring;
drivers/bus/mhi/host/pm.c
731
ring->rp = ring->base;
drivers/bus/mhi/host/pm.c
732
ring->wp = ring->base;
drivers/comedi/drivers/mite.c
555
writel(mite_chan->ring->dma_addr,
drivers/comedi/drivers/mite.c
568
struct mite_ring *ring,
drivers/comedi/drivers/mite.c
583
if (!mite_chan->ring) {
drivers/comedi/drivers/mite.c
584
mite_chan->ring = ring;
drivers/comedi/drivers/mite.c
600
struct mite_ring *ring)
drivers/comedi/drivers/mite.c
602
return mite_request_channel_in_range(mite, ring, 0,
drivers/comedi/drivers/mite.c
618
if (mite_chan->ring) {
drivers/comedi/drivers/mite.c
630
mite_chan->ring = NULL;
drivers/comedi/drivers/mite.c
649
int mite_init_ring_descriptors(struct mite_ring *ring,
drivers/comedi/drivers/mite.c
662
if ((n_full_links + (remainder > 0 ? 1 : 0)) > ring->n_links) {
drivers/comedi/drivers/mite.c
670
desc = &ring->descs[i];
drivers/comedi/drivers/mite.c
673
desc->next = cpu_to_le32(ring->dma_addr +
drivers/comedi/drivers/mite.c
679
desc = &ring->descs[i];
drivers/comedi/drivers/mite.c
686
desc->next = cpu_to_le32(ring->dma_addr);
drivers/comedi/drivers/mite.c
697
static void mite_free_dma_descs(struct mite_ring *ring)
drivers/comedi/drivers/mite.c
699
struct mite_dma_desc *descs = ring->descs;
drivers/comedi/drivers/mite.c
702
dma_free_coherent(ring->hw_dev,
drivers/comedi/drivers/mite.c
703
ring->n_links * sizeof(*descs),
drivers/comedi/drivers/mite.c
704
descs, ring->dma_addr);
drivers/comedi/drivers/mite.c
705
ring->descs = NULL;
drivers/comedi/drivers/mite.c
706
ring->dma_addr = 0;
drivers/comedi/drivers/mite.c
707
ring->n_links = 0;
drivers/comedi/drivers/mite.c
716
int mite_buf_change(struct mite_ring *ring, struct comedi_subdevice *s)
drivers/comedi/drivers/mite.c
722
mite_free_dma_descs(ring);
drivers/comedi/drivers/mite.c
729
descs = dma_alloc_coherent(ring->hw_dev,
drivers/comedi/drivers/mite.c
731
&ring->dma_addr, GFP_KERNEL);
drivers/comedi/drivers/mite.c
737
ring->descs = descs;
drivers/comedi/drivers/mite.c
738
ring->n_links = n_links;
drivers/comedi/drivers/mite.c
740
return mite_init_ring_descriptors(ring, s, n_links << PAGE_SHIFT);
drivers/comedi/drivers/mite.c
750
struct mite_ring *ring;
drivers/comedi/drivers/mite.c
752
ring = kmalloc_obj(*ring);
drivers/comedi/drivers/mite.c
753
if (!ring)
drivers/comedi/drivers/mite.c
755
ring->hw_dev = get_device(&mite->pcidev->dev);
drivers/comedi/drivers/mite.c
756
if (!ring->hw_dev) {
drivers/comedi/drivers/mite.c
757
kfree(ring);
drivers/comedi/drivers/mite.c
760
ring->n_links = 0;
drivers/comedi/drivers/mite.c
761
ring->descs = NULL;
drivers/comedi/drivers/mite.c
762
ring->dma_addr = 0;
drivers/comedi/drivers/mite.c
763
return ring;
drivers/comedi/drivers/mite.c
771
void mite_free_ring(struct mite_ring *ring)
drivers/comedi/drivers/mite.c
773
if (ring) {
drivers/comedi/drivers/mite.c
774
mite_free_dma_descs(ring);
drivers/comedi/drivers/mite.c
775
put_device(ring->hw_dev);
drivers/comedi/drivers/mite.c
776
kfree(ring);
drivers/comedi/drivers/mite.h
41
struct mite_ring *ring;
drivers/comedi/drivers/mite.h
68
struct mite_ring *ring,
drivers/comedi/drivers/mite.h
72
struct mite_ring *ring);
drivers/comedi/drivers/mite.h
75
int mite_init_ring_descriptors(struct mite_ring *ring,
drivers/comedi/drivers/mite.h
77
int mite_buf_change(struct mite_ring *ring, struct comedi_subdevice *s);
drivers/comedi/drivers/mite.h
80
void mite_free_ring(struct mite_ring *ring);
drivers/comedi/drivers/ni_660x.c
259
struct mite_ring *ring[NI660X_MAX_CHIPS][NI660X_COUNTERS_PER_CHIP];
drivers/comedi/drivers/ni_660x.c
341
struct mite_ring *ring;
drivers/comedi/drivers/ni_660x.c
346
ring = devpriv->ring[counter->chip_index][counter->counter_index];
drivers/comedi/drivers/ni_660x.c
347
mite_chan = mite_request_channel(devpriv->mite, ring);
drivers/comedi/drivers/ni_660x.c
472
struct mite_ring *ring;
drivers/comedi/drivers/ni_660x.c
475
ring = devpriv->ring[counter->chip_index][counter->counter_index];
drivers/comedi/drivers/ni_660x.c
476
ret = mite_buf_change(ring, s);
drivers/comedi/drivers/ni_660x.c
509
devpriv->ring[i][j] = mite_alloc_ring(devpriv->mite);
drivers/comedi/drivers/ni_660x.c
510
if (!devpriv->ring[i][j])
drivers/comedi/drivers/ni_660x.c
526
mite_free_ring(devpriv->ring[i][j]);
drivers/comedi/drivers/ni_mio_common.c
1969
static void ni_cmd_set_mite_transfer(struct mite_ring *ring,
drivers/comedi/drivers/ni_mio_common.c
1995
mite_init_ring_descriptors(ring, sdev, nbytes);
drivers/crypto/caam/ctrl.c
861
int ret, ring;
drivers/crypto/caam/ctrl.c
933
ring = 0;
drivers/crypto/caam/ctrl.c
945
ctrlpriv->jr[ring] = (struct caam_job_ring __iomem __force *)
drivers/crypto/caam/ctrl.c
949
ring++;
drivers/crypto/caam/ctrl.c
957
perfmon = ring ? (struct caam_perfmon __iomem *)&ctrlpriv->jr[0]->perfmon :
drivers/crypto/cavium/nitrox/nitrox_dev.h
104
int ring;
drivers/crypto/cavium/nitrox/nitrox_dev.h
161
int ring;
drivers/crypto/cavium/nitrox/nitrox_hal.c
101
offset = NPS_PKT_IN_INSTR_CTLX(ring);
drivers/crypto/cavium/nitrox/nitrox_hal.c
309
static void reset_aqm_ring(struct nitrox_device *ndev, int ring)
drivers/crypto/cavium/nitrox/nitrox_hal.c
318
offset = AQMQ_ENX(ring);
drivers/crypto/cavium/nitrox/nitrox_hal.c
325
offset = AQMQ_ACTIVITY_STATX(ring);
drivers/crypto/cavium/nitrox/nitrox_hal.c
334
offset = AQMQ_CMP_CNTX(ring);
drivers/crypto/cavium/nitrox/nitrox_hal.c
340
void enable_aqm_ring(struct nitrox_device *ndev, int ring)
drivers/crypto/cavium/nitrox/nitrox_hal.c
345
offset = AQMQ_ENX(ring);
drivers/crypto/cavium/nitrox/nitrox_hal.c
354
int ring;
drivers/crypto/cavium/nitrox/nitrox_hal.c
356
for (ring = 0; ring < ndev->nr_queues; ring++) {
drivers/crypto/cavium/nitrox/nitrox_hal.c
357
struct nitrox_cmdq *cmdq = ndev->aqmq[ring];
drivers/crypto/cavium/nitrox/nitrox_hal.c
364
reset_aqm_ring(ndev, ring);
drivers/crypto/cavium/nitrox/nitrox_hal.c
367
offset = AQMQ_DRBLX(ring);
drivers/crypto/cavium/nitrox/nitrox_hal.c
375
offset = AQMQ_NXT_CMDX(ring);
drivers/crypto/cavium/nitrox/nitrox_hal.c
379
offset = AQMQ_BADRX(ring);
drivers/crypto/cavium/nitrox/nitrox_hal.c
383
offset = AQMQ_QSZX(ring);
drivers/crypto/cavium/nitrox/nitrox_hal.c
389
offset = AQMQ_CMP_THRX(ring);
drivers/crypto/cavium/nitrox/nitrox_hal.c
395
enable_aqm_ring(ndev, ring);
drivers/crypto/cavium/nitrox/nitrox_hal.c
65
static void reset_pkt_input_ring(struct nitrox_device *ndev, int ring)
drivers/crypto/cavium/nitrox/nitrox_hal.c
73
offset = NPS_PKT_IN_INSTR_CTLX(ring);
drivers/crypto/cavium/nitrox/nitrox_hal.c
88
offset = NPS_PKT_IN_DONE_CNTSX(ring);
drivers/crypto/cavium/nitrox/nitrox_hal.c
94
void enable_pkt_input_ring(struct nitrox_device *ndev, int ring)
drivers/crypto/cavium/nitrox/nitrox_hal.h
22
void enable_pkt_input_ring(struct nitrox_device *ndev, int ring);
drivers/crypto/cavium/nitrox/nitrox_isr.c
333
qvec->ring = i / NR_RING_VECTORS;
drivers/crypto/cavium/nitrox/nitrox_isr.c
334
if (qvec->ring >= ndev->nr_queues)
drivers/crypto/cavium/nitrox/nitrox_isr.c
337
qvec->cmdq = &ndev->pkt_inq[qvec->ring];
drivers/crypto/cavium/nitrox/nitrox_isr.c
338
snprintf(qvec->name, IRQ_NAMESZ, "nitrox-pkt%d", qvec->ring);
drivers/crypto/cavium/nitrox/nitrox_isr.c
344
qvec->ring);
drivers/crypto/cavium/nitrox/nitrox_isr.c
347
cpu = qvec->ring % num_online_cpus();
drivers/crypto/cavium/nitrox/nitrox_mbx.c
139
vfdev->ring = i;
drivers/crypto/cavium/nitrox/nitrox_mbx.c
141
vfdev->msg.value = pf2vf_read_mbox(ndev, vfdev->ring);
drivers/crypto/cavium/nitrox/nitrox_mbx.c
162
vfdev->ring = (i + 64);
drivers/crypto/cavium/nitrox/nitrox_mbx.c
164
vfdev->msg.value = pf2vf_read_mbox(ndev, vfdev->ring);
drivers/crypto/cavium/nitrox/nitrox_mbx.c
39
static inline u64 pf2vf_read_mbox(struct nitrox_device *ndev, int ring)
drivers/crypto/cavium/nitrox/nitrox_mbx.c
43
reg_addr = NPS_PKT_MBOX_VF_PF_PFDATAX(ring);
drivers/crypto/cavium/nitrox/nitrox_mbx.c
48
int ring)
drivers/crypto/cavium/nitrox/nitrox_mbx.c
52
reg_addr = NPS_PKT_MBOX_PF_VF_PFDATAX(ring);
drivers/crypto/cavium/nitrox/nitrox_mbx.c
96
pf2vf_write_mbox(ndev, msg.value, vfdev->ring);
drivers/crypto/inside-secure/eip93/eip93-common.c
107
rdesc = eip93_ring_next_wptr(eip93, &eip93->ring->rdr);
drivers/crypto/inside-secure/eip93/eip93-common.c
111
cdesc = eip93_ring_next_wptr(eip93, &eip93->ring->cdr);
drivers/crypto/inside-secure/eip93/eip93-common.c
127
cdesc = eip93_ring_next_rptr(eip93, &eip93->ring->cdr);
drivers/crypto/inside-secure/eip93/eip93-common.c
133
ptr = eip93_ring_next_rptr(eip93, &eip93->ring->rdr);
drivers/crypto/inside-secure/eip93/eip93-common.c
497
scoped_guard(spinlock_irqsave, &eip93->ring->write_lock)
drivers/crypto/inside-secure/eip93/eip93-common.c
596
scoped_guard(spinlock_bh, &eip93->ring->idr_lock)
drivers/crypto/inside-secure/eip93/eip93-common.c
597
crypto_async_idr = idr_alloc(&eip93->ring->crypto_async_idr, async, 0,
drivers/crypto/inside-secure/eip93/eip93-common.c
69
struct eip93_desc_ring *ring)
drivers/crypto/inside-secure/eip93/eip93-common.c
71
void *ptr = ring->write;
drivers/crypto/inside-secure/eip93/eip93-common.c
73
if ((ring->write == ring->read - ring->offset) ||
drivers/crypto/inside-secure/eip93/eip93-common.c
74
(ring->read == ring->base && ring->write == ring->base_end))
drivers/crypto/inside-secure/eip93/eip93-common.c
77
if (ring->write == ring->base_end)
drivers/crypto/inside-secure/eip93/eip93-common.c
78
ring->write = ring->base;
drivers/crypto/inside-secure/eip93/eip93-common.c
80
ring->write += ring->offset;
drivers/crypto/inside-secure/eip93/eip93-common.c
86
struct eip93_desc_ring *ring)
drivers/crypto/inside-secure/eip93/eip93-common.c
88
void *ptr = ring->read;
drivers/crypto/inside-secure/eip93/eip93-common.c
90
if (ring->write == ring->read)
drivers/crypto/inside-secure/eip93/eip93-common.c
93
if (ring->read == ring->base_end)
drivers/crypto/inside-secure/eip93/eip93-common.c
94
ring->read = ring->base;
drivers/crypto/inside-secure/eip93/eip93-common.c
96
ring->read += ring->offset;
drivers/crypto/inside-secure/eip93/eip93-hash.c
262
scoped_guard(spinlock_bh, &eip93->ring->idr_lock)
drivers/crypto/inside-secure/eip93/eip93-hash.c
263
crypto_async_idr = idr_alloc(&eip93->ring->crypto_async_idr, async, 0,
drivers/crypto/inside-secure/eip93/eip93-hash.c
271
scoped_guard(spinlock_irqsave, &eip93->ring->write_lock)
drivers/crypto/inside-secure/eip93/eip93-main.c
210
scoped_guard(spinlock_irqsave, &eip93->ring->read_lock)
drivers/crypto/inside-secure/eip93/eip93-main.c
252
scoped_guard(spinlock_bh, &eip93->ring->idr_lock) {
drivers/crypto/inside-secure/eip93/eip93-main.c
253
async = idr_find(&eip93->ring->crypto_async_idr, crypto_idr);
drivers/crypto/inside-secure/eip93/eip93-main.c
254
idr_remove(&eip93->ring->crypto_async_idr, crypto_idr);
drivers/crypto/inside-secure/eip93/eip93-main.c
287
tasklet_schedule(&eip93->ring->done_task);
drivers/crypto/inside-secure/eip93/eip93-main.c
356
static int eip93_set_ring(struct eip93_device *eip93, struct eip93_desc_ring *ring)
drivers/crypto/inside-secure/eip93/eip93-main.c
358
ring->offset = sizeof(struct eip93_descriptor);
drivers/crypto/inside-secure/eip93/eip93-main.c
359
ring->base = dmam_alloc_coherent(eip93->dev,
drivers/crypto/inside-secure/eip93/eip93-main.c
361
&ring->base_dma, GFP_KERNEL);
drivers/crypto/inside-secure/eip93/eip93-main.c
362
if (!ring->base)
drivers/crypto/inside-secure/eip93/eip93-main.c
365
ring->write = ring->base;
drivers/crypto/inside-secure/eip93/eip93-main.c
366
ring->base_end = ring->base + sizeof(struct eip93_descriptor) * (EIP93_RING_NUM - 1);
drivers/crypto/inside-secure/eip93/eip93-main.c
367
ring->read = ring->base;
drivers/crypto/inside-secure/eip93/eip93-main.c
374
struct eip93_desc_ring *cdr = &eip93->ring->cdr;
drivers/crypto/inside-secure/eip93/eip93-main.c
375
struct eip93_desc_ring *rdr = &eip93->ring->rdr;
drivers/crypto/inside-secure/eip93/eip93-main.c
398
tasklet_kill(&eip93->ring->done_task);
drivers/crypto/inside-secure/eip93/eip93-main.c
408
idr_destroy(&eip93->ring->crypto_async_idr);
drivers/crypto/inside-secure/eip93/eip93-main.c
437
eip93->ring = devm_kcalloc(eip93->dev, 1, sizeof(*eip93->ring), GFP_KERNEL);
drivers/crypto/inside-secure/eip93/eip93-main.c
438
if (!eip93->ring)
drivers/crypto/inside-secure/eip93/eip93-main.c
446
tasklet_init(&eip93->ring->done_task, eip93_done_task, (unsigned long)eip93);
drivers/crypto/inside-secure/eip93/eip93-main.c
448
spin_lock_init(&eip93->ring->read_lock);
drivers/crypto/inside-secure/eip93/eip93-main.c
449
spin_lock_init(&eip93->ring->write_lock);
drivers/crypto/inside-secure/eip93/eip93-main.c
451
spin_lock_init(&eip93->ring->idr_lock);
drivers/crypto/inside-secure/eip93/eip93-main.c
452
idr_init(&eip93->ring->crypto_async_idr);
drivers/crypto/inside-secure/eip93/eip93-main.h
103
struct eip93_ring *ring;
drivers/crypto/inside-secure/safexcel.c
1001
safexcel_rdr_req_set(priv, ring, rdesc, async);
drivers/crypto/inside-secure/safexcel.c
1006
safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
drivers/crypto/inside-secure/safexcel.c
1012
int ring)
drivers/crypto/inside-secure/safexcel.c
1022
nreq = readl(EIP197_HIA_RDR(priv, ring) + EIP197_HIA_xDR_PROC_COUNT);
drivers/crypto/inside-secure/safexcel.c
1029
req = safexcel_rdr_req_get(priv, ring);
drivers/crypto/inside-secure/safexcel.c
1032
ndesc = ctx->handle_result(priv, ring, req,
drivers/crypto/inside-secure/safexcel.c
1054
EIP197_HIA_RDR(priv, ring) + EIP197_HIA_xDR_PROC_COUNT);
drivers/crypto/inside-secure/safexcel.c
1063
spin_lock_bh(&priv->ring[ring].lock);
drivers/crypto/inside-secure/safexcel.c
1065
priv->ring[ring].requests -= handled;
drivers/crypto/inside-secure/safexcel.c
1066
safexcel_try_push_requests(priv, ring);
drivers/crypto/inside-secure/safexcel.c
1068
if (!priv->ring[ring].requests)
drivers/crypto/inside-secure/safexcel.c
1069
priv->ring[ring].busy = false;
drivers/crypto/inside-secure/safexcel.c
1071
spin_unlock_bh(&priv->ring[ring].lock);
drivers/crypto/inside-secure/safexcel.c
1079
safexcel_dequeue(data->priv, data->ring);
drivers/crypto/inside-secure/safexcel.c
1084
int ring;
drivers/crypto/inside-secure/safexcel.c
1091
int ring = irq_data->ring, rc = IRQ_NONE;
drivers/crypto/inside-secure/safexcel.c
1094
status = readl(EIP197_HIA_AIC_R(priv) + EIP197_HIA_AIC_R_ENABLED_STAT(ring));
drivers/crypto/inside-secure/safexcel.c
1099
if (status & EIP197_RDR_IRQ(ring)) {
drivers/crypto/inside-secure/safexcel.c
1100
stat = readl(EIP197_HIA_RDR(priv, ring) + EIP197_HIA_xDR_STAT);
drivers/crypto/inside-secure/safexcel.c
1115
EIP197_HIA_RDR(priv, ring) + EIP197_HIA_xDR_STAT);
drivers/crypto/inside-secure/safexcel.c
1119
writel(status, EIP197_HIA_AIC_R(priv) + EIP197_HIA_AIC_R_ACK(ring));
drivers/crypto/inside-secure/safexcel.c
1128
int ring = irq_data->ring;
drivers/crypto/inside-secure/safexcel.c
1130
safexcel_handle_result_descriptor(priv, ring);
drivers/crypto/inside-secure/safexcel.c
1132
queue_work(priv->ring[ring].workqueue,
drivers/crypto/inside-secure/safexcel.c
1133
&priv->ring[ring].work_data.work);
drivers/crypto/inside-secure/safexcel.c
1610
priv->ring = devm_kcalloc(dev, priv->config.rings,
drivers/crypto/inside-secure/safexcel.c
1611
sizeof(*priv->ring),
drivers/crypto/inside-secure/safexcel.c
1613
if (!priv->ring)
drivers/crypto/inside-secure/safexcel.c
1622
&priv->ring[i].cdr,
drivers/crypto/inside-secure/safexcel.c
1623
&priv->ring[i].rdr);
drivers/crypto/inside-secure/safexcel.c
1629
priv->ring[i].rdr_req = devm_kcalloc(dev,
drivers/crypto/inside-secure/safexcel.c
1631
sizeof(*priv->ring[i].rdr_req),
drivers/crypto/inside-secure/safexcel.c
1633
if (!priv->ring[i].rdr_req) {
drivers/crypto/inside-secure/safexcel.c
1645
ring_irq->ring = i;
drivers/crypto/inside-secure/safexcel.c
1660
priv->ring[i].irq = irq;
drivers/crypto/inside-secure/safexcel.c
1661
priv->ring[i].work_data.priv = priv;
drivers/crypto/inside-secure/safexcel.c
1662
priv->ring[i].work_data.ring = i;
drivers/crypto/inside-secure/safexcel.c
1663
INIT_WORK(&priv->ring[i].work_data.work,
drivers/crypto/inside-secure/safexcel.c
1667
priv->ring[i].workqueue =
drivers/crypto/inside-secure/safexcel.c
1669
if (!priv->ring[i].workqueue) {
drivers/crypto/inside-secure/safexcel.c
1674
priv->ring[i].requests = 0;
drivers/crypto/inside-secure/safexcel.c
1675
priv->ring[i].busy = false;
drivers/crypto/inside-secure/safexcel.c
1677
crypto_init_queue(&priv->ring[i].queue,
drivers/crypto/inside-secure/safexcel.c
1680
spin_lock_init(&priv->ring[i].lock);
drivers/crypto/inside-secure/safexcel.c
1681
spin_lock_init(&priv->ring[i].queue_lock);
drivers/crypto/inside-secure/safexcel.c
1702
if (priv->ring[i].irq)
drivers/crypto/inside-secure/safexcel.c
1703
irq_set_affinity_hint(priv->ring[i].irq, NULL);
drivers/crypto/inside-secure/safexcel.c
1704
if (priv->ring[i].workqueue)
drivers/crypto/inside-secure/safexcel.c
1705
destroy_workqueue(priv->ring[i].workqueue);
drivers/crypto/inside-secure/safexcel.c
1811
irq_set_affinity_hint(priv->ring[i].irq, NULL);
drivers/crypto/inside-secure/safexcel.c
1812
destroy_workqueue(priv->ring[i].workqueue);
drivers/crypto/inside-secure/safexcel.c
1975
destroy_workqueue(priv->ring[i].workqueue);
drivers/crypto/inside-secure/safexcel.c
512
writel(lower_32_bits(priv->ring[i].cdr.base_dma),
drivers/crypto/inside-secure/safexcel.c
514
writel(upper_32_bits(priv->ring[i].cdr.base_dma),
drivers/crypto/inside-secure/safexcel.c
560
writel(lower_32_bits(priv->ring[i].rdr.base_dma),
drivers/crypto/inside-secure/safexcel.c
562
writel(upper_32_bits(priv->ring[i].rdr.base_dma),
drivers/crypto/inside-secure/safexcel.c
807
int ring)
drivers/crypto/inside-secure/safexcel.c
809
int coal = min_t(int, priv->ring[ring].requests, EIP197_MAX_BATCH_SZ);
drivers/crypto/inside-secure/safexcel.c
817
EIP197_HIA_RDR(priv, ring) + EIP197_HIA_xDR_THRESH);
drivers/crypto/inside-secure/safexcel.c
820
void safexcel_dequeue(struct safexcel_crypto_priv *priv, int ring)
drivers/crypto/inside-secure/safexcel.c
829
req = priv->ring[ring].req;
drivers/crypto/inside-secure/safexcel.c
830
backlog = priv->ring[ring].backlog;
drivers/crypto/inside-secure/safexcel.c
835
spin_lock_bh(&priv->ring[ring].queue_lock);
drivers/crypto/inside-secure/safexcel.c
836
backlog = crypto_get_backlog(&priv->ring[ring].queue);
drivers/crypto/inside-secure/safexcel.c
837
req = crypto_dequeue_request(&priv->ring[ring].queue);
drivers/crypto/inside-secure/safexcel.c
838
spin_unlock_bh(&priv->ring[ring].queue_lock);
drivers/crypto/inside-secure/safexcel.c
841
priv->ring[ring].req = NULL;
drivers/crypto/inside-secure/safexcel.c
842
priv->ring[ring].backlog = NULL;
drivers/crypto/inside-secure/safexcel.c
848
ret = ctx->send(req, ring, &commands, &results);
drivers/crypto/inside-secure/safexcel.c
871
priv->ring[ring].req = req;
drivers/crypto/inside-secure/safexcel.c
872
priv->ring[ring].backlog = backlog;
drivers/crypto/inside-secure/safexcel.c
878
spin_lock_bh(&priv->ring[ring].lock);
drivers/crypto/inside-secure/safexcel.c
880
priv->ring[ring].requests += nreq;
drivers/crypto/inside-secure/safexcel.c
882
if (!priv->ring[ring].busy) {
drivers/crypto/inside-secure/safexcel.c
883
safexcel_try_push_requests(priv, ring);
drivers/crypto/inside-secure/safexcel.c
884
priv->ring[ring].busy = true;
drivers/crypto/inside-secure/safexcel.c
887
spin_unlock_bh(&priv->ring[ring].lock);
drivers/crypto/inside-secure/safexcel.c
891
EIP197_HIA_RDR(priv, ring) + EIP197_HIA_xDR_PREP_COUNT);
drivers/crypto/inside-secure/safexcel.c
895
EIP197_HIA_CDR(priv, ring) + EIP197_HIA_xDR_PREP_COUNT);
drivers/crypto/inside-secure/safexcel.c
941
int ring,
drivers/crypto/inside-secure/safexcel.c
945
int i = safexcel_ring_rdr_rdesc_index(priv, ring, rdesc);
drivers/crypto/inside-secure/safexcel.c
947
priv->ring[ring].rdr_req[i] = req;
drivers/crypto/inside-secure/safexcel.c
951
safexcel_rdr_req_get(struct safexcel_crypto_priv *priv, int ring)
drivers/crypto/inside-secure/safexcel.c
953
int i = safexcel_ring_first_rdr_index(priv, ring);
drivers/crypto/inside-secure/safexcel.c
955
return priv->ring[ring].rdr_req[i];
drivers/crypto/inside-secure/safexcel.c
958
void safexcel_complete(struct safexcel_crypto_priv *priv, int ring)
drivers/crypto/inside-secure/safexcel.c
964
cdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].cdr);
drivers/crypto/inside-secure/safexcel.c
975
dma_addr_t ctxr_dma, int ring)
drivers/crypto/inside-secure/safexcel.c
983
cdesc = safexcel_add_cdesc(priv, ring, true, true, 0, 0, 0, ctxr_dma,
drivers/crypto/inside-secure/safexcel.c
994
rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
drivers/crypto/inside-secure/safexcel.h
691
int ring;
drivers/crypto/inside-secure/safexcel.h
834
struct safexcel_ring *ring;
drivers/crypto/inside-secure/safexcel.h
838
int (*send)(struct crypto_async_request *req, int ring,
drivers/crypto/inside-secure/safexcel.h
840
int (*handle_result)(struct safexcel_crypto_priv *priv, int ring,
drivers/crypto/inside-secure/safexcel.h
854
int ring;
drivers/crypto/inside-secure/safexcel.h
887
void safexcel_dequeue(struct safexcel_crypto_priv *priv, int ring);
drivers/crypto/inside-secure/safexcel.h
890
void safexcel_complete(struct safexcel_crypto_priv *priv, int ring);
drivers/crypto/inside-secure/safexcel.h
893
dma_addr_t ctxr_dma, int ring);
drivers/crypto/inside-secure/safexcel.h
899
struct safexcel_desc_ring *ring);
drivers/crypto/inside-secure/safexcel.h
901
struct safexcel_desc_ring *ring);
drivers/crypto/inside-secure/safexcel.h
914
int ring);
drivers/crypto/inside-secure/safexcel.h
916
int ring,
drivers/crypto/inside-secure/safexcel.h
919
int ring,
drivers/crypto/inside-secure/safexcel.h
923
safexcel_rdr_req_get(struct safexcel_crypto_priv *priv, int ring);
drivers/crypto/inside-secure/safexcel_cipher.c
1010
err = safexcel_handle_inv_result(priv, ring, async, sreq,
drivers/crypto/inside-secure/safexcel_cipher.c
1013
err = safexcel_handle_req_result(priv, ring, async, req->src,
drivers/crypto/inside-secure/safexcel_cipher.c
1023
int ring, int *commands, int *results)
drivers/crypto/inside-secure/safexcel_cipher.c
1029
ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
drivers/crypto/inside-secure/safexcel_cipher.c
1039
static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
drivers/crypto/inside-secure/safexcel_cipher.c
1051
ret = safexcel_cipher_send_inv(async, ring, commands, results);
drivers/crypto/inside-secure/safexcel_cipher.c
1062
ret = safexcel_send_req(async, ring, sreq, req->src,
drivers/crypto/inside-secure/safexcel_cipher.c
1071
static int safexcel_aead_send(struct crypto_async_request *async, int ring,
drivers/crypto/inside-secure/safexcel_cipher.c
1084
ret = safexcel_cipher_send_inv(async, ring, commands, results);
drivers/crypto/inside-secure/safexcel_cipher.c
1086
ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
drivers/crypto/inside-secure/safexcel_cipher.c
1101
int ring = ctx->base.ring;
drivers/crypto/inside-secure/safexcel_cipher.c
1108
spin_lock_bh(&priv->ring[ring].queue_lock);
drivers/crypto/inside-secure/safexcel_cipher.c
1109
crypto_enqueue_request(&priv->ring[ring].queue, base);
drivers/crypto/inside-secure/safexcel_cipher.c
1110
spin_unlock_bh(&priv->ring[ring].queue_lock);
drivers/crypto/inside-secure/safexcel_cipher.c
1112
queue_work(priv->ring[ring].workqueue,
drivers/crypto/inside-secure/safexcel_cipher.c
1113
&priv->ring[ring].work_data.work);
drivers/crypto/inside-secure/safexcel_cipher.c
1163
int ret, ring;
drivers/crypto/inside-secure/safexcel_cipher.c
1174
ctx->base.ring = safexcel_select_ring(priv);
drivers/crypto/inside-secure/safexcel_cipher.c
1182
ring = ctx->base.ring;
drivers/crypto/inside-secure/safexcel_cipher.c
1184
spin_lock_bh(&priv->ring[ring].queue_lock);
drivers/crypto/inside-secure/safexcel_cipher.c
1185
ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
drivers/crypto/inside-secure/safexcel_cipher.c
1186
spin_unlock_bh(&priv->ring[ring].queue_lock);
drivers/crypto/inside-secure/safexcel_cipher.c
1188
queue_work(priv->ring[ring].workqueue,
drivers/crypto/inside-secure/safexcel_cipher.c
1189
&priv->ring[ring].work_data.work);
drivers/crypto/inside-secure/safexcel_cipher.c
607
static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
drivers/crypto/inside-secure/safexcel_cipher.c
627
rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
drivers/crypto/inside-secure/safexcel_cipher.c
641
safexcel_complete(priv, ring);
drivers/crypto/inside-secure/safexcel_cipher.c
673
static int safexcel_send_req(struct crypto_async_request *base, int ring,
drivers/crypto/inside-secure/safexcel_cipher.c
781
first_cdesc = safexcel_add_cdesc(priv, ring,
drivers/crypto/inside-secure/safexcel_cipher.c
802
cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
drivers/crypto/inside-secure/safexcel_cipher.c
849
rdesc = safexcel_add_rdesc(priv, ring, first, last,
drivers/crypto/inside-secure/safexcel_cipher.c
855
rdesc = safexcel_add_rdesc(priv, ring, first, last,
drivers/crypto/inside-secure/safexcel_cipher.c
878
rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
drivers/crypto/inside-secure/safexcel_cipher.c
888
safexcel_rdr_req_set(priv, ring, first_rdesc, base);
drivers/crypto/inside-secure/safexcel_cipher.c
896
safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
drivers/crypto/inside-secure/safexcel_cipher.c
899
safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
drivers/crypto/inside-secure/safexcel_cipher.c
918
int ring,
drivers/crypto/inside-secure/safexcel_cipher.c
933
rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
drivers/crypto/inside-secure/safexcel_cipher.c
947
safexcel_complete(priv, ring);
drivers/crypto/inside-secure/safexcel_cipher.c
958
ring = safexcel_select_ring(priv);
drivers/crypto/inside-secure/safexcel_cipher.c
959
ctx->base.ring = ring;
drivers/crypto/inside-secure/safexcel_cipher.c
961
spin_lock_bh(&priv->ring[ring].queue_lock);
drivers/crypto/inside-secure/safexcel_cipher.c
962
enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
drivers/crypto/inside-secure/safexcel_cipher.c
963
spin_unlock_bh(&priv->ring[ring].queue_lock);
drivers/crypto/inside-secure/safexcel_cipher.c
968
queue_work(priv->ring[ring].workqueue,
drivers/crypto/inside-secure/safexcel_cipher.c
969
&priv->ring[ring].work_data.work);
drivers/crypto/inside-secure/safexcel_cipher.c
977
int ring,
drivers/crypto/inside-secure/safexcel_cipher.c
987
err = safexcel_handle_inv_result(priv, ring, async, sreq,
drivers/crypto/inside-secure/safexcel_cipher.c
990
err = safexcel_handle_req_result(priv, ring, async, req->src,
drivers/crypto/inside-secure/safexcel_cipher.c
999
int ring,
drivers/crypto/inside-secure/safexcel_hash.c
227
int ring,
drivers/crypto/inside-secure/safexcel_hash.c
240
rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
drivers/crypto/inside-secure/safexcel_hash.c
249
safexcel_complete(priv, ring);
drivers/crypto/inside-secure/safexcel_hash.c
307
static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
drivers/crypto/inside-secure/safexcel_hash.c
402
first_cdesc = safexcel_add_cdesc(priv, ring, 1,
drivers/crypto/inside-secure/safexcel_hash.c
442
cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
drivers/crypto/inside-secure/safexcel_hash.c
476
rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
drivers/crypto/inside-secure/safexcel_hash.c
483
safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
drivers/crypto/inside-secure/safexcel_hash.c
503
safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
drivers/crypto/inside-secure/safexcel_hash.c
516
int ring,
drivers/crypto/inside-secure/safexcel_hash.c
528
rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
drivers/crypto/inside-secure/safexcel_hash.c
537
safexcel_complete(priv, ring);
drivers/crypto/inside-secure/safexcel_hash.c
547
ring = safexcel_select_ring(priv);
drivers/crypto/inside-secure/safexcel_hash.c
548
ctx->base.ring = ring;
drivers/crypto/inside-secure/safexcel_hash.c
550
spin_lock_bh(&priv->ring[ring].queue_lock);
drivers/crypto/inside-secure/safexcel_hash.c
551
enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
drivers/crypto/inside-secure/safexcel_hash.c
552
spin_unlock_bh(&priv->ring[ring].queue_lock);
drivers/crypto/inside-secure/safexcel_hash.c
557
queue_work(priv->ring[ring].workqueue,
drivers/crypto/inside-secure/safexcel_hash.c
558
&priv->ring[ring].work_data.work);
drivers/crypto/inside-secure/safexcel_hash.c
565
static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
drivers/crypto/inside-secure/safexcel_hash.c
577
err = safexcel_handle_inv_result(priv, ring, async,
drivers/crypto/inside-secure/safexcel_hash.c
580
err = safexcel_handle_req_result(priv, ring, async,
drivers/crypto/inside-secure/safexcel_hash.c
588
int ring, int *commands, int *results)
drivers/crypto/inside-secure/safexcel_hash.c
595
ctx->base.ctxr_dma, ring);
drivers/crypto/inside-secure/safexcel_hash.c
606
int ring, int *commands, int *results)
drivers/crypto/inside-secure/safexcel_hash.c
613
ret = safexcel_ahash_send_inv(async, ring, commands, results);
drivers/crypto/inside-secure/safexcel_hash.c
615
ret = safexcel_ahash_send_req(async, ring, commands, results);
drivers/crypto/inside-secure/safexcel_hash.c
627
int ring = ctx->base.ring;
drivers/crypto/inside-secure/safexcel_hash.c
642
spin_lock_bh(&priv->ring[ring].queue_lock);
drivers/crypto/inside-secure/safexcel_hash.c
643
crypto_enqueue_request(&priv->ring[ring].queue, &req->base);
drivers/crypto/inside-secure/safexcel_hash.c
644
spin_unlock_bh(&priv->ring[ring].queue_lock);
drivers/crypto/inside-secure/safexcel_hash.c
646
queue_work(priv->ring[ring].workqueue,
drivers/crypto/inside-secure/safexcel_hash.c
647
&priv->ring[ring].work_data.work);
drivers/crypto/inside-secure/safexcel_hash.c
692
int ret, ring;
drivers/crypto/inside-secure/safexcel_hash.c
719
ctx->base.ring = safexcel_select_ring(priv);
drivers/crypto/inside-secure/safexcel_hash.c
728
ring = ctx->base.ring;
drivers/crypto/inside-secure/safexcel_hash.c
730
spin_lock_bh(&priv->ring[ring].queue_lock);
drivers/crypto/inside-secure/safexcel_hash.c
731
ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base);
drivers/crypto/inside-secure/safexcel_hash.c
732
spin_unlock_bh(&priv->ring[ring].queue_lock);
drivers/crypto/inside-secure/safexcel_hash.c
734
queue_work(priv->ring[ring].workqueue,
drivers/crypto/inside-secure/safexcel_hash.c
735
&priv->ring[ring].work_data.work);
drivers/crypto/inside-secure/safexcel_ring.c
103
struct safexcel_desc_ring *ring,
drivers/crypto/inside-secure/safexcel_ring.c
106
void *ptr = ring->write;
drivers/crypto/inside-secure/safexcel_ring.c
109
*rtoken = ring->write + ring->shoffset;
drivers/crypto/inside-secure/safexcel_ring.c
111
if ((ring->write == ring->read - ring->offset) ||
drivers/crypto/inside-secure/safexcel_ring.c
112
(ring->read == ring->base && ring->write == ring->base_end))
drivers/crypto/inside-secure/safexcel_ring.c
115
if (ring->write == ring->base_end)
drivers/crypto/inside-secure/safexcel_ring.c
116
ring->write = ring->base;
drivers/crypto/inside-secure/safexcel_ring.c
118
ring->write += ring->offset;
drivers/crypto/inside-secure/safexcel_ring.c
124
struct safexcel_desc_ring *ring)
drivers/crypto/inside-secure/safexcel_ring.c
126
void *ptr = ring->read;
drivers/crypto/inside-secure/safexcel_ring.c
128
if (ring->write == ring->read)
drivers/crypto/inside-secure/safexcel_ring.c
131
if (ring->read == ring->base_end)
drivers/crypto/inside-secure/safexcel_ring.c
132
ring->read = ring->base;
drivers/crypto/inside-secure/safexcel_ring.c
134
ring->read += ring->offset;
drivers/crypto/inside-secure/safexcel_ring.c
140
int ring)
drivers/crypto/inside-secure/safexcel_ring.c
142
struct safexcel_desc_ring *rdr = &priv->ring[ring].rdr;
drivers/crypto/inside-secure/safexcel_ring.c
148
int ring)
drivers/crypto/inside-secure/safexcel_ring.c
150
struct safexcel_desc_ring *rdr = &priv->ring[ring].rdr;
drivers/crypto/inside-secure/safexcel_ring.c
156
int ring,
drivers/crypto/inside-secure/safexcel_ring.c
159
struct safexcel_desc_ring *rdr = &priv->ring[ring].rdr;
drivers/crypto/inside-secure/safexcel_ring.c
165
struct safexcel_desc_ring *ring)
drivers/crypto/inside-secure/safexcel_ring.c
167
if (ring->write == ring->read)
drivers/crypto/inside-secure/safexcel_ring.c
170
if (ring->write == ring->base) {
drivers/crypto/inside-secure/safexcel_ring.c
171
ring->write = ring->base_end;
drivers/crypto/inside-secure/safexcel_ring.c
172
ring->shwrite = ring->shbase_end;
drivers/crypto/inside-secure/safexcel_ring.c
174
ring->write -= ring->offset;
drivers/crypto/inside-secure/safexcel_ring.c
175
ring->shwrite -= ring->shoffset;
drivers/crypto/inside-secure/safexcel_ring.c
189
cdesc = safexcel_ring_next_cwptr(priv, &priv->ring[ring_id].cdr,
drivers/crypto/inside-secure/safexcel_ring.c
232
rdesc = safexcel_ring_next_rwptr(priv, &priv->ring[ring_id].rdr,
drivers/crypto/inside-secure/safexcel_ring.c
78
struct safexcel_desc_ring *ring,
drivers/crypto/inside-secure/safexcel_ring.c
82
void *ptr = ring->write;
drivers/crypto/inside-secure/safexcel_ring.c
85
*atoken = ring->shwrite;
drivers/crypto/inside-secure/safexcel_ring.c
87
if ((ring->write == ring->read - ring->offset) ||
drivers/crypto/inside-secure/safexcel_ring.c
88
(ring->read == ring->base && ring->write == ring->base_end))
drivers/crypto/inside-secure/safexcel_ring.c
91
if (ring->write == ring->base_end) {
drivers/crypto/inside-secure/safexcel_ring.c
92
ring->write = ring->base;
drivers/crypto/inside-secure/safexcel_ring.c
93
ring->shwrite = ring->shbase;
drivers/crypto/inside-secure/safexcel_ring.c
95
ring->write += ring->offset;
drivers/crypto/inside-secure/safexcel_ring.c
96
ring->shwrite += ring->shoffset;
drivers/crypto/intel/qat/qat_common/adf_accel_devices.h
165
u32 ring);
drivers/crypto/intel/qat/qat_common/adf_accel_devices.h
167
u32 ring, u32 value);
drivers/crypto/intel/qat/qat_common/adf_accel_devices.h
169
u32 ring);
drivers/crypto/intel/qat/qat_common/adf_accel_devices.h
171
u32 ring, u32 value);
drivers/crypto/intel/qat/qat_common/adf_accel_devices.h
184
u32 ring);
drivers/crypto/intel/qat/qat_common/adf_accel_devices.h
186
u32 ring, u32 value);
drivers/crypto/intel/qat/qat_common/adf_accel_devices.h
188
u32 ring);
drivers/crypto/intel/qat/qat_common/adf_accel_devices.h
190
u32 ring, dma_addr_t addr);
drivers/crypto/intel/qat/qat_common/adf_common_drv.h
94
void adf_update_ring_arb(struct adf_etr_ring_data *ring);
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.c
11
static u32 read_csr_ring_head(void __iomem *csr_base_addr, u32 bank, u32 ring)
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.c
13
return READ_CSR_RING_HEAD(csr_base_addr, bank, ring);
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.c
16
static void write_csr_ring_head(void __iomem *csr_base_addr, u32 bank, u32 ring,
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.c
19
WRITE_CSR_RING_HEAD(csr_base_addr, bank, ring, value);
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.c
22
static u32 read_csr_ring_tail(void __iomem *csr_base_addr, u32 bank, u32 ring)
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.c
24
return READ_CSR_RING_TAIL(csr_base_addr, bank, ring);
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.c
27
static void write_csr_ring_tail(void __iomem *csr_base_addr, u32 bank, u32 ring,
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.c
30
WRITE_CSR_RING_TAIL(csr_base_addr, bank, ring, value);
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.c
39
u32 ring, u32 value)
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.c
41
WRITE_CSR_RING_CONFIG(csr_base_addr, bank, ring, value);
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.c
44
static void write_csr_ring_base(void __iomem *csr_base_addr, u32 bank, u32 ring,
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.c
47
WRITE_CSR_RING_BASE(csr_base_addr, bank, ring, addr);
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.h
30
#define READ_CSR_RING_HEAD(csr_base_addr, bank, ring) \
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.h
32
ADF_RING_CSR_RING_HEAD + ((ring) << 2))
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.h
33
#define READ_CSR_RING_TAIL(csr_base_addr, bank, ring) \
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.h
35
ADF_RING_CSR_RING_TAIL + ((ring) << 2))
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.h
39
#define WRITE_CSR_RING_CONFIG(csr_base_addr, bank, ring, value) \
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.h
41
ADF_RING_CSR_RING_CONFIG + ((ring) << 2), value)
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.h
42
#define WRITE_CSR_RING_BASE(csr_base_addr, bank, ring, value) \
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.h
48
ADF_RING_CSR_RING_LBASE + ((ring) << 2), l_base); \
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.h
50
ADF_RING_CSR_RING_UBASE + ((ring) << 2), u_base); \
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.h
53
#define WRITE_CSR_RING_HEAD(csr_base_addr, bank, ring, value) \
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.h
55
ADF_RING_CSR_RING_HEAD + ((ring) << 2), value)
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.h
56
#define WRITE_CSR_RING_TAIL(csr_base_addr, bank, ring, value) \
drivers/crypto/intel/qat/qat_common/adf_gen2_hw_csr_data.h
58
ADF_RING_CSR_RING_TAIL + ((ring) << 2), value)
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.c
102
static void write_csr_ring_base(void __iomem *csr_base_addr, u32 bank, u32 ring,
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.c
105
WRITE_CSR_RING_BASE(csr_base_addr, bank, ring, addr);
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.c
11
static u32 read_csr_ring_head(void __iomem *csr_base_addr, u32 bank, u32 ring)
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.c
13
return READ_CSR_RING_HEAD(csr_base_addr, bank, ring);
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.c
16
static void write_csr_ring_head(void __iomem *csr_base_addr, u32 bank, u32 ring,
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.c
19
WRITE_CSR_RING_HEAD(csr_base_addr, bank, ring, value);
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.c
22
static u32 read_csr_ring_tail(void __iomem *csr_base_addr, u32 bank, u32 ring)
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.c
24
return READ_CSR_RING_TAIL(csr_base_addr, bank, ring);
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.c
27
static void write_csr_ring_tail(void __iomem *csr_base_addr, u32 bank, u32 ring,
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.c
30
WRITE_CSR_RING_TAIL(csr_base_addr, bank, ring, value);
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.c
85
u32 ring)
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.c
87
return READ_CSR_RING_CONFIG(csr_base_addr, bank, ring);
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.c
90
static void write_csr_ring_config(void __iomem *csr_base_addr, u32 bank, u32 ring,
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.c
93
WRITE_CSR_RING_CONFIG(csr_base_addr, bank, ring, value);
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.c
97
u32 ring)
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.c
99
return READ_CSR_RING_BASE(csr_base_addr, bank, ring);
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.h
101
static inline u64 read_base(void __iomem *csr_base_addr, u32 bank, u32 ring)
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.h
110
ADF_RING_CSR_RING_LBASE + (ring << 2));
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.h
112
ADF_RING_CSR_RING_UBASE + (ring << 2));
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.h
117
#define READ_CSR_RING_BASE(csr_base_addr, bank, ring) \
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.h
118
read_base((csr_base_addr) + ADF_RING_CSR_ADDR_OFFSET, (bank), (ring))
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.h
120
#define WRITE_CSR_RING_HEAD(csr_base_addr, bank, ring, value) \
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.h
123
ADF_RING_CSR_RING_HEAD + ((ring) << 2), value)
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.h
124
#define WRITE_CSR_RING_TAIL(csr_base_addr, bank, ring, value) \
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.h
127
ADF_RING_CSR_RING_TAIL + ((ring) << 2), value)
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.h
37
#define READ_CSR_RING_HEAD(csr_base_addr, bank, ring) \
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.h
40
ADF_RING_CSR_RING_HEAD + ((ring) << 2))
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.h
41
#define READ_CSR_RING_TAIL(csr_base_addr, bank, ring) \
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.h
44
ADF_RING_CSR_RING_TAIL + ((ring) << 2))
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.h
76
#define READ_CSR_RING_CONFIG(csr_base_addr, bank, ring) \
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.h
79
ADF_RING_CSR_RING_CONFIG + ((ring) << 2))
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.h
80
#define WRITE_CSR_RING_CONFIG(csr_base_addr, bank, ring, value) \
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.h
83
ADF_RING_CSR_RING_CONFIG + ((ring) << 2), value)
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.h
84
#define WRITE_CSR_RING_BASE(csr_base_addr, bank, ring, value) \
drivers/crypto/intel/qat/qat_common/adf_gen4_hw_csr_data.h
88
u32 _ring = ring; \
drivers/crypto/intel/qat/qat_common/adf_hw_arbiter.c
48
void adf_update_ring_arb(struct adf_etr_ring_data *ring)
drivers/crypto/intel/qat/qat_common/adf_hw_arbiter.c
50
struct adf_accel_dev *accel_dev = ring->bank->accel_dev;
drivers/crypto/intel/qat/qat_common/adf_hw_arbiter.c
65
arben_tx = (ring->bank->ring_mask & tx_ring_mask) >> 0;
drivers/crypto/intel/qat/qat_common/adf_hw_arbiter.c
66
arben_rx = (ring->bank->ring_mask & rx_ring_mask) >> shift;
drivers/crypto/intel/qat/qat_common/adf_hw_arbiter.c
69
csr_ops->write_csr_ring_srv_arb_en(ring->bank->csr_addr,
drivers/crypto/intel/qat/qat_common/adf_hw_arbiter.c
70
ring->bank->bank_number, arben);
drivers/crypto/intel/qat/qat_common/adf_sysfs.c
285
unsigned int ring;
drivers/crypto/intel/qat/qat_common/adf_sysfs.c
291
ret = kstrtouint(buf, 10, &ring);
drivers/crypto/intel/qat/qat_common/adf_sysfs.c
296
if (ring >= num_rings) {
drivers/crypto/intel/qat/qat_common/adf_sysfs.c
304
accel_dev->sysfs.ring_num = ring;
drivers/crypto/intel/qat/qat_common/adf_transport.c
101
ring->tail = adf_modulo(ring->tail +
drivers/crypto/intel/qat/qat_common/adf_transport.c
102
ADF_MSG_SIZE_TO_BYTES(ring->msg_size),
drivers/crypto/intel/qat/qat_common/adf_transport.c
103
ADF_RING_SIZE_MODULO(ring->ring_size));
drivers/crypto/intel/qat/qat_common/adf_transport.c
104
csr_ops->write_csr_ring_tail(ring->bank->csr_addr,
drivers/crypto/intel/qat/qat_common/adf_transport.c
105
ring->bank->bank_number, ring->ring_number,
drivers/crypto/intel/qat/qat_common/adf_transport.c
106
ring->tail);
drivers/crypto/intel/qat/qat_common/adf_transport.c
107
spin_unlock_bh(&ring->lock);
drivers/crypto/intel/qat/qat_common/adf_transport.c
112
static int adf_handle_response(struct adf_etr_ring_data *ring)
drivers/crypto/intel/qat/qat_common/adf_transport.c
114
struct adf_hw_csr_ops *csr_ops = GET_CSR_OPS(ring->bank->accel_dev);
drivers/crypto/intel/qat/qat_common/adf_transport.c
116
u32 *msg = (u32 *)((uintptr_t)ring->base_addr + ring->head);
drivers/crypto/intel/qat/qat_common/adf_transport.c
119
ring->callback((u32 *)msg);
drivers/crypto/intel/qat/qat_common/adf_transport.c
120
atomic_dec(ring->inflights);
drivers/crypto/intel/qat/qat_common/adf_transport.c
122
ring->head = adf_modulo(ring->head +
drivers/crypto/intel/qat/qat_common/adf_transport.c
123
ADF_MSG_SIZE_TO_BYTES(ring->msg_size),
drivers/crypto/intel/qat/qat_common/adf_transport.c
124
ADF_RING_SIZE_MODULO(ring->ring_size));
drivers/crypto/intel/qat/qat_common/adf_transport.c
126
msg = (u32 *)((uintptr_t)ring->base_addr + ring->head);
drivers/crypto/intel/qat/qat_common/adf_transport.c
129
csr_ops->write_csr_ring_head(ring->bank->csr_addr,
drivers/crypto/intel/qat/qat_common/adf_transport.c
130
ring->bank->bank_number,
drivers/crypto/intel/qat/qat_common/adf_transport.c
131
ring->ring_number, ring->head);
drivers/crypto/intel/qat/qat_common/adf_transport.c
136
static void adf_configure_tx_ring(struct adf_etr_ring_data *ring)
drivers/crypto/intel/qat/qat_common/adf_transport.c
138
struct adf_hw_csr_ops *csr_ops = GET_CSR_OPS(ring->bank->accel_dev);
drivers/crypto/intel/qat/qat_common/adf_transport.c
139
u32 ring_config = BUILD_RING_CONFIG(ring->ring_size);
drivers/crypto/intel/qat/qat_common/adf_transport.c
141
csr_ops->write_csr_ring_config(ring->bank->csr_addr,
drivers/crypto/intel/qat/qat_common/adf_transport.c
142
ring->bank->bank_number,
drivers/crypto/intel/qat/qat_common/adf_transport.c
143
ring->ring_number, ring_config);
drivers/crypto/intel/qat/qat_common/adf_transport.c
147
static void adf_configure_rx_ring(struct adf_etr_ring_data *ring)
drivers/crypto/intel/qat/qat_common/adf_transport.c
149
struct adf_hw_csr_ops *csr_ops = GET_CSR_OPS(ring->bank->accel_dev);
drivers/crypto/intel/qat/qat_common/adf_transport.c
151
BUILD_RESP_RING_CONFIG(ring->ring_size,
drivers/crypto/intel/qat/qat_common/adf_transport.c
155
csr_ops->write_csr_ring_config(ring->bank->csr_addr,
drivers/crypto/intel/qat/qat_common/adf_transport.c
156
ring->bank->bank_number,
drivers/crypto/intel/qat/qat_common/adf_transport.c
157
ring->ring_number, ring_config);
drivers/crypto/intel/qat/qat_common/adf_transport.c
160
static int adf_init_ring(struct adf_etr_ring_data *ring)
drivers/crypto/intel/qat/qat_common/adf_transport.c
162
struct adf_etr_bank_data *bank = ring->bank;
drivers/crypto/intel/qat/qat_common/adf_transport.c
168
ADF_SIZE_TO_RING_SIZE_IN_BYTES(ring->ring_size);
drivers/crypto/intel/qat/qat_common/adf_transport.c
171
ring->base_addr = dma_alloc_coherent(&GET_DEV(accel_dev),
drivers/crypto/intel/qat/qat_common/adf_transport.c
172
ring_size_bytes, &ring->dma_addr,
drivers/crypto/intel/qat/qat_common/adf_transport.c
174
if (!ring->base_addr)
drivers/crypto/intel/qat/qat_common/adf_transport.c
177
memset(ring->base_addr, 0x7F, ring_size_bytes);
drivers/crypto/intel/qat/qat_common/adf_transport.c
179
if (adf_check_ring_alignment(ring->dma_addr, ring_size_bytes)) {
drivers/crypto/intel/qat/qat_common/adf_transport.c
182
ring->base_addr, ring->dma_addr);
drivers/crypto/intel/qat/qat_common/adf_transport.c
183
ring->base_addr = NULL;
drivers/crypto/intel/qat/qat_common/adf_transport.c
187
if (hw_data->tx_rings_mask & (1 << ring->ring_number))
drivers/crypto/intel/qat/qat_common/adf_transport.c
188
adf_configure_tx_ring(ring);
drivers/crypto/intel/qat/qat_common/adf_transport.c
191
adf_configure_rx_ring(ring);
drivers/crypto/intel/qat/qat_common/adf_transport.c
193
ring_base = csr_ops->build_csr_ring_base_addr(ring->dma_addr,
drivers/crypto/intel/qat/qat_common/adf_transport.c
194
ring->ring_size);
drivers/crypto/intel/qat/qat_common/adf_transport.c
196
csr_ops->write_csr_ring_base(ring->bank->csr_addr,
drivers/crypto/intel/qat/qat_common/adf_transport.c
197
ring->bank->bank_number, ring->ring_number,
drivers/crypto/intel/qat/qat_common/adf_transport.c
199
spin_lock_init(&ring->lock);
drivers/crypto/intel/qat/qat_common/adf_transport.c
203
static void adf_cleanup_ring(struct adf_etr_ring_data *ring)
drivers/crypto/intel/qat/qat_common/adf_transport.c
206
ADF_SIZE_TO_RING_SIZE_IN_BYTES(ring->ring_size);
drivers/crypto/intel/qat/qat_common/adf_transport.c
209
if (ring->base_addr) {
drivers/crypto/intel/qat/qat_common/adf_transport.c
210
memset(ring->base_addr, 0x7F, ring_size_bytes);
drivers/crypto/intel/qat/qat_common/adf_transport.c
211
dma_free_coherent(&GET_DEV(ring->bank->accel_dev),
drivers/crypto/intel/qat/qat_common/adf_transport.c
212
ring_size_bytes, ring->base_addr,
drivers/crypto/intel/qat/qat_common/adf_transport.c
213
ring->dma_addr);
drivers/crypto/intel/qat/qat_common/adf_transport.c
226
struct adf_etr_ring_data *ring;
drivers/crypto/intel/qat/qat_common/adf_transport.c
267
ring = &bank->rings[ring_num];
drivers/crypto/intel/qat/qat_common/adf_transport.c
268
ring->ring_number = ring_num;
drivers/crypto/intel/qat/qat_common/adf_transport.c
269
ring->bank = bank;
drivers/crypto/intel/qat/qat_common/adf_transport.c
270
ring->callback = callback;
drivers/crypto/intel/qat/qat_common/adf_transport.c
271
ring->msg_size = ADF_BYTES_TO_MSG_SIZE(msg_size);
drivers/crypto/intel/qat/qat_common/adf_transport.c
272
ring->ring_size = adf_verify_ring_size(msg_size, num_msgs);
drivers/crypto/intel/qat/qat_common/adf_transport.c
273
ring->head = 0;
drivers/crypto/intel/qat/qat_common/adf_transport.c
274
ring->tail = 0;
drivers/crypto/intel/qat/qat_common/adf_transport.c
275
max_inflights = ADF_MAX_INFLIGHTS(ring->ring_size, ring->msg_size);
drivers/crypto/intel/qat/qat_common/adf_transport.c
276
ring->threshold = ADF_PERCENT(max_inflights, ADF_MAX_RING_THRESHOLD);
drivers/crypto/intel/qat/qat_common/adf_transport.c
277
atomic_set(ring->inflights, 0);
drivers/crypto/intel/qat/qat_common/adf_transport.c
278
ret = adf_init_ring(ring);
drivers/crypto/intel/qat/qat_common/adf_transport.c
283
adf_update_ring_arb(ring);
drivers/crypto/intel/qat/qat_common/adf_transport.c
285
if (adf_ring_debugfs_add(ring, ring_name)) {
drivers/crypto/intel/qat/qat_common/adf_transport.c
294
adf_enable_ring_irq(bank, ring->ring_number);
drivers/crypto/intel/qat/qat_common/adf_transport.c
295
*ring_ptr = ring;
drivers/crypto/intel/qat/qat_common/adf_transport.c
298
adf_cleanup_ring(ring);
drivers/crypto/intel/qat/qat_common/adf_transport.c
300
adf_update_ring_arb(ring);
drivers/crypto/intel/qat/qat_common/adf_transport.c
304
void adf_remove_ring(struct adf_etr_ring_data *ring)
drivers/crypto/intel/qat/qat_common/adf_transport.c
306
struct adf_etr_bank_data *bank = ring->bank;
drivers/crypto/intel/qat/qat_common/adf_transport.c
310
adf_disable_ring_irq(bank, ring->ring_number);
drivers/crypto/intel/qat/qat_common/adf_transport.c
315
ring->ring_number, 0);
drivers/crypto/intel/qat/qat_common/adf_transport.c
317
ring->ring_number, 0);
drivers/crypto/intel/qat/qat_common/adf_transport.c
318
adf_ring_debugfs_rm(ring);
drivers/crypto/intel/qat/qat_common/adf_transport.c
319
adf_unreserve_ring(bank, ring->ring_number);
drivers/crypto/intel/qat/qat_common/adf_transport.c
321
adf_update_ring_arb(ring);
drivers/crypto/intel/qat/qat_common/adf_transport.c
322
adf_cleanup_ring(ring);
drivers/crypto/intel/qat/qat_common/adf_transport.c
392
struct adf_etr_ring_data *ring;
drivers/crypto/intel/qat/qat_common/adf_transport.c
40
static int adf_reserve_ring(struct adf_etr_bank_data *bank, u32 ring)
drivers/crypto/intel/qat/qat_common/adf_transport.c
425
ring = &bank->rings[i];
drivers/crypto/intel/qat/qat_common/adf_transport.c
427
ring->inflights =
drivers/crypto/intel/qat/qat_common/adf_transport.c
43
if (bank->ring_mask & (1 << ring)) {
drivers/crypto/intel/qat/qat_common/adf_transport.c
431
if (!ring->inflights)
drivers/crypto/intel/qat/qat_common/adf_transport.c
440
ring->inflights = tx_ring->inflights;
drivers/crypto/intel/qat/qat_common/adf_transport.c
456
ring = &bank->rings[i];
drivers/crypto/intel/qat/qat_common/adf_transport.c
457
kfree(ring->inflights);
drivers/crypto/intel/qat/qat_common/adf_transport.c
458
ring->inflights = NULL;
drivers/crypto/intel/qat/qat_common/adf_transport.c
47
bank->ring_mask |= (1 << ring);
drivers/crypto/intel/qat/qat_common/adf_transport.c
52
static void adf_unreserve_ring(struct adf_etr_bank_data *bank, u32 ring)
drivers/crypto/intel/qat/qat_common/adf_transport.c
530
struct adf_etr_ring_data *ring = &bank->rings[i];
drivers/crypto/intel/qat/qat_common/adf_transport.c
533
adf_cleanup_ring(ring);
drivers/crypto/intel/qat/qat_common/adf_transport.c
536
kfree(ring->inflights);
drivers/crypto/intel/qat/qat_common/adf_transport.c
55
bank->ring_mask &= ~(1 << ring);
drivers/crypto/intel/qat/qat_common/adf_transport.c
59
static void adf_enable_ring_irq(struct adf_etr_bank_data *bank, u32 ring)
drivers/crypto/intel/qat/qat_common/adf_transport.c
64
bank->irq_mask |= (1 << ring);
drivers/crypto/intel/qat/qat_common/adf_transport.c
72
static void adf_disable_ring_irq(struct adf_etr_bank_data *bank, u32 ring)
drivers/crypto/intel/qat/qat_common/adf_transport.c
77
bank->irq_mask &= ~(1 << ring);
drivers/crypto/intel/qat/qat_common/adf_transport.c
83
bool adf_ring_nearly_full(struct adf_etr_ring_data *ring)
drivers/crypto/intel/qat/qat_common/adf_transport.c
85
return atomic_read(ring->inflights) > ring->threshold;
drivers/crypto/intel/qat/qat_common/adf_transport.c
88
int adf_send_message(struct adf_etr_ring_data *ring, u32 *msg)
drivers/crypto/intel/qat/qat_common/adf_transport.c
90
struct adf_hw_csr_ops *csr_ops = GET_CSR_OPS(ring->bank->accel_dev);
drivers/crypto/intel/qat/qat_common/adf_transport.c
92
if (atomic_add_return(1, ring->inflights) >
drivers/crypto/intel/qat/qat_common/adf_transport.c
93
ADF_MAX_INFLIGHTS(ring->ring_size, ring->msg_size)) {
drivers/crypto/intel/qat/qat_common/adf_transport.c
94
atomic_dec(ring->inflights);
drivers/crypto/intel/qat/qat_common/adf_transport.c
97
spin_lock_bh(&ring->lock);
drivers/crypto/intel/qat/qat_common/adf_transport.c
98
memcpy((void *)((uintptr_t)ring->base_addr + ring->tail), msg,
drivers/crypto/intel/qat/qat_common/adf_transport.c
99
ADF_MSG_SIZE_TO_BYTES(ring->msg_size));
drivers/crypto/intel/qat/qat_common/adf_transport.h
17
bool adf_ring_nearly_full(struct adf_etr_ring_data *ring);
drivers/crypto/intel/qat/qat_common/adf_transport.h
18
int adf_send_message(struct adf_etr_ring_data *ring, u32 *msg);
drivers/crypto/intel/qat/qat_common/adf_transport.h
19
void adf_remove_ring(struct adf_etr_ring_data *ring);
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
108
ring->ring_number);
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
111
ring->bank->bank_debug_dir,
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
112
ring, &adf_ring_debug_fops);
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
113
ring->ring_debug = ring_debug;
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
117
void adf_ring_debugfs_rm(struct adf_etr_ring_data *ring)
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
119
if (ring->ring_debug) {
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
120
debugfs_remove(ring->ring_debug->debug);
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
121
kfree(ring->ring_debug);
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
122
ring->ring_debug = NULL;
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
13
#define ADF_RING_NUM_MSGS(ring) \
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
14
(ADF_SIZE_TO_RING_SIZE_IN_BYTES(ring->ring_size) / \
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
15
ADF_MSG_SIZE_TO_BYTES(ring->msg_size))
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
162
struct adf_etr_ring_data *ring = &bank->rings[ring_id];
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
170
ring->ring_number);
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
172
ring->ring_number);
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
177
ring->ring_number, head, tail,
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
178
(empty & 1 << ring->ring_number) >>
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
179
ring->ring_number);
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
19
struct adf_etr_ring_data *ring = sfile->private;
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
20
unsigned int num_msg = ADF_RING_NUM_MSGS(ring);
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
30
return ring->base_addr +
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
31
(ADF_MSG_SIZE_TO_BYTES(ring->msg_size) * (*pos)++);
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
36
struct adf_etr_ring_data *ring = sfile->private;
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
37
unsigned int num_msg = ADF_RING_NUM_MSGS(ring);
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
45
return ring->base_addr + (ADF_MSG_SIZE_TO_BYTES(ring->msg_size) * val);
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
50
struct adf_etr_ring_data *ring = sfile->private;
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
51
struct adf_etr_bank_data *bank = ring->bank;
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
53
void __iomem *csr = ring->bank->csr_addr;
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
59
ring->ring_number);
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
61
ring->ring_number);
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
66
ring->ring_debug->ring_name);
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
68
ring->ring_number, ring->bank->bank_number);
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
70
head, tail, (empty & 1 << ring->ring_number)
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
71
>> ring->ring_number);
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
73
(long long)ADF_SIZE_TO_RING_SIZE_IN_BYTES(ring->ring_size),
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
74
ADF_MSG_SIZE_TO_BYTES(ring->msg_size));
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
79
v, ADF_MSG_SIZE_TO_BYTES(ring->msg_size), false);
drivers/crypto/intel/qat/qat_common/adf_transport_debug.c
97
int adf_ring_debugfs_add(struct adf_etr_ring_data *ring, const char *name)
drivers/crypto/intel/qat/qat_common/adf_transport_internal.h
55
int adf_ring_debugfs_add(struct adf_etr_ring_data *ring, const char *name);
drivers/crypto/intel/qat/qat_common/adf_transport_internal.h
56
void adf_ring_debugfs_rm(struct adf_etr_ring_data *ring);
drivers/crypto/intel/qat/qat_common/adf_transport_internal.h
65
static inline int adf_ring_debugfs_add(struct adf_etr_ring_data *ring,
drivers/crypto/intel/qat/qat_common/adf_transport_internal.h
71
#define adf_ring_debugfs_rm(ring) do {} while (0)
drivers/dma/amd/qdma/qdma.c
1000
"amd-qdma-queue", ring);
drivers/dma/amd/qdma/qdma.c
952
struct qdma_intr_ring *ring;
drivers/dma/amd/qdma/qdma.c
966
ring = &qdev->qintr_rings[i];
drivers/dma/amd/qdma/qdma.c
967
ring->qdev = qdev;
drivers/dma/amd/qdma/qdma.c
968
ring->msix_id = qdev->err_irq_idx + i + 1;
drivers/dma/amd/qdma/qdma.c
969
ring->ridx = i;
drivers/dma/amd/qdma/qdma.c
970
ring->color = 1;
drivers/dma/amd/qdma/qdma.c
971
ring->base = dmam_alloc_coherent(pdata->dma_dev,
drivers/dma/amd/qdma/qdma.c
973
&ring->dev_base, GFP_KERNEL);
drivers/dma/amd/qdma/qdma.c
974
if (!ring->base) {
drivers/dma/amd/qdma/qdma.c
978
intr_ctxt.agg_base = QDMA_INTR_RING_BASE(ring->dev_base);
drivers/dma/amd/qdma/qdma.c
980
intr_ctxt.vec = ring->msix_id;
drivers/dma/amd/qdma/qdma.c
984
QDMA_CTXT_CLEAR, ring->ridx, NULL);
drivers/dma/amd/qdma/qdma.c
992
QDMA_CTXT_WRITE, ring->ridx, ctxt);
drivers/dma/ioat/dma.c
376
struct ioat_ring_ent **ring;
drivers/dma/ioat/dma.c
381
ring = kzalloc_objs(*ring, total_descs, flags);
drivers/dma/ioat/dma.c
382
if (!ring)
drivers/dma/ioat/dma.c
406
kfree(ring);
drivers/dma/ioat/dma.c
412
ring[i] = ioat_alloc_ring_ent(c, i, flags);
drivers/dma/ioat/dma.c
413
if (!ring[i]) {
drivers/dma/ioat/dma.c
417
ioat_free_ring_ent(ring[i], c);
drivers/dma/ioat/dma.c
429
kfree(ring);
drivers/dma/ioat/dma.c
432
set_desc_id(ring[i], i);
drivers/dma/ioat/dma.c
437
struct ioat_ring_ent *next = ring[i+1];
drivers/dma/ioat/dma.c
438
struct ioat_dma_descriptor *hw = ring[i]->hw;
drivers/dma/ioat/dma.c
442
ring[i]->hw->next = ring[0]->txd.phys;
drivers/dma/ioat/dma.c
455
return ring;
drivers/dma/ioat/dma.h
135
struct ioat_ring_ent **ring;
drivers/dma/ioat/dma.h
345
return ioat_chan->ring[idx & (ioat_ring_size(ioat_chan) - 1)];
drivers/dma/ioat/init.c
608
if (!ioat_chan->ring)
drivers/dma/ioat/init.c
651
kfree(ioat_chan->ring);
drivers/dma/ioat/init.c
652
ioat_chan->ring = NULL;
drivers/dma/ioat/init.c
670
struct ioat_ring_ent **ring;
drivers/dma/ioat/init.c
677
if (ioat_chan->ring)
drivers/dma/ioat/init.c
697
ring = ioat_alloc_ring(c, order, GFP_NOWAIT);
drivers/dma/ioat/init.c
698
if (!ring)
drivers/dma/ioat/init.c
703
ioat_chan->ring = ring;
drivers/dma/mediatek/mtk-hsdma.c
185
struct mtk_hsdma_ring ring;
drivers/dma/mediatek/mtk-hsdma.c
317
struct mtk_hsdma_ring *ring = &pc->ring;
drivers/dma/mediatek/mtk-hsdma.c
326
pc->sz_ring = 2 * MTK_DMA_SIZE * sizeof(*ring->txd);
drivers/dma/mediatek/mtk-hsdma.c
327
ring->txd = dma_alloc_coherent(hsdma2dev(hsdma), pc->sz_ring,
drivers/dma/mediatek/mtk-hsdma.c
328
&ring->tphys, GFP_NOWAIT);
drivers/dma/mediatek/mtk-hsdma.c
329
if (!ring->txd)
drivers/dma/mediatek/mtk-hsdma.c
332
ring->rxd = &ring->txd[MTK_DMA_SIZE];
drivers/dma/mediatek/mtk-hsdma.c
333
ring->rphys = ring->tphys + MTK_DMA_SIZE * sizeof(*ring->txd);
drivers/dma/mediatek/mtk-hsdma.c
334
ring->cur_tptr = 0;
drivers/dma/mediatek/mtk-hsdma.c
335
ring->cur_rptr = MTK_DMA_SIZE - 1;
drivers/dma/mediatek/mtk-hsdma.c
337
ring->cb = kzalloc_objs(*ring->cb, MTK_DMA_SIZE, GFP_NOWAIT);
drivers/dma/mediatek/mtk-hsdma.c
338
if (!ring->cb) {
drivers/dma/mediatek/mtk-hsdma.c
358
mtk_dma_write(hsdma, MTK_HSDMA_TX_BASE, ring->tphys);
drivers/dma/mediatek/mtk-hsdma.c
360
mtk_dma_write(hsdma, MTK_HSDMA_TX_CPU, ring->cur_tptr);
drivers/dma/mediatek/mtk-hsdma.c
362
mtk_dma_write(hsdma, MTK_HSDMA_RX_BASE, ring->rphys);
drivers/dma/mediatek/mtk-hsdma.c
364
mtk_dma_write(hsdma, MTK_HSDMA_RX_CPU, ring->cur_rptr);
drivers/dma/mediatek/mtk-hsdma.c
379
kfree(ring->cb);
drivers/dma/mediatek/mtk-hsdma.c
383
pc->sz_ring, ring->txd, ring->tphys);
drivers/dma/mediatek/mtk-hsdma.c
390
struct mtk_hsdma_ring *ring = &pc->ring;
drivers/dma/mediatek/mtk-hsdma.c
405
kfree(ring->cb);
drivers/dma/mediatek/mtk-hsdma.c
408
pc->sz_ring, ring->txd, ring->tphys);
drivers/dma/mediatek/mtk-hsdma.c
415
struct mtk_hsdma_ring *ring = &pc->ring;
drivers/dma/mediatek/mtk-hsdma.c
449
txd = &ring->txd[ring->cur_tptr];
drivers/dma/mediatek/mtk-hsdma.c
454
rxd = &ring->rxd[ring->cur_tptr];
drivers/dma/mediatek/mtk-hsdma.c
459
ring->cb[ring->cur_tptr].vd = &hvd->vd;
drivers/dma/mediatek/mtk-hsdma.c
462
ring->cur_tptr = MTK_HSDMA_NEXT_DESP_IDX(ring->cur_tptr,
drivers/dma/mediatek/mtk-hsdma.c
476
prev = MTK_HSDMA_LAST_DESP_IDX(ring->cur_tptr, MTK_DMA_SIZE);
drivers/dma/mediatek/mtk-hsdma.c
477
ring->cb[prev].flag = MTK_HSDMA_VDESC_FINISHED;
drivers/dma/mediatek/mtk-hsdma.c
487
mtk_dma_write(hsdma, MTK_HSDMA_TX_CPU, ring->cur_tptr);
drivers/dma/mediatek/mtk-hsdma.c
559
next = MTK_HSDMA_NEXT_DESP_IDX(pc->ring.cur_rptr,
drivers/dma/mediatek/mtk-hsdma.c
561
rxd = &pc->ring.rxd[next];
drivers/dma/mediatek/mtk-hsdma.c
571
cb = &pc->ring.cb[next];
drivers/dma/mediatek/mtk-hsdma.c
611
pc->ring.cur_rptr = next;
drivers/dma/mediatek/mtk-hsdma.c
621
mtk_dma_write(hsdma, MTK_HSDMA_RX_CPU, pc->ring.cur_rptr);
drivers/dma/qcom/gpi.c
1255
struct gpi_ring *ring = &chan->ch_ring;
drivers/dma/qcom/gpi.c
1271
GPII_n_CH_k_CNTXT_0(ring->el_size, 0, chan->dir, GPI_CHTYPE_PROTO_GPI));
drivers/dma/qcom/gpi.c
1272
gpi_write_reg(gpii, chan->ch_cntxt_base_reg + CNTXT_1_R_LENGTH, ring->len);
drivers/dma/qcom/gpi.c
1273
gpi_write_reg(gpii, chan->ch_cntxt_base_reg + CNTXT_2_RING_BASE_LSB, ring->phys_addr);
drivers/dma/qcom/gpi.c
1275
upper_32_bits(ring->phys_addr));
drivers/dma/qcom/gpi.c
1277
upper_32_bits(ring->phys_addr));
drivers/dma/qcom/gpi.c
1293
struct gpi_ring *ring = &gpii->ev_ring;
drivers/dma/qcom/gpi.c
1306
GPII_n_EV_k_CNTXT_0(ring->el_size, GPI_INTTYPE_IRQ, GPI_CHTYPE_GPI_EV));
drivers/dma/qcom/gpi.c
1307
gpi_write_reg(gpii, base + CNTXT_1_R_LENGTH, ring->len);
drivers/dma/qcom/gpi.c
1308
gpi_write_reg(gpii, base + CNTXT_2_RING_BASE_LSB, lower_32_bits(ring->phys_addr));
drivers/dma/qcom/gpi.c
1309
gpi_write_reg(gpii, base + CNTXT_3_RING_BASE_MSB, upper_32_bits(ring->phys_addr));
drivers/dma/qcom/gpi.c
1311
upper_32_bits(ring->phys_addr));
drivers/dma/qcom/gpi.c
1320
ring->wp = (ring->base + ring->len - ring->el_size);
drivers/dma/qcom/gpi.c
1329
gpi_write_ev_db(gpii, ring, ring->wp);
drivers/dma/qcom/gpi.c
1335
static int gpi_ring_num_elements_avail(const struct gpi_ring * const ring)
drivers/dma/qcom/gpi.c
1339
if (ring->wp < ring->rp) {
drivers/dma/qcom/gpi.c
1340
elements = ((ring->rp - ring->wp) / ring->el_size) - 1;
drivers/dma/qcom/gpi.c
1342
elements = (ring->rp - ring->base) / ring->el_size;
drivers/dma/qcom/gpi.c
1343
elements += ((ring->base + ring->len - ring->wp) / ring->el_size) - 1;
drivers/dma/qcom/gpi.c
1349
static int gpi_ring_add_element(struct gpi_ring *ring, void **wp)
drivers/dma/qcom/gpi.c
1351
if (gpi_ring_num_elements_avail(ring) <= 0)
drivers/dma/qcom/gpi.c
1354
*wp = ring->wp;
drivers/dma/qcom/gpi.c
1355
ring->wp += ring->el_size;
drivers/dma/qcom/gpi.c
1356
if (ring->wp >= (ring->base + ring->len))
drivers/dma/qcom/gpi.c
1357
ring->wp = ring->base;
drivers/dma/qcom/gpi.c
1365
static void gpi_ring_recycle_ev_element(struct gpi_ring *ring)
drivers/dma/qcom/gpi.c
1368
ring->wp += ring->el_size;
drivers/dma/qcom/gpi.c
1369
if (ring->wp >= (ring->base + ring->len))
drivers/dma/qcom/gpi.c
1370
ring->wp = ring->base;
drivers/dma/qcom/gpi.c
1373
ring->rp += ring->el_size;
drivers/dma/qcom/gpi.c
1374
if (ring->rp >= (ring->base + ring->len))
drivers/dma/qcom/gpi.c
1375
ring->rp = ring->base;
drivers/dma/qcom/gpi.c
1381
static void gpi_free_ring(struct gpi_ring *ring,
drivers/dma/qcom/gpi.c
1384
dma_free_coherent(gpii->gpi_dev->dev, ring->alloc_size,
drivers/dma/qcom/gpi.c
1385
ring->pre_aligned, ring->dma_handle);
drivers/dma/qcom/gpi.c
1386
memset(ring, 0, sizeof(*ring));
drivers/dma/qcom/gpi.c
1390
static int gpi_alloc_ring(struct gpi_ring *ring, u32 elements,
drivers/dma/qcom/gpi.c
1401
ring->alloc_size = (len + (len - 1));
drivers/dma/qcom/gpi.c
1405
ring->alloc_size);
drivers/dma/qcom/gpi.c
1407
ring->pre_aligned = dma_alloc_coherent(gpii->gpi_dev->dev,
drivers/dma/qcom/gpi.c
1408
ring->alloc_size,
drivers/dma/qcom/gpi.c
1409
&ring->dma_handle, GFP_KERNEL);
drivers/dma/qcom/gpi.c
1410
if (!ring->pre_aligned) {
drivers/dma/qcom/gpi.c
1412
ring->alloc_size);
drivers/dma/qcom/gpi.c
1417
ring->phys_addr = (ring->dma_handle + (len - 1)) & ~(len - 1);
drivers/dma/qcom/gpi.c
1418
ring->base = ring->pre_aligned + (ring->phys_addr - ring->dma_handle);
drivers/dma/qcom/gpi.c
1419
ring->rp = ring->base;
drivers/dma/qcom/gpi.c
1420
ring->wp = ring->base;
drivers/dma/qcom/gpi.c
1421
ring->len = len;
drivers/dma/qcom/gpi.c
1422
ring->el_size = el_size;
drivers/dma/qcom/gpi.c
1423
ring->elements = ring->len / ring->el_size;
drivers/dma/qcom/gpi.c
1424
memset(ring->base, 0, ring->len);
drivers/dma/qcom/gpi.c
1425
ring->configured = true;
drivers/dma/qcom/gpi.c
1432
&ring->dma_handle, &ring->phys_addr, ring->len,
drivers/dma/qcom/gpi.c
1433
ring->el_size, ring->elements);
drivers/dma/qcom/gpi.c
537
static void gpi_ring_recycle_ev_element(struct gpi_ring *ring);
drivers/dma/qcom/gpi.c
538
static int gpi_ring_add_element(struct gpi_ring *ring, void **wp);
drivers/dma/qcom/gpi.c
551
static inline phys_addr_t to_physical(const struct gpi_ring *const ring,
drivers/dma/qcom/gpi.c
554
return ring->phys_addr + (addr - ring->base);
drivers/dma/qcom/gpi.c
557
static inline void *to_virtual(const struct gpi_ring *const ring, phys_addr_t addr)
drivers/dma/qcom/gpi.c
559
return ring->base + (addr - ring->phys_addr);
drivers/dma/qcom/gpi.c
718
struct gpi_ring *ring, void *wp)
drivers/dma/qcom/gpi.c
723
p_wp = to_physical(ring, wp);
drivers/dma/qcom/gpi.c
729
struct gpi_ring *ring, void *wp)
drivers/dma/qcom/gpi.c
733
p_wp = ring->phys_addr + (wp - ring->base);
drivers/dma/ti/k3-udma.c
654
struct k3_ring *ring = NULL;
drivers/dma/ti/k3-udma.c
659
ring = uc->rflow->fd_ring;
drivers/dma/ti/k3-udma.c
663
ring = uc->tchan->t_ring;
drivers/dma/ti/k3-udma.c
678
return k3_ringacc_ring_push(ring, &paddr);
drivers/dma/ti/k3-udma.c
694
struct k3_ring *ring = NULL;
drivers/dma/ti/k3-udma.c
699
ring = uc->rflow->r_ring;
drivers/dma/ti/k3-udma.c
703
ring = uc->tchan->tc_ring;
drivers/dma/ti/k3-udma.c
709
ret = k3_ringacc_ring_pop(ring, addr);
drivers/dma/xgene-dma.c
1032
static void xgene_dma_wr_ring_state(struct xgene_dma_ring *ring)
drivers/dma/xgene-dma.c
1036
iowrite32(ring->num, ring->pdma->csr_ring + XGENE_DMA_RING_STATE);
drivers/dma/xgene-dma.c
1039
iowrite32(ring->state[i], ring->pdma->csr_ring +
drivers/dma/xgene-dma.c
1043
static void xgene_dma_clr_ring_state(struct xgene_dma_ring *ring)
drivers/dma/xgene-dma.c
1045
memset(ring->state, 0, sizeof(u32) * XGENE_DMA_RING_NUM_CONFIG);
drivers/dma/xgene-dma.c
1046
xgene_dma_wr_ring_state(ring);
drivers/dma/xgene-dma.c
1049
static void xgene_dma_setup_ring(struct xgene_dma_ring *ring)
drivers/dma/xgene-dma.c
1051
void *ring_cfg = ring->state;
drivers/dma/xgene-dma.c
1052
u64 addr = ring->desc_paddr;
drivers/dma/xgene-dma.c
1055
ring->slots = ring->size / XGENE_DMA_RING_WQ_DESC_SIZE;
drivers/dma/xgene-dma.c
1058
xgene_dma_clr_ring_state(ring);
drivers/dma/xgene-dma.c
1063
if (ring->owner == XGENE_DMA_RING_OWNER_DMA) {
drivers/dma/xgene-dma.c
1076
XGENE_DMA_RING_SIZE_SET(ring_cfg, ring->cfgsize);
drivers/dma/xgene-dma.c
1079
xgene_dma_wr_ring_state(ring);
drivers/dma/xgene-dma.c
1082
iowrite32(XGENE_DMA_RING_ID_SETUP(ring->id),
drivers/dma/xgene-dma.c
1083
ring->pdma->csr_ring + XGENE_DMA_RING_ID);
drivers/dma/xgene-dma.c
1086
iowrite32(XGENE_DMA_RING_ID_BUF_SETUP(ring->num),
drivers/dma/xgene-dma.c
1087
ring->pdma->csr_ring + XGENE_DMA_RING_ID_BUF);
drivers/dma/xgene-dma.c
1089
if (ring->owner != XGENE_DMA_RING_OWNER_CPU)
drivers/dma/xgene-dma.c
1093
for (i = 0; i < ring->slots; i++) {
drivers/dma/xgene-dma.c
1096
desc = &ring->desc_hw[i];
drivers/dma/xgene-dma.c
1101
val = ioread32(ring->pdma->csr_ring + XGENE_DMA_RING_NE_INT_MODE);
drivers/dma/xgene-dma.c
1102
XGENE_DMA_RING_NE_INT_MODE_SET(val, ring->buf_num);
drivers/dma/xgene-dma.c
1103
iowrite32(val, ring->pdma->csr_ring + XGENE_DMA_RING_NE_INT_MODE);
drivers/dma/xgene-dma.c
1106
static void xgene_dma_clear_ring(struct xgene_dma_ring *ring)
drivers/dma/xgene-dma.c
1110
if (ring->owner == XGENE_DMA_RING_OWNER_CPU) {
drivers/dma/xgene-dma.c
1112
val = ioread32(ring->pdma->csr_ring +
drivers/dma/xgene-dma.c
1114
XGENE_DMA_RING_NE_INT_MODE_RESET(val, ring->buf_num);
drivers/dma/xgene-dma.c
1115
iowrite32(val, ring->pdma->csr_ring +
drivers/dma/xgene-dma.c
1120
ring_id = XGENE_DMA_RING_ID_SETUP(ring->id);
drivers/dma/xgene-dma.c
1121
iowrite32(ring_id, ring->pdma->csr_ring + XGENE_DMA_RING_ID);
drivers/dma/xgene-dma.c
1123
iowrite32(0, ring->pdma->csr_ring + XGENE_DMA_RING_ID_BUF);
drivers/dma/xgene-dma.c
1124
xgene_dma_clr_ring_state(ring);
drivers/dma/xgene-dma.c
1127
static void xgene_dma_set_ring_cmd(struct xgene_dma_ring *ring)
drivers/dma/xgene-dma.c
1129
ring->cmd_base = ring->pdma->csr_ring_cmd +
drivers/dma/xgene-dma.c
1130
XGENE_DMA_RING_CMD_BASE_OFFSET((ring->num -
drivers/dma/xgene-dma.c
1133
ring->cmd = ring->cmd_base + XGENE_DMA_RING_CMD_OFFSET;
drivers/dma/xgene-dma.c
1165
static void xgene_dma_delete_ring_one(struct xgene_dma_ring *ring)
drivers/dma/xgene-dma.c
1168
xgene_dma_clear_ring(ring);
drivers/dma/xgene-dma.c
1171
if (ring->desc_vaddr) {
drivers/dma/xgene-dma.c
1172
dma_free_coherent(ring->pdma->dev, ring->size,
drivers/dma/xgene-dma.c
1173
ring->desc_vaddr, ring->desc_paddr);
drivers/dma/xgene-dma.c
1174
ring->desc_vaddr = NULL;
drivers/dma/xgene-dma.c
1185
struct xgene_dma_ring *ring,
drivers/dma/xgene-dma.c
1191
ring->pdma = chan->pdma;
drivers/dma/xgene-dma.c
1192
ring->cfgsize = cfgsize;
drivers/dma/xgene-dma.c
1193
ring->num = chan->pdma->ring_num++;
drivers/dma/xgene-dma.c
1194
ring->id = XGENE_DMA_RING_ID_GET(ring->owner, ring->buf_num);
drivers/dma/xgene-dma.c
1199
ring->size = ret;
drivers/dma/xgene-dma.c
1202
ring->desc_vaddr = dma_alloc_coherent(chan->dev, ring->size,
drivers/dma/xgene-dma.c
1203
&ring->desc_paddr, GFP_KERNEL);
drivers/dma/xgene-dma.c
1204
if (!ring->desc_vaddr) {
drivers/dma/xgene-dma.c
1210
xgene_dma_set_ring_cmd(ring);
drivers/dma/xgene-dma.c
1211
xgene_dma_setup_ring(ring);
drivers/dma/xgene-dma.c
597
struct xgene_dma_ring *ring = &chan->tx_ring;
drivers/dma/xgene-dma.c
601
desc_hw = &ring->desc_hw[ring->head];
drivers/dma/xgene-dma.c
607
if (++ring->head == ring->slots)
drivers/dma/xgene-dma.c
608
ring->head = 0;
drivers/dma/xgene-dma.c
618
desc_hw = &ring->desc_hw[ring->head];
drivers/dma/xgene-dma.c
620
if (++ring->head == ring->slots)
drivers/dma/xgene-dma.c
621
ring->head = 0;
drivers/dma/xgene-dma.c
632
2 : 1, ring->cmd);
drivers/dma/xgene-dma.c
689
struct xgene_dma_ring *ring = &chan->rx_ring;
drivers/dma/xgene-dma.c
705
desc_hw = &ring->desc_hw[ring->head];
drivers/dma/xgene-dma.c
712
if (++ring->head == ring->slots)
drivers/dma/xgene-dma.c
713
ring->head = 0;
drivers/dma/xgene-dma.c
740
iowrite32(-1, ring->cmd);
drivers/gpib/tnt4882/mite.h
43
struct mite_dma_chain ring[MITE_RING_SIZE];
drivers/gpu/drm/amd/amdgpu/amdgpu.h
1521
struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu.h
1523
struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu.h
1534
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu.h
1537
ssize_t amdgpu_get_soft_full_reset_mask(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu.h
622
void (*flush_hdp)(struct amdgpu_device *adev, struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu.h
625
struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c
654
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c
660
ring = &adev->gfx.compute_ring[0];
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c
663
ring = &adev->sdma.instance[0].ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c
666
ring = &adev->sdma.instance[1].ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c
688
ret = amdgpu_ib_schedule(ring, 1, ib, job, &f);
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c
821
struct amdgpu_ring *kiq_ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c
823
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c
836
ring = kzalloc_obj(*ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c
837
if (!ring) {
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c
843
ring->doorbell_index = doorbell_off;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c
844
ring->funcs = ring_funcs;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c
854
kiq->pmf->kiq_unmap_queues(kiq_ring, ring, RESET_QUEUES, 0, 0);
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c
868
kfree(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_arcturus.c
290
struct amdgpu_ring *ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_arcturus.c
292
if (!amdgpu_ring_sched_ready(ring))
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_arcturus.c
297
drm_sched_stop(&ring->sched, NULL);
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_arcturus.c
298
r = amdgpu_fence_wait_empty(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_arcturus.c
302
drm_sched_start(&ring->sched, 0);
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v10.c
294
struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v10_3.c
280
struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v11.c
265
struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v9.c
305
struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[inst].ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
295
struct amdgpu_ring *ring = &adev->cper.ring_buf;
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
318
amdgpu_cper_ring_write(ring, fatal, fatal->record_length);
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
327
struct amdgpu_ring *ring = &adev->cper.ring_buf;
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
343
amdgpu_cper_ring_write(ring, bp_threshold, bp_threshold->record_length);
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
371
struct amdgpu_ring *ring = &adev->cper.ring_buf;
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
420
amdgpu_cper_ring_write(ring, corrected, corrected->record_length);
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
426
static bool amdgpu_cper_is_hdr(struct amdgpu_ring *ring, u64 pos)
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
430
chdr = (struct cper_hdr *)&(ring->ring[pos]);
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
434
static u32 amdgpu_cper_ring_get_ent_sz(struct amdgpu_ring *ring, u64 pos)
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
440
chdr = (struct cper_hdr *)&(ring->ring[pos]);
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
441
chunk = ring->ring_size - (pos << 2);
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
449
if (ring->count_dw)
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
452
for (p = pos + 1; p <= ring->buf_mask; p++) {
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
453
chdr = (struct cper_hdr *)&(ring->ring[p]);
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
467
void amdgpu_cper_ring_write(struct amdgpu_ring *ring, void *src, int count)
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
474
if (count >= ring->ring_size - 4) {
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
475
dev_err(ring->adev->dev,
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
477
count, ring->ring_size - 4);
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
482
mutex_lock(&ring->adev->cper.ring_lock);
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
484
wptr_old = ring->wptr;
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
485
rptr = *ring->rptr_cpu_addr & ring->ptr_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
488
ent_sz = amdgpu_cper_ring_get_ent_sz(ring, ring->wptr);
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
491
memcpy(&ring->ring[ring->wptr], s, chunk);
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
493
ring->wptr += (chunk >> 2);
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
494
ring->wptr &= ring->ptr_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
499
if (ring->count_dw < rec_cnt_dw)
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
500
ring->count_dw = 0;
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
503
if (((wptr_old < rptr) && (rptr <= ring->wptr)) ||
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
504
((ring->wptr < wptr_old) && (wptr_old < rptr)) ||
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
505
((rptr <= ring->wptr) && (ring->wptr < wptr_old))) {
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
506
pos = (ring->wptr + 1) & ring->ptr_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
509
ent_sz = amdgpu_cper_ring_get_ent_sz(ring, pos);
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
512
rptr &= ring->ptr_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
513
*ring->rptr_cpu_addr = rptr;
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
516
} while (!amdgpu_cper_is_hdr(ring, rptr));
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
519
if (ring->count_dw >= rec_cnt_dw)
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
520
ring->count_dw -= rec_cnt_dw;
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
521
mutex_unlock(&ring->adev->cper.ring_lock);
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
524
static u64 amdgpu_cper_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
526
return *(ring->rptr_cpu_addr);
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
529
static u64 amdgpu_cper_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
531
return ring->wptr;
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
544
struct amdgpu_ring *ring = &(adev->cper.ring_buf);
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
548
ring->adev = NULL;
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
549
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
550
ring->use_doorbell = false;
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
551
ring->no_scheduler = true;
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
552
ring->funcs = &cper_ring_funcs;
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
554
sprintf(ring->name, "cper");
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.c
555
return amdgpu_ring_init(adev, ring, CPER_MAX_RING_SIZE, NULL, 0,
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.h
100
void amdgpu_cper_ring_write(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_cper.h
65
void *ring[CPER_MAX_ALLOWED_COUNT];
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
1022
struct amdgpu_ring *ring = amdgpu_job_ring(job);
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
1023
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
1028
if (!ring->funcs->parse_cs && !ring->funcs->patch_cs_in_place)
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
1058
if (ring->funcs->parse_cs) {
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
1062
r = amdgpu_ring_parse_cs(ring, p, job, ib);
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
1070
r = amdgpu_ring_patch_cs_in_place(ring, p, job, ib);
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
1112
struct amdgpu_ring *ring = to_amdgpu_ring(sched);
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
1114
if (amdgpu_vmid_uses_reserved(vm, ring->vm_hub))
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
1505
wait->in.ring, &entity);
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
1553
user->ring, &entity);
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
345
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
355
ring = amdgpu_job_ring(job);
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
359
if (ring->no_user_submission)
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
363
if (p->uf_bo && ring->funcs->no_user_fence)
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
388
r = amdgpu_ib_get(p->adev, vm, ring->funcs->parse_cs ?
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
424
deps[i].ring, &entity);
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
83
chunk_ib->ring, &entity);
drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c
205
const u32 ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c
257
if (cmpxchg(&ctx->entities[hw_ip][ring], NULL, entity))
drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c
434
u32 ring, struct drm_sched_entity **entity)
drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c
452
if (ring >= amdgpu_ctx_num_entities[hw_ip]) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c
454
"invalid ring: %d %d\n", hw_ip, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c
458
if (ctx->entities[hw_ip][ring] == NULL) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c
459
r = amdgpu_ctx_init_entity(ctx, hw_ip, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c
464
ctx_entity = &ctx->entities[hw_ip][ring]->entity;
drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.h
77
u32 ring, struct drm_sched_entity **entity);
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1659
struct amdgpu_ring *ring = adev->rings[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1661
if (!amdgpu_ring_sched_ready(ring))
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1663
drm_sched_wqueue_stop(&ring->sched);
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1675
struct amdgpu_ring *ring = adev->rings[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1677
if (!amdgpu_ring_sched_ready(ring))
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1679
drm_sched_wqueue_start(&ring->sched);
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1790
static void amdgpu_ib_preempt_fences_swap(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1793
struct amdgpu_fence_driver *drv = &ring->fence_drv;
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1796
last_seq = atomic_read(&ring->fence_drv.last_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1797
sync_seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1848
static void amdgpu_ib_preempt_mark_partial_job(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1854
struct amdgpu_fence_driver *drv = &ring->fence_drv;
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1855
struct drm_gpu_scheduler *sched = &ring->sched;
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1858
if (ring->funcs->type != AMDGPU_RING_TYPE_GFX)
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1891
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1898
ring = adev->rings[val];
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1900
if (!amdgpu_ring_sched_ready(ring) ||
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1901
!ring->funcs->preempt_ib)
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1905
if (ring->trail_seq != le32_to_cpu(*ring->trail_fence_cpu_addr))
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1908
length = ring->fence_drv.num_fences_mask + 1;
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1919
drm_sched_wqueue_stop(&ring->sched);
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1922
r = amdgpu_ring_preempt_ib(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1924
drm_warn(adev_to_drm(adev), "failed to preempt ring %d\n", ring->idx);
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1928
amdgpu_fence_process(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1930
if (atomic_read(&ring->fence_drv.last_seq) !=
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1931
ring->fence_drv.sync_seq) {
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1932
drm_info(adev_to_drm(adev), "ring %d was preempted\n", ring->idx);
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1934
amdgpu_ib_preempt_mark_partial_job(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1937
amdgpu_ib_preempt_fences_swap(ring, fences);
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1939
amdgpu_fence_driver_force_completion(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1942
amdgpu_ib_preempt_job_recovery(&ring->sched);
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1945
amdgpu_fence_wait_empty(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1953
drm_sched_wqueue_start(&ring->sched);
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
2054
struct amdgpu_ring *ring = adev->rings[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
2056
if (!ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
2059
amdgpu_debugfs_ring_init(adev, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
268
if (coredump->ring) {
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
271
coredump->ring->funcs->type,
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
272
coredump->ring->name);
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
297
struct amdgpu_ring *ring = coredump->adev->rings[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
299
drm_printf(&p, "ring name: %s\n", ring->name);
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
301
amdgpu_ring_get_rptr(ring),
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
302
amdgpu_ring_get_wptr(ring),
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
303
ring->buf_mask);
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
305
ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
309
while (j < ring->ring_size) {
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
310
drm_printf(&p, "0x%x \t 0x%x\n", j, ring->ring[j / 4]);
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
354
coredump->ring = to_amdgpu_ring(s_job->sched);
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.h
40
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
2930
struct amdgpu_ring *ring = adev->rings[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
2933
if (!ring || ring->no_scheduler)
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
2936
switch (ring->funcs->type) {
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
2952
args.credit_limit = ring->num_hw_submission;
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
2953
args.score = ring->sched_score;
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
2954
args.name = ring->name;
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
2956
r = drm_sched_init(&ring->sched, &args);
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
2960
ring->name);
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
2963
r = amdgpu_uvd_entity_init(adev, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
2967
ring->name);
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
2970
r = amdgpu_vce_entity_init(adev, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
2974
ring->name);
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
5661
struct amdgpu_ring *ring = adev->rings[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
5663
if (!amdgpu_ring_sched_ready(ring))
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
5666
if (amdgpu_fence_count_emitted(ring))
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
5829
struct amdgpu_ring *ring = adev->rings[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
5831
if (!amdgpu_ring_sched_ready(ring))
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
5835
amdgpu_fence_driver_force_completion(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
6328
struct amdgpu_ring *ring = tmp_adev->rings[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
6330
if (!amdgpu_ring_sched_ready(ring))
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
6333
drm_sched_stop(&ring->sched, job ? &job->base : NULL);
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
6336
amdgpu_job_stop_all_jobs_on_sched(&ring->sched);
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
6412
struct amdgpu_ring *ring = tmp_adev->rings[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
6414
if (!amdgpu_ring_sched_ready(ring))
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
6417
drm_sched_start(&ring->sched, 0);
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
7317
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
7326
if (ring && ring->funcs->emit_hdp_flush) {
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
7327
amdgpu_ring_emit_hdp_flush(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
7331
if (!ring && amdgpu_sriov_runtime(adev)) {
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
7336
amdgpu_hdp_flush(adev, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
7340
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
7349
amdgpu_hdp_invalidate(adev, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
7498
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
7501
struct amdgpu_isolation *isolation = &adev->isolation[ring->xcp_id];
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
7511
if (ring->funcs->type != AMDGPU_RING_TYPE_GFX &&
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
7512
ring->funcs->type != AMDGPU_RING_TYPE_COMPUTE)
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
7562
dep = amdgpu_sync_peek_fence(&isolation->prev, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
7645
ssize_t amdgpu_get_soft_full_reset_mask(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
7649
if (!ring || !ring->adev)
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
7652
if (amdgpu_device_should_recover_gpu(ring->adev))
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
7655
if (unlikely(!ring->adev->debug_disable_soft_recovery) &&
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
7656
!amdgpu_sriov_vf(ring->adev) && ring->funcs->soft_recovery)
drivers/gpu/drm/amd/amdgpu/amdgpu_drv.c
2844
struct amdgpu_ring *ring = adev->rings[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_drv.c
2846
if (ring && ring->sched.ready) {
drivers/gpu/drm/amd/amdgpu/amdgpu_drv.c
2847
ret = amdgpu_fence_wait_empty(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
112
int amdgpu_fence_emit(struct amdgpu_ring *ring, struct amdgpu_fence *af,
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
115
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
122
af->ring = ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
124
seq = ++ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
126
&ring->fence_drv.lock,
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
127
adev->fence_context + ring->idx, seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
130
amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr,
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
135
ptr = &ring->fence_drv.fences[seq & ring->fence_drv.num_fences_mask];
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
172
int amdgpu_fence_emit_polling(struct amdgpu_ring *ring, uint32_t *s,
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
181
seq = ++ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
182
r = amdgpu_fence_wait_polling(ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
183
seq - ring->fence_drv.num_fences_mask,
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
188
amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr,
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
203
static void amdgpu_fence_schedule_fallback(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
205
mod_timer(&ring->fence_drv.fallback_timer,
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
220
bool amdgpu_fence_process(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
222
struct amdgpu_fence_driver *drv = &ring->fence_drv;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
223
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
227
last_seq = atomic_read(&ring->fence_drv.last_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
228
seq = amdgpu_fence_read(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
232
if (timer_delete(&ring->fence_drv.fallback_timer) &&
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
233
seq != ring->fence_drv.sync_seq)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
234
amdgpu_fence_schedule_fallback(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
280
struct amdgpu_ring *ring = timer_container_of(ring, t,
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
283
if (amdgpu_fence_process(ring))
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
284
dev_warn(ring->adev->dev,
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
286
ring->name);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
297
int amdgpu_fence_wait_empty(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
299
uint64_t seq = READ_ONCE(ring->fence_drv.sync_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
306
ptr = &ring->fence_drv.fences[seq & ring->fence_drv.num_fences_mask];
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
330
signed long amdgpu_fence_wait_polling(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
335
while ((int32_t)(wait_seq - amdgpu_fence_read(ring)) > 0 && timeout > 0) {
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
350
unsigned int amdgpu_fence_count_emitted(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
358
emitted -= atomic_read(&ring->fence_drv.last_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
359
emitted += READ_ONCE(ring->fence_drv.sync_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
370
u64 amdgpu_fence_last_unsignaled_time_us(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
372
struct amdgpu_fence_driver *drv = &ring->fence_drv;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
376
last_seq = atomic_read(&ring->fence_drv.last_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
377
sync_seq = READ_ONCE(ring->fence_drv.sync_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
401
void amdgpu_fence_update_start_timestamp(struct amdgpu_ring *ring, uint32_t seq, ktime_t timestamp)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
403
struct amdgpu_fence_driver *drv = &ring->fence_drv;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
427
int amdgpu_fence_driver_start_ring(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
431
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
434
if (ring->funcs->type != AMDGPU_RING_TYPE_UVD) {
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
435
ring->fence_drv.cpu_addr = ring->fence_cpu_addr;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
436
ring->fence_drv.gpu_addr = ring->fence_gpu_addr;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
440
ring->fence_drv.cpu_addr = adev->uvd.inst[ring->me].cpu_addr + index;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
441
ring->fence_drv.gpu_addr = adev->uvd.inst[ring->me].gpu_addr + index;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
443
amdgpu_fence_write(ring, atomic_read(&ring->fence_drv.last_seq));
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
445
ring->fence_drv.irq_src = irq_src;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
446
ring->fence_drv.irq_type = irq_type;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
447
ring->fence_drv.initialized = true;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
450
ring->name, ring->fence_drv.gpu_addr);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
463
int amdgpu_fence_driver_init_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
465
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
470
if (!is_power_of_2(ring->num_hw_submission))
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
473
ring->fence_drv.cpu_addr = NULL;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
474
ring->fence_drv.gpu_addr = 0;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
475
ring->fence_drv.sync_seq = 0;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
476
atomic_set(&ring->fence_drv.last_seq, 0);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
477
ring->fence_drv.initialized = false;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
479
timer_setup(&ring->fence_drv.fallback_timer, amdgpu_fence_fallback, 0);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
481
ring->fence_drv.num_fences_mask = ring->num_hw_submission * 2 - 1;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
482
spin_lock_init(&ring->fence_drv.lock);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
483
ring->fence_drv.fences = kcalloc(ring->num_hw_submission * 2, sizeof(void *),
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
486
if (!ring->fence_drv.fences)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
520
static bool amdgpu_fence_need_ring_interrupt_restore(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
522
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
525
switch (ring->funcs->type) {
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
558
struct amdgpu_ring *ring = adev->rings[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
560
if (!ring || !ring->fence_drv.initialized)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
565
r = amdgpu_fence_wait_empty(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
570
amdgpu_fence_driver_force_completion(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
573
ring->fence_drv.irq_src &&
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
574
amdgpu_fence_need_ring_interrupt_restore(ring))
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
575
amdgpu_irq_put(adev, ring->fence_drv.irq_src,
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
576
ring->fence_drv.irq_type);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
578
timer_delete_sync(&ring->fence_drv.fallback_timer);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
588
struct amdgpu_ring *ring = adev->rings[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
590
if (!ring || !ring->fence_drv.initialized || !ring->fence_drv.irq_src)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
605
struct amdgpu_ring *ring = adev->rings[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
607
if (!ring || !ring->fence_drv.initialized)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
616
if (ring->sched.ops)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
617
drm_sched_fini(&ring->sched);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
619
for (j = 0; j <= ring->fence_drv.num_fences_mask; ++j)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
620
dma_fence_put(ring->fence_drv.fences[j]);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
621
kfree(ring->fence_drv.fences);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
622
ring->fence_drv.fences = NULL;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
623
ring->fence_drv.initialized = false;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
63
static void amdgpu_fence_write(struct amdgpu_ring *ring, u32 seq)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
644
struct amdgpu_ring *ring = adev->rings[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
646
if (!ring || !ring->fence_drv.initialized)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
65
struct amdgpu_fence_driver *drv = &ring->fence_drv;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
650
if (ring->fence_drv.irq_src &&
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
651
amdgpu_fence_need_ring_interrupt_restore(ring))
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
652
amdgpu_irq_get(adev, ring->fence_drv.irq_src,
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
653
ring->fence_drv.irq_type);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
664
void amdgpu_fence_driver_set_error(struct amdgpu_ring *ring, int error)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
666
struct amdgpu_fence_driver *drv = &ring->fence_drv;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
687
void amdgpu_fence_driver_force_completion(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
689
amdgpu_fence_driver_set_error(ring, -ECANCELED);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
690
amdgpu_fence_write(ring, ring->fence_drv.sync_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
691
amdgpu_fence_process(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
721
struct amdgpu_ring *ring = af->ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
726
last_seq = amdgpu_fence_read(ring) & ring->fence_drv.num_fences_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
727
seq = ring->fence_drv.sync_seq & ring->fence_drv.num_fences_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
730
spin_lock_irqsave(&ring->fence_drv.lock, flags);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
733
last_seq &= ring->fence_drv.num_fences_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
735
ptr = &ring->fence_drv.fences[last_seq];
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
751
spin_unlock_irqrestore(&ring->fence_drv.lock, flags);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
755
amdgpu_fence_driver_force_completion(af->ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
756
af->ring->ring_backup_entries_to_copy = 0;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
762
af->wptr = af->ring->wptr;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
765
static void amdgpu_ring_backup_unprocessed_command(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
768
unsigned int first_idx = start_wptr & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
769
unsigned int last_idx = end_wptr & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
773
for (i = first_idx; i != last_idx; ++i, i &= ring->buf_mask)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
774
ring->ring_backup[ring->ring_backup_entries_to_copy++] = ring->ring[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
777
void amdgpu_ring_backup_unprocessed_commands(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
786
last_seq = amdgpu_fence_read(ring) & ring->fence_drv.num_fences_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
787
seq = ring->fence_drv.sync_seq & ring->fence_drv.num_fences_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
788
wptr = ring->fence_drv.signalled_wptr;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
789
ring->ring_backup_entries_to_copy = 0;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
79
static u32 amdgpu_fence_read(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
793
last_seq &= ring->fence_drv.num_fences_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
795
ptr = &ring->fence_drv.fences[last_seq];
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
807
amdgpu_ring_backup_unprocessed_command(ring, wptr,
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
81
struct amdgpu_fence_driver *drv = &ring->fence_drv;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
811
amdgpu_ring_backup_unprocessed_command(ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
833
return (const char *)to_amdgpu_fence(f)->ring->name;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
846
if (!timer_pending(&to_amdgpu_fence(f)->ring->fence_drv.fallback_timer))
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
847
amdgpu_fence_schedule_fallback(to_amdgpu_fence(f)->ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
897
struct amdgpu_ring *ring = adev->rings[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
899
if (!ring || !ring->fence_drv.initialized)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
902
amdgpu_fence_process(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
904
seq_printf(m, "--- ring %d (%s) ---\n", i, ring->name);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
906
atomic_read(&ring->fence_drv.last_seq));
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
908
ring->fence_drv.sync_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
910
if (ring->funcs->type == AMDGPU_RING_TYPE_GFX ||
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
911
ring->funcs->type == AMDGPU_RING_TYPE_SDMA) {
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
913
le32_to_cpu(*ring->trail_fence_cpu_addr));
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
915
ring->trail_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
918
if (ring->funcs->type != AMDGPU_RING_TYPE_GFX)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
923
le32_to_cpu(*(ring->fence_drv.cpu_addr + 2)));
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
926
le32_to_cpu(*(ring->fence_drv.cpu_addr + 4)));
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
929
le32_to_cpu(*(ring->fence_drv.cpu_addr + 6)));
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
94
af->fence_wptr_start = af->ring->wptr;
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
99
af->fence_wptr_end = af->ring->wptr;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1070
struct amdgpu_ring *ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1075
if (adev->mes.ring[0].sched.ready)
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1078
BUG_ON(!ring->funcs->emit_rreg);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1085
r = amdgpu_ring_alloc(ring, 32);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1089
amdgpu_ring_emit_rreg(ring, reg, reg_val_offs);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1090
r = amdgpu_fence_emit_polling(ring, &seq, MAX_KIQ_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1094
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1097
r = amdgpu_fence_wait_polling(ring, seq, MAX_KIQ_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1116
r = amdgpu_fence_wait_polling(ring, seq, MAX_KIQ_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1128
amdgpu_ring_undo(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1144
struct amdgpu_ring *ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1146
BUG_ON(!ring->funcs->emit_wreg);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1151
if (adev->mes.ring[0].sched.ready) {
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1157
r = amdgpu_ring_alloc(ring, 32);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1161
amdgpu_ring_emit_wreg(ring, reg, v);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1162
r = amdgpu_fence_emit_polling(ring, &seq, MAX_KIQ_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1166
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1169
r = amdgpu_fence_wait_polling(ring, seq, MAX_KIQ_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1188
r = amdgpu_fence_wait_polling(ring, seq, MAX_KIQ_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1197
amdgpu_ring_undo(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1204
void amdgpu_gfx_get_hdp_flush_mask(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1208
if (!ring || !hdp_flush_mask || !reg_mem_engine) {
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1213
const struct nbio_hdp_flush_reg *nbio_hf_reg = ring->adev->nbio.hdp_flush_reg;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1215
switch (ring->funcs->type) {
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1217
*hdp_flush_mask = nbio_hf_reg->ref_and_mask_cp0 << ring->pipe;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1221
*hdp_flush_mask = nbio_hf_reg->ref_and_mask_cp2 << ring->pipe;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1233
DRM_ERROR("%s:unsupported ring type %d\n", __func__, ring->funcs->type);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1244
struct amdgpu_ring *ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1249
if (adev->enable_mes_kiq && adev->mes.ring[0].sched.ready)
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1252
if (!ring->funcs->emit_hdp_flush) {
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1257
r = amdgpu_ring_alloc(ring, 32);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1261
amdgpu_ring_emit_hdp_flush(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1262
r = amdgpu_fence_emit_polling(ring, &seq, MAX_KIQ_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1266
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1269
r = amdgpu_fence_wait_polling(ring, seq, MAX_KIQ_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1288
r = amdgpu_fence_wait_polling(ring, seq, MAX_KIQ_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1299
amdgpu_ring_undo(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1565
static int amdgpu_gfx_run_cleaner_shader_job(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1567
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1568
struct drm_gpu_scheduler *sched = &ring->sched;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1592
r = amdgpu_job_alloc_with_ib(ring->adev, &entity, owner,
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1603
for (i = 0; i <= ring->funcs->align_mask; ++i)
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1604
ib->ptr[i] = ring->funcs->nop;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1605
ib->length_dw = ring->funcs->align_mask + 1;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1626
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1637
ring = &adev->gfx.compute_ring[i + xcc_id * adev->gfx.num_compute_rings];
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1638
if ((ring->xcp_id == xcp_id) && ring->sched.ready) {
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
1639
r = amdgpu_gfx_run_cleaner_shader_job(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
172
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
174
int queue = ring->queue;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
175
int pipe = ring->pipe;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
182
int me = ring->me;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
186
if (ring == &adev->gfx.gfx_ring[bit])
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
194
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
200
ring == &adev->gfx.compute_ring[0])
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2240
void amdgpu_gfx_enforce_isolation_ring_begin_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2242
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2249
if (ring->xcp_id == AMDGPU_XCP_NO_PARTITION)
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2252
idx = ring->xcp_id;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2280
void amdgpu_gfx_enforce_isolation_ring_end_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2282
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2289
if (ring->xcp_id == AMDGPU_XCP_NO_PARTITION)
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2292
idx = ring->xcp_id;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2341
void amdgpu_gfx_profile_ring_begin_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2343
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2378
void amdgpu_gfx_profile_ring_end_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2380
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2385
atomic_dec(&ring->adev->gfx.total_submission_cnt);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2387
schedule_delayed_work(&ring->adev->gfx.idle_work, GFX_PROFILE_IDLE_TIMEOUT);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2467
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2477
ring = &adev->gfx.gfx_ring[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2479
ring->sched.ready = true;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2481
ring->sched.ready = false;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2493
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2498
ring = &adev->gfx.gfx_ring[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2499
if (ring->sched.ready)
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2537
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2547
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2549
ring->sched.ready = true;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2551
ring->sched.ready = false;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2564
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2569
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2570
if (ring->sched.ready)
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
272
struct amdgpu_ring *ring, int xcc_id)
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
295
ring->me = mec + 1;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
296
ring->pipe = pipe;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
297
ring->queue = queue;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
310
struct amdgpu_ring *ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
315
ring->adev = NULL;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
316
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
317
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
318
ring->xcc_id = xcc_id;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
319
ring->vm_hub = AMDGPU_GFXHUB(xcc_id);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
320
ring->doorbell_index =
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
325
r = amdgpu_gfx_kiq_acquire(adev, ring, xcc_id);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
329
ring->eop_gpu_addr = kiq->eop_gpu_addr;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
330
ring->no_scheduler = true;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
331
snprintf(ring->name, sizeof(ring->name), "kiq_%hhu.%hhu.%hhu.%hhu",
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
332
(unsigned char)xcc_id, (unsigned char)ring->me,
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
333
(unsigned char)ring->pipe, (unsigned char)ring->queue);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
334
r = amdgpu_ring_init(adev, ring, 1024, irq, AMDGPU_CP_KIQ_IRQ_DRIVER0,
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
342
void amdgpu_gfx_kiq_free_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
344
amdgpu_ring_fini(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
386
struct amdgpu_ring *ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
398
if (!adev->enable_mes_kiq && !ring->mqd_obj) {
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
407
&ring->mqd_obj,
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
408
&ring->mqd_gpu_addr,
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
409
&ring->mqd_ptr);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
419
"no memory to create MQD backup for ring %s\n", ring->name);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
427
ring = &adev->gfx.gfx_ring[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
428
if (!ring->mqd_obj) {
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
430
PAGE_SIZE, domain, &ring->mqd_obj,
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
431
&ring->mqd_gpu_addr, &ring->mqd_ptr);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
437
ring->mqd_size = gfx_mqd_size;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
441
dev_warn(adev->dev, "no memory to create MQD backup for ring %s\n", ring->name);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
451
ring = &adev->gfx.compute_ring[j];
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
452
if (!ring->mqd_obj) {
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
454
PAGE_SIZE, domain, &ring->mqd_obj,
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
455
&ring->mqd_gpu_addr, &ring->mqd_ptr);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
461
ring->mqd_size = compute_mqd_size;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
465
dev_warn(adev->dev, "no memory to create MQD backup for ring %s\n", ring->name);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
476
struct amdgpu_ring *ring = NULL;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
482
ring = &adev->gfx.gfx_ring[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
484
amdgpu_bo_free_kernel(&ring->mqd_obj,
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
485
&ring->mqd_gpu_addr,
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
486
&ring->mqd_ptr);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
492
ring = &adev->gfx.compute_ring[j];
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
494
amdgpu_bo_free_kernel(&ring->mqd_obj,
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
495
&ring->mqd_gpu_addr,
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
496
&ring->mqd_ptr);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
499
ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
501
amdgpu_bo_free_kernel(&ring->mqd_obj,
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
502
&ring->mqd_gpu_addr,
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
503
&ring->mqd_ptr);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
509
struct amdgpu_ring *kiq_ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
559
struct amdgpu_ring *kiq_ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
578
if (!adev->gfx.kiq[0].ring.sched.ready || amdgpu_in_reset(adev))
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
626
struct amdgpu_ring *kiq_ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
665
struct amdgpu_ring *kiq_ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
729
struct amdgpu_ring *kiq_ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.h
133
struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.h
135
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.h
139
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.h
161
struct amdgpu_ring ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.h
361
void (*get_hdp_flush_mask)(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.h
577
void amdgpu_gfx_kiq_free_ring(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.h
601
struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.h
603
struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.h
622
void amdgpu_gfx_get_hdp_flush_mask(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.h
646
void amdgpu_gfx_enforce_isolation_ring_begin_use(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.h
647
void amdgpu_gfx_enforce_isolation_ring_end_use(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.h
650
void amdgpu_gfx_profile_ring_begin_use(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.h
651
void amdgpu_gfx_profile_ring_end_use(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
641
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
662
ring = adev->rings[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
663
vmhub = ring->vm_hub;
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
665
if (ring == &adev->mes.ring[0] ||
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
666
ring == &adev->mes.ring[1] ||
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
667
ring == &adev->umsch_mm.ring ||
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
668
ring == &adev->cper.ring_buf)
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
672
if (amdgpu_sdma_is_shared_inv_eng(adev, ring))
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
678
ring->name);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
682
ring->vm_inv_eng = inv_eng - 1;
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
683
vm_inv_engs[vmhub] &= ~(1 << ring->vm_inv_eng);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
686
ring->name, ring->vm_inv_eng, ring->vm_hub);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
695
shared_ring = amdgpu_sdma_get_shared_ring(adev, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
697
shared_ring->vm_inv_eng = ring->vm_inv_eng;
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
699
ring->name, ring->vm_inv_eng, shared_ring->name, ring->vm_hub);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
710
struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
718
!ring->sched.ready) {
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
746
r = amdgpu_job_alloc_with_ib(ring->adev, &adev->mman.default_entity.base,
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
755
job->ibs->ptr[job->ibs->length_dw++] = ring->funcs->nop;
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
756
amdgpu_ring_pad_ib(ring, &job->ibs[0]);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
774
struct amdgpu_ring *ring = &adev->gfx.kiq[inst].ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
787
if (!adev->gmc.flush_pasid_uses_kiq || !ring->sched.ready) {
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
819
r = amdgpu_ring_alloc(ring, ndw);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
825
kiq->pmf->kiq_invalidate_tlbs(ring, pasid, 2, all_hub);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
828
kiq->pmf->kiq_invalidate_tlbs(ring, pasid, 0, all_hub);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
830
kiq->pmf->kiq_invalidate_tlbs(ring, pasid, flush_type, all_hub);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
831
r = amdgpu_fence_emit_polling(ring, &seq, MAX_KIQ_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
833
amdgpu_ring_undo(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
838
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
841
r = amdgpu_fence_wait_polling(ring, seq, MAX_KIQ_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
847
r = amdgpu_fence_wait_polling(ring, seq, MAX_KIQ_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
868
struct amdgpu_ring *ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
873
if (adev->mes.ring[MES_PIPE_INST(xcc_inst, 0)].sched.ready) {
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
880
amdgpu_ring_alloc(ring, 32);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
881
amdgpu_ring_emit_reg_write_reg_wait(ring, reg0, reg1,
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
883
r = amdgpu_fence_emit_polling(ring, &seq, MAX_KIQ_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
887
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
890
r = amdgpu_fence_wait_polling(ring, seq, MAX_KIQ_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
901
r = amdgpu_fence_wait_polling(ring, seq, MAX_KIQ_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
910
amdgpu_ring_undo(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.h
158
uint64_t (*emit_flush_gpu_tlb)(struct amdgpu_ring *ring, unsigned vmid,
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.h
161
void (*emit_pasid_mapping)(struct amdgpu_ring *ring, unsigned vmid,
drivers/gpu/drm/amd/amdgpu/amdgpu_hdp.c
52
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_hdp.c
54
if (!ring || !ring->funcs->emit_wreg) {
drivers/gpu/drm/amd/amdgpu/amdgpu_hdp.c
62
amdgpu_ring_emit_wreg(ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_hdp.c
70
void amdgpu_hdp_invalidate(struct amdgpu_device *adev, struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_hdp.c
73
adev->asic_funcs->invalidate_hdp(adev, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_hdp.c
75
adev->hdp.funcs->invalidate_hdp(adev, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_hdp.c
78
void amdgpu_hdp_flush(struct amdgpu_device *adev, struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_hdp.c
81
adev->asic_funcs->flush_hdp(adev, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_hdp.c
83
adev->hdp.funcs->flush_hdp(adev, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_hdp.h
32
void (*flush_hdp)(struct amdgpu_device *adev, struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_hdp.h
34
struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_hdp.h
48
struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_hdp.h
50
struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_hdp.h
52
struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
124
int amdgpu_ib_schedule(struct amdgpu_ring *ring, unsigned int num_ibs,
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
128
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
177
if (!ring->sched.ready) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
178
dev_err(adev->dev, "couldn't schedule ib on ring <%s>\n", ring->name);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
190
(!ring->funcs->secure_submission_supported)) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
191
dev_err(adev->dev, "secure submissions not supported on ring <%s>\n", ring->name);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
196
alloc_size = ring->funcs->emit_frame_size + num_ibs *
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
197
ring->funcs->emit_ib_size;
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
199
r = amdgpu_ring_alloc(ring, alloc_size);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
205
need_ctx_switch = ring->current_ctx != fence_ctx;
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
206
if (ring->funcs->emit_pipeline_sync && job &&
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
208
need_ctx_switch || amdgpu_vm_need_pipeline_sync(ring, job))) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
218
if ((ib->flags & AMDGPU_IB_FLAG_EMIT_MEM_SYNC) && ring->funcs->emit_mem_sync)
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
219
ring->funcs->emit_mem_sync(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
221
if (ring->funcs->emit_wave_limit &&
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
222
ring->hw_prio == AMDGPU_GFX_PIPE_PRIO_HIGH)
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
223
ring->funcs->emit_wave_limit(ring, true);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
225
if (ring->funcs->insert_start)
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
226
ring->funcs->insert_start(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
229
r = amdgpu_vm_flush(ring, job, need_pipe_sync);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
231
amdgpu_ring_undo(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
236
amdgpu_ring_ib_begin(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
238
if (ring->funcs->emit_gfx_shadow && adev->gfx.cp_gfx_shadow)
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
239
amdgpu_ring_emit_gfx_shadow(ring, shadow_va, csa_va, gds_va,
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
242
if (ring->funcs->init_cond_exec)
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
243
cond_exec = amdgpu_ring_init_cond_exec(ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
244
ring->cond_exe_gpu_addr);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
246
amdgpu_device_flush_hdp(adev, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
251
if (job && ring->funcs->emit_cntxcntl) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
254
amdgpu_ring_emit_cntxcntl(ring, status);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
260
if (job && ring->funcs->emit_frame_cntl) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
262
amdgpu_ring_emit_frame_cntl(ring, true, secure);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
268
if (job && ring->funcs->emit_frame_cntl) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
270
amdgpu_ring_emit_frame_cntl(ring, false, secure);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
272
amdgpu_ring_emit_frame_cntl(ring, true, secure);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
276
amdgpu_ring_emit_ib(ring, job, ib, status);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
280
if (job && ring->funcs->emit_frame_cntl)
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
281
amdgpu_ring_emit_frame_cntl(ring, false, secure);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
283
amdgpu_device_invalidate_hdp(adev, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
290
amdgpu_ring_emit_fence(ring, job->uf_addr, job->uf_sequence,
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
294
if (ring->funcs->emit_gfx_shadow && ring->funcs->init_cond_exec &&
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
296
amdgpu_ring_emit_gfx_shadow(ring, 0, 0, 0, false, 0);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
297
amdgpu_ring_init_cond_exec(ring, ring->cond_exe_gpu_addr);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
300
r = amdgpu_fence_emit(ring, af, fence_flags);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
304
amdgpu_vmid_reset(adev, ring->vm_hub, job->vmid);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
305
amdgpu_ring_undo(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
313
if (ring->funcs->insert_end)
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
314
ring->funcs->insert_end(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
316
amdgpu_ring_patch_cond_exec(ring, cond_exec);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
318
ring->current_ctx = fence_ctx;
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
319
if (job && ring->funcs->emit_switch_buffer)
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
320
amdgpu_ring_emit_switch_buffer(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
322
if (ring->funcs->emit_wave_limit &&
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
323
ring->hw_prio == AMDGPU_GFX_PIPE_PRIO_HIGH)
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
324
ring->funcs->emit_wave_limit(ring, false);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
334
amdgpu_ring_ib_end(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
335
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
436
struct amdgpu_ring *ring = adev->rings[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
442
if (!ring->sched.ready || !ring->funcs->test_ib)
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
446
ring->funcs->type == AMDGPU_RING_TYPE_KIQ)
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
450
if (ring->funcs->type == AMDGPU_RING_TYPE_UVD ||
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
451
ring->funcs->type == AMDGPU_RING_TYPE_VCE ||
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
452
ring->funcs->type == AMDGPU_RING_TYPE_UVD_ENC ||
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
453
ring->funcs->type == AMDGPU_RING_TYPE_VCN_DEC ||
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
454
ring->funcs->type == AMDGPU_RING_TYPE_VCN_ENC ||
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
455
ring->funcs->type == AMDGPU_RING_TYPE_VCN_JPEG)
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
460
r = amdgpu_ring_test_ib(ring, tmo);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
463
ring->name);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
467
ring->sched.ready = false;
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
469
ring->name, r);
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
471
if (ring == &adev->gfx.gfx_ring[0]) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
207
static int amdgpu_vmid_grab_idle(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
211
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
212
unsigned vmhub = ring->vm_hub;
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
216
if (!dma_fence_is_signaled(ring->vmid_wait)) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
217
*fence = dma_fence_get(ring->vmid_wait);
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
225
NULL : ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
237
dma_fence_put(ring->vmid_wait);
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
238
ring->vmid_wait = dma_fence_get(*fence);
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
257
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
262
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
263
unsigned vmhub = ring->vm_hub;
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
264
uint64_t fence_context = adev->fence_context + ring->idx;
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
286
ring = NULL;
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
290
tmp = amdgpu_sync_peek_fence(&(*id)->active, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
322
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
326
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
327
unsigned vmhub = ring->vm_hub;
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
329
uint64_t fence_context = adev->fence_context + ring->idx;
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
384
int amdgpu_vmid_grab(struct amdgpu_vm *vm, struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
387
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
388
unsigned vmhub = ring->vm_hub;
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
395
r = amdgpu_vmid_grab_idle(ring, &idle, fence);
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
400
r = amdgpu_vmid_grab_reserved(vm, ring, job, &id, fence);
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
404
r = amdgpu_vmid_grab_used(vm, ring, job, &id);
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
443
trace_amdgpu_vm_grab_id(vm, ring, job);
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.h
86
int amdgpu_vmid_grab(struct amdgpu_vm *vm, struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
120
if (!ih->ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
129
(void *)ih->ring, ih->gpu_addr);
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
130
ih->ring = NULL;
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
133
(void **)&ih->ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
157
ih->ring[wptr++] = cpu_to_le32(iv[i]);
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
271
dw[0] = le32_to_cpu(ih->ring[ring_index + 0]);
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
272
dw[1] = le32_to_cpu(ih->ring[ring_index + 1]);
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
273
dw[2] = le32_to_cpu(ih->ring[ring_index + 2]);
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
274
dw[3] = le32_to_cpu(ih->ring[ring_index + 3]);
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
275
dw[4] = le32_to_cpu(ih->ring[ring_index + 4]);
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
276
dw[5] = le32_to_cpu(ih->ring[ring_index + 5]);
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
277
dw[6] = le32_to_cpu(ih->ring[ring_index + 6]);
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
278
dw[7] = le32_to_cpu(ih->ring[ring_index + 7]);
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
308
dw1 = le32_to_cpu(ih->ring[ring_index + 1]);
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
309
dw2 = le32_to_cpu(ih->ring[ring_index + 2]);
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
59
if (ih->ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
65
ih->ring = dma_alloc_coherent(adev->dev, ih->ring_size + 8,
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
67
if (ih->ring == NULL)
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
72
ih->wptr_cpu = &ih->ring[ih->ring_size / 4];
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
74
ih->rptr_cpu = &ih->ring[(ih->ring_size / 4) + 1];
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c
91
(void **)&ih->ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ih.h
59
uint32_t *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_irq.c
481
entry.iv_entry = (const uint32_t *)&ih->ring[ring_index];
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
116
amdgpu_ring_is_reset_type_supported(ring, AMDGPU_RESET_TYPE_SOFT_RESET) &&
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
117
amdgpu_ring_soft_recovery(ring, job->vmid, s_job->s_fence->parent)) {
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
124
job->base.sched->name, atomic_read(&ring->fence_drv.last_seq),
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
125
ring->fence_drv.sync_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
127
ti = amdgpu_vm_get_task_info_pasid(ring->adev, job->pasid);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
135
amdgpu_ring_is_reset_type_supported(ring, AMDGPU_RESET_TYPE_PER_QUEUE) &&
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
136
ring->funcs->reset) {
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
140
drm_sched_wqueue_stop(&ring->sched);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
141
r = amdgpu_ring_reset(ring, job->vmid, job->hw_fence);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
144
drm_sched_wqueue_start(&ring->sched);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
145
atomic_inc(&ring->adev->gpu_reset_counter);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
147
ring->sched.name);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
154
dev_err(adev->dev, "Ring %s reset failed\n", ring->sched.name);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
160
if (amdgpu_device_should_recover_gpu(ring->adev)) {
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
175
r = amdgpu_device_gpu_recover(ring->adev, job, &reset_context);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
179
drm_sched_suspend_timeout(&ring->sched);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
373
int amdgpu_job_submit_direct(struct amdgpu_job *job, struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
378
job->base.sched = &ring->sched;
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
379
r = amdgpu_ib_schedule(ring, job->num_ibs, job->ibs, job, fence);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
392
struct amdgpu_ring *ring = to_amdgpu_ring(s_entity->rq->sched);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
402
fence = amdgpu_device_switch_gang(ring->adev, job->gang_submit);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
407
fence = amdgpu_device_enforce_isolation(ring->adev, ring, job);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
412
r = amdgpu_vmid_grab(job->vm, ring, job, &fence);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
414
dev_err(ring->adev->dev, "Error getting VM ID (%d)\n", r);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
429
struct amdgpu_ring *ring = to_amdgpu_ring(sched_job->sched);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
430
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
447
ring->name);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
449
r = amdgpu_ib_schedule(ring, job->num_ibs, job->ibs, job,
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
454
ring->name);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
90
struct amdgpu_ring *ring = to_amdgpu_ring(s_job->sched);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
94
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_job.h
129
int amdgpu_job_submit_direct(struct amdgpu_job *job, struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
133
void amdgpu_jpeg_ring_begin_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
135
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
146
void amdgpu_jpeg_ring_end_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
148
atomic_dec(&ring->adev->jpeg.total_submission_cnt);
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
149
schedule_delayed_work(&ring->adev->jpeg.idle_work, JPEG_IDLE_TIMEOUT);
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
152
int amdgpu_jpeg_dec_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
154
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
163
r = amdgpu_ring_alloc(ring, 3);
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
167
WREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring->pipe], 0xCAFEDEAD);
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
169
RREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring->pipe]);
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
171
amdgpu_ring_write(ring, PACKET0(adev->jpeg.internal.jpeg_pitch[ring->pipe], 0));
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
172
amdgpu_ring_write(ring, 0xABADCAFE);
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
173
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
176
tmp = RREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring->pipe]);
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
188
static int amdgpu_jpeg_dec_set_reg(struct amdgpu_ring *ring, uint32_t handle,
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
191
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
198
r = amdgpu_job_alloc_with_ib(ring->adev, NULL, NULL, ib_size_dw * 4,
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
206
ib->ptr[0] = PACKETJ(adev->jpeg.internal.jpeg_pitch[ring->pipe], 0, 0, PACKETJ_TYPE0);
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
214
r = amdgpu_job_submit_direct(job, ring, &f);
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
229
int amdgpu_jpeg_dec_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
231
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
237
r = amdgpu_jpeg_dec_set_reg(ring, 1, &fence);
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
253
tmp = RREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring->pipe]);
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
362
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
373
ring = &adev->jpeg.inst[i].ring_dec[j];
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
375
ring->sched.ready = true;
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
377
ring->sched.ready = false;
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
390
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
396
ring = &adev->jpeg.inst[i].ring_dec[j];
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
397
if (ring->sched.ready)
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.h
159
void amdgpu_jpeg_ring_begin_use(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.h
160
void amdgpu_jpeg_ring_end_use(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.h
162
int amdgpu_jpeg_dec_ring_test_ring(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.h
163
int amdgpu_jpeg_dec_ring_test_ib(struct amdgpu_ring *ring, long timeout);
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
481
if (adev->sdma.instance[i].ring.sched.ready &&
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
482
!adev->sdma.instance[i].ring.no_user_submission)
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
499
if (adev->uvd.inst[i].ring.sched.ready &&
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
500
!adev->uvd.inst[i].ring.no_user_submission)
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
509
if (adev->vce.ring[i].sched.ready &&
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
510
!adev->vce.ring[i].no_user_submission)
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
574
if (adev->vpe.ring.sched.ready &&
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
575
!adev->vpe.ring.no_user_submission)
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
333
struct amdgpu_ring *ring, uint32_t xcc_id)
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
341
queue_input.queue_type = ring->funcs->type;
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
342
queue_input.doorbell_offset = ring->doorbell_index;
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
343
queue_input.pipe_id = ring->pipe;
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
344
queue_input.queue_id = ring->queue;
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
345
queue_input.mqd_addr = amdgpu_bo_gpu_offset(ring->mqd_obj);
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
346
queue_input.wptr_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
358
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
367
queue_input.queue_type = ring->funcs->type;
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
368
queue_input.doorbell_offset = ring->doorbell_index;
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
369
queue_input.pipe_id = ring->pipe;
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
370
queue_input.queue_id = ring->queue;
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
384
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
395
queue_input.queue_type = ring->funcs->type;
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
396
queue_input.doorbell_offset = ring->doorbell_index;
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
397
queue_input.me_id = ring->me;
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
398
queue_input.pipe_id = ring->pipe;
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
399
queue_input.queue_id = ring->queue;
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
400
queue_input.mqd_addr = ring->mqd_obj ? amdgpu_bo_gpu_offset(ring->mqd_obj) : 0;
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
401
queue_input.wptr_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
405
if (ring->funcs->type == AMDGPU_RING_TYPE_GFX)
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.c
577
mes_ring = &adev->mes.ring[0];
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.h
198
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.h
212
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.h
436
struct amdgpu_ring *ring, uint32_t xcc_id);
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.h
438
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.h
442
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_mes.h
95
struct amdgpu_ring ring[AMDGPU_MAX_MES_INST_PIPES];
drivers/gpu/drm/amd/amdgpu/amdgpu_mes_ctx.h
61
uint8_t ring[PAGE_SIZE * 4];
drivers/gpu/drm/amd/amdgpu/amdgpu_mes_ctx.h
78
uint8_t ring[PAGE_SIZE * 4];
drivers/gpu/drm/amd/amdgpu/amdgpu_mes_ctx.h
92
uint8_t ring[PAGE_SIZE * 4];
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3448
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3449
struct psp_gfx_rb_frame *ring_buffer_start = ring->ring_mem;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3451
ring->ring_size / sizeof(struct psp_gfx_rb_frame) - 1;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3453
uint32_t ring_size_dw = ring->ring_size / 4;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
59
struct psp_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
62
ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
64
ring->ring_type = ring_type;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
67
ring->ring_size = 0x1000;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
68
ret = amdgpu_bo_create_kernel(adev, ring->ring_size, PAGE_SIZE,
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
72
&ring->ring_mem_mc_addr,
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
73
(void **)&ring->ring_mem);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
75
ring->ring_size = 0;
drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c
2527
memcpy(&entry, &data->ring[data->rptr],
drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c
2585
memcpy(&data->ring[data->wptr], info->entry,
drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c
2612
kfree(data->ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c
2651
data->ring = kmalloc(data->ring_size, GFP_KERNEL);
drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c
2652
if (!data->ring) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ras.h
678
unsigned char *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
101
if (ring->funcs->begin_use)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
102
ring->funcs->begin_use(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
117
static void amdgpu_ring_alloc_reemit(struct amdgpu_ring *ring, unsigned int ndw)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
121
ndw = (ndw + ring->funcs->align_mask) & ~ring->funcs->align_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
123
ring->count_dw = ndw;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
124
ring->wptr_old = ring->wptr;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
126
if (ring->funcs->begin_use)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
127
ring->funcs->begin_use(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
138
void amdgpu_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
142
occupied = ring->wptr & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
143
chunk1 = ring->buf_mask + 1 - occupied;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
148
memset32(&ring->ring[occupied], ring->funcs->nop, chunk1);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
151
memset32(ring->ring, ring->funcs->nop, chunk2);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
153
ring->wptr += count;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
154
ring->wptr &= ring->ptr_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
155
ring->count_dw -= count;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
166
void amdgpu_ring_generic_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
168
u32 align_mask = ring->funcs->align_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
174
memset32(&ib->ptr[ib->length_dw], ring->funcs->nop, count);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
189
void amdgpu_ring_commit(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
193
if (ring->count_dw < 0)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
194
drm_err(adev_to_drm(ring->adev), "writing more dwords to the ring than expected!\n");
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
197
count = ring->funcs->align_mask + 1 -
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
198
(ring->wptr & ring->funcs->align_mask);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
199
count &= ring->funcs->align_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
202
ring->funcs->insert_nop(ring, count);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
205
amdgpu_ring_set_wptr(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
207
if (ring->funcs->end_use)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
208
ring->funcs->end_use(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
218
void amdgpu_ring_undo(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
220
ring->wptr = ring->wptr_old;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
222
if (ring->funcs->end_use)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
223
ring->funcs->end_use(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
226
#define amdgpu_ring_get_gpu_addr(ring, offset) \
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
227
(ring->adev->wb.gpu_addr + offset * 4)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
229
#define amdgpu_ring_get_cpu_addr(ring, offset) \
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
230
(&ring->adev->wb.wb[offset])
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
246
int amdgpu_ring_init(struct amdgpu_device *adev, struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
263
if (ring->funcs->type == AMDGPU_RING_TYPE_KIQ)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
265
if (ring->funcs->type == AMDGPU_RING_TYPE_MES)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
267
else if (ring == &adev->sdma.instance[0].page)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
270
if (ring->adev == NULL) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
274
ring->adev = adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
275
ring->num_hw_submission = sched_hw_submission;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
276
ring->sched_score = sched_score;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
277
ring->vmid_wait = dma_fence_get_stub();
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
279
ring->idx = adev->num_rings++;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
280
adev->rings[ring->idx] = ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
282
r = amdgpu_fence_driver_init_ring(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
287
r = amdgpu_device_wb_get(adev, &ring->rptr_offs);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
293
r = amdgpu_device_wb_get(adev, &ring->wptr_offs);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
299
r = amdgpu_device_wb_get(adev, &ring->fence_offs);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
305
r = amdgpu_device_wb_get(adev, &ring->trail_fence_offs);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
311
r = amdgpu_device_wb_get(adev, &ring->cond_exe_offs);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
317
ring->fence_gpu_addr =
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
318
amdgpu_ring_get_gpu_addr(ring, ring->fence_offs);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
319
ring->fence_cpu_addr =
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
320
amdgpu_ring_get_cpu_addr(ring, ring->fence_offs);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
322
ring->rptr_gpu_addr =
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
323
amdgpu_ring_get_gpu_addr(ring, ring->rptr_offs);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
324
ring->rptr_cpu_addr =
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
325
amdgpu_ring_get_cpu_addr(ring, ring->rptr_offs);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
327
ring->wptr_gpu_addr =
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
328
amdgpu_ring_get_gpu_addr(ring, ring->wptr_offs);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
329
ring->wptr_cpu_addr =
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
330
amdgpu_ring_get_cpu_addr(ring, ring->wptr_offs);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
332
ring->trail_fence_gpu_addr =
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
333
amdgpu_ring_get_gpu_addr(ring, ring->trail_fence_offs);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
334
ring->trail_fence_cpu_addr =
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
335
amdgpu_ring_get_cpu_addr(ring, ring->trail_fence_offs);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
337
ring->cond_exe_gpu_addr =
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
338
amdgpu_ring_get_gpu_addr(ring, ring->cond_exe_offs);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
339
ring->cond_exe_cpu_addr =
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
340
amdgpu_ring_get_cpu_addr(ring, ring->cond_exe_offs);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
343
*ring->cond_exe_cpu_addr = 1;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
345
if (ring->funcs->type != AMDGPU_RING_TYPE_CPER) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
346
r = amdgpu_fence_driver_start_ring(ring, irq_src, irq_type);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
352
max_ibs_dw = ring->funcs->emit_frame_size +
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
353
amdgpu_ring_max_ibs(ring->funcs->type) * ring->funcs->emit_ib_size;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
354
max_ibs_dw = (max_ibs_dw + ring->funcs->align_mask) & ~ring->funcs->align_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
359
ring->ring_size = roundup_pow_of_two(max_dw * 4 * sched_hw_submission);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
361
ring->ring_size = roundup_pow_of_two(max_dw * 4);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
362
ring->count_dw = (ring->ring_size - 4) >> 2;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
364
ring->wptr = *ring->rptr_cpu_addr = 0;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
367
ring->buf_mask = (ring->ring_size / 4) - 1;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
368
ring->ptr_mask = ring->funcs->support_64bit_ptrs ?
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
369
0xffffffffffffffff : ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
371
ring->cached_rptr = 0;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
373
if (!ring->ring_backup) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
374
ring->ring_backup = kvzalloc(ring->ring_size, GFP_KERNEL);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
375
if (!ring->ring_backup)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
380
if (ring->ring_obj == NULL) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
381
r = amdgpu_bo_create_kernel(adev, ring->ring_size + ring->funcs->extra_bytes,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
384
&ring->ring_obj,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
385
&ring->gpu_addr,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
386
(void **)&ring->ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
389
kvfree(ring->ring_backup);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
392
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
395
ring->max_dw = max_dw;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
396
ring->hw_prio = hw_prio;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
398
if (!ring->no_scheduler && ring->funcs->type < AMDGPU_HW_IP_NUM) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
399
hw_ip = ring->funcs->type;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
402
&ring->sched;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
415
void amdgpu_ring_fini(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
419
if (!(ring->adev) || !(ring->adev->rings[ring->idx]))
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
422
ring->sched.ready = false;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
424
amdgpu_device_wb_free(ring->adev, ring->rptr_offs);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
425
amdgpu_device_wb_free(ring->adev, ring->wptr_offs);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
427
amdgpu_device_wb_free(ring->adev, ring->cond_exe_offs);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
428
amdgpu_device_wb_free(ring->adev, ring->fence_offs);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
430
amdgpu_bo_free_kernel(&ring->ring_obj,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
431
&ring->gpu_addr,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
432
(void **)&ring->ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
433
kvfree(ring->ring_backup);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
434
ring->ring_backup = NULL;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
436
dma_fence_put(ring->vmid_wait);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
437
ring->vmid_wait = NULL;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
438
ring->me = 0;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
453
void amdgpu_ring_emit_reg_write_reg_wait_helper(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
457
amdgpu_ring_emit_wreg(ring, reg0, ref);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
458
amdgpu_ring_emit_reg_wait(ring, reg1, mask, mask);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
470
bool amdgpu_ring_soft_recovery(struct amdgpu_ring *ring, unsigned int vmid,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
479
if (amdgpu_sriov_vf(ring->adev) || !ring->funcs->soft_recovery || !fence)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
489
ring->funcs->soft_recovery(ring, vmid);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
494
atomic_inc(&ring->adev->gpu_reset_counter);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
508
struct amdgpu_ring *ring = file_inode(f)->i_private;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
538
r = amdgpu_ras_mgr_handle_ras_cmd(ring->adev,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
552
r = amdgpu_ras_mgr_handle_ras_cmd(ring->adev, RAS_CMD__GET_CPER_RECORD,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
574
struct amdgpu_ring *ring = file_inode(f)->i_private;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
580
if (ring->funcs->type == AMDGPU_RING_TYPE_CPER && amdgpu_uniras_enabled(ring->adev))
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
589
if (ring->funcs->type == AMDGPU_RING_TYPE_CPER)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
590
mutex_lock(&ring->adev->cper.ring_lock);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
592
early[0] = amdgpu_ring_get_rptr(ring) & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
593
early[1] = amdgpu_ring_get_wptr(ring) & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
594
early[2] = ring->wptr & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
608
if (ring->funcs->type != AMDGPU_RING_TYPE_CPER) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
610
if (*pos >= (ring->ring_size + 12))
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
613
value = ring->ring[(*pos - 12)/4];
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
627
size = ring->ring_size - (early[0] - early[1]);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
633
value = ring->ring[p];
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
644
p &= ring->ptr_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
649
if (ring->funcs->type == AMDGPU_RING_TYPE_CPER)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
650
mutex_unlock(&ring->adev->cper.ring_lock);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
658
struct amdgpu_ring *ring = file_inode(f)->i_private;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
663
if (ring->funcs->type == AMDGPU_RING_TYPE_CPER)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
664
amdgpu_virt_req_ras_cper_dump(ring->adev, false);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
684
struct amdgpu_ring *ring = file_inode(f)->i_private;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
685
ssize_t bytes = min_t(ssize_t, ring->mqd_size - *pos, size);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
686
void *from = ((u8 *)ring->mqd_ptr) + *pos;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
688
if (*pos > ring->mqd_size)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
706
struct amdgpu_ring *ring = data;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
708
amdgpu_fence_driver_set_error(ring, val);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
718
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
725
sprintf(name, "amdgpu_ring_%s", ring->name);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
727
debugfs_create_file_size(name, S_IFREG | 0444, root, ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
729
ring->ring_size + 12);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
731
debugfs_create_file_size(name, S_IFREG | 0444, root, ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
733
ring->ring_size + 12);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
735
if (ring->mqd_obj) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
736
sprintf(name, "amdgpu_mqd_%s", ring->name);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
737
debugfs_create_file_size(name, S_IFREG | 0444, root, ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
739
ring->mqd_size);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
742
sprintf(name, "amdgpu_error_%s", ring->name);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
743
debugfs_create_file(name, 0200, root, ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
758
int amdgpu_ring_test_helper(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
760
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
763
r = amdgpu_ring_test_ring(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
766
ring->name, r);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
769
ring->name);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
771
ring->sched.ready = !r;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
776
static void amdgpu_ring_to_mqd_prop(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
779
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
780
bool is_high_prio_compute = ring->funcs->type == AMDGPU_RING_TYPE_COMPUTE &&
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
781
amdgpu_gfx_is_high_priority_compute_queue(adev, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
782
bool is_high_prio_gfx = ring->funcs->type == AMDGPU_RING_TYPE_GFX &&
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
783
amdgpu_gfx_is_high_priority_graphics_queue(adev, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
787
prop->mqd_gpu_addr = ring->mqd_gpu_addr;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
788
prop->hqd_base_gpu_addr = ring->gpu_addr;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
789
prop->rptr_gpu_addr = ring->rptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
790
prop->wptr_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
791
prop->queue_size = ring->ring_size;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
792
prop->eop_gpu_addr = ring->eop_gpu_addr;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
793
prop->use_doorbell = ring->use_doorbell;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
794
prop->doorbell_index = ring->doorbell_index;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
800
prop->hqd_active = ring->funcs->type == AMDGPU_RING_TYPE_KIQ;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
809
int amdgpu_ring_init_mqd(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
811
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
815
amdgpu_ring_to_mqd_prop(ring, &prop);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
817
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
819
if (ring->funcs->type == AMDGPU_RING_TYPE_KIQ)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
822
mqd_mgr = &adev->mqds[ring->funcs->type];
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
824
return mqd_mgr->init_mqd(adev, ring->mqd_ptr, &prop);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
827
void amdgpu_ring_ib_begin(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
829
if (ring->is_sw_ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
830
amdgpu_sw_ring_ib_begin(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
833
void amdgpu_ring_ib_end(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
835
if (ring->is_sw_ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
836
amdgpu_sw_ring_ib_end(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
839
void amdgpu_ring_ib_on_emit_cntl(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
841
if (ring->is_sw_ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
842
amdgpu_sw_ring_ib_mark_offset(ring, AMDGPU_MUX_OFFSET_TYPE_CONTROL);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
845
void amdgpu_ring_ib_on_emit_ce(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
847
if (ring->is_sw_ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
848
amdgpu_sw_ring_ib_mark_offset(ring, AMDGPU_MUX_OFFSET_TYPE_CE);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
851
void amdgpu_ring_ib_on_emit_de(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
853
if (ring->is_sw_ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
854
amdgpu_sw_ring_ib_mark_offset(ring, AMDGPU_MUX_OFFSET_TYPE_DE);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
857
bool amdgpu_ring_sched_ready(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
859
if (!ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
86
int amdgpu_ring_alloc(struct amdgpu_ring *ring, unsigned int ndw)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
862
if (ring->no_scheduler || !drm_sched_wqueue_ready(&ring->sched))
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
868
void amdgpu_ring_reset_helper_begin(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
872
amdgpu_ring_backup_unprocessed_commands(ring, guilty_fence);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
875
int amdgpu_ring_reset_helper_end(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
882
r = amdgpu_ring_test_ring(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
890
if (ring->ring_backup_entries_to_copy) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
891
amdgpu_ring_alloc_reemit(ring, ring->ring_backup_entries_to_copy);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
892
for (i = 0; i < ring->ring_backup_entries_to_copy; i++)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
893
amdgpu_ring_write(ring, ring->ring_backup[i]);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
894
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
899
bool amdgpu_ring_is_reset_type_supported(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
90
ndw = (ndw + ring->funcs->align_mask) & ~ring->funcs->align_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
902
switch (ring->funcs->type) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
904
if (ring->adev->gfx.gfx_supported_reset & reset_type)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
908
if (ring->adev->gfx.compute_supported_reset & reset_type)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
912
if (ring->adev->sdma.supported_reset & reset_type)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
917
if (ring->adev->vcn.supported_reset & reset_type)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
921
if (ring->adev->jpeg.supported_reset & reset_type)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
95
if (WARN_ON_ONCE(ndw > ring->max_dw))
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
98
ring->count_dw = ndw;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c
99
ring->wptr_old = ring->wptr;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
146
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
162
void amdgpu_fence_driver_set_error(struct amdgpu_ring *ring, int error);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
163
void amdgpu_fence_driver_force_completion(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
167
int amdgpu_fence_driver_init_ring(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
168
int amdgpu_fence_driver_start_ring(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
175
int amdgpu_fence_emit(struct amdgpu_ring *ring, struct amdgpu_fence *af,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
177
int amdgpu_fence_emit_polling(struct amdgpu_ring *ring, uint32_t *s,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
179
bool amdgpu_fence_process(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
180
int amdgpu_fence_wait_empty(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
181
signed long amdgpu_fence_wait_polling(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
184
unsigned amdgpu_fence_count_emitted(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
188
u64 amdgpu_fence_last_unsignaled_time_us(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
189
void amdgpu_fence_update_start_timestamp(struct amdgpu_ring *ring, uint32_t seq,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
234
u64 (*get_rptr)(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
235
u64 (*get_wptr)(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
236
void (*set_wptr)(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
248
void (*emit_ib)(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
252
void (*emit_fence)(struct amdgpu_ring *ring, uint64_t addr,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
254
void (*emit_pipeline_sync)(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
255
void (*emit_vm_flush)(struct amdgpu_ring *ring, unsigned vmid,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
257
void (*emit_hdp_flush)(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
258
void (*emit_gds_switch)(struct amdgpu_ring *ring, uint32_t vmid,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
263
int (*test_ring)(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
264
int (*test_ib)(struct amdgpu_ring *ring, long timeout);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
266
void (*insert_nop)(struct amdgpu_ring *ring, uint32_t count);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
267
void (*insert_start)(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
268
void (*insert_end)(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
270
void (*pad_ib)(struct amdgpu_ring *ring, struct amdgpu_ib *ib);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
271
unsigned (*init_cond_exec)(struct amdgpu_ring *ring, uint64_t addr);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
273
void (*begin_use)(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
274
void (*end_use)(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
275
void (*emit_switch_buffer) (struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
276
void (*emit_cntxcntl) (struct amdgpu_ring *ring, uint32_t flags);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
277
void (*emit_gfx_shadow)(struct amdgpu_ring *ring, u64 shadow_va, u64 csa_va,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
279
void (*emit_rreg)(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
281
void (*emit_wreg)(struct amdgpu_ring *ring, uint32_t reg, uint32_t val);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
282
void (*emit_reg_wait)(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
284
void (*emit_reg_write_reg_wait)(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
287
void (*emit_frame_cntl)(struct amdgpu_ring *ring, bool start,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
290
void (*soft_recovery)(struct amdgpu_ring *ring, unsigned vmid);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
291
int (*preempt_ib)(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
292
void (*emit_mem_sync)(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
293
void (*emit_wave_limit)(struct amdgpu_ring *ring, bool enable);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
294
void (*patch_cntl)(struct amdgpu_ring *ring, unsigned offset);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
295
void (*patch_ce)(struct amdgpu_ring *ring, unsigned offset);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
296
void (*patch_de)(struct amdgpu_ring *ring, unsigned offset);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
297
int (*reset)(struct amdgpu_ring *ring, unsigned int vmid,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
299
void (*emit_cleaner_shader)(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
312
uint32_t *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
460
int amdgpu_ring_alloc(struct amdgpu_ring *ring, unsigned ndw);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
461
void amdgpu_ring_ib_begin(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
462
void amdgpu_ring_ib_end(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
463
void amdgpu_ring_ib_on_emit_cntl(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
464
void amdgpu_ring_ib_on_emit_ce(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
465
void amdgpu_ring_ib_on_emit_de(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
467
void amdgpu_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
468
void amdgpu_ring_generic_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
469
void amdgpu_ring_commit(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
470
void amdgpu_ring_undo(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
471
int amdgpu_ring_init(struct amdgpu_device *adev, struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
475
void amdgpu_ring_fini(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
476
void amdgpu_ring_emit_reg_write_reg_wait_helper(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
479
bool amdgpu_ring_soft_recovery(struct amdgpu_ring *ring, unsigned int vmid,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
482
static inline void amdgpu_ring_set_preempt_cond_exec(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
485
*ring->cond_exe_cpu_addr = cond_exec;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
488
static inline void amdgpu_ring_clear_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
490
memset32(ring->ring, ring->funcs->nop, ring->buf_mask + 1);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
493
static inline void amdgpu_ring_write(struct amdgpu_ring *ring, uint32_t v)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
495
ring->ring[ring->wptr++ & ring->buf_mask] = v;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
496
ring->wptr &= ring->ptr_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
497
ring->count_dw--;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
500
static inline void amdgpu_ring_write_multiple(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
505
occupied = ring->wptr & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
506
chunk1 = ring->buf_mask + 1 - occupied;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
513
memcpy(&ring->ring[occupied], src, chunk1);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
517
memcpy(ring->ring, src, chunk2);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
520
ring->wptr += count_dw;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
521
ring->wptr &= ring->ptr_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
522
ring->count_dw -= count_dw;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
532
static inline void amdgpu_ring_patch_cond_exec(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
537
if (!ring->funcs->init_cond_exec)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
540
WARN_ON(offset > ring->buf_mask);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
541
WARN_ON(ring->ring[offset] != 0);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
543
cur = (ring->wptr - 1) & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
545
cur += ring->ring_size >> 2;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
546
ring->ring[offset] = cur - offset;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
549
int amdgpu_ring_test_helper(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
552
struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
554
int amdgpu_ring_init_mqd(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
572
int amdgpu_ib_schedule(struct amdgpu_ring *ring, unsigned num_ibs,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
578
bool amdgpu_ring_sched_ready(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
579
void amdgpu_ring_backup_unprocessed_commands(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
581
void amdgpu_ring_reset_helper_begin(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
583
int amdgpu_ring_reset_helper_end(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
585
bool amdgpu_ring_is_reset_type_supported(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
105
amdgpu_fence_update_start_timestamp(e->ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
109
le32_to_cpu(*(e->ring->fence_drv.cpu_addr + 2))) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
110
if (chunk->cntl_offset <= e->ring->buf_mask)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
111
amdgpu_ring_patch_cntl(e->ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
113
if (chunk->ce_offset <= e->ring->buf_mask)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
114
amdgpu_ring_patch_ce(e->ring, chunk->ce_offset);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
115
if (chunk->de_offset <= e->ring->buf_mask)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
116
amdgpu_ring_patch_de(e->ring, chunk->de_offset);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
118
amdgpu_ring_mux_copy_pkt_from_sw_ring(mux, e->ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
150
int amdgpu_ring_mux_init(struct amdgpu_ring_mux *mux, struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
153
mux->real_ring = ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
195
int amdgpu_ring_mux_add_sw_ring(struct amdgpu_ring_mux *mux, struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
205
ring->entry_index = mux->num_ring_entries;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
206
e->ring = ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
213
void amdgpu_ring_mux_set_wptr(struct amdgpu_ring_mux *mux, struct amdgpu_ring *ring, u64 wptr)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
219
if (ring->hw_prio <= AMDGPU_RING_PRIO_DEFAULT)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
222
e = amdgpu_ring_mux_sw_entry(mux, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
230
if (ring->hw_prio <= AMDGPU_RING_PRIO_DEFAULT && mux->pending_trailing_fence_signaled) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
237
if (ring->hw_prio <= AMDGPU_RING_PRIO_DEFAULT && e->sw_cptr < mux->wptr_resubmit)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
243
if (ring->hw_prio > AMDGPU_RING_PRIO_DEFAULT || mux->wptr_resubmit < wptr) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
244
amdgpu_ring_mux_copy_pkt_from_sw_ring(mux, ring, e->sw_cptr, wptr);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
253
u64 amdgpu_ring_mux_get_wptr(struct amdgpu_ring_mux *mux, struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
257
e = amdgpu_ring_mux_sw_entry(mux, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
282
u64 amdgpu_ring_mux_get_rptr(struct amdgpu_ring_mux *mux, struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
287
e = amdgpu_ring_mux_sw_entry(mux, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
305
e->sw_rptr = (e->sw_cptr + offset) & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
316
u64 amdgpu_sw_ring_get_rptr_gfx(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
318
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
321
WARN_ON(!ring->is_sw_ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
322
return amdgpu_ring_mux_get_rptr(mux, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
325
u64 amdgpu_sw_ring_get_wptr_gfx(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
327
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
330
WARN_ON(!ring->is_sw_ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
331
return amdgpu_ring_mux_get_wptr(mux, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
334
void amdgpu_sw_ring_set_wptr_gfx(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
336
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
339
WARN_ON(!ring->is_sw_ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
340
amdgpu_ring_mux_set_wptr(mux, ring, ring->wptr);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
344
void amdgpu_sw_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
346
WARN_ON(!ring->is_sw_ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
364
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
369
ring = mux->ring_entry[i].ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
370
if (ring->hw_prio > AMDGPU_RING_PRIO_DEFAULT &&
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
371
amdgpu_fence_count_emitted(ring) > 0)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
373
if (ring->hw_prio <= AMDGPU_RING_PRIO_DEFAULT &&
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
374
amdgpu_fence_last_unsignaled_time_us(ring) >
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
393
void amdgpu_sw_ring_ib_begin(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
395
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
398
WARN_ON(!ring->is_sw_ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
399
if (adev->gfx.mcbp && ring->hw_prio > AMDGPU_RING_PRIO_DEFAULT) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
405
amdgpu_ring_mux_start_ib(mux, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
408
void amdgpu_sw_ring_ib_end(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
410
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
413
WARN_ON(!ring->is_sw_ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
414
if (adev->gfx.mcbp && ring->hw_prio > AMDGPU_RING_PRIO_DEFAULT)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
416
amdgpu_ring_mux_end_ib(mux, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
419
void amdgpu_sw_ring_ib_mark_offset(struct amdgpu_ring *ring, enum amdgpu_ring_mux_offset_type type)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
421
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
425
if (ring->hw_prio > AMDGPU_RING_PRIO_DEFAULT)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
428
offset = ring->wptr & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
430
amdgpu_ring_mux_ib_mark_offset(mux, ring, offset, type);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
433
void amdgpu_ring_mux_start_ib(struct amdgpu_ring_mux *mux, struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
44
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
442
e = amdgpu_ring_mux_sw_entry(mux, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
454
chunk->start = ring->wptr;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
456
chunk->cntl_offset = ring->buf_mask + 1;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
457
chunk->de_offset = ring->buf_mask + 1;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
458
chunk->ce_offset = ring->buf_mask + 1;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
46
return ring->entry_index < mux->ring_entry_size ?
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
462
static void scan_and_remove_signaled_chunk(struct amdgpu_ring_mux *mux, struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
468
e = amdgpu_ring_mux_sw_entry(mux, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
47
&mux->ring_entry[ring->entry_index] : NULL;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
474
last_seq = atomic_read(&ring->fence_drv.last_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
485
struct amdgpu_ring *ring, u64 offset,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
491
e = amdgpu_ring_mux_sw_entry(mux, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
519
void amdgpu_ring_mux_end_ib(struct amdgpu_ring_mux *mux, struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
52
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
524
e = amdgpu_ring_mux_sw_entry(mux, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
536
chunk->end = ring->wptr;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
537
chunk->sync_seq = READ_ONCE(ring->fence_drv.sync_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
539
scan_and_remove_signaled_chunk(mux, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
545
struct amdgpu_ring *ring = NULL;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
556
if (e->ring->hw_prio <= AMDGPU_RING_PRIO_DEFAULT) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
557
ring = e->ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
562
if (!ring) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
567
amdgpu_fence_process(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
568
if (amdgpu_fence_count_emitted(ring) > 0) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
570
mux->seqno_to_resubmit = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
58
start = s_start & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
59
end = s_end & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
66
amdgpu_ring_alloc(real_ring, (ring->ring_size >> 2) + end - start);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
67
amdgpu_ring_write_multiple(real_ring, (void *)&ring->ring[start],
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
68
(ring->ring_size >> 2) - start);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
69
amdgpu_ring_write_multiple(real_ring, (void *)&ring->ring[0], end);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
72
amdgpu_ring_write_multiple(real_ring, (void *)&ring->ring[start], end - start);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
88
if (mux->ring_entry[i].ring->hw_prio <= AMDGPU_RING_PRIO_DEFAULT) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
99
last_seq = atomic_read(&e->ring->fence_drv.last_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.h
104
int amdgpu_ring_mux_init(struct amdgpu_ring_mux *mux, struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.h
107
int amdgpu_ring_mux_add_sw_ring(struct amdgpu_ring_mux *mux, struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.h
108
void amdgpu_ring_mux_set_wptr(struct amdgpu_ring_mux *mux, struct amdgpu_ring *ring, u64 wptr);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.h
109
u64 amdgpu_ring_mux_get_wptr(struct amdgpu_ring_mux *mux, struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.h
110
u64 amdgpu_ring_mux_get_rptr(struct amdgpu_ring_mux *mux, struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.h
111
void amdgpu_ring_mux_start_ib(struct amdgpu_ring_mux *mux, struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.h
112
void amdgpu_ring_mux_end_ib(struct amdgpu_ring_mux *mux, struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.h
113
void amdgpu_ring_mux_ib_mark_offset(struct amdgpu_ring_mux *mux, struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.h
117
u64 amdgpu_sw_ring_get_rptr_gfx(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.h
118
u64 amdgpu_sw_ring_get_wptr_gfx(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.h
119
void amdgpu_sw_ring_set_wptr_gfx(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.h
120
void amdgpu_sw_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.h
121
void amdgpu_sw_ring_ib_begin(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.h
122
void amdgpu_sw_ring_ib_end(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.h
123
void amdgpu_sw_ring_ib_mark_offset(struct amdgpu_ring *ring, enum amdgpu_ring_mux_offset_type type);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.h
44
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_rlc.h
261
struct amdgpu_ring *ring, unsigned vmid);
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
355
struct amdgpu_ring *ring, *page = NULL;
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
377
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
381
ring->sched.ready = true;
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
383
ring->sched.ready = false;
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
40
struct amdgpu_sdma_instance *amdgpu_sdma_get_instance_from_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
402
struct amdgpu_ring *ring, *page = NULL;
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
416
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
42
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
420
if (ring->sched.ready)
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
46
if (ring == &adev->sdma.instance[i].ring ||
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
47
ring == &adev->sdma.instance[i].page)
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
501
struct amdgpu_ring *amdgpu_sdma_get_shared_ring(struct amdgpu_device *adev, struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
504
(ring->me < adev->sdma.num_instances) &&
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
505
(ring == &adev->sdma.instance[ring->me].ring))
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
506
return &adev->sdma.instance[ring->me].page;
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
519
bool amdgpu_sdma_is_shared_inv_eng(struct amdgpu_device *adev, struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
521
int i = ring->me;
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
529
return (ring == &adev->sdma.instance[i].page);
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
53
int amdgpu_sdma_get_index_from_ring(struct amdgpu_ring *ring, uint32_t *index)
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
55
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
558
struct amdgpu_ring *gfx_ring = &sdma_instance->ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
59
if (ring == &adev->sdma.instance[i].ring ||
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
60
ring == &adev->sdma.instance[i].page) {
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
69
uint64_t amdgpu_sdma_get_csa_mc_addr(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
72
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.c
81
r = amdgpu_sdma_get_index_from_ring(ring, &index);
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.h
195
amdgpu_sdma_get_instance_from_ring(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.h
196
int amdgpu_sdma_get_index_from_ring(struct amdgpu_ring *ring, uint32_t *index);
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.h
197
uint64_t amdgpu_sdma_get_csa_mc_addr(struct amdgpu_ring *ring, unsigned vmid);
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.h
214
bool amdgpu_sdma_is_shared_inv_eng(struct amdgpu_device *adev, struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.h
216
struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.h
59
int (*stop_kernel_queue)(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.h
60
int (*start_kernel_queue)(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.h
70
struct amdgpu_ring ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c
317
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c
331
if (ring && s_fence) {
drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c
335
if (s_fence->sched == &ring->sched) {
drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c
71
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c
73
ring = container_of(s_fence->sched, struct amdgpu_ring, sched);
drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c
74
return ring->adev == adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_sync.h
57
struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
149
__field(u32, ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
156
__entry->ring = to_amdgpu_ring(job->base.entity->rq->sched)->idx;
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
162
__entry->bo_list, __entry->ring, __entry->dw,
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
174
__string(ring, to_amdgpu_ring(job->base.sched)->name)
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
182
__assign_str(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
187
__entry->seqno, __get_str(ring), __entry->num_ibs)
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
197
__string(ring, to_amdgpu_ring(job->base.sched)->name)
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
205
__assign_str(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
210
__entry->seqno, __get_str(ring), __entry->num_ibs)
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
215
TP_PROTO(struct amdgpu_vm *vm, struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
217
TP_ARGS(vm, ring, job),
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
220
__string(ring, ring->name)
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
221
__field(u32, ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
230
__assign_str(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
232
__entry->vm_hub = ring->vm_hub,
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
237
__entry->pasid, __get_str(ring), __entry->vmid,
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
413
TP_PROTO(struct amdgpu_ring *ring, unsigned vmid,
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
415
TP_ARGS(ring, vmid, pd_addr),
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
417
__string(ring, ring->name)
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
424
__assign_str(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
426
__entry->vm_hub = ring->vm_hub;
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
430
__get_str(ring), __entry->vmid,
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
474
TP_PROTO(struct amdgpu_ring *ring, struct dma_fence *fence),
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
475
TP_ARGS(ring, fence),
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
477
__string(ring, ring->name)
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
482
__assign_str(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
485
TP_printk("ring=%s, seqno=%Lu", __get_str(ring), __entry->seqno)
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
549
__string(ring, sched_job->base.sched->name)
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
556
__assign_str(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
562
__get_str(ring), __entry->ctx, __entry->seqno)
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
168
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
170
ring = adev->mman.buffer_funcs_ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
171
amdgpu_ring_pad_ib(ring, &job->ibs[0]);
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
2303
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
2306
ring = adev->mman.buffer_funcs_ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
2307
sched = &ring->sched;
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
2400
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
2406
ring = adev->mman.buffer_funcs_ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
2408
if (!ring->sched.ready) {
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
107
struct amdgpu_ring *ring = &umsch->ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
109
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
110
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
111
ring->no_scheduler = true;
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
112
ring->doorbell_index = (AMDGPU_NAVI10_DOORBELL64_VCN0_1 << 1) + 6;
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
114
snprintf(ring->name, sizeof(ring->name), "umsch");
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
116
return amdgpu_ring_init(adev, ring, 1024, NULL, 0, AMDGPU_RING_PRIO_DEFAULT, NULL);
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
122
struct amdgpu_device *adev = umsch->ring.adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
180
struct amdgpu_device *adev = umsch->ring.adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
212
struct amdgpu_device *adev = umsch->ring.adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
243
struct amdgpu_device *adev = umsch->ring.adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
327
adev->umsch_mm.ring.funcs = &umsch_v4_0_ring_funcs;
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
371
amdgpu_ring_fini(&adev->umsch_mm.ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
39
struct amdgpu_ring *ring = &umsch->ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
41
if (amdgpu_ring_alloc(ring, ndws))
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
44
amdgpu_ring_write_multiple(ring, pkt, ndws);
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
45
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
52
struct amdgpu_ring *ring = &umsch->ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
53
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
56
r = amdgpu_fence_wait_polling(ring, ring->fence_drv.sync_seq, adev->usec_timeout);
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
59
ring->fence_drv.sync_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
66
static void umsch_mm_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
68
struct amdgpu_umsch_mm *umsch = (struct amdgpu_umsch_mm *)ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
69
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
71
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
72
WDOORBELL32(ring->doorbell_index, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
74
WREG32(umsch->rb_wptr, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
77
static u64 umsch_mm_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
79
struct amdgpu_umsch_mm *umsch = (struct amdgpu_umsch_mm *)ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
80
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
85
static u64 umsch_mm_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
87
struct amdgpu_umsch_mm *umsch = (struct amdgpu_umsch_mm *)ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.c
88
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_umsch_mm.h
134
struct amdgpu_ring ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1126
static int amdgpu_uvd_send_msg(struct amdgpu_ring *ring, struct amdgpu_bo *bo,
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1129
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1137
r = amdgpu_job_alloc_with_ib(ring->adev, &adev->uvd.entity,
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1146
offset = adev->reg_offset[UVD_HWIP][ring->me][1];
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1170
r = amdgpu_job_submit_direct(job, ring, &f);
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1202
int amdgpu_uvd_get_create_msg(struct amdgpu_ring *ring, uint32_t handle,
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1205
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1226
return amdgpu_uvd_send_msg(ring, bo, true, fence);
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1230
int amdgpu_uvd_get_destroy_msg(struct amdgpu_ring *ring, uint32_t handle,
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1233
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1255
r = amdgpu_uvd_send_msg(ring, bo, direct, fence);
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1272
fences += amdgpu_fence_count_emitted(&adev->uvd.inst[i].ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1293
void amdgpu_uvd_ring_begin_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1295
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1315
void amdgpu_uvd_ring_end_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1317
if (!amdgpu_sriov_vf(ring->adev))
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1318
schedule_delayed_work(&ring->adev->uvd.idle_work, UVD_IDLE_TIMEOUT);
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1329
int amdgpu_uvd_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1334
r = amdgpu_uvd_get_create_msg(ring, 1, &fence);
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1345
r = amdgpu_uvd_get_destroy_msg(ring, 1, true, &fence);
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
386
amdgpu_ring_fini(&adev->uvd.inst[j].ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
405
int amdgpu_uvd_entity_init(struct amdgpu_device *adev, struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
407
if (ring == &adev->uvd.inst[0].ring) {
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
408
struct drm_gpu_scheduler *sched = &ring->sched;
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
515
amdgpu_fence_driver_force_completion(&adev->uvd.inst[i].ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
523
struct amdgpu_ring *ring = &adev->uvd.inst[0].ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
532
r = amdgpu_uvd_get_destroy_msg(ring, handle, false,
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.h
45
struct amdgpu_ring ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.h
76
int amdgpu_uvd_entity_init(struct amdgpu_device *adev, struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.h
80
int amdgpu_uvd_get_create_msg(struct amdgpu_ring *ring, uint32_t handle,
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.h
82
int amdgpu_uvd_get_destroy_msg(struct amdgpu_ring *ring, uint32_t handle,
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.h
89
void amdgpu_uvd_ring_begin_use(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.h
90
void amdgpu_uvd_ring_end_use(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.h
91
int amdgpu_uvd_ring_test_ib(struct amdgpu_ring *ring, long timeout);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1110
void amdgpu_vce_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1115
amdgpu_ring_write(ring, VCE_CMD_IB);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1116
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1117
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1118
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1130
void amdgpu_vce_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1135
amdgpu_ring_write(ring, VCE_CMD_FENCE);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1136
amdgpu_ring_write(ring, addr);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1137
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1138
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1139
amdgpu_ring_write(ring, VCE_CMD_TRAP);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1140
amdgpu_ring_write(ring, VCE_CMD_END);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1149
int amdgpu_vce_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1151
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1160
r = amdgpu_ring_alloc(ring, 16);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1164
rptr = amdgpu_ring_get_rptr(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1166
amdgpu_ring_write(ring, VCE_CMD_END);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1167
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1170
if (amdgpu_ring_get_rptr(ring) != rptr)
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1188
int amdgpu_vce_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1194
if (ring != &ring->adev->vce.ring[0])
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1197
r = amdgpu_vce_get_create_msg(ring, 1, NULL);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1201
r = amdgpu_vce_get_destroy_msg(ring, 1, true, &fence);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1216
enum amdgpu_ring_priority_level amdgpu_vce_get_ring_prio(int ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
1218
switch (ring) {
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
259
amdgpu_ring_fini(&adev->vce.ring[i]);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
278
int amdgpu_vce_entity_init(struct amdgpu_device *adev, struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
280
if (ring == &adev->vce.ring[0]) {
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
281
struct drm_gpu_scheduler *sched = &ring->sched;
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
363
count += amdgpu_fence_count_emitted(&adev->vce.ring[i]);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
387
void amdgpu_vce_ring_begin_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
389
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
419
void amdgpu_vce_ring_end_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
421
if (!amdgpu_sriov_vf(ring->adev))
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
422
schedule_delayed_work(&ring->adev->vce.idle_work, VCE_IDLE_TIMEOUT);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
435
struct amdgpu_ring *ring = &adev->vce.ring[0];
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
444
r = amdgpu_vce_get_destroy_msg(ring, handle, false, NULL);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
480
static int amdgpu_vce_get_create_msg(struct amdgpu_ring *ring, uint32_t handle,
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
491
r = amdgpu_job_alloc_with_ib(ring->adev, &ring->adev->vce.entity,
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
500
r = amdgpu_ib_get(ring->adev, NULL, AMDGPU_GPU_PAGE_SIZE * 2,
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
516
if ((ring->adev->vce.fw_version >> 24) >= 52)
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
531
if ((ring->adev->vce.fw_version >> 24) >= 52) {
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
547
r = amdgpu_job_submit_direct(job, ring, &f);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
572
static int amdgpu_vce_get_destroy_msg(struct amdgpu_ring *ring, uint32_t handle,
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
581
r = amdgpu_job_alloc_with_ib(ring->adev, &ring->adev->vce.entity,
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
614
r = amdgpu_job_submit_direct(job, ring, &f);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
91
static int amdgpu_vce_get_create_msg(struct amdgpu_ring *ring, uint32_t handle,
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
93
static int amdgpu_vce_get_destroy_msg(struct amdgpu_ring *ring, uint32_t handle,
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.h
48
struct amdgpu_ring ring[AMDGPU_MAX_VCE_RINGS];
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.h
60
int amdgpu_vce_entity_init(struct amdgpu_device *adev, struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.h
70
void amdgpu_vce_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job,
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.h
72
void amdgpu_vce_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.h
74
int amdgpu_vce_ring_test_ring(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.h
75
int amdgpu_vce_ring_test_ib(struct amdgpu_ring *ring, long timeout);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.h
76
void amdgpu_vce_ring_begin_use(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.h
77
void amdgpu_vce_ring_end_use(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.h
78
unsigned amdgpu_vce_ring_get_emit_ib_size(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.h
79
unsigned amdgpu_vce_ring_get_dma_frame_size(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.h
80
enum amdgpu_ring_priority_level amdgpu_vce_get_ring_prio(int ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1003
if (adev->vcn.inst[ring->me].using_unified_queue)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1006
r = amdgpu_job_alloc_with_ib(ring->adev, NULL, NULL,
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1017
if (adev->vcn.inst[ring->me].using_unified_queue)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1039
if (adev->vcn.inst[ring->me].using_unified_queue)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1042
r = amdgpu_job_submit_direct(job, ring, &f);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1057
int amdgpu_vcn_enc_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1059
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1071
r = amdgpu_vcn_enc_get_create_msg(ring, 1, &ib, NULL);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1075
r = amdgpu_vcn_enc_get_destroy_msg(ring, 1, &ib, &fence);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1092
int amdgpu_vcn_unified_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1094
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1099
r = amdgpu_vcn_enc_ring_test_ib(ring, timeout);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1104
r = amdgpu_vcn_dec_sw_ring_test_ib(ring, timeout);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1110
enum amdgpu_ring_priority_level amdgpu_vcn_get_enc_ring_prio(int ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1112
switch (ring) {
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1402
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1411
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1413
ring->sched.ready = true;
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1415
ring->sched.ready = false;
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1427
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1432
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1433
if (ring->sched.ready)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1546
int amdgpu_vcn_ring_reset(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1550
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1552
if (adev->vcn.inst[ring->me].using_unified_queue)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
1555
return amdgpu_vcn_reset_engine(adev, ring->me);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
501
void amdgpu_vcn_ring_begin_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
503
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
504
struct amdgpu_vcn_inst *vcn_inst = &adev->vcn.inst[ring->me];
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
518
if (ring->funcs->type == AMDGPU_RING_TYPE_VCN_ENC) {
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
540
void amdgpu_vcn_ring_end_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
542
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
545
if (ring->adev->pg_flags & AMD_PG_SUPPORT_VCN_DPG &&
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
546
ring->funcs->type == AMDGPU_RING_TYPE_VCN_ENC &&
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
547
!adev->vcn.inst[ring->me].using_unified_queue)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
548
atomic_dec(&ring->adev->vcn.inst[ring->me].dpg_enc_submission_cnt);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
550
atomic_dec(&ring->adev->vcn.inst[ring->me].total_submission_cnt);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
552
schedule_delayed_work(&ring->adev->vcn.inst[ring->me].idle_work,
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
556
int amdgpu_vcn_dec_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
558
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
567
WREG32(adev->vcn.inst[ring->me].external.scratch9, 0xCAFEDEAD);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
568
r = amdgpu_ring_alloc(ring, 3);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
571
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.scratch9, 0));
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
572
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
573
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
575
tmp = RREG32(adev->vcn.inst[ring->me].external.scratch9);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
587
int amdgpu_vcn_dec_sw_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
589
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
597
r = amdgpu_ring_alloc(ring, 16);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
601
rptr = amdgpu_ring_get_rptr(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
603
amdgpu_ring_write(ring, VCN_DEC_SW_CMD_END);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
604
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
607
if (amdgpu_ring_get_rptr(ring) != rptr)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
618
static int amdgpu_vcn_dec_send_msg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
623
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
629
r = amdgpu_job_alloc_with_ib(ring->adev, NULL, NULL,
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
636
ib->ptr[0] = PACKET0(adev->vcn.inst[ring->me].internal.data0, 0);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
638
ib->ptr[2] = PACKET0(adev->vcn.inst[ring->me].internal.data1, 0);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
640
ib->ptr[4] = PACKET0(adev->vcn.inst[ring->me].internal.cmd, 0);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
643
ib->ptr[i] = PACKET0(adev->vcn.inst[ring->me].internal.nop, 0);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
648
r = amdgpu_job_submit_direct(job, ring, &f);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
667
static int amdgpu_vcn_dec_get_create_msg(struct amdgpu_ring *ring, uint32_t handle,
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
670
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
702
static int amdgpu_vcn_dec_get_destroy_msg(struct amdgpu_ring *ring, uint32_t handle,
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
705
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
729
int amdgpu_vcn_dec_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
735
r = amdgpu_vcn_dec_get_create_msg(ring, 1, &ib);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
739
r = amdgpu_vcn_dec_send_msg(ring, &ib, NULL);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
742
r = amdgpu_vcn_dec_get_destroy_msg(ring, 1, &ib);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
746
r = amdgpu_vcn_dec_send_msg(ring, &ib, &fence);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
791
static int amdgpu_vcn_dec_sw_send_msg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
797
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
806
if (adev->vcn.inst[ring->me].using_unified_queue)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
809
r = amdgpu_job_alloc_with_ib(ring->adev, NULL, NULL,
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
819
if (adev->vcn.inst[ring->me].using_unified_queue) {
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
838
if (adev->vcn.inst[ring->me].using_unified_queue)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
841
r = amdgpu_job_submit_direct(job, ring, &f);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
860
int amdgpu_vcn_dec_sw_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
866
r = amdgpu_vcn_dec_get_create_msg(ring, 1, &ib);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
870
r = amdgpu_vcn_dec_sw_send_msg(ring, &ib, NULL);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
873
r = amdgpu_vcn_dec_get_destroy_msg(ring, 1, &ib);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
877
r = amdgpu_vcn_dec_sw_send_msg(ring, &ib, &fence);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
892
int amdgpu_vcn_enc_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
894
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
902
r = amdgpu_ring_alloc(ring, 16);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
906
rptr = amdgpu_ring_get_rptr(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
908
amdgpu_ring_write(ring, VCN_ENC_CMD_END);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
909
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
912
if (amdgpu_ring_get_rptr(ring) != rptr)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
923
static int amdgpu_vcn_enc_get_create_msg(struct amdgpu_ring *ring, uint32_t handle,
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
928
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
936
if (adev->vcn.inst[ring->me].using_unified_queue)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
939
r = amdgpu_job_alloc_with_ib(ring->adev, NULL, NULL,
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
950
if (adev->vcn.inst[ring->me].using_unified_queue)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
972
if (adev->vcn.inst[ring->me].using_unified_queue)
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
975
r = amdgpu_job_submit_direct(job, ring, &f);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
990
static int amdgpu_vcn_enc_get_destroy_msg(struct amdgpu_ring *ring, uint32_t handle,
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
995
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.h
524
void amdgpu_vcn_ring_begin_use(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.h
525
void amdgpu_vcn_ring_end_use(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.h
530
int amdgpu_vcn_dec_ring_test_ring(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.h
531
int amdgpu_vcn_dec_ring_test_ib(struct amdgpu_ring *ring, long timeout);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.h
532
int amdgpu_vcn_dec_sw_ring_test_ring(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.h
533
int amdgpu_vcn_dec_sw_ring_test_ib(struct amdgpu_ring *ring, long timeout);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.h
534
int amdgpu_vcn_unified_ring_test_ib(struct amdgpu_ring *ring, long timeout);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.h
536
int amdgpu_vcn_enc_ring_test_ring(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.h
537
int amdgpu_vcn_enc_ring_test_ib(struct amdgpu_ring *ring, long timeout);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.h
539
enum amdgpu_ring_priority_level amdgpu_vcn_get_enc_ring_prio(int ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.h
563
int amdgpu_vcn_ring_reset(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_virt.c
1208
adev->mes.ring[0].sched.ready = false;
drivers/gpu/drm/amd/amdgpu/amdgpu_virt.c
1803
struct amdgpu_ring *ring = &adev->cper.ring_buf;
drivers/gpu/drm/amd/amdgpu/amdgpu_virt.c
1837
amdgpu_cper_ring_write(ring, entry, entry->record_length);
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
702
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
719
ring = adev->rings[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
720
if (ring->funcs->type == AMDGPU_RING_TYPE_COMPUTE)
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
722
ring->has_compute_vm_bug = has_compute_vm_bug;
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
724
ring->has_compute_vm_bug = false;
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
737
bool amdgpu_vm_need_pipeline_sync(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
740
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
741
unsigned vmhub = ring->vm_hub;
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
747
if (job->vm_needs_flush || ring->has_compute_vm_bug)
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
750
if (ring->funcs->emit_gds_switch && job->gds_switch_needed)
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
771
int amdgpu_vm_flush(struct amdgpu_ring *ring, struct amdgpu_job *job,
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
774
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
775
struct amdgpu_isolation *isolation = &adev->isolation[ring->xcp_id];
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
776
unsigned vmhub = ring->vm_hub;
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
780
bool gds_switch_needed = ring->funcs->emit_gds_switch &&
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
802
gds_switch_needed &= !!ring->funcs->emit_gds_switch;
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
803
vm_flush_needed &= !!ring->funcs->emit_vm_flush &&
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
806
ring->funcs->emit_wreg;
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
810
ring->funcs->emit_cleaner_shader && job->base.s_fence &&
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
817
amdgpu_ring_ib_begin(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
818
if (ring->funcs->init_cond_exec)
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
819
patch = amdgpu_ring_init_cond_exec(ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
820
ring->cond_exe_gpu_addr);
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
823
amdgpu_ring_emit_pipeline_sync(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
826
ring->funcs->emit_cleaner_shader(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
829
trace_amdgpu_vm_flush(ring, job->vmid, job->vm_pd_addr);
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
830
amdgpu_ring_emit_vm_flush(ring, job->vmid, job->vm_pd_addr);
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
834
amdgpu_gmc_emit_pasid_mapping(ring, job->vmid, job->pasid);
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
837
adev->gfx.rlc.funcs->update_spm_vmid(adev, ring->xcc_id, ring, job->vmid);
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
839
if (ring->funcs->emit_gds_switch &&
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
841
amdgpu_ring_emit_gds_switch(ring, job->vmid, job->gds_base,
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
848
r = amdgpu_fence_emit(ring, job->hw_vm_fence, 0);
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
878
trace_amdgpu_cleaner_shader(ring, fence);
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
886
amdgpu_ring_patch_cond_exec(ring, patch);
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
889
if (ring->funcs->emit_switch_buffer) {
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
890
amdgpu_ring_emit_switch_buffer(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
891
amdgpu_ring_emit_switch_buffer(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
894
amdgpu_ring_ib_end(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h
518
int amdgpu_vm_flush(struct amdgpu_ring *ring, struct amdgpu_job *job, bool need_pipe_sync);
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h
579
bool amdgpu_vm_need_pipeline_sync(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c
110
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c
113
ring = container_of(p->vm->delayed.rq->sched, struct amdgpu_ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c
117
amdgpu_ring_pad_ib(ring, ib);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
123
struct amdgpu_device *adev = vpe->ring.adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
233
struct amdgpu_device *adev = vpe->ring.adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
275
struct amdgpu_ring *ring = &vpe->ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
278
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
279
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
280
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
281
ring->doorbell_index = (adev->doorbell_index.vpe_ring << 1);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
282
snprintf(ring->name, 4, "vpe");
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
284
ret = amdgpu_ring_init(adev, ring, 1024, &vpe->trap_irq, 0,
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
294
amdgpu_ring_fini(&vpe->ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
351
fences += amdgpu_fence_count_emitted(&adev->vpe.ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
409
amdgpu_get_soft_full_reset_mask(&adev->vpe.ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
485
static void vpe_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
491
amdgpu_ring_write(ring, ring->funcs->nop |
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
494
amdgpu_ring_write(ring, ring->funcs->nop);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
497
static uint64_t vpe_get_csa_mc_addr(struct amdgpu_ring *ring, uint32_t vmid)
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
499
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
512
static void vpe_ring_emit_pred_exec(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
516
if (!ring->adev->vpe.collaborate_mode)
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
519
amdgpu_ring_write(ring, VPE_CMD_HEADER(VPE_CMD_OPCODE_PRED_EXE, 0) |
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
521
amdgpu_ring_write(ring, exec_count & 0x1fff);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
524
static void vpe_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
530
uint64_t csa_mc_addr = vpe_get_csa_mc_addr(ring, vmid);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
532
amdgpu_ring_write(ring, VPE_CMD_HEADER(VPE_CMD_OPCODE_INDIRECT, 0) |
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
536
amdgpu_ring_write(ring, ib->gpu_addr & 0xffffffe0);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
537
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
538
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
539
amdgpu_ring_write(ring, lower_32_bits(csa_mc_addr));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
540
amdgpu_ring_write(ring, upper_32_bits(csa_mc_addr));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
543
static void vpe_ring_emit_fence(struct amdgpu_ring *ring, uint64_t addr,
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
550
amdgpu_ring_write(ring, VPE_CMD_HEADER(VPE_CMD_OPCODE_FENCE, 0));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
553
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
554
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
555
amdgpu_ring_write(ring, i == 0 ? lower_32_bits(seq) : upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
561
amdgpu_ring_write(ring, VPE_CMD_HEADER(VPE_CMD_OPCODE_TRAP, 0));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
562
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
567
static void vpe_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
569
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
570
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
572
vpe_ring_emit_pred_exec(ring, 0, 6);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
575
amdgpu_ring_write(ring, VPE_CMD_HEADER(VPE_CMD_OPCODE_POLL_REGMEM,
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
579
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
580
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
581
amdgpu_ring_write(ring, seq); /* reference */
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
582
amdgpu_ring_write(ring, 0xffffffff); /* mask */
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
583
amdgpu_ring_write(ring, VPE_CMD_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
587
static void vpe_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val)
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
589
vpe_ring_emit_pred_exec(ring, 0, 3);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
591
amdgpu_ring_write(ring, VPE_CMD_HEADER(VPE_CMD_OPCODE_REG_WRITE, 0));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
592
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
593
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
596
static void vpe_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
599
vpe_ring_emit_pred_exec(ring, 0, 6);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
601
amdgpu_ring_write(ring, VPE_CMD_HEADER(VPE_CMD_OPCODE_POLL_REGMEM,
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
605
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
606
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
607
amdgpu_ring_write(ring, val); /* reference */
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
608
amdgpu_ring_write(ring, mask); /* mask */
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
609
amdgpu_ring_write(ring, VPE_CMD_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
613
static void vpe_ring_emit_vm_flush(struct amdgpu_ring *ring, unsigned int vmid,
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
616
amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
619
static unsigned int vpe_ring_init_cond_exec(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
624
amdgpu_ring_write(ring, VPE_CMD_HEADER(VPE_CMD_OPCODE_COND_EXE, 0));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
625
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
626
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
627
amdgpu_ring_write(ring, 1);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
628
ret = ring->wptr & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
629
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
634
static int vpe_ring_preempt_ib(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
636
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
642
amdgpu_ring_set_preempt_cond_exec(ring, false);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
645
ring->trail_seq += 1;
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
646
amdgpu_ring_alloc(ring, 10);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
647
vpe_ring_emit_fence(ring, ring->trail_fence_gpu_addr, ring->trail_seq, 0);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
648
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
651
WREG32(vpe_get_reg_offset(vpe, ring->me, preempt_reg), 1);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
655
if (ring->trail_seq ==
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
656
le32_to_cpu(*(ring->trail_fence_cpu_addr)))
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
663
dev_err(adev->dev, "ring %d failed to be preempted\n", ring->idx);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
667
WREG32(vpe_get_reg_offset(vpe, ring->me, preempt_reg), 0);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
670
amdgpu_ring_set_preempt_cond_exec(ring, true);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
702
static uint64_t vpe_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
704
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
708
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
709
rptr = atomic64_read((atomic64_t *)ring->rptr_cpu_addr);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
712
rptr = RREG32(vpe_get_reg_offset(vpe, ring->me, vpe->regs.queue0_rb_rptr_hi));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
714
rptr |= RREG32(vpe_get_reg_offset(vpe, ring->me, vpe->regs.queue0_rb_rptr_lo));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
715
dev_dbg(adev->dev, "rptr before shift [%i] == 0x%016llx\n", ring->me, rptr);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
721
static uint64_t vpe_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
723
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
727
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
728
wptr = atomic64_read((atomic64_t *)ring->wptr_cpu_addr);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
731
wptr = RREG32(vpe_get_reg_offset(vpe, ring->me, vpe->regs.queue0_rb_wptr_hi));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
733
wptr |= RREG32(vpe_get_reg_offset(vpe, ring->me, vpe->regs.queue0_rb_wptr_lo));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
734
dev_dbg(adev->dev, "wptr before shift [%i] == 0x%016llx\n", ring->me, wptr);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
740
static void vpe_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
742
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
745
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
750
ring->wptr_offs,
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
751
lower_32_bits(ring->wptr << 2),
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
752
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
753
atomic64_set((atomic64_t *)ring->wptr_cpu_addr, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
754
WDOORBELL64(ring->doorbell_index, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
756
WDOORBELL64(ring->doorbell_index + 4, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
764
lower_32_bits(ring->wptr << 2),
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
765
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
767
lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
769
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
774
static int vpe_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
776
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
791
ret = amdgpu_ring_alloc(ring, 4);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
793
dev_err(adev->dev, "dma failed to lock ring %d (%d).\n", ring->idx, ret);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
797
amdgpu_ring_write(ring, VPE_CMD_HEADER(VPE_CMD_OPCODE_FENCE, 0));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
798
amdgpu_ring_write(ring, lower_32_bits(wb_addr));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
799
amdgpu_ring_write(ring, upper_32_bits(wb_addr));
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
800
amdgpu_ring_write(ring, test_pattern);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
801
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
816
static int vpe_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
818
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
849
ret = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
870
static void vpe_ring_begin_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
872
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
895
static void vpe_ring_end_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
897
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
902
static int vpe_ring_reset(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
906
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
909
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
920
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.c
996
adev->vpe.ring.funcs = &vpe_ring_funcs;
drivers/gpu/drm/amd/amdgpu/amdgpu_vpe.h
64
struct amdgpu_ring ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
469
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
476
ring = to_amdgpu_ring(entity->entity.rq->sched);
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
477
atomic_dec(&adev->xcp_mgr->xcp[ring->xcp_id].ref_cnt);
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
524
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
530
ring->xcp_id = AMDGPU_XCP_NO_PARTITION;
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
531
if (ring->funcs->type == AMDGPU_RING_TYPE_COMPUTE)
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
532
adev->gfx.enforce_isolation[0].xcp_id = ring->xcp_id;
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
534
(ring->funcs->type == AMDGPU_RING_TYPE_CPER))
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
539
switch (ring->funcs->type) {
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
554
dev_err(adev->dev, "Not support ring type %d!", ring->funcs->type);
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
560
ring->xcp_id = xcp_id;
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
561
dev_dbg(adev->dev, "ring:%s xcp_id :%u", ring->name,
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
562
ring->xcp_id);
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
563
if (ring->funcs->type == AMDGPU_RING_TYPE_COMPUTE)
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
571
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
577
.gpu_sched[ring->funcs->type][ring->hw_prio].num_scheds;
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
578
adev->xcp_mgr->xcp[sel_xcp_id].gpu_sched[ring->funcs->type][ring->hw_prio]
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
579
.sched[(*num_gpu_sched)++] = &ring->sched;
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
581
ring->name, sel_xcp_id, ring->funcs->type,
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
582
ring->hw_prio, *num_gpu_sched);
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
587
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
599
ring = adev->rings[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
600
if (!ring || !ring->sched.ready || ring->no_scheduler)
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
603
amdgpu_xcp_gpu_sched_update(adev, ring, ring->xcp_id);
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
608
if ((ring->funcs->type == AMDGPU_RING_TYPE_VCN_ENC ||
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
609
ring->funcs->type == AMDGPU_RING_TYPE_VCN_JPEG) &&
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
611
amdgpu_xcp_gpu_sched_update(adev, ring, ring->xcp_id + 1);
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
622
struct amdgpu_ring *ring = adev->rings[i];
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
624
if (ring->funcs->type == AMDGPU_RING_TYPE_COMPUTE ||
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
625
ring->funcs->type == AMDGPU_RING_TYPE_KIQ)
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
626
amdgpu_set_xcp_id(adev, ring->xcc_id, ring);
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
628
amdgpu_set_xcp_id(adev, ring->me, ring);
drivers/gpu/drm/amd/amdgpu/cik.c
1858
static void cik_flush_hdp(struct amdgpu_device *adev, struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/cik.c
1860
if (!ring || !ring->funcs->emit_wreg) {
drivers/gpu/drm/amd/amdgpu/cik.c
1864
amdgpu_ring_emit_wreg(ring, mmHDP_MEM_COHERENCY_FLUSH_CNTL, 1);
drivers/gpu/drm/amd/amdgpu/cik.c
1869
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/cik.c
1871
if (!ring || !ring->funcs->emit_wreg) {
drivers/gpu/drm/amd/amdgpu/cik.c
1875
amdgpu_ring_emit_wreg(ring, mmHDP_DEBUG0, 1);
drivers/gpu/drm/amd/amdgpu/cik_ih.c
264
dw[0] = le32_to_cpu(ih->ring[ring_index + 0]);
drivers/gpu/drm/amd/amdgpu/cik_ih.c
265
dw[1] = le32_to_cpu(ih->ring[ring_index + 1]);
drivers/gpu/drm/amd/amdgpu/cik_ih.c
266
dw[2] = le32_to_cpu(ih->ring[ring_index + 2]);
drivers/gpu/drm/amd/amdgpu/cik_ih.c
267
dw[3] = le32_to_cpu(ih->ring[ring_index + 3]);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
1144
amdgpu_fence_process(&adev->sdma.instance[0].ring);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
1157
amdgpu_fence_process(&adev->sdma.instance[1].ring);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
1180
drm_sched_fault(&adev->sdma.instance[instance_id].ring.sched);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
1253
adev->sdma.instance[i].ring.funcs = &cik_sdma_ring_funcs;
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
1254
adev->sdma.instance[i].ring.me = i;
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
1337
adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring;
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
1355
&adev->sdma.instance[i].ring.sched;
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
161
static uint64_t cik_sdma_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
165
rptr = *ring->rptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
177
static uint64_t cik_sdma_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
179
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
181
return (RREG32(mmSDMA0_GFX_RB_WPTR + sdma_offsets[ring->me]) & 0x3fffc) >> 2;
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
191
static void cik_sdma_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
193
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
195
WREG32(mmSDMA0_GFX_RB_WPTR + sdma_offsets[ring->me],
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
196
(ring->wptr << 2) & 0x3fffc);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
199
static void cik_sdma_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
201
struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
206
amdgpu_ring_write(ring, ring->funcs->nop |
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
209
amdgpu_ring_write(ring, ring->funcs->nop);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
222
static void cik_sdma_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
231
cik_sdma_ring_insert_nop(ring, (4 - lower_32_bits(ring->wptr)) & 7);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
233
amdgpu_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_INDIRECT_BUFFER, 0, extra_bits));
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
234
amdgpu_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
235
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xffffffff);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
236
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
247
static void cik_sdma_ring_emit_hdp_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
253
if (ring->me == 0)
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
258
amdgpu_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_POLL_REG_MEM, 0, extra_bits));
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
259
amdgpu_ring_write(ring, mmGPU_HDP_FLUSH_DONE << 2);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
260
amdgpu_ring_write(ring, mmGPU_HDP_FLUSH_REQ << 2);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
261
amdgpu_ring_write(ring, ref_and_mask); /* reference */
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
262
amdgpu_ring_write(ring, ref_and_mask); /* mask */
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
263
amdgpu_ring_write(ring, (0xfff << 16) | 10); /* retry count, poll interval */
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
278
static void cik_sdma_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
283
amdgpu_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_FENCE, 0, 0));
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
284
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
285
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
286
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
291
amdgpu_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_FENCE, 0, 0));
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
292
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
293
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
294
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
298
amdgpu_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_TRAP, 0, 0));
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
428
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
434
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
454
rb_bufsz = order_base_2(ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
470
upper_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFF);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
472
((ring->rptr_gpu_addr) & 0xFFFFFFFC));
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
476
WREG32(mmSDMA0_GFX_RB_BASE + sdma_offsets[i], ring->gpu_addr >> 8);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
477
WREG32(mmSDMA0_GFX_RB_BASE_HI + sdma_offsets[i], ring->gpu_addr >> 40);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
479
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
480
WREG32(mmSDMA0_GFX_RB_WPTR + sdma_offsets[i], ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
497
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
498
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
600
static int cik_sdma_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
602
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
617
r = amdgpu_ring_alloc(ring, 5);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
621
amdgpu_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_WRITE, SDMA_WRITE_SUB_OPCODE_LINEAR, 0));
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
622
amdgpu_ring_write(ring, lower_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
623
amdgpu_ring_write(ring, upper_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
624
amdgpu_ring_write(ring, 1); /* number of DWs to follow */
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
625
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
626
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
652
static int cik_sdma_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
654
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
682
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
798
static void cik_sdma_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib)
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
800
struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
822
static void cik_sdma_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
824
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
825
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
828
amdgpu_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_POLL_REG_MEM, 0,
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
832
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
833
amdgpu_ring_write(ring, upper_32_bits(addr) & 0xffffffff);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
834
amdgpu_ring_write(ring, seq); /* reference */
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
835
amdgpu_ring_write(ring, 0xffffffff); /* mask */
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
836
amdgpu_ring_write(ring, (0xfff << 16) | 4); /* retry count, poll interval */
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
849
static void cik_sdma_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
855
amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
857
amdgpu_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_POLL_REG_MEM, 0, extra_bits));
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
858
amdgpu_ring_write(ring, mmVM_INVALIDATE_REQUEST << 2);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
859
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
860
amdgpu_ring_write(ring, 0); /* reference */
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
861
amdgpu_ring_write(ring, 0); /* mask */
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
862
amdgpu_ring_write(ring, (0xfff << 16) | 10); /* retry count, poll interval */
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
865
static void cik_sdma_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
868
amdgpu_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_SRBM_WRITE, 0, 0xf000));
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
869
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
870
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
943
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
966
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
967
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
968
sprintf(ring->name, "sdma%d", i);
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
969
r = amdgpu_ring_init(adev, ring, 1024,
drivers/gpu/drm/amd/amdgpu/cik_sdma.c
987
amdgpu_ring_fini(&adev->sdma.instance[i].ring);
drivers/gpu/drm/amd/amdgpu/cz_ih.c
253
dw[0] = le32_to_cpu(ih->ring[ring_index + 0]);
drivers/gpu/drm/amd/amdgpu/cz_ih.c
254
dw[1] = le32_to_cpu(ih->ring[ring_index + 1]);
drivers/gpu/drm/amd/amdgpu/cz_ih.c
255
dw[2] = le32_to_cpu(ih->ring[ring_index + 2]);
drivers/gpu/drm/amd/amdgpu/cz_ih.c
256
dw[3] = le32_to_cpu(ih->ring[ring_index + 3]);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
3693
static void gfx_v10_0_ring_emit_ce_meta(struct amdgpu_ring *ring, bool resume);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
3694
static void gfx_v10_0_ring_emit_de_meta(struct amdgpu_ring *ring, bool resume);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
3695
static void gfx_v10_0_ring_emit_frame_cntl(struct amdgpu_ring *ring, bool start, bool secure);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
3699
static void gfx_v10_0_ring_invalidate_tlbs(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
3727
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
3729
uint64_t mqd_addr = amdgpu_bo_gpu_offset(ring->mqd_obj);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
3730
uint64_t wptr_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
3733
switch (ring->funcs->type) {
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
3752
PACKET3_MAP_QUEUES_QUEUE(ring->queue) |
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
3753
PACKET3_MAP_QUEUES_PIPE(ring->pipe) |
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
3754
PACKET3_MAP_QUEUES_ME((ring->me == 1 ? 0 : 1)) |
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
3759
amdgpu_ring_write(kiq_ring, PACKET3_MAP_QUEUES_DOORBELL_OFFSET(ring->doorbell_index));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
3767
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
3771
uint32_t eng_sel = ring->funcs->type == AMDGPU_RING_TYPE_GFX ? 4 : 0;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
3780
PACKET3_UNMAP_QUEUES_DOORBELL_OFFSET0(ring->doorbell_index));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
3794
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
3798
uint32_t eng_sel = ring->funcs->type == AMDGPU_RING_TYPE_GFX ? 4 : 0;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
3806
PACKET3_QUERY_STATUS_DOORBELL_OFFSET(ring->doorbell_index) |
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4000
static void gfx_v10_0_write_data_to_reg(struct amdgpu_ring *ring, int eng_sel,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4003
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4004
amdgpu_ring_write(ring, WRITE_DATA_ENGINE_SEL(eng_sel) |
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4006
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4007
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4008
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4011
static void gfx_v10_0_wait_reg_mem(struct amdgpu_ring *ring, int eng_sel,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4016
amdgpu_ring_write(ring, PACKET3(PACKET3_WAIT_REG_MEM, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4017
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4026
amdgpu_ring_write(ring, addr0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4027
amdgpu_ring_write(ring, addr1);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4028
amdgpu_ring_write(ring, ref);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4029
amdgpu_ring_write(ring, mask);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4030
amdgpu_ring_write(ring, inv); /* poll interval */
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4033
static int gfx_v10_0_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4035
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4042
r = amdgpu_ring_alloc(ring, 3);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4045
ring->idx, r);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4049
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_UCONFIG_REG, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4050
amdgpu_ring_write(ring, scratch -
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4052
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4053
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4071
static int gfx_v10_0_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4073
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4104
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4653
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4657
ring = &adev->gfx.gfx_ring[ring_id];
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4659
ring->me = me;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4660
ring->pipe = pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4661
ring->queue = queue;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4663
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4664
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4667
ring->doorbell_index = adev->doorbell_index.gfx_ring0 << 1;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4669
ring->doorbell_index = adev->doorbell_index.gfx_ring1 << 1;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4670
ring->vm_hub = AMDGPU_GFXHUB(0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4671
sprintf(ring->name, "gfx_%d.%d.%d", ring->me, ring->pipe, ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4673
irq_type = AMDGPU_CP_IRQ_GFX_ME0_PIPE0_EOP + ring->pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4674
hw_prio = amdgpu_gfx_is_high_priority_graphics_queue(adev, ring) ?
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4676
return amdgpu_ring_init(adev, ring, 1024, &adev->gfx.eop_irq, irq_type,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4684
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4687
ring = &adev->gfx.compute_ring[ring_id];
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4690
ring->me = mec + 1;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4691
ring->pipe = pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4692
ring->queue = queue;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4694
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4695
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4696
ring->doorbell_index = (adev->doorbell_index.mec_ring0 + ring_id) << 1;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4697
ring->eop_gpu_addr = adev->gfx.mec.hpd_eop_gpu_addr
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4699
ring->vm_hub = AMDGPU_GFXHUB(0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4700
sprintf(ring->name, "comp_%d.%d.%d", ring->me, ring->pipe, ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4703
+ ((ring->me - 1) * adev->gfx.mec.num_pipe_per_mec)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4704
+ ring->pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4705
hw_prio = amdgpu_gfx_is_high_priority_compute_queue(adev, ring) ?
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
4708
return amdgpu_ring_init(adev, ring, 1024, &adev->gfx.eop_irq, irq_type,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
5033
amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[0].ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6366
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6379
ring = &adev->gfx.gfx_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6380
r = amdgpu_ring_alloc(ring, gfx_v10_0_get_csb_size(adev) + 4);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6386
amdgpu_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6387
amdgpu_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6389
amdgpu_ring_write(ring, PACKET3(PACKET3_CONTEXT_CONTROL, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6390
amdgpu_ring_write(ring, 0x80000000);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6391
amdgpu_ring_write(ring, 0x80000000);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6396
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6399
amdgpu_ring_write(ring, ext->reg_index -
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6402
amdgpu_ring_write(ring, ext->extent[i]);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6409
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6410
amdgpu_ring_write(ring, ctx_reg_offset);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6411
amdgpu_ring_write(ring, adev->gfx.config.pa_sc_tile_steering_override);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6413
amdgpu_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6414
amdgpu_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6416
amdgpu_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6417
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6419
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_BASE, 2));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6420
amdgpu_ring_write(ring, PACKET3_BASE_INDEX(CE_PARTITION_BASE));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6421
amdgpu_ring_write(ring, 0x8000);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6422
amdgpu_ring_write(ring, 0x8000);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6424
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6429
ring = &adev->gfx.gfx_ring[1];
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6430
r = amdgpu_ring_alloc(ring, 2);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6436
amdgpu_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6437
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6439
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6456
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6462
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6464
DOORBELL_OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6483
DOORBELL_RANGE_LOWER_Sienna_Cichlid, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6491
DOORBELL_RANGE_LOWER, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6502
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6518
ring = &adev->gfx.gfx_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6519
rb_bufsz = order_base_2(ring->ring_size / 8);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6528
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6529
WREG32_SOC15(GC, 0, mmCP_RB0_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6530
WREG32_SOC15(GC, 0, mmCP_RB0_WPTR_HI, upper_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6533
rptr_addr = ring->rptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6538
wptr_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6547
rb_addr = ring->gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6553
gfx_v10_0_cp_gfx_set_doorbell(adev, ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6561
ring = &adev->gfx.gfx_ring[1];
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6562
rb_bufsz = order_base_2(ring->ring_size / 8);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6567
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6568
WREG32_SOC15(GC, 0, mmCP_RB1_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6569
WREG32_SOC15(GC, 0, mmCP_RB1_WPTR_HI, upper_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6571
rptr_addr = ring->rptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6575
wptr_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6584
rb_addr = ring->gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6589
gfx_v10_0_cp_gfx_set_doorbell(adev, ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6641
adev->gfx.kiq[0].ring.sched.ready = false;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6716
static void gfx_v10_0_kiq_setting(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6719
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6733
tmp |= (ring->me << 5) | (ring->pipe << 3) | (ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6739
tmp |= (ring->me << 5) | (ring->pipe << 3) | (ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6846
static int gfx_v10_0_kgq_init_queue(struct amdgpu_ring *ring, bool reset)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6848
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6849
struct v10_gfx_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6850
int mqd_idx = ring - &adev->gfx.gfx_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6855
nv_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6856
amdgpu_ring_init_mqd(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6863
if (ring->doorbell_index == adev->doorbell_index.gfx_ring0 << 1)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6864
gfx_v10_0_cp_gfx_set_doorbell(adev, ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6872
nv_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6873
if (ring->doorbell_index == adev->doorbell_index.gfx_ring0 << 1)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6874
gfx_v10_0_cp_gfx_set_doorbell(adev, ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6882
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6883
atomic64_set((atomic64_t *)ring->wptr_cpu_addr, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
6884
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7023
static int gfx_v10_0_kiq_init_register(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7025
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7026
struct v10_compute_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7100
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7126
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7132
static int gfx_v10_0_kiq_init_queue(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7134
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7135
struct v10_compute_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7137
gfx_v10_0_kiq_setting(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7145
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7146
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7149
nv_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7150
gfx_v10_0_kiq_init_register(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7156
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7158
nv_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7159
amdgpu_ring_init_mqd(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7160
gfx_v10_0_kiq_init_register(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7171
static int gfx_v10_0_kcq_init_queue(struct amdgpu_ring *ring, bool restore)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7173
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7174
struct v10_compute_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7175
int mqd_idx = ring - &adev->gfx.compute_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7180
nv_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7181
amdgpu_ring_init_mqd(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7192
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7193
atomic64_set((atomic64_t *)ring->wptr_cpu_addr, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7194
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7202
gfx_v10_0_kiq_init_queue(&adev->gfx.kiq[0].ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7225
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7260
ring = &adev->gfx.gfx_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7261
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7267
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7268
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7762
static void gfx_v10_0_ring_emit_gds_switch(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7768
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7771
gfx_v10_0_write_data_to_reg(ring, 0, false,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7776
gfx_v10_0_write_data_to_reg(ring, 0, false,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7781
gfx_v10_0_write_data_to_reg(ring, 0, false,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
7786
gfx_v10_0_write_data_to_reg(ring, 0, false,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8323
struct amdgpu_ring *ring, unsigned int vmid)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8544
static u64 gfx_v10_0_ring_get_rptr_gfx(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8547
return *(uint32_t *)ring->rptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8550
static u64 gfx_v10_0_ring_get_wptr_gfx(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8552
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8556
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8557
wptr = atomic64_read((atomic64_t *)ring->wptr_cpu_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8566
static void gfx_v10_0_ring_set_wptr_gfx(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8568
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8570
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8572
atomic64_set((atomic64_t *)ring->wptr_cpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8573
ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8574
WDOORBELL64(ring->doorbell_index, ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8577
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8579
upper_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8583
static u64 gfx_v10_0_ring_get_rptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8586
return *(uint32_t *)ring->rptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8589
static u64 gfx_v10_0_ring_get_wptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8594
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8595
wptr = atomic64_read((atomic64_t *)ring->wptr_cpu_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8601
static void gfx_v10_0_ring_set_wptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8603
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8605
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8606
atomic64_set((atomic64_t *)ring->wptr_cpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8607
ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8608
WDOORBELL64(ring->doorbell_index, ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8614
static void gfx_v10_0_ring_emit_hdp_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8616
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8624
adev->gfx.funcs->get_hdp_flush_mask(ring, &ref_and_mask, ®_mem_engine);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8625
gfx_v10_0_wait_reg_mem(ring, reg_mem_engine, 0, 1,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8631
static void gfx_v10_0_ring_emit_ib_gfx(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8646
if (ring->adev->gfx.mcbp && (ib->flags & AMDGPU_IB_FLAG_PREEMPT)) {
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8653
gfx_v10_0_ring_emit_de_meta(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8654
(!amdgpu_sriov_vf(ring->adev) && flags & AMDGPU_IB_PREEMPTED) ? true : false);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8657
amdgpu_ring_write(ring, header);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8659
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8664
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8665
amdgpu_ring_write(ring, control);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8668
static void gfx_v10_0_ring_emit_ib_compute(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8687
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8688
amdgpu_ring_write(ring, mmGDS_COMPUTE_MAX_WAVE_ID);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8689
amdgpu_ring_write(ring, ring->adev->gds.gds_compute_max_wave_id);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8692
amdgpu_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8694
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8699
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8700
amdgpu_ring_write(ring, control);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8703
static void gfx_v10_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8710
amdgpu_ring_write(ring, PACKET3(PACKET3_RELEASE_MEM, 6));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8711
amdgpu_ring_write(ring, (PACKET3_RELEASE_MEM_GCR_SEQ |
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8718
amdgpu_ring_write(ring, (PACKET3_RELEASE_MEM_DATA_SEL(write64bit ? 2 : 1) |
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8729
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8730
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8731
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8732
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8733
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8736
static void gfx_v10_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8738
int usepfp = (ring->funcs->type == AMDGPU_RING_TYPE_GFX);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8739
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8740
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8742
gfx_v10_0_wait_reg_mem(ring, usepfp, 1, 0, lower_32_bits(addr),
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8746
static void gfx_v10_0_ring_invalidate_tlbs(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8750
amdgpu_ring_write(ring, PACKET3(PACKET3_INVALIDATE_TLBS, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8751
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8758
static void gfx_v10_0_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8761
amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8764
if (ring->funcs->type == AMDGPU_RING_TYPE_GFX) {
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8766
amdgpu_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8767
amdgpu_ring_write(ring, 0x0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8771
static void gfx_v10_0_ring_emit_fence_kiq(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8774
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8780
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8781
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8783
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8784
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8785
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8789
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8790
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8792
amdgpu_ring_write(ring, SOC15_REG_OFFSET(GC, 0, mmCPC_INT_STATUS));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8793
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8794
amdgpu_ring_write(ring, 0x20000000); /* src_id is 178 */
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8798
static void gfx_v10_0_ring_emit_sb(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8800
amdgpu_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8801
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8804
static void gfx_v10_0_ring_emit_cntxcntl(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8809
if (ring->adev->gfx.mcbp)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8810
gfx_v10_0_ring_emit_ce_meta(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8811
(!amdgpu_sriov_vf(ring->adev) && flags & AMDGPU_IB_PREEMPTED) ? true : false);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8833
amdgpu_ring_write(ring, PACKET3(PACKET3_CONTEXT_CONTROL, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8834
amdgpu_ring_write(ring, dw2);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8835
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8838
static unsigned int gfx_v10_0_ring_emit_init_cond_exec(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8843
amdgpu_ring_write(ring, PACKET3(PACKET3_COND_EXEC, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8844
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8845
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8847
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8848
ret = ring->wptr & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8850
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8855
static int gfx_v10_0_ring_preempt_ib(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8858
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8860
struct amdgpu_ring *kiq_ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8874
amdgpu_ring_set_preempt_cond_exec(ring, false);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8877
kiq->pmf->kiq_unmap_queues(kiq_ring, ring, PREEMPT_QUEUES_NO_UNMAP,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8878
ring->trail_fence_gpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8879
++ring->trail_seq);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8886
if (ring->trail_seq ==
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8887
le32_to_cpu(*(ring->trail_fence_cpu_addr)))
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8894
DRM_ERROR("ring %d failed to preempt ib\n", ring->idx);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8898
amdgpu_ring_set_preempt_cond_exec(ring, true);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8902
static void gfx_v10_0_ring_emit_ce_meta(struct amdgpu_ring *ring, bool resume)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8904
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8913
ce_payload_gpu_addr = amdgpu_csa_vaddr(ring->adev) + offset;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8916
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, cnt));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8917
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(2) |
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8921
amdgpu_ring_write(ring, lower_32_bits(ce_payload_gpu_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8922
amdgpu_ring_write(ring, upper_32_bits(ce_payload_gpu_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8925
amdgpu_ring_write_multiple(ring, ce_payload_cpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8928
amdgpu_ring_write_multiple(ring, (void *)&ce_payload,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8932
static void gfx_v10_0_ring_emit_de_meta(struct amdgpu_ring *ring, bool resume)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8934
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8941
de_payload_gpu_addr = amdgpu_csa_vaddr(ring->adev) + offset;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8944
gds_addr = ALIGN(amdgpu_csa_vaddr(ring->adev) +
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8952
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, cnt));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8953
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(1) |
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8957
amdgpu_ring_write(ring, lower_32_bits(de_payload_gpu_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8958
amdgpu_ring_write(ring, upper_32_bits(de_payload_gpu_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8961
amdgpu_ring_write_multiple(ring, de_payload_cpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8964
amdgpu_ring_write_multiple(ring, (void *)&de_payload,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8968
static void gfx_v10_0_ring_emit_frame_cntl(struct amdgpu_ring *ring, bool start,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8973
amdgpu_ring_write(ring, PACKET3(PACKET3_FRAME_CONTROL, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8974
amdgpu_ring_write(ring, v | FRAME_CMD(start ? 0 : 1));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8977
static void gfx_v10_0_ring_emit_rreg(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8980
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8982
amdgpu_ring_write(ring, PACKET3(PACKET3_COPY_DATA, 4));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8983
amdgpu_ring_write(ring, 0 | /* src: register*/
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8986
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8987
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8988
amdgpu_ring_write(ring, lower_32_bits(adev->wb.gpu_addr +
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8990
amdgpu_ring_write(ring, upper_32_bits(adev->wb.gpu_addr +
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8994
static void gfx_v10_0_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8999
switch (ring->funcs->type) {
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9010
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9011
amdgpu_ring_write(ring, cmd);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9012
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9013
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9014
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9017
static void gfx_v10_0_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9020
gfx_v10_0_wait_reg_mem(ring, 0, 0, 0, reg, 0, val, mask, 0x20);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9023
static void gfx_v10_0_ring_emit_reg_write_reg_wait(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9027
int usepfp = (ring->funcs->type == AMDGPU_RING_TYPE_GFX);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9028
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9034
gfx_v10_0_wait_reg_mem(ring, usepfp, 0, 1, reg0, reg1,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9037
amdgpu_ring_emit_reg_write_reg_wait_helper(ring, reg0, reg1,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9184
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9202
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9207
if ((ring->me == me_id) &&
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9208
(ring->pipe == pipe_id) &&
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9209
(ring->queue == queue_id))
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9210
amdgpu_fence_process(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9345
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9355
ring = &adev->gfx.gfx_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9356
if (ring->me == me_id && ring->pipe == pipe_id &&
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9357
ring->queue == queue_id)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9358
drm_sched_fault(&ring->sched);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9364
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9365
if (ring->me == me_id && ring->pipe == pipe_id &&
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9366
ring->queue == queue_id)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9367
drm_sched_fault(&ring->sched);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9408
struct amdgpu_ring *ring = &(adev->gfx.kiq[0].ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9410
if (ring->me == 1)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9414
target += ring->pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9452
struct amdgpu_ring *ring = &(adev->gfx.kiq[0].ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9460
amdgpu_fence_process(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9464
static void gfx_v10_0_emit_mem_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9477
amdgpu_ring_write(ring, PACKET3(PACKET3_ACQUIRE_MEM, 6));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9478
amdgpu_ring_write(ring, 0); /* CP_COHER_CNTL */
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9479
amdgpu_ring_write(ring, 0xffffffff); /* CP_COHER_SIZE */
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9480
amdgpu_ring_write(ring, 0xffffff); /* CP_COHER_SIZE_HI */
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9481
amdgpu_ring_write(ring, 0); /* CP_COHER_BASE */
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9482
amdgpu_ring_write(ring, 0); /* CP_COHER_BASE_HI */
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9483
amdgpu_ring_write(ring, 0x0000000A); /* POLL_INTERVAL */
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9484
amdgpu_ring_write(ring, gcr_cntl); /* GCR_CNTL */
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9487
static void gfx_v10_ring_insert_nop(struct amdgpu_ring *ring, uint32_t num_nop)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9491
amdgpu_ring_write(ring, ring->funcs->nop);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9496
amdgpu_ring_write(ring, PACKET3(PACKET3_NOP, min(num_nop - 2, 0x3ffe)));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9499
amdgpu_ring_insert_nop(ring, num_nop - 1);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9502
static int gfx_v10_0_reset_kgq(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9506
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9508
struct amdgpu_ring *kiq_ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9517
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9526
addr = amdgpu_bo_gpu_offset(ring->mqd_obj) +
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9529
if (ring->pipe == 0)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9530
tmp = REG_SET_FIELD(tmp, CP_VMID_RESET, PIPE0_QUEUES, 1 << ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9532
tmp = REG_SET_FIELD(tmp, CP_VMID_RESET, PIPE1_QUEUES, 1 << ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9547
r = gfx_v10_0_kgq_init_queue(ring, true);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9559
kiq->pmf->kiq_map_queues(kiq_ring, ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9566
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9569
static int gfx_v10_0_reset_kcq(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9573
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9575
struct amdgpu_ring *kiq_ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9582
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9591
kiq->pmf->kiq_unmap_queues(kiq_ring, ring, RESET_QUEUES,
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9602
nv_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9618
r = gfx_v10_0_kcq_init_queue(ring, true);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9629
kiq->pmf->kiq_map_queues(kiq_ring, ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9636
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9779
static void gfx_v10_0_ring_emit_cleaner_shader(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9782
amdgpu_ring_write(ring, PACKET3(PACKET3_RUN_CLEANER_SHADER, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9783
amdgpu_ring_write(ring, 0); /* RESERVED field, programmed to zero */
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9786
static void gfx_v10_0_ring_begin_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9788
amdgpu_gfx_profile_ring_begin_use(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9790
amdgpu_gfx_enforce_isolation_ring_begin_use(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9793
static void gfx_v10_0_ring_end_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9795
amdgpu_gfx_profile_ring_end_use(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9797
amdgpu_gfx_enforce_isolation_ring_end_use(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
9952
adev->gfx.kiq[0].ring.funcs = &gfx_v10_0_ring_funcs_kiq;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1138
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1142
ring = &adev->gfx.gfx_ring[ring_id];
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1144
ring->me = me;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1145
ring->pipe = pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1146
ring->queue = queue;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1148
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1149
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1151
ring->no_scheduler = true;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1152
ring->no_user_submission = true;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1156
ring->doorbell_index = adev->doorbell_index.gfx_ring0 << 1;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1158
ring->doorbell_index = adev->doorbell_index.gfx_ring1 << 1;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1159
ring->vm_hub = AMDGPU_GFXHUB(0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1160
sprintf(ring->name, "gfx_%d.%d.%d", ring->me, ring->pipe, ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1162
irq_type = AMDGPU_CP_IRQ_GFX_ME0_PIPE0_EOP + ring->pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1163
hw_prio = amdgpu_gfx_is_high_priority_graphics_queue(adev, ring) ?
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1165
return amdgpu_ring_init(adev, ring, 1024, &adev->gfx.eop_irq, irq_type,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1174
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1177
ring = &adev->gfx.compute_ring[ring_id];
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1180
ring->me = mec + 1;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1181
ring->pipe = pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1182
ring->queue = queue;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1184
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1185
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1186
ring->doorbell_index = (adev->doorbell_index.mec_ring0 + ring_id) << 1;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1187
ring->eop_gpu_addr = adev->gfx.mec.hpd_eop_gpu_addr
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1189
ring->vm_hub = AMDGPU_GFXHUB(0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1190
sprintf(ring->name, "comp_%d.%d.%d", ring->me, ring->pipe, ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1193
+ ((ring->me - 1) * adev->gfx.mec.num_pipe_per_mec)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1194
+ ring->pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1195
hw_prio = amdgpu_gfx_is_high_priority_compute_queue(adev, ring) ?
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1198
r = amdgpu_ring_init(adev, ring, 1024, &adev->gfx.eop_irq, irq_type,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
1934
amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[0].ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
338
static void gfx_v11_0_ring_emit_de_meta(struct amdgpu_ring *ring, bool resume);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
339
static void gfx_v11_0_ring_emit_frame_cntl(struct amdgpu_ring *ring, bool start, bool secure);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
340
static void gfx_v11_0_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
343
static void gfx_v11_0_ring_invalidate_tlbs(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3616
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3630
ring = &adev->gfx.gfx_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3631
r = amdgpu_ring_alloc(ring, gfx_v11_0_get_csb_size(adev));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3637
amdgpu_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3638
amdgpu_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3640
amdgpu_ring_write(ring, PACKET3(PACKET3_CONTEXT_CONTROL, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3641
amdgpu_ring_write(ring, 0x80000000);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3642
amdgpu_ring_write(ring, 0x80000000);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3647
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3650
amdgpu_ring_write(ring, ext->reg_index -
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3653
amdgpu_ring_write(ring, ext->extent[i]);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3660
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3661
amdgpu_ring_write(ring, ctx_reg_offset);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3662
amdgpu_ring_write(ring, adev->gfx.config.pa_sc_tile_steering_override);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3664
amdgpu_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3665
amdgpu_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3667
amdgpu_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3668
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3670
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3675
ring = &adev->gfx.gfx_ring[1];
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3676
r = amdgpu_ring_alloc(ring, 2);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3682
amdgpu_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3683
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3685
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3702
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3707
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3709
DOORBELL_OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3719
DOORBELL_RANGE_LOWER, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
372
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3728
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
374
uint64_t mqd_addr = amdgpu_bo_gpu_offset(ring->mqd_obj);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3744
ring = &adev->gfx.gfx_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3745
rb_bufsz = order_base_2(ring->ring_size / 8);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
375
uint64_t wptr_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3751
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3752
WREG32_SOC15(GC, 0, regCP_RB0_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3753
WREG32_SOC15(GC, 0, regCP_RB0_WPTR_HI, upper_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3756
rptr_addr = ring->rptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3761
wptr_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3770
rb_addr = ring->gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3776
gfx_v11_0_cp_gfx_set_doorbell(adev, ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
378
switch (ring->funcs->type) {
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3784
ring = &adev->gfx.gfx_ring[1];
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3785
rb_bufsz = order_base_2(ring->ring_size / 8);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3790
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3791
WREG32_SOC15(GC, 0, regCP_RB1_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3792
WREG32_SOC15(GC, 0, regCP_RB1_WPTR_HI, upper_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3794
rptr_addr = ring->rptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3798
wptr_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3807
rb_addr = ring->gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
3812
gfx_v11_0_cp_gfx_set_doorbell(adev, ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
400
PACKET3_MAP_QUEUES_QUEUE(ring->queue) |
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
401
PACKET3_MAP_QUEUES_PIPE(ring->pipe) |
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4054
static void gfx_v11_0_kiq_setting(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4057
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4062
tmp |= (ring->me << 5) | (ring->pipe << 3) | (ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
407
amdgpu_ring_write(kiq_ring, PACKET3_MAP_QUEUES_DOORBELL_OFFSET(ring->doorbell_index));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
415
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4196
static int gfx_v11_0_kgq_init_queue(struct amdgpu_ring *ring, bool reset)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4198
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4199
struct v11_gfx_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
420
uint32_t eng_sel = ring->funcs->type == AMDGPU_RING_TYPE_GFX ? 4 : 0;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4200
int mqd_idx = ring - &adev->gfx.gfx_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4205
soc21_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4206
amdgpu_ring_init_mqd(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4216
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4217
atomic64_set((atomic64_t *)ring->wptr_cpu_addr, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4218
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
422
if (adev->enable_mes && !adev->gfx.kiq[0].ring.sched.ready) {
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
423
amdgpu_mes_unmap_legacy_queue(adev, ring, action,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
435
PACKET3_UNMAP_QUEUES_DOORBELL_OFFSET0(ring->doorbell_index));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4379
static int gfx_v11_0_kiq_init_register(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4381
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4382
struct v11_compute_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4457
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4483
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4489
static int gfx_v11_0_kiq_init_queue(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
449
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4491
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4492
struct v11_compute_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4494
gfx_v11_0_kiq_setting(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4502
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4503
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4506
soc21_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4507
gfx_v11_0_kiq_init_register(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4513
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4515
soc21_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4516
amdgpu_ring_init_mqd(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4517
gfx_v11_0_kiq_init_register(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4528
static int gfx_v11_0_kcq_init_queue(struct amdgpu_ring *ring, bool reset)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
453
uint32_t eng_sel = ring->funcs->type == AMDGPU_RING_TYPE_GFX ? 4 : 0;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4530
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4531
struct v11_compute_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4532
int mqd_idx = ring - &adev->gfx.compute_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4537
soc21_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4538
amdgpu_ring_init_mqd(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4549
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4550
atomic64_set((atomic64_t *)ring->wptr_cpu_addr, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4551
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4559
gfx_v11_0_kiq_init_queue(&adev->gfx.kiq[0].ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4582
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
461
PACKET3_QUERY_STATUS_DOORBELL_OFFSET(ring->doorbell_index) |
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4631
ring = &adev->gfx.gfx_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4633
r = amdgpu_ring_test_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4641
ring = &adev->gfx.gfx_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4642
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4649
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
4650
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5144
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5148
ring = &adev->gfx.gfx_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5149
r = amdgpu_ring_test_ib(ring, tmo);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
515
static void gfx_v11_0_write_data_to_reg(struct amdgpu_ring *ring, int eng_sel,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5155
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5156
r = amdgpu_ring_test_ib(ring, tmo);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
518
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
519
amdgpu_ring_write(ring, WRITE_DATA_ENGINE_SEL(eng_sel) |
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5202
static void gfx_v11_0_ring_emit_gds_switch(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5208
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
521
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5211
gfx_v11_0_write_data_to_reg(ring, 0, false,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5216
gfx_v11_0_write_data_to_reg(ring, 0, false,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
522
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5221
gfx_v11_0_write_data_to_reg(ring, 0, false,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5226
gfx_v11_0_write_data_to_reg(ring, 0, false,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
523
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
526
static void gfx_v11_0_wait_reg_mem(struct amdgpu_ring *ring, int eng_sel,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
531
amdgpu_ring_write(ring, PACKET3(PACKET3_WAIT_REG_MEM, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
532
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
541
amdgpu_ring_write(ring, addr0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
542
amdgpu_ring_write(ring, addr1);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
543
amdgpu_ring_write(ring, ref);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
544
amdgpu_ring_write(ring, mask);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
545
amdgpu_ring_write(ring, inv); /* poll interval */
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
548
static void gfx_v11_ring_insert_nop(struct amdgpu_ring *ring, uint32_t num_nop)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
552
amdgpu_ring_write(ring, ring->funcs->nop);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
557
amdgpu_ring_write(ring, PACKET3(PACKET3_NOP, min(num_nop - 2, 0x3ffe)));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5585
struct amdgpu_ring *ring, unsigned vmid)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
560
amdgpu_ring_insert_nop(ring, num_nop - 1);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5607
if (ring
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5610
&& ((ring->funcs->type == AMDGPU_RING_TYPE_GFX)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5611
|| (ring->funcs->type == AMDGPU_RING_TYPE_COMPUTE))) {
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5612
amdgpu_ring_emit_wreg(ring, reg, data);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
563
static int gfx_v11_0_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
565
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
572
r = amdgpu_ring_alloc(ring, 5);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
575
ring->idx, r);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5776
static u64 gfx_v11_0_ring_get_rptr_gfx(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5779
return *(uint32_t *)ring->rptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5782
static u64 gfx_v11_0_ring_get_wptr_gfx(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5784
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5788
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5789
wptr = atomic64_read((atomic64_t *)ring->wptr_cpu_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
579
if (ring->funcs->type == AMDGPU_RING_TYPE_KIQ) {
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5798
static void gfx_v11_0_ring_set_wptr_gfx(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
580
gfx_v11_0_ring_emit_wreg(ring, scratch, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5800
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5802
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5804
atomic64_set((atomic64_t *)ring->wptr_cpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5805
ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5806
WDOORBELL64(ring->doorbell_index, ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5809
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5811
upper_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5815
static u64 gfx_v11_0_ring_get_rptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5818
return *(uint32_t *)ring->rptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
582
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_UCONFIG_REG, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5821
static u64 gfx_v11_0_ring_get_wptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5826
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5827
wptr = atomic64_read((atomic64_t *)ring->wptr_cpu_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
583
amdgpu_ring_write(ring, scratch -
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5833
static void gfx_v11_0_ring_set_wptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5835
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5838
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5839
atomic64_set((atomic64_t *)ring->wptr_cpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5840
ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5841
WDOORBELL64(ring->doorbell_index, ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5847
static void gfx_v11_0_ring_emit_hdp_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5849
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
585
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5857
adev->gfx.funcs->get_hdp_flush_mask(ring, &ref_and_mask, ®_mem_engine);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5858
gfx_v11_0_wait_reg_mem(ring, reg_mem_engine, 0, 1,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5864
static void gfx_v11_0_ring_emit_ib_gfx(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
587
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5876
if (ring->adev->gfx.mcbp && (ib->flags & AMDGPU_IB_FLAG_PREEMPT)) {
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5882
if (vmid && !ring->adev->gfx.rs64_enable)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5883
gfx_v11_0_ring_emit_de_meta(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5884
!amdgpu_sriov_vf(ring->adev) && (flags & AMDGPU_IB_PREEMPTED));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5887
amdgpu_ring_write(ring, header);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5889
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5894
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5895
amdgpu_ring_write(ring, control);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5898
static void gfx_v11_0_ring_emit_ib_compute(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5917
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5918
amdgpu_ring_write(ring, regGDS_COMPUTE_MAX_WAVE_ID);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5919
amdgpu_ring_write(ring, ring->adev->gds.gds_compute_max_wave_id);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5922
amdgpu_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5924
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5929
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5930
amdgpu_ring_write(ring, control);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5933
static void gfx_v11_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5940
amdgpu_ring_write(ring, PACKET3(PACKET3_RELEASE_MEM, 6));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5941
amdgpu_ring_write(ring, (PACKET3_RELEASE_MEM_GCR_SEQ |
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5948
amdgpu_ring_write(ring, (PACKET3_RELEASE_MEM_DATA_SEL(write64bit ? 2 : 1) |
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5959
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5960
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5961
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5962
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5963
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5966
static void gfx_v11_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5968
int usepfp = (ring->funcs->type == AMDGPU_RING_TYPE_GFX);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5969
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5970
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5972
gfx_v11_0_wait_reg_mem(ring, usepfp, 1, 0, lower_32_bits(addr),
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5976
static void gfx_v11_0_ring_invalidate_tlbs(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5980
amdgpu_ring_write(ring, PACKET3(PACKET3_INVALIDATE_TLBS, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5981
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5988
static void gfx_v11_0_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5991
amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5994
if (ring->funcs->type == AMDGPU_RING_TYPE_GFX) {
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5996
amdgpu_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
5997
amdgpu_ring_write(ring, 0x0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6003
ring->set_q_mode_offs = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6004
ring->set_q_mode_ptr = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6007
static void gfx_v11_0_ring_emit_fence_kiq(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6010
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6016
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6017
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6019
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6020
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6021
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6025
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6026
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6028
amdgpu_ring_write(ring, SOC15_REG_OFFSET(GC, 0, regCPC_INT_STATUS));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6029
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6030
amdgpu_ring_write(ring, 0x20000000); /* src_id is 178 */
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6034
static void gfx_v11_0_ring_emit_cntxcntl(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
604
static int gfx_v11_0_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6049
amdgpu_ring_write(ring, PACKET3(PACKET3_CONTEXT_CONTROL, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6050
amdgpu_ring_write(ring, dw2);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6051
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6054
static unsigned gfx_v11_0_ring_emit_init_cond_exec(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6059
amdgpu_ring_write(ring, PACKET3(PACKET3_COND_EXEC, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
606
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6060
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6061
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6063
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6064
ret = ring->wptr & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6066
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6071
static void gfx_v11_0_ring_emit_gfx_shadow(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6076
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6079
if (!adev->gfx.cp_gfx_shadow || !ring->ring_obj)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6094
amdgpu_ring_write(ring, PACKET3(PACKET3_NOP, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6095
offs = ring->wptr & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6102
amdgpu_ring_write(ring, shadow_va ? 1 : 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6103
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6105
if (ring->set_q_mode_offs) {
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6108
addr = amdgpu_bo_gpu_offset(ring->ring_obj);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6109
addr += ring->set_q_mode_offs << 2;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6110
end = gfx_v11_0_ring_emit_init_cond_exec(ring, addr);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6120
addr = amdgpu_bo_gpu_offset(ring->ring_obj);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6122
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6123
amdgpu_ring_write(ring, WRITE_DATA_DST_SEL(5) | WR_CONFIRM);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6124
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6125
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6126
amdgpu_ring_write(ring, 0x1);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6129
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_Q_PREEMPTION_MODE, 7));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6130
amdgpu_ring_write(ring, lower_32_bits(shadow_va));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6131
amdgpu_ring_write(ring, upper_32_bits(shadow_va));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6132
amdgpu_ring_write(ring, lower_32_bits(gds_va));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6133
amdgpu_ring_write(ring, upper_32_bits(gds_va));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6134
amdgpu_ring_write(ring, lower_32_bits(csa_va));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6135
amdgpu_ring_write(ring, upper_32_bits(csa_va));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6136
amdgpu_ring_write(ring, shadow_va ?
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6138
amdgpu_ring_write(ring, init_shadow ?
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6141
if (ring->set_q_mode_offs)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6142
amdgpu_ring_patch_cond_exec(ring, end);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6151
if (ring->set_q_mode_ptr && ring->set_q_mode_token == token)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6152
*ring->set_q_mode_ptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6154
ring->set_q_mode_token = token;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6156
ring->set_q_mode_ptr = &ring->ring[ring->set_q_mode_offs];
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6159
ring->set_q_mode_offs = offs;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
616
ring->funcs->type == AMDGPU_RING_TYPE_KIQ)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6162
static int gfx_v11_0_ring_preempt_ib(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6165
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6167
struct amdgpu_ring *kiq_ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6184
amdgpu_ring_set_preempt_cond_exec(ring, false);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6187
kiq->pmf->kiq_unmap_queues(kiq_ring, ring, PREEMPT_QUEUES_NO_UNMAP,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6188
ring->trail_fence_gpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6189
++ring->trail_seq);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6196
if (ring->trail_seq ==
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6197
le32_to_cpu(*(ring->trail_fence_cpu_addr)))
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6204
DRM_ERROR("ring %d failed to preempt ib\n", ring->idx);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6208
amdgpu_ring_set_preempt_cond_exec(ring, true);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6212
static void gfx_v11_0_ring_emit_de_meta(struct amdgpu_ring *ring, bool resume)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6214
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6221
de_payload_gpu_addr = amdgpu_csa_vaddr(ring->adev) + offset;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6224
gds_addr = ALIGN(amdgpu_csa_vaddr(ring->adev) +
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6232
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, cnt));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6233
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(1) |
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6237
amdgpu_ring_write(ring, lower_32_bits(de_payload_gpu_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6238
amdgpu_ring_write(ring, upper_32_bits(de_payload_gpu_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6241
amdgpu_ring_write_multiple(ring, de_payload_cpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6244
amdgpu_ring_write_multiple(ring, (void *)&de_payload,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6248
static void gfx_v11_0_ring_emit_frame_cntl(struct amdgpu_ring *ring, bool start,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6253
amdgpu_ring_write(ring, PACKET3(PACKET3_FRAME_CONTROL, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6254
amdgpu_ring_write(ring, v | FRAME_CMD(start ? 0 : 1));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6257
static void gfx_v11_0_ring_emit_rreg(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6260
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6262
amdgpu_ring_write(ring, PACKET3(PACKET3_COPY_DATA, 4));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6263
amdgpu_ring_write(ring, 0 | /* src: register*/
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6266
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6267
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6268
amdgpu_ring_write(ring, lower_32_bits(adev->wb.gpu_addr +
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6270
amdgpu_ring_write(ring, upper_32_bits(adev->wb.gpu_addr +
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6274
static void gfx_v11_0_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6279
switch (ring->funcs->type) {
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6290
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6291
amdgpu_ring_write(ring, cmd);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6292
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6293
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6294
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6297
static void gfx_v11_0_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6300
gfx_v11_0_wait_reg_mem(ring, 0, 0, 0, reg, 0, val, mask, 0x20);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6303
static void gfx_v11_0_ring_emit_reg_write_reg_wait(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6307
int usepfp = (ring->funcs->type == AMDGPU_RING_TYPE_GFX);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6309
gfx_v11_0_wait_reg_mem(ring, usepfp, 0, 1, reg0, reg1,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
642
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6452
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6482
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6487
if ((ring->me == me_id) &&
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6488
(ring->pipe == pipe_id) &&
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6489
(ring->queue == queue_id))
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6490
amdgpu_fence_process(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6626
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6637
ring = &adev->gfx.gfx_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6638
if (ring->me == me_id && ring->pipe == pipe_id &&
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6639
ring->queue == queue_id)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6640
drm_sched_fault(&ring->sched);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6646
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6647
if (ring->me == me_id && ring->pipe == pipe_id &&
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6648
ring->queue == queue_id)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6649
drm_sched_fault(&ring->sched);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6703
struct amdgpu_ring *ring = &(adev->gfx.kiq[0].ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6706
target += ring->pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6740
static void gfx_v11_0_emit_mem_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6753
amdgpu_ring_write(ring, PACKET3(PACKET3_ACQUIRE_MEM, 6));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6754
amdgpu_ring_write(ring, 0); /* CP_COHER_CNTL */
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6755
amdgpu_ring_write(ring, 0xffffffff); /* CP_COHER_SIZE */
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6756
amdgpu_ring_write(ring, 0xffffff); /* CP_COHER_SIZE_HI */
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6757
amdgpu_ring_write(ring, 0); /* CP_COHER_BASE */
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6758
amdgpu_ring_write(ring, 0); /* CP_COHER_BASE_HI */
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6759
amdgpu_ring_write(ring, 0x0000000A); /* POLL_INTERVAL */
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6760
amdgpu_ring_write(ring, gcr_cntl); /* GCR_CNTL */
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6771
static int gfx_v11_reset_gfx_pipe(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6773
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6782
soc21_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6784
switch (ring->pipe) {
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6818
dev_info(adev->dev, "The ring %s pipe reset to the ME firmware start PC: %s\n", ring->name,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6826
static int gfx_v11_0_reset_kgq(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6830
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6834
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6836
r = amdgpu_mes_reset_legacy_queue(ring->adev, ring, vmid, use_mmio, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6840
r = gfx_v11_reset_gfx_pipe(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6846
r = gfx_v11_0_kgq_init_queue(ring, true);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6852
r = amdgpu_mes_map_legacy_queue(adev, ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6859
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6862
static int gfx_v11_0_reset_compute_pipe(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6865
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6874
soc21_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6881
switch (ring->pipe) {
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6914
if (ring->me == 1) {
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6915
switch (ring->pipe) {
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6945
switch (ring->pipe) {
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6984
dev_info(adev->dev, "The ring %s pipe resets to MEC FW start PC: %s\n", ring->name,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6992
static int gfx_v11_0_reset_kcq(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6996
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
6999
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
7001
r = amdgpu_mes_reset_legacy_queue(ring->adev, ring, vmid, true, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
7004
r = gfx_v11_0_reset_compute_pipe(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
7009
r = gfx_v11_0_kcq_init_queue(ring, true);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
7014
r = amdgpu_mes_map_legacy_queue(adev, ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
7020
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
7165
static void gfx_v11_0_ring_emit_cleaner_shader(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
7168
amdgpu_ring_write(ring, PACKET3(PACKET3_RUN_CLEANER_SHADER, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
7169
amdgpu_ring_write(ring, 0); /* RESERVED field, programmed to zero */
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
7172
static void gfx_v11_0_ring_begin_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
7174
amdgpu_gfx_profile_ring_begin_use(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
7176
amdgpu_gfx_enforce_isolation_ring_begin_use(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
7179
static void gfx_v11_0_ring_end_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
7181
amdgpu_gfx_profile_ring_end_use(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
7183
amdgpu_gfx_enforce_isolation_ring_end_use(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v11_0.c
7338
adev->gfx.kiq[0].ring.funcs = &gfx_v11_0_ring_funcs_kiq;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
1003
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
1006
ring = &adev->gfx.compute_ring[ring_id];
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
1009
ring->me = mec + 1;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
1010
ring->pipe = pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
1011
ring->queue = queue;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
1013
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
1014
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
1015
ring->doorbell_index = (adev->doorbell_index.mec_ring0 + ring_id) << 1;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
1016
ring->eop_gpu_addr = adev->gfx.mec.hpd_eop_gpu_addr
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
1018
ring->vm_hub = AMDGPU_GFXHUB(0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
1019
sprintf(ring->name, "comp_%d.%d.%d", ring->me, ring->pipe, ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
1022
+ ((ring->me - 1) * adev->gfx.mec.num_pipe_per_mec)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
1023
+ ring->pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
1024
hw_prio = amdgpu_gfx_is_high_priority_compute_queue(adev, ring) ?
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
1027
r = amdgpu_ring_init(adev, ring, 1024, &adev->gfx.eop_irq, irq_type,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
1644
amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[0].ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
2691
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
2696
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
2698
DOORBELL_OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
2708
DOORBELL_RANGE_LOWER, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
2717
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
2733
ring = &adev->gfx.gfx_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
2734
rb_bufsz = order_base_2(ring->ring_size / 8);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
2740
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
2741
WREG32_SOC15(GC, 0, regCP_RB0_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
2742
WREG32_SOC15(GC, 0, regCP_RB0_WPTR_HI, upper_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
2745
rptr_addr = ring->rptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
2750
wptr_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
2759
rb_addr = ring->gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
2765
gfx_v12_0_cp_gfx_set_doorbell(adev, ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
2805
adev->gfx.kiq[0].ring.sched.ready = enable;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
281
static void gfx_v12_0_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
284
static void gfx_v12_0_ring_invalidate_tlbs(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
2942
static void gfx_v12_0_kiq_setting(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
2945
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
2950
tmp |= (ring->me << 5) | (ring->pipe << 3) | (ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3067
static int gfx_v12_0_kgq_init_queue(struct amdgpu_ring *ring, bool reset)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3069
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
307
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3070
struct v12_gfx_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3071
int mqd_idx = ring - &adev->gfx.gfx_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3076
soc24_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3077
amdgpu_ring_init_mqd(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3087
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3088
atomic64_set((atomic64_t *)ring->wptr_cpu_addr, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3089
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
309
uint64_t mqd_addr = amdgpu_bo_gpu_offset(ring->mqd_obj);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
310
uint64_t wptr_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
313
switch (ring->funcs->type) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3249
static int gfx_v12_0_kiq_init_register(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3251
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3252
struct v12_compute_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3327
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
335
PACKET3_MAP_QUEUES_QUEUE(ring->queue) |
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3353
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3359
static int gfx_v12_0_kiq_init_queue(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
336
PACKET3_MAP_QUEUES_PIPE(ring->pipe) |
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3361
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3362
struct v12_compute_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3365
gfx_v12_0_kiq_setting(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3373
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3374
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3377
soc24_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3378
gfx_v12_0_kiq_init_register(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3384
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3386
soc24_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3387
amdgpu_ring_init_mqd(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3388
gfx_v12_0_kiq_init_register(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3399
static int gfx_v12_0_kcq_init_queue(struct amdgpu_ring *ring, bool reset)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3401
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3402
struct v12_compute_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3403
int mqd_idx = ring - &adev->gfx.compute_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3408
soc24_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3409
amdgpu_ring_init_mqd(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
342
amdgpu_ring_write(kiq_ring, PACKET3_MAP_QUEUES_DOORBELL_OFFSET(ring->doorbell_index));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3420
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3421
atomic64_set((atomic64_t *)ring->wptr_cpu_addr, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3422
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3430
gfx_v12_0_kiq_init_queue(&adev->gfx.kiq[0].ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3431
adev->gfx.kiq[0].ring.sched.ready = true;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3454
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3499
ring = &adev->gfx.gfx_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
350
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3500
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3506
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3507
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
355
uint32_t eng_sel = ring->funcs->type == AMDGPU_RING_TYPE_GFX ? 4 : 0;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
357
if (adev->enable_mes && !adev->gfx.kiq[0].ring.sched.ready) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
358
amdgpu_mes_unmap_legacy_queue(adev, ring, action,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
370
PACKET3_UNMAP_QUEUES_DOORBELL_OFFSET0(ring->doorbell_index));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
384
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
387
uint32_t eng_sel = ring->funcs->type == AMDGPU_RING_TYPE_GFX ? 4 : 0;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
395
PACKET3_QUERY_STATUS_DOORBELL_OFFSET(ring->doorbell_index) |
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3965
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3984
if (ring
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3986
&& ((ring->funcs->type == AMDGPU_RING_TYPE_GFX)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3987
|| (ring->funcs->type == AMDGPU_RING_TYPE_COMPUTE))) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
3989
amdgpu_ring_emit_wreg(ring, reg, data);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
429
static void gfx_v12_0_wait_reg_mem(struct amdgpu_ring *ring, int eng_sel,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4321
static u64 gfx_v12_0_ring_get_rptr_gfx(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4324
return *(uint32_t *)ring->rptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4327
static u64 gfx_v12_0_ring_get_wptr_gfx(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4329
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4333
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4334
wptr = atomic64_read((atomic64_t *)ring->wptr_cpu_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
434
amdgpu_ring_write(ring, PACKET3(PACKET3_WAIT_REG_MEM, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4343
static void gfx_v12_0_ring_set_wptr_gfx(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4345
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4347
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4349
atomic64_set((atomic64_t *)ring->wptr_cpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
435
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4350
ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4351
WDOORBELL64(ring->doorbell_index, ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4354
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4356
upper_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4360
static u64 gfx_v12_0_ring_get_rptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4363
return *(uint32_t *)ring->rptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4366
static u64 gfx_v12_0_ring_get_wptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4371
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4372
wptr = atomic64_read((atomic64_t *)ring->wptr_cpu_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4378
static void gfx_v12_0_ring_set_wptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4380
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4383
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4384
atomic64_set((atomic64_t *)ring->wptr_cpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4385
ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4386
WDOORBELL64(ring->doorbell_index, ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4392
static void gfx_v12_0_ring_emit_hdp_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4394
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4402
adev->gfx.funcs->get_hdp_flush_mask(ring, &ref_and_mask, ®_mem_engine);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4403
gfx_v12_0_wait_reg_mem(ring, reg_mem_engine, 0, 1,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4409
static void gfx_v12_0_ring_emit_ib_gfx(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4421
amdgpu_ring_write(ring, header);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4423
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4428
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4429
amdgpu_ring_write(ring, control);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4432
static void gfx_v12_0_ring_emit_ib_compute(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
444
amdgpu_ring_write(ring, addr0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4440
amdgpu_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4442
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4447
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4448
amdgpu_ring_write(ring, control);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
445
amdgpu_ring_write(ring, addr1);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4451
static void gfx_v12_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4458
amdgpu_ring_write(ring, PACKET3(PACKET3_RELEASE_MEM, 6));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4459
amdgpu_ring_write(ring, (PACKET3_RELEASE_MEM_GCR_SEQ |
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
446
amdgpu_ring_write(ring, ref);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4464
amdgpu_ring_write(ring, (PACKET3_RELEASE_MEM_DATA_SEL(write64bit ? 2 : 1) |
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
447
amdgpu_ring_write(ring, mask);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4475
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4476
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4477
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4478
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4479
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
448
amdgpu_ring_write(ring, inv); /* poll interval */
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4482
static void gfx_v12_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4484
int usepfp = (ring->funcs->type == AMDGPU_RING_TYPE_GFX);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4485
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4486
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4488
gfx_v12_0_wait_reg_mem(ring, usepfp, 1, 0, lower_32_bits(addr),
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4492
static void gfx_v12_0_ring_invalidate_tlbs(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4496
amdgpu_ring_write(ring, PACKET3(PACKET3_INVALIDATE_TLBS, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4497
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4504
static void gfx_v12_0_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4507
amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
451
static int gfx_v12_0_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4510
if (ring->funcs->type == AMDGPU_RING_TYPE_GFX) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4512
amdgpu_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4513
amdgpu_ring_write(ring, 0x0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4517
static void gfx_v12_0_ring_emit_fence_kiq(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4520
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4526
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4527
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4529
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
453
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4530
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4531
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4535
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4536
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4538
amdgpu_ring_write(ring, SOC15_REG_OFFSET(GC, 0, regCPC_INT_STATUS));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4539
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4540
amdgpu_ring_write(ring, 0x20000000); /* src_id is 178 */
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4544
static void gfx_v12_0_ring_emit_cntxcntl(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4559
amdgpu_ring_write(ring, PACKET3(PACKET3_CONTEXT_CONTROL, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4560
amdgpu_ring_write(ring, dw2);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4561
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4564
static unsigned gfx_v12_0_ring_emit_init_cond_exec(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4569
amdgpu_ring_write(ring, PACKET3(PACKET3_COND_EXEC, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4570
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4571
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4573
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4574
ret = ring->wptr & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4576
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4581
static int gfx_v12_0_ring_preempt_ib(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4584
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4586
struct amdgpu_ring *kiq_ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
460
r = amdgpu_ring_alloc(ring, 5);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4603
amdgpu_ring_set_preempt_cond_exec(ring, false);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4606
kiq->pmf->kiq_unmap_queues(kiq_ring, ring, PREEMPT_QUEUES_NO_UNMAP,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4607
ring->trail_fence_gpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4608
++ring->trail_seq);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4615
if (ring->trail_seq ==
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4616
le32_to_cpu(*(ring->trail_fence_cpu_addr)))
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4623
DRM_ERROR("ring %d failed to preempt ib\n", ring->idx);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4627
amdgpu_ring_set_preempt_cond_exec(ring, true);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4631
static void gfx_v12_0_ring_emit_rreg(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4634
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4636
amdgpu_ring_write(ring, PACKET3(PACKET3_COPY_DATA, 4));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4637
amdgpu_ring_write(ring, 0 | /* src: register*/
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
464
ring->idx, r);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4640
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4641
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4642
amdgpu_ring_write(ring, lower_32_bits(adev->wb.gpu_addr +
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4644
amdgpu_ring_write(ring, upper_32_bits(adev->wb.gpu_addr +
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4648
static void gfx_v12_0_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4654
switch (ring->funcs->type) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4665
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4666
amdgpu_ring_write(ring, cmd);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4667
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4668
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4669
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4672
static void gfx_v12_0_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4675
gfx_v12_0_wait_reg_mem(ring, 0, 0, 0, reg, 0, val, mask, 0x20);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4678
static void gfx_v12_0_ring_emit_reg_write_reg_wait(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
468
if (ring->funcs->type == AMDGPU_RING_TYPE_KIQ) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4682
int usepfp = (ring->funcs->type == AMDGPU_RING_TYPE_GFX);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4684
gfx_v12_0_wait_reg_mem(ring, usepfp, 0, 1, reg0, reg1,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
469
gfx_v12_0_ring_emit_wreg(ring, scratch, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
471
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_UCONFIG_REG, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
472
amdgpu_ring_write(ring, scratch -
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
474
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
476
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4818
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4848
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4853
if ((ring->me == me_id) &&
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4854
(ring->pipe == pipe_id) &&
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4855
(ring->queue == queue_id))
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4856
amdgpu_fence_process(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
493
static int gfx_v12_0_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
495
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
4992
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5003
ring = &adev->gfx.gfx_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5004
if (ring->me == me_id && ring->pipe == pipe_id &&
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5005
ring->queue == queue_id)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5006
drm_sched_fault(&ring->sched);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5012
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5013
if (ring->me == me_id && ring->pipe == pipe_id &&
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5014
ring->queue == queue_id)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5015
drm_sched_fault(&ring->sched);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
505
ring->funcs->type == AMDGPU_RING_TYPE_KIQ)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5052
static void gfx_v12_0_emit_mem_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5065
amdgpu_ring_write(ring, PACKET3(PACKET3_ACQUIRE_MEM, 6));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5066
amdgpu_ring_write(ring, 0); /* CP_COHER_CNTL */
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5067
amdgpu_ring_write(ring, 0xffffffff); /* CP_COHER_SIZE */
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5068
amdgpu_ring_write(ring, 0xffffff); /* CP_COHER_SIZE_HI */
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5069
amdgpu_ring_write(ring, 0); /* CP_COHER_BASE */
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5070
amdgpu_ring_write(ring, 0); /* CP_COHER_BASE_HI */
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5071
amdgpu_ring_write(ring, 0x0000000A); /* POLL_INTERVAL */
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5072
amdgpu_ring_write(ring, gcr_cntl); /* GCR_CNTL */
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5075
static void gfx_v12_ring_insert_nop(struct amdgpu_ring *ring, uint32_t num_nop)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5079
amdgpu_ring_write(ring, ring->funcs->nop);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5084
amdgpu_ring_write(ring, PACKET3(PACKET3_NOP, min(num_nop - 2, 0x3ffe)));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5087
amdgpu_ring_insert_nop(ring, num_nop - 1);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5090
static void gfx_v12_0_ring_emit_cleaner_shader(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5093
amdgpu_ring_write(ring, PACKET3(PACKET3_RUN_CLEANER_SHADER, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5094
amdgpu_ring_write(ring, 0); /* RESERVED field, programmed to zero */
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5234
static int gfx_v12_reset_gfx_pipe(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5236
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5245
soc24_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5247
switch (ring->pipe) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5281
dev_info(adev->dev, "The ring %s pipe reset: %s\n", ring->name,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5290
static int gfx_v12_0_reset_kgq(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5294
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5298
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5300
r = amdgpu_mes_reset_legacy_queue(ring->adev, ring, vmid, use_mmio, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5303
r = gfx_v12_reset_gfx_pipe(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5309
r = gfx_v12_0_kgq_init_queue(ring, true);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
531
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5315
r = amdgpu_mes_map_legacy_queue(adev, ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5322
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5325
static int gfx_v12_0_reset_compute_pipe(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5327
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5336
soc24_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5342
switch (ring->pipe) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5375
switch (ring->pipe) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5402
dev_info(adev->dev, "The ring %s pipe resets: %s\n", ring->name,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5408
static int gfx_v12_0_reset_kcq(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5412
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5415
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5417
r = amdgpu_mes_reset_legacy_queue(ring->adev, ring, vmid, true, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5420
r = gfx_v12_0_reset_compute_pipe(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5425
r = gfx_v12_0_kcq_init_queue(ring, true);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5430
r = amdgpu_mes_map_legacy_queue(adev, ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5436
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5439
static void gfx_v12_0_ring_begin_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5441
amdgpu_gfx_profile_ring_begin_use(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5443
amdgpu_gfx_enforce_isolation_ring_begin_use(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5446
static void gfx_v12_0_ring_end_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5448
amdgpu_gfx_profile_ring_end_use(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5450
amdgpu_gfx_enforce_isolation_ring_end_use(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
5592
adev->gfx.kiq[0].ring.funcs = &gfx_v12_0_ring_funcs_kiq;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
971
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
974
ring = &adev->gfx.gfx_ring[ring_id];
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
976
ring->me = me;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
977
ring->pipe = pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
978
ring->queue = queue;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
980
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
981
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
984
ring->doorbell_index = adev->doorbell_index.gfx_ring0 << 1;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
986
ring->doorbell_index = adev->doorbell_index.gfx_ring1 << 1;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
987
ring->vm_hub = AMDGPU_GFXHUB(0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
988
sprintf(ring->name, "gfx_%d.%d.%d", ring->me, ring->pipe, ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
990
irq_type = AMDGPU_CP_IRQ_GFX_ME0_PIPE0_EOP + ring->pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v12_0.c
991
r = amdgpu_ring_init(adev, ring, 1024, &adev->gfx.eop_irq, irq_type,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
111
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
113
uint64_t mqd_addr = amdgpu_bo_gpu_offset(ring->mqd_obj);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
114
uint64_t wptr_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
117
switch (ring->funcs->type) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
1280
amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[i].ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
135
PACKET3_MAP_QUEUES_QUEUE(ring->queue) |
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
136
PACKET3_MAP_QUEUES_PIPE(ring->pipe) |
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
142
amdgpu_ring_write(kiq_ring, PACKET3_MAP_QUEUES_DOORBELL_OFFSET(ring->doorbell_index));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
150
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
155
uint32_t eng_sel = ring->funcs->type == AMDGPU_RING_TYPE_GFX ? 4 : 0;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
157
if (adev->enable_mes && !adev->gfx.kiq[0].ring.sched.ready) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
158
amdgpu_mes_unmap_legacy_queue(adev, ring, action, gpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
170
PACKET3_UNMAP_QUEUES_DOORBELL_OFFSET0(ring->doorbell_index));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
184
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
187
uint32_t eng_sel = ring->funcs->type == AMDGPU_RING_TYPE_GFX ? 4 : 0;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
1917
adev->gfx.kiq[xcc_id].ring.sched.ready = enable;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
195
PACKET3_QUERY_STATUS_DOORBELL_OFFSET(ring->doorbell_index) |
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2081
static void gfx_v12_1_xcc_kiq_setting(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2085
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2090
tmp |= (ring->me << 5) | (ring->pipe << 3) | (ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2246
static int gfx_v12_1_xcc_kiq_init_register(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2249
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2250
struct v12_1_compute_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
233
static void gfx_v12_1_wait_reg_mem(struct amdgpu_ring *ring, int eng_sel,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2343
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2349
static int gfx_v12_1_xcc_kiq_init_queue(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2352
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2353
struct v12_1_compute_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2355
gfx_v12_1_xcc_kiq_setting(ring, xcc_id);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2363
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2364
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2367
soc_v1_0_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0, GET_INST(GC, xcc_id));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2368
gfx_v12_1_xcc_kiq_init_register(ring, xcc_id);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2374
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2376
soc_v1_0_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0, GET_INST(GC, xcc_id));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2377
amdgpu_ring_init_mqd(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2378
gfx_v12_1_xcc_kiq_init_register(ring, xcc_id);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2389
static int gfx_v12_1_xcc_kcq_init_queue(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2392
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2393
struct v12_1_compute_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2394
int mqd_idx = ring - &adev->gfx.compute_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2399
soc_v1_0_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0, GET_INST(GC, xcc_id));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2400
amdgpu_ring_init_mqd(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2411
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2412
atomic64_set((atomic64_t *)ring->wptr_cpu_addr, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2413
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2422
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2425
ring = &adev->gfx.kiq[xcc_id].ring;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2427
r = amdgpu_bo_reserve(ring->mqd_obj, false);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
243
amdgpu_ring_write(ring, PACKET3(PACKET3_WAIT_REG_MEM, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2431
r = amdgpu_bo_kmap(ring->mqd_obj, (void **)&ring->mqd_ptr);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2433
amdgpu_bo_unreserve(ring->mqd_obj);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2437
gfx_v12_1_xcc_kiq_init_queue(ring, xcc_id);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2438
amdgpu_bo_kunmap(ring->mqd_obj);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2439
ring->mqd_ptr = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
244
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2440
amdgpu_bo_unreserve(ring->mqd_obj);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2441
ring->sched.ready = true;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2448
struct amdgpu_ring *ring = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2455
ring = &adev->gfx.compute_ring[i + xcc_id * adev->gfx.num_compute_rings];
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2457
r = amdgpu_bo_reserve(ring->mqd_obj, false);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2460
r = amdgpu_bo_kmap(ring->mqd_obj, (void **)&ring->mqd_ptr);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2462
r = gfx_v12_1_xcc_kcq_init_queue(ring, xcc_id);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2463
amdgpu_bo_kunmap(ring->mqd_obj);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2464
ring->mqd_ptr = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2466
amdgpu_bo_unreserve(ring->mqd_obj);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2479
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2509
ring = &adev->gfx.compute_ring[i + xcc_id * adev->gfx.num_compute_rings];
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2510
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
253
amdgpu_ring_write(ring, addr0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
254
amdgpu_ring_write(ring, addr1);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
255
amdgpu_ring_write(ring, ref);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
256
amdgpu_ring_write(ring, mask);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
257
amdgpu_ring_write(ring, inv); /* poll interval */
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
260
static int gfx_v12_1_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
262
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
270
scratch_reg0_offset = SOC15_REG_OFFSET(GC, GET_INST(GC, ring->xcc_id), regSCRATCH_REG0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
274
r = amdgpu_ring_alloc(ring, 5);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
278
ring->idx, r);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
282
if (ring->funcs->type == AMDGPU_RING_TYPE_KIQ) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
283
gfx_v12_1_ring_emit_wreg(ring, xcc_offset, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
285
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_UCONFIG_REG, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
286
amdgpu_ring_write(ring, xcc_offset -
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
288
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
290
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2952
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2971
if (ring
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2973
&& ((ring->funcs->type == AMDGPU_RING_TYPE_GFX)
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2974
|| (ring->funcs->type == AMDGPU_RING_TYPE_COMPUTE))) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
2976
amdgpu_ring_emit_wreg(ring, reg, data);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
307
static int gfx_v12_1_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
309
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
319
ring->funcs->type == AMDGPU_RING_TYPE_KIQ)
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3283
static u64 gfx_v12_1_ring_get_rptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3286
return *(uint32_t *)ring->rptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3289
static u64 gfx_v12_1_ring_get_wptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3294
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3295
wptr = atomic64_read((atomic64_t *)ring->wptr_cpu_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3301
static void gfx_v12_1_ring_set_wptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3303
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3306
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3307
atomic64_set((atomic64_t *)ring->wptr_cpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3308
ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3309
WDOORBELL64(ring->doorbell_index, ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3315
static void gfx_v12_1_ring_emit_ib_compute(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3334
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3335
amdgpu_ring_write(ring, regGDS_COMPUTE_MAX_WAVE_ID);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3338
amdgpu_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3340
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3345
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3346
amdgpu_ring_write(ring, control);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3349
static void gfx_v12_1_ring_emit_fence(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3356
amdgpu_ring_write(ring, PACKET3(PACKET3_RELEASE_MEM, 6));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3357
amdgpu_ring_write(ring, (PACKET3_RELEASE_MEM_GCR_SEQ(1) |
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3364
amdgpu_ring_write(ring, (PACKET3_RELEASE_MEM_DATA_SEL(write64bit ? 2 : 1) |
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3375
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3376
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3377
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3378
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3379
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3382
static void gfx_v12_1_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3384
int usepfp = (ring->funcs->type == AMDGPU_RING_TYPE_GFX);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3385
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3386
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3388
gfx_v12_1_wait_reg_mem(ring, usepfp, 1, 0, lower_32_bits(addr),
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3392
static void gfx_v12_1_ring_invalidate_tlbs(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3396
amdgpu_ring_write(ring, PACKET3(PACKET3_INVALIDATE_TLBS, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3397
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3404
static void gfx_v12_1_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3407
amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3410
if (ring->funcs->type == AMDGPU_RING_TYPE_GFX) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3412
amdgpu_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3413
amdgpu_ring_write(ring, 0x0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3417
static void gfx_v12_1_ring_emit_fence_kiq(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3420
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3426
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3427
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3429
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3430
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3431
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3435
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3436
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3438
amdgpu_ring_write(ring, SOC15_REG_OFFSET(GC, GET_INST(GC, 0), regCPC_INT_STATUS));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3439
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3440
amdgpu_ring_write(ring, 0x20000000); /* src_id is 178 */
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3444
static void gfx_v12_1_ring_emit_rreg(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3447
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
345
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3451
amdgpu_ring_write(ring, PACKET3(PACKET3_COPY_DATA, 4));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3452
amdgpu_ring_write(ring, 0 | /* src: register*/
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3455
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3456
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3457
amdgpu_ring_write(ring, lower_32_bits(adev->wb.gpu_addr +
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3459
amdgpu_ring_write(ring, upper_32_bits(adev->wb.gpu_addr +
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3463
static void gfx_v12_1_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3471
switch (ring->funcs->type) {
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3479
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3480
amdgpu_ring_write(ring, cmd);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3481
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3482
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3483
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3486
static void gfx_v12_1_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3489
gfx_v12_1_wait_reg_mem(ring, 0, 0, 0, reg, 0, val, mask, 0x20);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3492
static void gfx_v12_1_ring_emit_reg_write_reg_wait(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3496
int usepfp = (ring->funcs->type == AMDGPU_RING_TYPE_GFX);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3498
gfx_v12_1_wait_reg_mem(ring, usepfp, 0, 1, reg0, reg1,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3608
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3642
ring = &adev->gfx.compute_ring
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3649
if ((ring->me == me_id) &&
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3650
(ring->pipe == pipe_id) &&
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3651
(ring->queue == queue_id))
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3652
amdgpu_fence_process(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3711
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3725
ring = &adev->gfx.gfx_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3727
if (ring->me == me_id && ring->pipe == pipe_id)
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3728
drm_sched_fault(&ring->sched);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3734
ring = &adev->gfx.compute_ring
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3737
if (ring->me == me_id && ring->pipe == pipe_id &&
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3738
ring->queue == queue_id)
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3739
drm_sched_fault(&ring->sched);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3766
static void gfx_v12_1_emit_mem_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3777
amdgpu_ring_write(ring, PACKET3(PACKET3_ACQUIRE_MEM, 6));
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3778
amdgpu_ring_write(ring, 0); /* CP_COHER_CNTL */
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3779
amdgpu_ring_write(ring, 0xffffffff); /* CP_COHER_SIZE */
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3780
amdgpu_ring_write(ring, 0xffffff); /* CP_COHER_SIZE_HI */
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3781
amdgpu_ring_write(ring, 0); /* CP_COHER_BASE */
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3782
amdgpu_ring_write(ring, 0); /* CP_COHER_BASE_HI */
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3783
amdgpu_ring_write(ring, 0x0000000A); /* POLL_INTERVAL */
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3784
amdgpu_ring_write(ring, gcr_cntl); /* GCR_CNTL */
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
3867
adev->gfx.kiq[i].ring.funcs = &gfx_v12_1_ring_funcs_kiq;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
771
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
775
ring = &adev->gfx.compute_ring[xcc_id * adev->gfx.num_compute_rings +
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
779
ring->xcc_id = xcc_id;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
780
ring->me = mec + 1;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
781
ring->pipe = pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
782
ring->queue = queue;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
784
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
785
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
788
ring->doorbell_index = (xcc_doorbell_start + ring_id) << 1;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
789
ring->eop_gpu_addr = adev->gfx.mec.hpd_eop_gpu_addr +
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
792
ring->vm_hub = AMDGPU_GFXHUB(xcc_id);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
793
sprintf(ring->name, "comp_%d.%d.%d.%d",
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
794
ring->xcc_id, ring->me, ring->pipe, ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
797
+ ((ring->me - 1) * adev->gfx.mec.num_pipe_per_mec)
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
798
+ ring->pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
799
hw_prio = amdgpu_gfx_is_high_priority_compute_queue(adev, ring) ?
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
802
r = amdgpu_ring_init(adev, ring, 1024, &adev->gfx.eop_irq, irq_type,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
82
static void gfx_v12_1_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v12_1.c
85
static void gfx_v12_1_ring_invalidate_tlbs(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1788
static int gfx_v6_0_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1790
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1797
r = amdgpu_ring_alloc(ring, 3);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1801
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1802
amdgpu_ring_write(ring, mmSCRATCH_REG0 - PACKET3_SET_CONFIG_REG_START);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1803
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1804
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1818
static void gfx_v6_0_ring_emit_vgt_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1820
amdgpu_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1821
amdgpu_ring_write(ring, EVENT_TYPE(VGT_FLUSH) |
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1825
static void gfx_v6_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1831
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1832
amdgpu_ring_write(ring, (mmCP_COHER_CNTL2 - PACKET3_SET_CONFIG_REG_START));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1833
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1834
amdgpu_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1835
amdgpu_ring_write(ring, PACKET3_TCL1_ACTION_ENA |
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1839
amdgpu_ring_write(ring, 0xFFFFFFFF);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1840
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1841
amdgpu_ring_write(ring, 10); /* poll interval */
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1843
amdgpu_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1844
amdgpu_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_TS_EVENT) | EVENT_INDEX(5));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1845
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1846
amdgpu_ring_write(ring, (upper_32_bits(addr) & 0xffff) |
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1849
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1850
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1853
static void gfx_v6_0_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1863
amdgpu_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1864
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1874
amdgpu_ring_write(ring, header);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1875
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1880
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1881
amdgpu_ring_write(ring, control);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1894
static int gfx_v6_0_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1896
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
1913
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2008
struct amdgpu_ring *ring = &adev->gfx.gfx_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2011
r = amdgpu_ring_alloc(ring, 7 + 4);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2016
amdgpu_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2017
amdgpu_ring_write(ring, 0x1);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2018
amdgpu_ring_write(ring, 0x0);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2019
amdgpu_ring_write(ring, adev->gfx.config.max_hw_contexts - 1);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2020
amdgpu_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2021
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2022
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2024
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_BASE, 2));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2025
amdgpu_ring_write(ring, PACKET3_BASE_INDEX(CE_PARTITION_BASE));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2026
amdgpu_ring_write(ring, 0xc000);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2027
amdgpu_ring_write(ring, 0xe000);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2028
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2032
r = amdgpu_ring_alloc(ring, gfx_v6_0_get_csb_size(adev) + 10);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2038
amdgpu_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2039
amdgpu_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2044
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2046
amdgpu_ring_write(ring, ext->reg_index - PACKET3_SET_CONTEXT_REG_START);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2048
amdgpu_ring_write(ring, ext->extent[i]);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2053
amdgpu_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2054
amdgpu_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2056
amdgpu_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2057
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2059
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2060
amdgpu_ring_write(ring, 0x00000316);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2061
amdgpu_ring_write(ring, 0x0000000e);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2062
amdgpu_ring_write(ring, 0x00000010);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2064
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2071
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2088
ring = &adev->gfx.gfx_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2089
rb_bufsz = order_base_2(ring->ring_size / 8);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2099
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2100
WREG32(mmCP_RB0_WPTR, ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2103
rptr_addr = ring->rptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2112
WREG32(mmCP_RB0_BASE, ring->gpu_addr >> 8);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2116
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2123
static u64 gfx_v6_0_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2125
return *ring->rptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2128
static u64 gfx_v6_0_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2130
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2132
if (ring == &adev->gfx.gfx_ring[0])
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2134
else if (ring == &adev->gfx.compute_ring[0])
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2136
else if (ring == &adev->gfx.compute_ring[1])
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2142
static void gfx_v6_0_ring_set_wptr_gfx(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2144
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2146
WREG32(mmCP_RB0_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2150
static void gfx_v6_0_ring_set_wptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2152
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2154
if (ring == &adev->gfx.compute_ring[0]) {
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2155
WREG32(mmCP_RB1_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2157
} else if (ring == &adev->gfx.compute_ring[1]) {
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2158
WREG32(mmCP_RB2_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2168
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2177
ring = &adev->gfx.compute_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2178
rb_bufsz = order_base_2(ring->ring_size / 8);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2186
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2187
WREG32(mmCP_RB1_WPTR, ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2189
rptr_addr = ring->rptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2195
WREG32(mmCP_RB1_BASE, ring->gpu_addr >> 8);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2197
ring = &adev->gfx.compute_ring[1];
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2198
rb_bufsz = order_base_2(ring->ring_size / 8);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2206
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2207
WREG32(mmCP_RB2_WPTR, ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2208
rptr_addr = ring->rptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2214
WREG32(mmCP_RB2_BASE, ring->gpu_addr >> 8);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2286
static void gfx_v6_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2288
int usepfp = (ring->funcs->type == AMDGPU_RING_TYPE_GFX);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2289
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2290
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2292
amdgpu_ring_write(ring, PACKET3(PACKET3_WAIT_REG_MEM, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2293
amdgpu_ring_write(ring, (WAIT_REG_MEM_MEM_SPACE(1) | /* memory */
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2296
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2297
amdgpu_ring_write(ring, upper_32_bits(addr) & 0xffffffff);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2298
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2299
amdgpu_ring_write(ring, 0xffffffff);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2300
amdgpu_ring_write(ring, 4); /* poll interval */
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2304
amdgpu_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2305
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2306
amdgpu_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2307
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2311
static void gfx_v6_0_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2314
int usepfp = (ring->funcs->type == AMDGPU_RING_TYPE_GFX);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2316
amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2319
amdgpu_ring_write(ring, PACKET3(PACKET3_WAIT_REG_MEM, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2320
amdgpu_ring_write(ring, (WAIT_REG_MEM_FUNCTION(0) | /* always */
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2322
amdgpu_ring_write(ring, mmVM_INVALIDATE_REQUEST);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2323
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2324
amdgpu_ring_write(ring, 0); /* ref */
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2325
amdgpu_ring_write(ring, 0); /* mask */
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2326
amdgpu_ring_write(ring, 0x20); /* poll interval */
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2330
amdgpu_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2331
amdgpu_ring_write(ring, 0x0);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2334
amdgpu_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2335
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2336
amdgpu_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2337
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2341
static void gfx_v6_0_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2344
int usepfp = (ring->funcs->type == AMDGPU_RING_TYPE_GFX);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2346
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2347
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(usepfp) |
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2349
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2350
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2351
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2933
static void gfx_v6_ring_emit_cntxcntl(struct amdgpu_ring *ring, uint32_t flags)
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2936
gfx_v6_0_ring_emit_vgt_flush(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2937
amdgpu_ring_write(ring, PACKET3(PACKET3_CONTEXT_CONTROL, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2938
amdgpu_ring_write(ring, 0x80000000);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
2939
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3042
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3071
ring = &adev->gfx.gfx_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3072
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3073
sprintf(ring->name, "gfx");
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3074
r = amdgpu_ring_init(adev, ring, 2048,
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3089
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3090
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3091
ring->use_doorbell = false;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3092
ring->doorbell_index = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3093
ring->me = 1;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3094
ring->pipe = i;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3095
ring->queue = i;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3096
sprintf(ring->name, "comp_%d.%d.%d", ring->me, ring->pipe, ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3097
irq_type = AMDGPU_CP_IRQ_COMPUTE_MEC1_PIPE0_EOP + ring->pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3098
r = amdgpu_ring_init(adev, ring, 1024,
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3214
int ring,
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3220
if (ring == 0) {
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3233
if (ring == 0) {
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3345
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3349
ring = &adev->gfx.gfx_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3353
ring = &adev->gfx.compute_ring[entry->ring_id - 1];
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3358
drm_sched_fault(&ring->sched);
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3426
static void gfx_v6_0_emit_mem_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3428
amdgpu_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3429
amdgpu_ring_write(ring, PACKET3_TCL1_ACTION_ENA |
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3433
amdgpu_ring_write(ring, 0xffffffff); /* CP_COHER_SIZE */
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3434
amdgpu_ring_write(ring, 0); /* CP_COHER_BASE */
drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c
3435
amdgpu_ring_write(ring, 0x0000000A); /* poll interval */
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2033
static int gfx_v7_0_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2035
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2041
r = amdgpu_ring_alloc(ring, 3);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2045
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_UCONFIG_REG, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2046
amdgpu_ring_write(ring, mmSCRATCH_REG0 - PACKET3_SET_UCONFIG_REG_START);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2047
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2048
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2068
static void gfx_v7_0_ring_emit_hdp_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2072
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2079
adev->gfx.funcs->get_hdp_flush_mask(ring, &ref_and_mask, &usepfp);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2080
amdgpu_ring_write(ring, PACKET3(PACKET3_WAIT_REG_MEM, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2081
amdgpu_ring_write(ring, (WAIT_REG_MEM_OPERATION(1) | /* write, wait, write */
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2084
amdgpu_ring_write(ring, mmGPU_HDP_FLUSH_REQ);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2085
amdgpu_ring_write(ring, mmGPU_HDP_FLUSH_DONE);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2086
amdgpu_ring_write(ring, ref_and_mask);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2087
amdgpu_ring_write(ring, ref_and_mask);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2088
amdgpu_ring_write(ring, 0x20); /* poll interval */
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2091
static void gfx_v7_0_ring_emit_vgt_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2093
amdgpu_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2094
amdgpu_ring_write(ring, EVENT_TYPE(VS_PARTIAL_FLUSH) |
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2097
amdgpu_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2098
amdgpu_ring_write(ring, EVENT_TYPE(VGT_FLUSH) |
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2113
static void gfx_v7_0_ring_emit_fence_gfx(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2123
amdgpu_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2124
amdgpu_ring_write(ring, (EOP_TCL1_ACTION_EN |
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2128
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2129
amdgpu_ring_write(ring, (upper_32_bits(addr) & 0xffff) |
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2131
amdgpu_ring_write(ring, lower_32_bits(seq - 1));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2132
amdgpu_ring_write(ring, upper_32_bits(seq - 1));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2135
amdgpu_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2136
amdgpu_ring_write(ring, (EOP_TCL1_ACTION_EN |
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2141
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2142
amdgpu_ring_write(ring, (upper_32_bits(addr) & 0xffff) |
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2144
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2145
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2159
static void gfx_v7_0_ring_emit_fence_compute(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2167
amdgpu_ring_write(ring, PACKET3(PACKET3_RELEASE_MEM, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2168
amdgpu_ring_write(ring, (EOP_TCL1_ACTION_EN |
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2172
amdgpu_ring_write(ring, DATA_SEL(write64bit ? 2 : 1) | INT_SEL(int_sel ? 2 : 0));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2173
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2174
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2175
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2176
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2196
static void gfx_v7_0_ring_emit_ib_gfx(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2206
amdgpu_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2207
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2217
amdgpu_ring_write(ring, header);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2218
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2223
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2224
amdgpu_ring_write(ring, control);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2227
static void gfx_v7_0_ring_emit_ib_compute(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2246
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2247
amdgpu_ring_write(ring, mmGDS_COMPUTE_MAX_WAVE_ID - PACKET3_SET_CONFIG_REG_START);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2248
amdgpu_ring_write(ring, ring->adev->gds.gds_compute_max_wave_id);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2251
amdgpu_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2252
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2257
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2258
amdgpu_ring_write(ring, control);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2261
static void gfx_v7_ring_emit_cntxcntl(struct amdgpu_ring *ring, uint32_t flags)
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2267
gfx_v7_0_ring_emit_vgt_flush(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2276
amdgpu_ring_write(ring, PACKET3(PACKET3_CONTEXT_CONTROL, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2277
amdgpu_ring_write(ring, dw2);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2278
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2291
static int gfx_v7_0_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2293
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2310
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2454
struct amdgpu_ring *ring = &adev->gfx.gfx_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2466
r = amdgpu_ring_alloc(ring, gfx_v7_0_get_csb_size(adev) + 8);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2473
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_BASE, 2));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2474
amdgpu_ring_write(ring, PACKET3_BASE_INDEX(CE_PARTITION_BASE));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2475
amdgpu_ring_write(ring, 0x8000);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2476
amdgpu_ring_write(ring, 0x8000);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2479
amdgpu_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2480
amdgpu_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2482
amdgpu_ring_write(ring, PACKET3(PACKET3_CONTEXT_CONTROL, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2483
amdgpu_ring_write(ring, 0x80000000);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2484
amdgpu_ring_write(ring, 0x80000000);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2489
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2491
amdgpu_ring_write(ring, ext->reg_index - PACKET3_SET_CONTEXT_REG_START);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2493
amdgpu_ring_write(ring, ext->extent[i]);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2498
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2499
amdgpu_ring_write(ring, mmPA_SC_RASTER_CONFIG - PACKET3_SET_CONTEXT_REG_START);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2500
amdgpu_ring_write(ring, adev->gfx.config.rb_config[0][0].raster_config);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2501
amdgpu_ring_write(ring, adev->gfx.config.rb_config[0][0].raster_config_1);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2503
amdgpu_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2504
amdgpu_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2506
amdgpu_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2507
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2509
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2510
amdgpu_ring_write(ring, 0x00000316);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2511
amdgpu_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2512
amdgpu_ring_write(ring, 0x00000010); /* VGT_OUT_DEALLOC_CNTL */
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2514
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2530
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2550
ring = &adev->gfx.gfx_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2551
rb_bufsz = order_base_2(ring->ring_size / 8);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2560
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2561
WREG32(mmCP_RB0_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2564
rptr_addr = ring->rptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2574
rb_addr = ring->gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2580
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2587
static u64 gfx_v7_0_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2589
return *ring->rptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2592
static u64 gfx_v7_0_ring_get_wptr_gfx(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2594
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2599
static void gfx_v7_0_ring_set_wptr_gfx(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2601
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2603
WREG32(mmCP_RB0_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2607
static u64 gfx_v7_0_ring_get_wptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2610
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2613
static void gfx_v7_0_ring_set_wptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2615
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2618
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2619
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2714
struct amdgpu_ring *ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2716
amdgpu_bo_free_kernel(&ring->mqd_obj, NULL, NULL);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2818
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2835
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2849
hqd_gpu_addr = ring->gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2860
order_base_2(ring->ring_size / 8);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2876
wb_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2881
wb_gpu_addr = ring->rptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2887
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2893
(ring->doorbell_index <<
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2906
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2907
mqd->cp_hqd_pq_wptr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2965
struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id];
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2968
AMDGPU_GEM_DOMAIN_GTT, &ring->mqd_obj,
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2976
cik_srbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2978
gfx_v7_0_mqd_init(adev, mqd, mqd_gpu_addr, ring);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2985
amdgpu_bo_kunmap(ring->mqd_obj);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
2986
amdgpu_bo_unreserve(ring->mqd_obj);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3003
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3027
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3028
amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3098
static void gfx_v7_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3100
int usepfp = (ring->funcs->type == AMDGPU_RING_TYPE_GFX);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3101
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3102
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3104
amdgpu_ring_write(ring, PACKET3(PACKET3_WAIT_REG_MEM, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3105
amdgpu_ring_write(ring, (WAIT_REG_MEM_MEM_SPACE(1) | /* memory */
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3108
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3109
amdgpu_ring_write(ring, upper_32_bits(addr) & 0xffffffff);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3110
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3111
amdgpu_ring_write(ring, 0xffffffff);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3112
amdgpu_ring_write(ring, 4); /* poll interval */
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3116
amdgpu_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3117
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3118
amdgpu_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3119
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3139
static void gfx_v7_0_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3142
int usepfp = (ring->funcs->type == AMDGPU_RING_TYPE_GFX);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3144
amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3147
amdgpu_ring_write(ring, PACKET3(PACKET3_WAIT_REG_MEM, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3148
amdgpu_ring_write(ring, (WAIT_REG_MEM_OPERATION(0) | /* wait */
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3151
amdgpu_ring_write(ring, mmVM_INVALIDATE_REQUEST);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3152
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3153
amdgpu_ring_write(ring, 0); /* ref */
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3154
amdgpu_ring_write(ring, 0); /* mask */
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3155
amdgpu_ring_write(ring, 0x20); /* poll interval */
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3160
amdgpu_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3161
amdgpu_ring_write(ring, 0x0);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3164
amdgpu_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3165
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3166
amdgpu_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3167
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3171
static void gfx_v7_0_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3174
int usepfp = (ring->funcs->type == AMDGPU_RING_TYPE_GFX);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3176
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3177
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(usepfp) |
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3179
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3180
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3181
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3467
struct amdgpu_ring *ring, unsigned vmid)
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3954
static void gfx_v7_0_ring_emit_gds_switch(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3961
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3962
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3964
amdgpu_ring_write(ring, amdgpu_gds_reg_offset[vmid].mem_base);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3965
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3966
amdgpu_ring_write(ring, gds_base);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3969
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3970
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3972
amdgpu_ring_write(ring, amdgpu_gds_reg_offset[vmid].mem_size);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3973
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3974
amdgpu_ring_write(ring, gds_size);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3977
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3978
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3980
amdgpu_ring_write(ring, amdgpu_gds_reg_offset[vmid].gws);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3981
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3982
amdgpu_ring_write(ring, gws_size << GDS_GWS_VMID0__SIZE__SHIFT | gws_base);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3985
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3986
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3988
amdgpu_ring_write(ring, amdgpu_gds_reg_offset[vmid].oa);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3989
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3990
amdgpu_ring_write(ring, (1 << (oa_size + oa_base)) - (1 << oa_base));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3993
static void gfx_v7_0_ring_soft_recovery(struct amdgpu_ring *ring, unsigned vmid)
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
3995
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4079
static void gfx_v7_0_get_hdp_flush_mask(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4082
if (!ring || !ref_and_mask || !reg_mem_engine) {
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4087
if (ring->funcs->type == AMDGPU_RING_TYPE_COMPUTE ||
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4088
ring->funcs->type == AMDGPU_RING_TYPE_KIQ) {
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4089
switch (ring->me) {
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4091
*ref_and_mask = GPU_HDP_FLUSH_DONE__CP2_MASK << ring->pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4094
*ref_and_mask = GPU_HDP_FLUSH_DONE__CP6_MASK << ring->pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4312
struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id];
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4315
ring->me = mec + 1;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4316
ring->pipe = pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4317
ring->queue = queue;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4319
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4320
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4321
ring->doorbell_index = adev->doorbell_index.mec_ring0 + ring_id;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4322
sprintf(ring->name, "comp_%d.%d.%d", ring->me, ring->pipe, ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4325
+ ((ring->me - 1) * adev->gfx.mec.num_pipe_per_mec)
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4326
+ ring->pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4329
r = amdgpu_ring_init(adev, ring, 1024,
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4341
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4397
ring = &adev->gfx.gfx_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4398
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4399
sprintf(ring->name, "gfx");
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4400
r = amdgpu_ring_init(adev, ring, 1024,
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4782
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4795
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4796
if ((ring->me == me_id) && (ring->pipe == pipe_id))
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4797
amdgpu_fence_process(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4807
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4820
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4821
if ((ring->me == me_id) && (ring->pipe == pipe_id))
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4822
drm_sched_fault(&ring->sched);
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4895
static void gfx_v7_0_emit_mem_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4897
amdgpu_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4898
amdgpu_ring_write(ring, PACKET3_TCL1_ACTION_ENA |
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4902
amdgpu_ring_write(ring, 0xffffffff); /* CP_COHER_SIZE */
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4903
amdgpu_ring_write(ring, 0); /* CP_COHER_BASE */
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4904
amdgpu_ring_write(ring, 0x0000000A); /* poll interval */
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4907
static void gfx_v7_0_emit_mem_sync_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4909
amdgpu_ring_write(ring, PACKET3(PACKET3_ACQUIRE_MEM, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4910
amdgpu_ring_write(ring, PACKET3_TCL1_ACTION_ENA |
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4914
amdgpu_ring_write(ring, 0xffffffff); /* CP_COHER_SIZE */
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4915
amdgpu_ring_write(ring, 0xff); /* CP_COHER_SIZE_HI */
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4916
amdgpu_ring_write(ring, 0); /* CP_COHER_BASE */
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4917
amdgpu_ring_write(ring, 0); /* CP_COHER_BASE_HI */
drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c
4918
amdgpu_ring_write(ring, 0x0000000A); /* poll interval */
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1476
struct amdgpu_ring *ring = &adev->gfx.compute_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1489
if (!ring->sched.ready)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1605
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1851
struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id];
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1854
ring = &adev->gfx.compute_ring[ring_id];
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1857
ring->me = mec + 1;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1858
ring->pipe = pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1859
ring->queue = queue;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1861
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1862
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1863
ring->doorbell_index = adev->doorbell_index.mec_ring0 + ring_id;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1864
ring->eop_gpu_addr = adev->gfx.mec.hpd_eop_gpu_addr
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1866
sprintf(ring->name, "comp_%d.%d.%d", ring->me, ring->pipe, ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1869
+ ((ring->me - 1) * adev->gfx.mec.num_pipe_per_mec)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1870
+ ring->pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1872
hw_prio = amdgpu_gfx_is_high_priority_compute_queue(adev, ring) ?
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1875
r = amdgpu_ring_init(adev, ring, 1024, &adev->gfx.eop_irq, irq_type,
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1890
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1968
ring = &adev->gfx.gfx_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1969
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1970
sprintf(ring->name, "gfx");
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1973
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1974
ring->doorbell_index = adev->doorbell_index.gfx_ring0;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
1977
r = amdgpu_ring_init(adev, ring, 1024, &adev->gfx.eop_irq,
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
2045
amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[0].ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4132
struct amdgpu_ring *ring = &adev->gfx.gfx_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4144
r = amdgpu_ring_alloc(ring, gfx_v8_0_get_csb_size(adev) + 4);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4151
amdgpu_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4152
amdgpu_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4154
amdgpu_ring_write(ring, PACKET3(PACKET3_CONTEXT_CONTROL, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4155
amdgpu_ring_write(ring, 0x80000000);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4156
amdgpu_ring_write(ring, 0x80000000);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4161
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4164
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4167
amdgpu_ring_write(ring, ext->extent[i]);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4172
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4173
amdgpu_ring_write(ring, mmPA_SC_RASTER_CONFIG - PACKET3_SET_CONTEXT_REG_START);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4174
amdgpu_ring_write(ring, adev->gfx.config.rb_config[0][0].raster_config);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4175
amdgpu_ring_write(ring, adev->gfx.config.rb_config[0][0].raster_config_1);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4177
amdgpu_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4178
amdgpu_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4180
amdgpu_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4181
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4184
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_BASE, 2));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4185
amdgpu_ring_write(ring, PACKET3_BASE_INDEX(CE_PARTITION_BASE));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4186
amdgpu_ring_write(ring, 0x8000);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4187
amdgpu_ring_write(ring, 0x8000);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4189
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4193
static void gfx_v8_0_set_cpg_door_bell(struct amdgpu_device *adev, struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4202
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4204
DOORBELL_OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4229
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4241
ring = &adev->gfx.gfx_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4242
rb_bufsz = order_base_2(ring->ring_size / 8);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4254
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4255
WREG32(mmCP_RB0_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4258
rptr_addr = ring->rptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4262
wptr_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4268
rb_addr = ring->gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4272
gfx_v8_0_set_cpg_door_bell(adev, ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4274
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4286
adev->gfx.kiq[0].ring.sched.ready = false;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4292
static void gfx_v8_0_kiq_setting(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4295
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4300
tmp |= (ring->me << 5) | (ring->pipe << 3) | (ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4306
struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4340
struct amdgpu_ring *ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4341
uint64_t mqd_addr = amdgpu_bo_gpu_offset(ring->mqd_obj);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4342
uint64_t wptr_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4350
PACKET3_MAP_QUEUES_DOORBELL_OFFSET(ring->doorbell_index) |
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4351
PACKET3_MAP_QUEUES_QUEUE(ring->queue) |
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4352
PACKET3_MAP_QUEUES_PIPE(ring->pipe) |
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4353
PACKET3_MAP_QUEUES_ME(ring->me == 1 ? 0 : 1)); /* doorbell */
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4386
static void gfx_v8_0_mqd_set_priority(struct amdgpu_ring *ring, struct vi_mqd *mqd)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4388
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4390
if (ring->funcs->type == AMDGPU_RING_TYPE_COMPUTE) {
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4391
if (amdgpu_gfx_is_high_priority_compute_queue(adev, ring)) {
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4399
static int gfx_v8_0_mqd_init(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4401
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4402
struct vi_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4413
mqd->dynamic_cu_mask_addr_lo = lower_32_bits(ring->mqd_gpu_addr
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4415
mqd->dynamic_cu_mask_addr_hi = upper_32_bits(ring->mqd_gpu_addr
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4417
eop_base_addr = ring->eop_gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4432
ring->use_doorbell ? 1 : 0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4437
mqd->cp_mqd_base_addr_lo = ring->mqd_gpu_addr & 0xfffffffc;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4438
mqd->cp_mqd_base_addr_hi = upper_32_bits(ring->mqd_gpu_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4446
hqd_gpu_addr = ring->gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4453
(order_base_2(ring->ring_size / 4) - 1));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4466
wb_gpu_addr = ring->rptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4472
wb_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4478
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4481
DOORBELL_OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4494
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4495
mqd->cp_hqd_pq_wptr = ring->wptr;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4534
gfx_v8_0_mqd_set_priority(ring, mqd);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4540
if (ring->funcs->type == AMDGPU_RING_TYPE_KIQ)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4583
static int gfx_v8_0_kiq_init_queue(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4585
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4586
struct vi_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4588
gfx_v8_0_kiq_setting(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4596
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4597
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4599
vi_srbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4608
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4610
vi_srbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4611
gfx_v8_0_mqd_init(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4623
static int gfx_v8_0_kcq_init_queue(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4625
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4626
struct vi_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4627
int mqd_idx = ring - &adev->gfx.compute_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4634
vi_srbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4635
gfx_v8_0_mqd_init(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4646
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4647
atomic64_set((atomic64_t *)ring->wptr_cpu_addr, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4648
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4665
gfx_v8_0_kiq_init_queue(&adev->gfx.kiq[0].ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4689
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4692
ring = &adev->gfx.gfx_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4693
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4697
ring = &adev->gfx.kiq[0].ring;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4698
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4703
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4704
amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4764
struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4771
struct amdgpu_ring *ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4779
amdgpu_ring_write(kiq_ring, PACKET3_UNMAP_QUEUES_DOORBELL_OFFSET0(ring->doorbell_index));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4978
struct amdgpu_ring *ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
4981
vi_srbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5073
struct amdgpu_ring *ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5076
vi_srbm_select(adev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5116
static void gfx_v8_0_ring_emit_gds_switch(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5123
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5124
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5126
amdgpu_ring_write(ring, amdgpu_gds_reg_offset[vmid].mem_base);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5127
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5128
amdgpu_ring_write(ring, gds_base);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5131
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5132
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5134
amdgpu_ring_write(ring, amdgpu_gds_reg_offset[vmid].mem_size);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5135
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5136
amdgpu_ring_write(ring, gds_size);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5139
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5140
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5142
amdgpu_ring_write(ring, amdgpu_gds_reg_offset[vmid].gws);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5143
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5144
amdgpu_ring_write(ring, gws_size << GDS_GWS_VMID0__SIZE__SHIFT | gws_base);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5147
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5148
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5150
amdgpu_ring_write(ring, amdgpu_gds_reg_offset[vmid].oa);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5151
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5152
amdgpu_ring_write(ring, (1 << (oa_size + oa_base)) - (1 << oa_base));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5223
static void gfx_v8_0_get_hdp_flush_mask(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5226
if (!ring || !ref_and_mask || !reg_mem_engine) {
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5231
if ((ring->funcs->type == AMDGPU_RING_TYPE_COMPUTE) ||
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5232
(ring->funcs->type == AMDGPU_RING_TYPE_KIQ)) {
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5233
switch (ring->me) {
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5235
*ref_and_mask = GPU_HDP_FLUSH_DONE__CP2_MASK << ring->pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5238
*ref_and_mask = GPU_HDP_FLUSH_DONE__CP6_MASK << ring->pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
5581
struct amdgpu_ring *ring, unsigned vmid)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6006
static u64 gfx_v8_0_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6008
return *ring->rptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6011
static u64 gfx_v8_0_ring_get_wptr_gfx(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6013
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6015
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6017
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6022
static void gfx_v8_0_ring_set_wptr_gfx(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6024
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6026
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6028
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6029
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6031
WREG32(mmCP_RB0_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6036
static void gfx_v8_0_ring_emit_hdp_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6039
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6046
adev->gfx.funcs->get_hdp_flush_mask(ring, &ref_and_mask, ®_mem_engine);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6047
amdgpu_ring_write(ring, PACKET3(PACKET3_WAIT_REG_MEM, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6048
amdgpu_ring_write(ring, (WAIT_REG_MEM_OPERATION(1) | /* write, wait, write */
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6051
amdgpu_ring_write(ring, mmGPU_HDP_FLUSH_REQ);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6052
amdgpu_ring_write(ring, mmGPU_HDP_FLUSH_DONE);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6053
amdgpu_ring_write(ring, ref_and_mask);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6054
amdgpu_ring_write(ring, ref_and_mask);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6055
amdgpu_ring_write(ring, 0x20); /* poll interval */
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6058
static void gfx_v8_0_ring_emit_vgt_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6060
amdgpu_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6061
amdgpu_ring_write(ring, EVENT_TYPE(VS_PARTIAL_FLUSH) |
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6064
amdgpu_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6065
amdgpu_ring_write(ring, EVENT_TYPE(VGT_FLUSH) |
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6069
static void gfx_v8_0_ring_emit_ib_gfx(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6084
if (amdgpu_sriov_vf(ring->adev) && (ib->flags & AMDGPU_IB_FLAG_PREEMPT)) {
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6088
gfx_v8_0_ring_emit_de_meta(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6091
amdgpu_ring_write(ring, header);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6092
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6097
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6098
amdgpu_ring_write(ring, control);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6101
static void gfx_v8_0_ring_emit_ib_compute(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6120
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6121
amdgpu_ring_write(ring, mmGDS_COMPUTE_MAX_WAVE_ID - PACKET3_SET_CONFIG_REG_START);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6122
amdgpu_ring_write(ring, ring->adev->gds.gds_compute_max_wave_id);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6125
amdgpu_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6126
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6131
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6132
amdgpu_ring_write(ring, control);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6135
static void gfx_v8_0_ring_emit_fence_gfx(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6145
amdgpu_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6146
amdgpu_ring_write(ring, (EOP_TCL1_ACTION_EN |
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6151
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6152
amdgpu_ring_write(ring, (upper_32_bits(addr) & 0xffff) |
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6154
amdgpu_ring_write(ring, lower_32_bits(seq - 1));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6155
amdgpu_ring_write(ring, upper_32_bits(seq - 1));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6159
amdgpu_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6160
amdgpu_ring_write(ring, (EOP_TCL1_ACTION_EN |
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6166
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6167
amdgpu_ring_write(ring, (upper_32_bits(addr) & 0xffff) |
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6169
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6170
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6174
static void gfx_v8_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6176
int usepfp = (ring->funcs->type == AMDGPU_RING_TYPE_GFX);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6177
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6178
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6180
amdgpu_ring_write(ring, PACKET3(PACKET3_WAIT_REG_MEM, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6181
amdgpu_ring_write(ring, (WAIT_REG_MEM_MEM_SPACE(1) | /* memory */
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6184
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6185
amdgpu_ring_write(ring, upper_32_bits(addr) & 0xffffffff);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6186
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6187
amdgpu_ring_write(ring, 0xffffffff);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6188
amdgpu_ring_write(ring, 4); /* poll interval */
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6191
static void gfx_v8_0_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6194
int usepfp = (ring->funcs->type == AMDGPU_RING_TYPE_GFX);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6196
amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6199
amdgpu_ring_write(ring, PACKET3(PACKET3_WAIT_REG_MEM, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6200
amdgpu_ring_write(ring, (WAIT_REG_MEM_OPERATION(0) | /* wait */
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6203
amdgpu_ring_write(ring, mmVM_INVALIDATE_REQUEST);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6204
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6205
amdgpu_ring_write(ring, 0); /* ref */
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6206
amdgpu_ring_write(ring, 0); /* mask */
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6207
amdgpu_ring_write(ring, 0x20); /* poll interval */
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6212
amdgpu_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6213
amdgpu_ring_write(ring, 0x0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6217
static u64 gfx_v8_0_ring_get_wptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6219
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6222
static void gfx_v8_0_ring_set_wptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6224
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6227
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6228
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6231
static void gfx_v8_0_ring_emit_fence_compute(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6239
amdgpu_ring_write(ring, PACKET3(PACKET3_RELEASE_MEM, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6240
amdgpu_ring_write(ring, (EOP_TCL1_ACTION_EN |
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6245
amdgpu_ring_write(ring, DATA_SEL(write64bit ? 2 : 1) | INT_SEL(int_sel ? 2 : 0));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6246
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6247
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6248
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6249
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6252
static void gfx_v8_0_ring_emit_fence_kiq(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6259
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6260
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6262
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6263
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6264
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6268
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6269
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6271
amdgpu_ring_write(ring, mmCPC_INT_STATUS);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6272
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6273
amdgpu_ring_write(ring, 0x20000000); /* src_id is 178 */
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6277
static void gfx_v8_ring_emit_sb(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6279
amdgpu_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6280
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6283
static void gfx_v8_ring_emit_cntxcntl(struct amdgpu_ring *ring, uint32_t flags)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6287
if (amdgpu_sriov_vf(ring->adev))
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6288
gfx_v8_0_ring_emit_ce_meta(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6292
gfx_v8_0_ring_emit_vgt_flush(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6311
amdgpu_ring_write(ring, PACKET3(PACKET3_CONTEXT_CONTROL, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6312
amdgpu_ring_write(ring, dw2);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6313
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6316
static unsigned gfx_v8_0_ring_emit_init_cond_exec(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6321
amdgpu_ring_write(ring, PACKET3(PACKET3_COND_EXEC, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6322
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6323
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6325
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6326
ret = ring->wptr & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6328
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6332
static void gfx_v8_0_ring_emit_rreg(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6335
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6337
amdgpu_ring_write(ring, PACKET3(PACKET3_COPY_DATA, 4));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6338
amdgpu_ring_write(ring, 0 | /* src: register*/
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6341
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6342
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6343
amdgpu_ring_write(ring, lower_32_bits(adev->wb.gpu_addr +
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6345
amdgpu_ring_write(ring, upper_32_bits(adev->wb.gpu_addr +
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6349
static void gfx_v8_0_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6354
switch (ring->funcs->type) {
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6366
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6367
amdgpu_ring_write(ring, cmd);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6368
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6369
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6370
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6373
static void gfx_v8_0_ring_soft_recovery(struct amdgpu_ring *ring, unsigned vmid)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6375
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6581
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6595
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6599
if ((ring->me == me_id) && (ring->pipe == pipe_id) && (ring->queue == queue_id))
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6600
amdgpu_fence_process(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6611
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6625
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6626
if (ring->me == me_id && ring->pipe == pipe_id &&
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6627
ring->queue == queue_id)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6628
drm_sched_fault(&ring->sched);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6761
static void gfx_v8_0_emit_mem_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6763
amdgpu_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6764
amdgpu_ring_write(ring, PACKET3_TCL1_ACTION_ENA |
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6769
amdgpu_ring_write(ring, 0xffffffff); /* CP_COHER_SIZE */
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6770
amdgpu_ring_write(ring, 0); /* CP_COHER_BASE */
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6771
amdgpu_ring_write(ring, 0x0000000A); /* poll interval */
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6774
static void gfx_v8_0_emit_mem_sync_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6776
amdgpu_ring_write(ring, PACKET3(PACKET3_ACQUIRE_MEM, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6777
amdgpu_ring_write(ring, PACKET3_TCL1_ACTION_ENA |
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6782
amdgpu_ring_write(ring, 0xffffffff); /* CP_COHER_SIZE */
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6783
amdgpu_ring_write(ring, 0xff); /* CP_COHER_SIZE_HI */
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6784
amdgpu_ring_write(ring, 0); /* CP_COHER_BASE */
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6785
amdgpu_ring_write(ring, 0); /* CP_COHER_BASE_HI */
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6786
amdgpu_ring_write(ring, 0x0000000A); /* poll interval */
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6792
static void gfx_v8_0_emit_wave_limit_cs(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6818
amdgpu_ring_emit_wreg(ring, wcl_cs_reg, val);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6823
static void gfx_v8_0_emit_wave_limit(struct amdgpu_ring *ring, bool enable)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6825
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6834
amdgpu_ring_emit_wreg(ring, mmSPI_WCL_PIPE_PERCENT_GFX, val);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6842
if (i != ring->pipe)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6843
gfx_v8_0_emit_wave_limit_cs(ring, i, enable);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
6980
adev->gfx.kiq[0].ring.funcs = &gfx_v8_0_ring_funcs_kiq;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
7146
static void gfx_v8_0_ring_emit_ce_meta(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
7155
if (ring->adev->virt.chained_ib_support) {
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
7156
ce_payload_addr = amdgpu_csa_vaddr(ring->adev) +
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
7160
ce_payload_addr = amdgpu_csa_vaddr(ring->adev) +
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
7165
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, cnt_ce));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
7166
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(2) |
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
7170
amdgpu_ring_write(ring, lower_32_bits(ce_payload_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
7171
amdgpu_ring_write(ring, upper_32_bits(ce_payload_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
7172
amdgpu_ring_write_multiple(ring, (void *)&ce_payload, cnt_ce - 2);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
7175
static void gfx_v8_0_ring_emit_de_meta(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
7184
csa_addr = amdgpu_csa_vaddr(ring->adev);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
7186
if (ring->adev->virt.chained_ib_support) {
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
7198
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, cnt_de));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
7199
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(1) |
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
7203
amdgpu_ring_write(ring, lower_32_bits(de_payload_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
7204
amdgpu_ring_write(ring, upper_32_bits(de_payload_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
7205
amdgpu_ring_write_multiple(ring, (void *)&de_payload, cnt_de - 2);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
730
static void gfx_v8_0_ring_emit_ce_meta(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
731
static void gfx_v8_0_ring_emit_de_meta(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
838
static int gfx_v8_0_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
840
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
846
r = amdgpu_ring_alloc(ring, 3);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
850
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_UCONFIG_REG, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
851
amdgpu_ring_write(ring, mmSCRATCH_REG0 - PACKET3_SET_UCONFIG_REG_START);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
852
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
853
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
868
static int gfx_v8_0_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
870
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c
898
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1010
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1014
uint32_t eng_sel = ring->funcs->type == AMDGPU_RING_TYPE_GFX ? 4 : 0;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1023
PACKET3_QUERY_STATUS_DOORBELL_OFFSET(ring->doorbell_index) |
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1160
static void gfx_v9_0_write_data_to_reg(struct amdgpu_ring *ring, int eng_sel,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1163
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1164
amdgpu_ring_write(ring, WRITE_DATA_ENGINE_SEL(eng_sel) |
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1167
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1168
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1169
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1172
static void gfx_v9_0_wait_reg_mem(struct amdgpu_ring *ring, int eng_sel,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1177
amdgpu_ring_write(ring, PACKET3(PACKET3_WAIT_REG_MEM, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1178
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1187
amdgpu_ring_write(ring, addr0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1188
amdgpu_ring_write(ring, addr1);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1189
amdgpu_ring_write(ring, ref);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1190
amdgpu_ring_write(ring, mask);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1191
amdgpu_ring_write(ring, inv); /* poll interval */
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1194
static int gfx_v9_0_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1196
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1203
r = amdgpu_ring_alloc(ring, 3);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1207
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_UCONFIG_REG, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1208
amdgpu_ring_write(ring, scratch - PACKET3_SET_UCONFIG_REG_START);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1209
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1210
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1224
static int gfx_v9_0_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1226
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
1254
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2158
struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id];
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2161
ring = &adev->gfx.compute_ring[ring_id];
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2164
ring->me = mec + 1;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2165
ring->pipe = pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2166
ring->queue = queue;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2168
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2169
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2170
ring->doorbell_index = (adev->doorbell_index.mec_ring0 + ring_id) << 1;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2171
ring->eop_gpu_addr = adev->gfx.mec.hpd_eop_gpu_addr
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2173
ring->vm_hub = AMDGPU_GFXHUB(0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2174
sprintf(ring->name, "comp_%d.%d.%d", ring->me, ring->pipe, ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2177
+ ((ring->me - 1) * adev->gfx.mec.num_pipe_per_mec)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2178
+ ring->pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2179
hw_prio = amdgpu_gfx_is_high_priority_compute_queue(adev, ring) ?
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2182
return amdgpu_ring_init(adev, ring, 1024, &adev->gfx.eop_irq, irq_type,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2218
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2334
ring = &adev->gfx.gfx_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2335
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2337
sprintf(ring->name, "gfx");
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2339
sprintf(ring->name, "gfx_%d", i);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2340
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2341
ring->doorbell_index = adev->doorbell_index.gfx_ring0 << 1;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2344
ring->no_scheduler = adev->gfx.mcbp;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2345
ring->vm_hub = AMDGPU_GFXHUB(0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2346
r = amdgpu_ring_init(adev, ring, 1024, &adev->gfx.eop_irq,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2356
ring = &adev->gfx.sw_gfx_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2357
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2358
sprintf(ring->name, amdgpu_sw_ring_name(i));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2359
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2360
ring->doorbell_index = adev->doorbell_index.gfx_ring0 << 1;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2361
ring->is_sw_ring = true;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2363
ring->vm_hub = AMDGPU_GFXHUB(0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2364
r = amdgpu_ring_init(adev, ring, 1024, &adev->gfx.eop_irq,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2369
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
2472
amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[0].ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3324
struct amdgpu_ring *ring = &adev->gfx.gfx_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3343
r = amdgpu_ring_alloc(ring, gfx_v9_0_get_csb_size(adev) + 4 + 3);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3349
amdgpu_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3350
amdgpu_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3352
amdgpu_ring_write(ring, PACKET3(PACKET3_CONTEXT_CONTROL, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3353
amdgpu_ring_write(ring, 0x80000000);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3354
amdgpu_ring_write(ring, 0x80000000);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3359
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3362
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3365
amdgpu_ring_write(ring, ext->extent[i]);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3370
amdgpu_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3371
amdgpu_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3373
amdgpu_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3374
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3376
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_BASE, 2));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3377
amdgpu_ring_write(ring, PACKET3_BASE_INDEX(CE_PARTITION_BASE));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3378
amdgpu_ring_write(ring, 0x8000);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3379
amdgpu_ring_write(ring, 0x8000);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3381
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_UCONFIG_REG,1));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3384
amdgpu_ring_write(ring, tmp);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3385
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3387
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3394
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3406
ring = &adev->gfx.gfx_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3407
rb_bufsz = order_base_2(ring->ring_size / 8);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3416
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3417
WREG32_SOC15(GC, 0, mmCP_RB0_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3418
WREG32_SOC15(GC, 0, mmCP_RB0_WPTR_HI, upper_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3421
rptr_addr = ring->rptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3425
wptr_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3432
rb_addr = ring->gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3437
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3439
DOORBELL_OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3448
DOORBELL_RANGE_LOWER, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3476
adev->gfx.kiq[0].ring.sched.ready = false;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3524
static void gfx_v9_0_kiq_setting(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3527
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3532
tmp |= (ring->me << 5) | (ring->pipe << 3) | (ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3536
static void gfx_v9_0_mqd_set_priority(struct amdgpu_ring *ring, struct v9_mqd *mqd)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3538
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3540
if (ring->funcs->type == AMDGPU_RING_TYPE_COMPUTE) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3541
if (amdgpu_gfx_is_high_priority_compute_queue(adev, ring)) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3549
static int gfx_v9_0_mqd_init(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3551
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3552
struct v9_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3569
lower_32_bits(ring->mqd_gpu_addr
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3572
upper_32_bits(ring->mqd_gpu_addr
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3575
eop_base_addr = ring->eop_gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3589
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3591
DOORBELL_OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3606
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3613
mqd->cp_mqd_base_addr_lo = ring->mqd_gpu_addr & 0xfffffffc;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3614
mqd->cp_mqd_base_addr_hi = upper_32_bits(ring->mqd_gpu_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3622
hqd_gpu_addr = ring->gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3629
(order_base_2(ring->ring_size / 4) - 1));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3642
wb_gpu_addr = ring->rptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3648
wb_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3653
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3669
gfx_v9_0_mqd_set_priority(ring, mqd);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3675
if (ring->funcs->type == AMDGPU_RING_TYPE_KIQ)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3681
static int gfx_v9_0_kiq_init_register(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3683
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3684
struct v9_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3754
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3789
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3795
static int gfx_v9_0_kiq_fini_register(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3797
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3834
static int gfx_v9_0_kiq_init_queue(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3836
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3837
struct v9_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3840
gfx_v9_0_kiq_setting(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3854
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3855
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3858
soc15_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3859
gfx_v9_0_kiq_init_register(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3867
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3869
soc15_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3870
gfx_v9_0_mqd_init(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3871
gfx_v9_0_kiq_init_register(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3882
static int gfx_v9_0_kcq_init_queue(struct amdgpu_ring *ring, bool restore)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3884
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3885
struct v9_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3886
int mqd_idx = ring - &adev->gfx.compute_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3900
soc15_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3901
gfx_v9_0_mqd_init(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3912
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3913
atomic64_set((atomic64_t *)ring->wptr_cpu_addr, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3914
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3922
gfx_v9_0_kiq_init_queue(&adev->gfx.kiq[0].ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3944
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3981
ring = &adev->gfx.gfx_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3982
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3988
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
3989
amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4083
soc15_grbm_select(adev, adev->gfx.kiq[0].ring.me,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4084
adev->gfx.kiq[0].ring.pipe,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4085
adev->gfx.kiq[0].ring.queue, 0, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4086
gfx_v9_0_kiq_fini_register(&adev->gfx.kiq[0].ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4206
struct amdgpu_ring *ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4208
BUG_ON(!ring->funcs->emit_rreg);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4215
amdgpu_ring_alloc(ring, 32);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4216
amdgpu_ring_write(ring, PACKET3(PACKET3_COPY_DATA, 4));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4217
amdgpu_ring_write(ring, 9 | /* src: register*/
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4221
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4222
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4223
amdgpu_ring_write(ring, lower_32_bits(adev->wb.gpu_addr +
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4225
amdgpu_ring_write(ring, upper_32_bits(adev->wb.gpu_addr +
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4227
r = amdgpu_fence_emit_polling(ring, &seq, MAX_KIQ_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4231
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4234
r = amdgpu_fence_wait_polling(ring, seq, MAX_KIQ_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4250
r = amdgpu_fence_wait_polling(ring, seq, MAX_KIQ_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4263
amdgpu_ring_undo(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4312
static void gfx_v9_0_ring_emit_gds_switch(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4318
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4321
gfx_v9_0_write_data_to_reg(ring, 0, false,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4326
gfx_v9_0_write_data_to_reg(ring, 0, false,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4331
gfx_v9_0_write_data_to_reg(ring, 0, false,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4336
gfx_v9_0_write_data_to_reg(ring, 0, false,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4579
struct amdgpu_ring *ring = &adev->gfx.compute_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4586
r = amdgpu_ring_alloc(ring, 7);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4589
ring->name, r);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4596
amdgpu_ring_write(ring, PACKET3(PACKET3_DMA_DATA, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4597
amdgpu_ring_write(ring, (PACKET3_DMA_DATA_CP_SYNC |
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4601
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4602
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4603
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4604
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4605
amdgpu_ring_write(ring, PACKET3_DMA_DATA_CMD_RAW_WAIT |
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4608
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4611
if (ring->wptr == gfx_v9_0_ring_get_rptr_compute(ring))
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4626
struct amdgpu_ring *ring = &adev->gfx.compute_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4647
if (!ring->sched.ready)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
4776
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5179
struct amdgpu_ring *ring, unsigned int vmid)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5348
static u64 gfx_v9_0_ring_get_rptr_gfx(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5350
return *ring->rptr_cpu_addr; /* gfx9 is 32bit rptr*/
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5353
static u64 gfx_v9_0_ring_get_wptr_gfx(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5355
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5359
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5360
wptr = atomic64_read((atomic64_t *)ring->wptr_cpu_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5369
static void gfx_v9_0_ring_set_wptr_gfx(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5371
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5373
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5375
atomic64_set((atomic64_t *)ring->wptr_cpu_addr, ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5376
WDOORBELL64(ring->doorbell_index, ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5378
WREG32_SOC15(GC, 0, mmCP_RB0_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5379
WREG32_SOC15(GC, 0, mmCP_RB0_WPTR_HI, upper_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5383
static void gfx_v9_0_ring_emit_hdp_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5385
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5393
adev->gfx.funcs->get_hdp_flush_mask(ring, &ref_and_mask, ®_mem_engine);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5394
gfx_v9_0_wait_reg_mem(ring, reg_mem_engine, 0, 1,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5400
static void gfx_v9_0_ring_emit_ib_gfx(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5422
gfx_v9_0_ring_emit_de_meta(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5423
(!amdgpu_sriov_vf(ring->adev) &&
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5429
amdgpu_ring_write(ring, header);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5431
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5436
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5437
amdgpu_ring_ib_on_emit_cntl(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5438
amdgpu_ring_write(ring, control);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5441
static void gfx_v9_0_ring_patch_cntl(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5444
u32 control = ring->ring[offset];
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5447
ring->ring[offset] = control;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5450
static void gfx_v9_0_ring_patch_ce_meta(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5453
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5462
if (offset + (payload_size >> 2) <= ring->buf_mask + 1) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5463
memcpy((void *)&ring->ring[offset], ce_payload_cpu_addr, payload_size);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5465
memcpy((void *)&ring->ring[offset], ce_payload_cpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5466
(ring->buf_mask + 1 - offset) << 2);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5467
payload_size -= (ring->buf_mask + 1 - offset) << 2;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5468
memcpy((void *)&ring->ring[0],
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5469
ce_payload_cpu_addr + ((ring->buf_mask + 1 - offset) << 2),
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5474
static void gfx_v9_0_ring_patch_de_meta(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5477
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5489
if (offset + (payload_size >> 2) <= ring->buf_mask + 1) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5490
memcpy((void *)&ring->ring[offset], de_payload_cpu_addr, payload_size);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5492
memcpy((void *)&ring->ring[offset], de_payload_cpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5493
(ring->buf_mask + 1 - offset) << 2);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5494
payload_size -= (ring->buf_mask + 1 - offset) << 2;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5495
memcpy((void *)&ring->ring[0],
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5496
de_payload_cpu_addr + ((ring->buf_mask + 1 - offset) << 2),
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5501
static void gfx_v9_0_ring_emit_ib_compute(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5520
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5521
amdgpu_ring_write(ring, mmGDS_COMPUTE_MAX_WAVE_ID);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5522
amdgpu_ring_write(ring, ring->adev->gds.gds_compute_max_wave_id);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5525
amdgpu_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5527
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5532
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5533
amdgpu_ring_write(ring, control);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5536
static void gfx_v9_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5546
amdgpu_ring_write(ring, PACKET3(PACKET3_RELEASE_MEM, 6));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5559
amdgpu_ring_write(ring, dw2);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5560
amdgpu_ring_write(ring, DATA_SEL(write64bit ? 2 : 1) | INT_SEL(int_sel ? 2 : 0));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5570
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5571
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5572
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5573
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5574
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5577
static void gfx_v9_0_ring_emit_event_write(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5581
amdgpu_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5582
amdgpu_ring_write(ring, EVENT_TYPE(event_type) |
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5586
static void gfx_v9_0_emit_mem_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5596
amdgpu_ring_write(ring, PACKET3(PACKET3_ACQUIRE_MEM, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5597
amdgpu_ring_write(ring, cp_coher_cntl); /* CP_COHER_CNTL */
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5598
amdgpu_ring_write(ring, 0xffffffff); /* CP_COHER_SIZE */
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5599
amdgpu_ring_write(ring, 0xffffff); /* CP_COHER_SIZE_HI */
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5600
amdgpu_ring_write(ring, 0); /* CP_COHER_BASE */
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5601
amdgpu_ring_write(ring, 0); /* CP_COHER_BASE_HI */
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5602
amdgpu_ring_write(ring, 0x0000000A); /* POLL_INTERVAL */
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5605
static void gfx_v9_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5607
if (ring->funcs->type == AMDGPU_RING_TYPE_GFX) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5608
gfx_v9_0_ring_emit_event_write(ring, VS_PARTIAL_FLUSH, 4);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5609
gfx_v9_0_ring_emit_event_write(ring, PS_PARTIAL_FLUSH, 4);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5611
gfx_v9_0_ring_emit_event_write(ring, CS_PARTIAL_FLUSH, 4);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5612
gfx_v9_0_emit_mem_sync(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5615
static void gfx_v9_0_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5618
amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5621
if (ring->funcs->type == AMDGPU_RING_TYPE_GFX) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5623
amdgpu_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5624
amdgpu_ring_write(ring, 0x0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5628
static u64 gfx_v9_0_ring_get_rptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5630
return *ring->rptr_cpu_addr; /* gfx9 hardware is 32bit rptr */
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5633
static u64 gfx_v9_0_ring_get_wptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5638
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5639
wptr = atomic64_read((atomic64_t *)ring->wptr_cpu_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5645
static void gfx_v9_0_ring_set_wptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5647
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5650
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5651
atomic64_set((atomic64_t *)ring->wptr_cpu_addr, ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5652
WDOORBELL64(ring->doorbell_index, ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5658
static void gfx_v9_0_ring_emit_fence_kiq(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5661
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5667
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5668
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5670
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5671
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5672
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5676
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5677
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5679
amdgpu_ring_write(ring, SOC15_REG_OFFSET(GC, 0, mmCPC_INT_STATUS));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5680
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5681
amdgpu_ring_write(ring, 0x20000000); /* src_id is 178 */
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5685
static void gfx_v9_ring_emit_sb(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5687
amdgpu_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5688
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5691
static void gfx_v9_0_ring_emit_ce_meta(struct amdgpu_ring *ring, bool resume)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5693
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5702
ce_payload_gpu_addr = amdgpu_csa_vaddr(ring->adev) + offset;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5705
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, cnt));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5706
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(2) |
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5710
amdgpu_ring_write(ring, lower_32_bits(ce_payload_gpu_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5711
amdgpu_ring_write(ring, upper_32_bits(ce_payload_gpu_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5713
amdgpu_ring_ib_on_emit_ce(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5716
amdgpu_ring_write_multiple(ring, ce_payload_cpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5719
amdgpu_ring_write_multiple(ring, (void *)&ce_payload,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5723
static int gfx_v9_0_ring_preempt_ib(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5726
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5728
struct amdgpu_ring *kiq_ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5742
amdgpu_ring_set_preempt_cond_exec(ring, false);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5744
ring->trail_seq += 1;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5745
amdgpu_ring_alloc(ring, 13);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5746
gfx_v9_0_ring_emit_fence(ring, ring->trail_fence_gpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5747
ring->trail_seq, AMDGPU_FENCE_FLAG_EXEC | AMDGPU_FENCE_FLAG_INT);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5750
kiq->pmf->kiq_unmap_queues(kiq_ring, ring, PREEMPT_QUEUES_NO_UNMAP,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5751
ring->trail_fence_gpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5752
ring->trail_seq);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5759
if (ring->trail_seq ==
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5760
le32_to_cpu(*ring->trail_fence_cpu_addr))
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5767
drm_warn(adev_to_drm(adev), "ring %d timeout to preempt ib\n", ring->idx);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5771
amdgpu_ring_emit_wreg(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5774
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5777
amdgpu_ring_set_preempt_cond_exec(ring, true);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5781
static void gfx_v9_0_ring_emit_de_meta(struct amdgpu_ring *ring, bool resume, bool usegds)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5783
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5790
de_payload_gpu_addr = amdgpu_csa_vaddr(ring->adev) + offset;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5793
gds_addr = ALIGN(amdgpu_csa_vaddr(ring->adev) +
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5803
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, cnt));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5804
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(1) |
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5808
amdgpu_ring_write(ring, lower_32_bits(de_payload_gpu_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5809
amdgpu_ring_write(ring, upper_32_bits(de_payload_gpu_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5811
amdgpu_ring_ib_on_emit_de(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5813
amdgpu_ring_write_multiple(ring, de_payload_cpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5816
amdgpu_ring_write_multiple(ring, (void *)&de_payload,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5820
static void gfx_v9_0_ring_emit_frame_cntl(struct amdgpu_ring *ring, bool start,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5825
amdgpu_ring_write(ring, PACKET3(PACKET3_FRAME_CONTROL, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5826
amdgpu_ring_write(ring, v | FRAME_CMD(start ? 0 : 1));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5829
static void gfx_v9_ring_emit_cntxcntl(struct amdgpu_ring *ring, uint32_t flags)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5833
gfx_v9_0_ring_emit_ce_meta(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5834
(!amdgpu_sriov_vf(ring->adev) &&
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5857
amdgpu_ring_write(ring, PACKET3(PACKET3_CONTEXT_CONTROL, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5858
amdgpu_ring_write(ring, dw2);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5859
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5862
static unsigned gfx_v9_0_ring_emit_init_cond_exec(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5866
amdgpu_ring_write(ring, PACKET3(PACKET3_COND_EXEC, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5867
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5868
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5870
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5871
ret = ring->wptr & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5873
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5877
static void gfx_v9_0_ring_emit_rreg(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5880
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5882
amdgpu_ring_write(ring, PACKET3(PACKET3_COPY_DATA, 4));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5883
amdgpu_ring_write(ring, 0 | /* src: register*/
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5886
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5887
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5888
amdgpu_ring_write(ring, lower_32_bits(adev->wb.gpu_addr +
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5890
amdgpu_ring_write(ring, upper_32_bits(adev->wb.gpu_addr +
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5894
static void gfx_v9_0_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5899
switch (ring->funcs->type) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5910
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5911
amdgpu_ring_write(ring, cmd);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5912
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5913
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5914
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5917
static void gfx_v9_0_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5920
gfx_v9_0_wait_reg_mem(ring, 0, 0, 0, reg, 0, val, mask, 0x20);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5923
static void gfx_v9_0_ring_emit_reg_write_reg_wait(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5927
int usepfp = (ring->funcs->type == AMDGPU_RING_TYPE_GFX);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5928
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5929
bool fw_version_ok = (ring->funcs->type == AMDGPU_RING_TYPE_GFX) ?
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5933
gfx_v9_0_wait_reg_mem(ring, usepfp, 0, 1, reg0, reg1,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5936
amdgpu_ring_emit_reg_write_reg_wait_helper(ring, reg0, reg1,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5940
static void gfx_v9_0_ring_soft_recovery(struct amdgpu_ring *ring, unsigned vmid)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5942
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
6222
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
6244
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
6248
if ((ring->me == me_id) && (ring->pipe == pipe_id) && (ring->queue == queue_id))
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
6249
amdgpu_fence_process(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
6260
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
6274
ring = &adev->gfx.compute_ring[i];
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
6275
if (ring->me == me_id && ring->pipe == pipe_id &&
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
6276
ring->queue == queue_id)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
6277
drm_sched_fault(&ring->sched);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7103
static void gfx_v9_0_emit_wave_limit_cs(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7106
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7131
amdgpu_ring_emit_wreg(ring, wcl_cs_reg, val);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7134
static void gfx_v9_0_emit_wave_limit(struct amdgpu_ring *ring, bool enable)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7136
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7146
amdgpu_ring_emit_wreg(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7156
if (i != ring->pipe)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7157
gfx_v9_0_emit_wave_limit_cs(ring, i, enable);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7162
static void gfx_v9_ring_insert_nop(struct amdgpu_ring *ring, uint32_t num_nop)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7166
amdgpu_ring_write(ring, ring->funcs->nop);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7171
amdgpu_ring_write(ring, PACKET3(PACKET3_NOP, min(num_nop - 2, 0x3ffe)));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7174
amdgpu_ring_insert_nop(ring, num_nop - 1);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7177
static void gfx_v9_0_ring_emit_wreg_me(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7183
switch (ring->funcs->type) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7191
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7192
amdgpu_ring_write(ring, cmd);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7193
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7194
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7195
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7198
static int gfx_v9_0_reset_kgq(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7202
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7204
struct amdgpu_ring *kiq_ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7209
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7228
if (amdgpu_ring_alloc(ring, 8 + 7 + 5 + 2 + 8 + 7))
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7231
ring->trail_seq++;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7232
gfx_v9_0_ring_emit_fence(ring, ring->trail_fence_gpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7233
ring->trail_seq, AMDGPU_FENCE_FLAG_EXEC);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7235
gfx_v9_0_wait_reg_mem(ring, 0, 1, 0,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7236
lower_32_bits(ring->trail_fence_gpu_addr),
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7237
upper_32_bits(ring->trail_fence_gpu_addr),
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7238
ring->trail_seq, 0xffffffff, 4);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7240
gfx_v9_0_ring_emit_wreg_me(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7243
gfx_v9_0_ring_emit_event_write(ring, ENABLE_LEGACY_PIPELINE, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7245
ring->trail_seq++;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7246
gfx_v9_0_ring_emit_fence(ring, ring->trail_fence_gpu_addr,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7247
ring->trail_seq, AMDGPU_FENCE_FLAG_EXEC);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7249
gfx_v9_0_wait_reg_mem(ring, 1, 1, 0,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7250
lower_32_bits(ring->trail_fence_gpu_addr),
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7251
upper_32_bits(ring->trail_fence_gpu_addr),
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7252
ring->trail_seq, 0xffffffff, 4);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7253
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7255
r = amdgpu_ring_test_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7259
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7262
static int gfx_v9_0_reset_kcq(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7266
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7268
struct amdgpu_ring *kiq_ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7275
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7284
kiq->pmf->kiq_unmap_queues(kiq_ring, ring, RESET_QUEUES,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7297
soc15_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7313
r = gfx_v9_0_kcq_init_queue(ring, true);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7324
kiq->pmf->kiq_map_queues(kiq_ring, ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7332
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7426
static void gfx_v9_0_ring_emit_cleaner_shader(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7428
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7432
amdgpu_ring_write(ring, PACKET3(PACKET3_RUN_CLEANER_SHADER, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7434
amdgpu_ring_write(ring, PACKET3(PACKET3_RUN_CLEANER_SHADER_9_0, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7436
amdgpu_ring_write(ring, 0); /* RESERVED field, programmed to zero */
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7439
static void gfx_v9_0_ring_begin_use_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7441
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7445
amdgpu_gfx_enforce_isolation_ring_begin_use(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7455
static void gfx_v9_0_ring_end_use_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7457
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7468
amdgpu_gfx_enforce_isolation_ring_end_use(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
7680
adev->gfx.kiq[0].ring.funcs = &gfx_v9_0_ring_funcs_kiq;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
915
static void gfx_v9_0_ring_emit_de_meta(struct amdgpu_ring *ring, bool resume, bool usegds);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
916
static u64 gfx_v9_0_ring_get_rptr_compute(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
952
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
954
uint64_t mqd_addr = amdgpu_bo_gpu_offset(ring->mqd_obj);
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
955
uint64_t wptr_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
956
uint32_t eng_sel = ring->funcs->type == AMDGPU_RING_TYPE_GFX ? 4 : 0;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
963
PACKET3_MAP_QUEUES_QUEUE(ring->queue) |
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
964
PACKET3_MAP_QUEUES_PIPE(ring->pipe) |
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
965
PACKET3_MAP_QUEUES_ME((ring->me == 1 ? 0 : 1)) |
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
974
PACKET3_MAP_QUEUES_DOORBELL_OFFSET(ring->doorbell_index));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
982
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
986
uint32_t eng_sel = ring->funcs->type == AMDGPU_RING_TYPE_GFX ? 4 : 0;
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
995
PACKET3_UNMAP_QUEUES_DOORBELL_OFFSET0(ring->doorbell_index));
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
998
amdgpu_ring_write(kiq_ring, lower_32_bits(ring->wptr & ring->buf_mask));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_2.c
347
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_2.c
412
r = amdgpu_ib_schedule(ring, 1, ib, NULL, fence_ptr);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1001
+ ((ring->me - 1) * adev->gfx.mec.num_pipe_per_mec)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1002
+ ring->pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1003
hw_prio = amdgpu_gfx_is_high_priority_compute_queue(adev, ring) ?
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1006
return amdgpu_ring_init(adev, ring, 1024, &adev->gfx.eop_irq, irq_type,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1198
amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[i].ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1671
int inst, struct amdgpu_ring *ring, unsigned int vmid)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1747
adev->gfx.kiq[xcc_id].ring.sched.ready = false;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1801
static void gfx_v9_4_3_xcc_kiq_setting(struct amdgpu_ring *ring, int xcc_id)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1804
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1809
tmp |= (ring->me << 5) | (ring->pipe << 3) | (ring->queue);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1813
static void gfx_v9_4_3_mqd_set_priority(struct amdgpu_ring *ring, struct v9_mqd *mqd)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1815
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1817
if (ring->funcs->type == AMDGPU_RING_TYPE_COMPUTE) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1818
if (amdgpu_gfx_is_high_priority_compute_queue(adev, ring)) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1826
static int gfx_v9_4_3_xcc_mqd_init(struct amdgpu_ring *ring, int xcc_id)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1828
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1829
struct v9_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1842
lower_32_bits(ring->mqd_gpu_addr
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1845
upper_32_bits(ring->mqd_gpu_addr
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1848
eop_base_addr = ring->eop_gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1862
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1864
DOORBELL_OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1882
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1889
mqd->cp_mqd_base_addr_lo = ring->mqd_gpu_addr & 0xfffffffc;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1890
mqd->cp_mqd_base_addr_hi = upper_32_bits(ring->mqd_gpu_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1898
hqd_gpu_addr = ring->gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1905
(order_base_2(ring->ring_size / 4) - 1));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1918
wb_gpu_addr = adev->wb.gpu_addr + (ring->rptr_offs * 4);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1924
wb_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1929
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1945
gfx_v9_4_3_mqd_set_priority(ring, mqd);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1951
if (ring->funcs->type == AMDGPU_RING_TYPE_KIQ)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1957
static int gfx_v9_4_3_xcc_kiq_init_register(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1960
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1961
struct v9_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
200
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
203
uint64_t mqd_addr = amdgpu_bo_gpu_offset(ring->mqd_obj);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2031
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
204
uint64_t wptr_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
205
uint32_t eng_sel = ring->funcs->type == AMDGPU_RING_TYPE_GFX ? 4 : 0;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2065
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2071
static int gfx_v9_4_3_xcc_q_fini_register(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2074
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2089
DRM_DEBUG("%s dequeue request failed.\n", ring->name);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2111
static int gfx_v9_4_3_xcc_kiq_init_queue(struct amdgpu_ring *ring, int xcc_id)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2113
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2114
struct v9_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2117
gfx_v9_4_3_xcc_kiq_setting(ring, xcc_id);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
212
PACKET3_MAP_QUEUES_QUEUE(ring->queue) |
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
213
PACKET3_MAP_QUEUES_PIPE(ring->pipe) |
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2131
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2132
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2134
soc15_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0, GET_INST(GC, xcc_id));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2135
gfx_v9_4_3_xcc_kiq_init_register(ring, xcc_id);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
214
PACKET3_MAP_QUEUES_ME((ring->me == 1 ? 0 : 1)) |
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2144
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2145
soc15_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0, GET_INST(GC, xcc_id));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2146
gfx_v9_4_3_xcc_mqd_init(ring, xcc_id);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2147
gfx_v9_4_3_xcc_kiq_init_register(ring, xcc_id);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2158
static void gfx_v9_4_3_xcc_kcq_init_queue(struct amdgpu_ring *ring, int xcc_id,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2161
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2162
struct v9_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2163
int mqd_idx = ring - &adev->gfx.compute_ring[0];
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2177
soc15_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0, GET_INST(GC, xcc_id));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2178
gfx_v9_4_3_xcc_mqd_init(ring, xcc_id);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2189
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2190
atomic64_set((atomic64_t *)&adev->wb.wb[ring->wptr_offs], 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2191
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2197
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2201
ring = &adev->gfx.compute_ring[j + xcc_id * adev->gfx.num_compute_rings];
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2204
soc15_grbm_select(adev, ring->me,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2205
ring->pipe,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2206
ring->queue, 0, GET_INST(GC, xcc_id));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2207
gfx_v9_4_3_xcc_q_fini_register(ring, xcc_id);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2218
gfx_v9_4_3_xcc_kiq_init_queue(&adev->gfx.kiq[xcc_id].ring, xcc_id);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2224
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
223
PACKET3_MAP_QUEUES_DOORBELL_OFFSET(ring->doorbell_index));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2230
ring = &adev->gfx.compute_ring[i + xcc_id *
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2233
gfx_v9_4_3_xcc_kcq_init_queue(ring, xcc_id, false);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2241
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2265
ring = &adev->gfx.compute_ring
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2267
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
231
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2335
soc15_grbm_select(adev, adev->gfx.kiq[xcc_id].ring.me,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2336
adev->gfx.kiq[xcc_id].ring.pipe,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2337
adev->gfx.kiq[xcc_id].ring.queue, 0,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2339
gfx_v9_4_3_xcc_q_fini_register(&adev->gfx.kiq[xcc_id].ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
235
uint32_t eng_sel = ring->funcs->type == AMDGPU_RING_TYPE_GFX ? 4 : 0;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
244
PACKET3_UNMAP_QUEUES_DOORBELL_OFFSET0(ring->doorbell_index));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2484
static void gfx_v9_4_3_ring_emit_gds_switch(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2490
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2493
gfx_v9_4_3_write_data_to_reg(ring, 0, false,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2498
gfx_v9_4_3_write_data_to_reg(ring, 0, false,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2503
gfx_v9_4_3_write_data_to_reg(ring, 0, false,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2508
gfx_v9_4_3_write_data_to_reg(ring, 0, false,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
258
struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
262
uint32_t eng_sel = ring->funcs->type == AMDGPU_RING_TYPE_GFX ? 4 : 0;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
271
PACKET3_QUERY_STATUS_DOORBELL_OFFSET(ring->doorbell_index) |
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2818
static void gfx_v9_4_3_ring_emit_hdp_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2820
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2828
adev->gfx.funcs->get_hdp_flush_mask(ring, &ref_and_mask, ®_mem_engine);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2829
gfx_v9_4_3_wait_reg_mem(ring, reg_mem_engine, 0, 1,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2835
static void gfx_v9_4_3_ring_emit_ib_compute(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2854
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2855
amdgpu_ring_write(ring, regGDS_COMPUTE_MAX_WAVE_ID);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2856
amdgpu_ring_write(ring, ring->adev->gds.gds_compute_max_wave_id);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2859
amdgpu_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2861
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2866
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2867
amdgpu_ring_write(ring, control);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2870
static void gfx_v9_4_3_ring_emit_fence(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2878
amdgpu_ring_write(ring, PACKET3(PACKET3_RELEASE_MEM, 6));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2879
amdgpu_ring_write(ring, ((writeback ? (EOP_TC_WB_ACTION_EN |
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2887
amdgpu_ring_write(ring, DATA_SEL(write64bit ? 2 : 1) | INT_SEL(int_sel ? 2 : 0));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2897
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2898
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2899
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2900
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2901
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2904
static void gfx_v9_4_3_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2906
int usepfp = (ring->funcs->type == AMDGPU_RING_TYPE_GFX);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2907
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2908
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2910
gfx_v9_4_3_wait_reg_mem(ring, usepfp, 1, 0,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2915
static void gfx_v9_4_3_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2918
amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2921
static u64 gfx_v9_4_3_ring_get_rptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2923
return ring->adev->wb.wb[ring->rptr_offs]; /* gfx9 hardware is 32bit rptr */
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2926
static u64 gfx_v9_4_3_ring_get_wptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2931
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2932
wptr = atomic64_read((atomic64_t *)&ring->adev->wb.wb[ring->wptr_offs]);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2938
static void gfx_v9_4_3_ring_set_wptr_compute(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2940
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2943
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2944
atomic64_set((atomic64_t *)&adev->wb.wb[ring->wptr_offs], ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2945
WDOORBELL64(ring->doorbell_index, ring->wptr);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2951
static void gfx_v9_4_3_ring_emit_fence_kiq(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2954
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2960
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2961
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2963
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2964
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2965
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2969
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2970
amdgpu_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2972
amdgpu_ring_write(ring, SOC15_REG_OFFSET(GC, GET_INST(GC, 0), regCPC_INT_STATUS));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2973
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2974
amdgpu_ring_write(ring, 0x20000000); /* src_id is 178 */
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2978
static void gfx_v9_4_3_ring_emit_rreg(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2981
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2985
amdgpu_ring_write(ring, PACKET3(PACKET3_COPY_DATA, 4));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2986
amdgpu_ring_write(ring, 0 | /* src: register*/
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2989
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2990
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2991
amdgpu_ring_write(ring, lower_32_bits(adev->wb.gpu_addr +
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2993
amdgpu_ring_write(ring, upper_32_bits(adev->wb.gpu_addr +
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
2997
static void gfx_v9_4_3_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3004
switch (ring->funcs->type) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3015
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3016
amdgpu_ring_write(ring, cmd);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3017
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3018
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3019
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3022
static void gfx_v9_4_3_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3025
gfx_v9_4_3_wait_reg_mem(ring, 0, 0, 0, reg, 0, val, mask, 0x20);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3028
static void gfx_v9_4_3_ring_emit_reg_write_reg_wait(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3032
amdgpu_ring_emit_reg_write_reg_wait_helper(ring, reg0, reg1,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3036
static void gfx_v9_4_3_ring_soft_recovery(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3039
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3046
amdgpu_gfx_rlc_enter_safe_mode(adev, ring->xcc_id);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3047
WREG32_SOC15(GC, GET_INST(GC, ring->xcc_id), regSQ_CMD, value);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3048
amdgpu_gfx_rlc_exit_safe_mode(adev, ring->xcc_id);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3288
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3305
ring = &adev->gfx.compute_ring
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3312
if ((ring->me == me_id) && (ring->pipe == pipe_id) && (ring->queue == queue_id))
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3313
amdgpu_fence_process(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3324
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3341
ring = &adev->gfx.compute_ring
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3344
if (ring->me == me_id && ring->pipe == pipe_id &&
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3345
ring->queue == queue_id)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3346
drm_sched_fault(&ring->sched);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3379
static void gfx_v9_4_3_emit_mem_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3389
amdgpu_ring_write(ring, PACKET3(PACKET3_ACQUIRE_MEM, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3390
amdgpu_ring_write(ring, cp_coher_cntl); /* CP_COHER_CNTL */
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3391
amdgpu_ring_write(ring, 0xffffffff); /* CP_COHER_SIZE */
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3392
amdgpu_ring_write(ring, 0xffffff); /* CP_COHER_SIZE_HI */
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3393
amdgpu_ring_write(ring, 0); /* CP_COHER_BASE */
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3394
amdgpu_ring_write(ring, 0); /* CP_COHER_BASE_HI */
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3395
amdgpu_ring_write(ring, 0x0000000A); /* POLL_INTERVAL */
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3398
static void gfx_v9_4_3_emit_wave_limit_cs(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3401
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3426
amdgpu_ring_emit_wreg(ring, wcl_cs_reg, val);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3429
static void gfx_v9_4_3_emit_wave_limit(struct amdgpu_ring *ring, bool enable)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3431
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3440
amdgpu_ring_emit_wreg(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3450
if (i != ring->pipe)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3451
gfx_v9_4_3_emit_wave_limit_cs(ring, i, enable);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3492
static int gfx_v9_4_3_reset_hw_pipe(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3494
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3501
gfx_v9_4_3_xcc_set_safe_mode(adev, ring->xcc_id);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3504
reset_pipe = RREG32_SOC15(GC, GET_INST(GC, ring->xcc_id), regCP_MEC_CNTL);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3507
if (ring->me == 1) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3508
switch (ring->pipe) {
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3529
if (ring->pipe)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3537
WREG32_SOC15(GC, GET_INST(GC, ring->xcc_id), regCP_MEC_CNTL, reset_pipe);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3538
WREG32_SOC15(GC, GET_INST(GC, ring->xcc_id), regCP_MEC_CNTL, clean_pipe);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3540
gfx_v9_4_3_xcc_unset_safe_mode(adev, ring->xcc_id);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3542
r = gfx_v9_4_3_unmap_done(adev, ring->me, ring->pipe, ring->queue, ring->xcc_id);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3546
static int gfx_v9_4_3_reset_kcq(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3550
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3551
struct amdgpu_kiq *kiq = &adev->gfx.kiq[ring->xcc_id];
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3552
struct amdgpu_ring *kiq_ring = &kiq->ring;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3560
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3569
kiq->pmf->kiq_unmap_queues(kiq_ring, ring, RESET_QUEUES,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3578
ring->name);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3582
r = gfx_v9_4_3_unmap_done(adev, ring->me, ring->pipe, ring->queue, ring->xcc_id);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3590
r = gfx_v9_4_3_reset_hw_pipe(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3592
dev_info(adev->dev, "ring: %s pipe reset :%s\n", ring->name,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3598
gfx_v9_4_3_xcc_kcq_init_queue(ring, ring->xcc_id, true);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3606
kiq->pmf->kiq_map_queues(kiq_ring, ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3619
r = amdgpu_ring_test_ring(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
3625
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
375
static void gfx_v9_4_3_write_data_to_reg(struct amdgpu_ring *ring, int eng_sel,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
379
amdgpu_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
380
amdgpu_ring_write(ring, WRITE_DATA_ENGINE_SEL(eng_sel) |
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
383
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
384
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
385
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
388
static void gfx_v9_4_3_wait_reg_mem(struct amdgpu_ring *ring, int eng_sel,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
399
amdgpu_ring_write(ring, PACKET3(PACKET3_WAIT_REG_MEM, 5));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
400
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
409
amdgpu_ring_write(ring, addr0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
410
amdgpu_ring_write(ring, addr1);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
411
amdgpu_ring_write(ring, ref);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
412
amdgpu_ring_write(ring, mask);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
413
amdgpu_ring_write(ring, inv); /* poll interval */
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
416
static int gfx_v9_4_3_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
419
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
426
scratch_reg0_offset = SOC15_REG_OFFSET(GC, GET_INST(GC, ring->xcc_id), regSCRATCH_REG0);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
430
r = amdgpu_ring_alloc(ring, 3);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
434
amdgpu_ring_write(ring, PACKET3(PACKET3_SET_UCONFIG_REG, 1));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
435
amdgpu_ring_write(ring, xcc_offset - PACKET3_SET_UCONFIG_REG_START);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
436
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
437
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
451
static int gfx_v9_4_3_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
453
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
4543
static void gfx_v9_4_3_ring_insert_nop(struct amdgpu_ring *ring, uint32_t num_nop)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
4547
amdgpu_ring_write(ring, ring->funcs->nop);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
4552
amdgpu_ring_write(ring, PACKET3(PACKET3_NOP, min(num_nop - 2, 0x3ffe)));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
4555
amdgpu_ring_insert_nop(ring, num_nop - 1);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
4691
static void gfx_v9_4_3_ring_emit_cleaner_shader(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
4694
amdgpu_ring_write(ring, PACKET3(PACKET3_RUN_CLEANER_SHADER, 0));
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
4695
amdgpu_ring_write(ring, 0); /* RESERVED field, programmed to zero */
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
4797
adev->gfx.kiq[i].ring.funcs = &gfx_v9_4_3_ring_funcs_kiq;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
481
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
975
struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id];
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
979
ring = &adev->gfx.compute_ring[xcc_id * adev->gfx.num_compute_rings +
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
983
ring->xcc_id = xcc_id;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
984
ring->me = mec + 1;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
985
ring->pipe = pipe;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
986
ring->queue = queue;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
988
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
989
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
992
ring->doorbell_index = (xcc_doorbell_start + ring_id) << 1;
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
993
ring->eop_gpu_addr = adev->gfx.mec.hpd_eop_gpu_addr +
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
996
ring->vm_hub = AMDGPU_GFXHUB(xcc_id);
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
997
sprintf(ring->name, "comp_%d.%d.%d.%d",
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
998
ring->xcc_id, ring->me, ring->pipe, ring->queue);
drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c
258
if (adev->gfx.kiq[0].ring.sched.ready && !adev->enable_mes &&
drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c
359
static uint64_t gmc_v10_0_emit_flush_gpu_tlb(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c
362
bool use_semaphore = gmc_v10_0_use_invalidate_semaphore(ring->adev, ring->vm_hub);
drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c
363
struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->vm_hub];
drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c
365
unsigned int eng = ring->vm_inv_eng;
drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c
377
amdgpu_ring_emit_reg_wait(ring,
drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c
381
amdgpu_ring_emit_wreg(ring, hub->ctx0_ptb_addr_lo32 +
drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c
385
amdgpu_ring_emit_wreg(ring, hub->ctx0_ptb_addr_hi32 +
drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c
389
amdgpu_ring_emit_reg_write_reg_wait(ring, hub->vm_inv_eng0_req +
drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c
401
amdgpu_ring_emit_wreg(ring, hub->vm_inv_eng0_sem +
drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c
407
static void gmc_v10_0_emit_pasid_mapping(struct amdgpu_ring *ring, unsigned int vmid,
drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c
410
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c
413
if (ring->vm_hub == AMDGPU_GFXHUB(0))
drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c
418
amdgpu_ring_emit_wreg(ring, reg, pasid);
drivers/gpu/drm/amd/amdgpu/gmc_v11_0.c
246
if ((adev->gfx.kiq[0].ring.sched.ready || adev->mes.ring[0].sched.ready) &&
drivers/gpu/drm/amd/amdgpu/gmc_v11_0.c
351
static uint64_t gmc_v11_0_emit_flush_gpu_tlb(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gmc_v11_0.c
354
bool use_semaphore = gmc_v11_0_use_invalidate_semaphore(ring->adev, ring->vm_hub);
drivers/gpu/drm/amd/amdgpu/gmc_v11_0.c
355
struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->vm_hub];
drivers/gpu/drm/amd/amdgpu/gmc_v11_0.c
357
unsigned int eng = ring->vm_inv_eng;
drivers/gpu/drm/amd/amdgpu/gmc_v11_0.c
369
amdgpu_ring_emit_reg_wait(ring,
drivers/gpu/drm/amd/amdgpu/gmc_v11_0.c
373
amdgpu_ring_emit_wreg(ring, hub->ctx0_ptb_addr_lo32 +
drivers/gpu/drm/amd/amdgpu/gmc_v11_0.c
377
amdgpu_ring_emit_wreg(ring, hub->ctx0_ptb_addr_hi32 +
drivers/gpu/drm/amd/amdgpu/gmc_v11_0.c
381
amdgpu_ring_emit_reg_write_reg_wait(ring, hub->vm_inv_eng0_req +
drivers/gpu/drm/amd/amdgpu/gmc_v11_0.c
393
amdgpu_ring_emit_wreg(ring, hub->vm_inv_eng0_sem +
drivers/gpu/drm/amd/amdgpu/gmc_v11_0.c
399
static void gmc_v11_0_emit_pasid_mapping(struct amdgpu_ring *ring, unsigned int vmid,
drivers/gpu/drm/amd/amdgpu/gmc_v11_0.c
402
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gmc_v11_0.c
405
if (ring->vm_hub == AMDGPU_GFXHUB(0))
drivers/gpu/drm/amd/amdgpu/gmc_v11_0.c
410
amdgpu_ring_emit_wreg(ring, reg, pasid);
drivers/gpu/drm/amd/amdgpu/gmc_v12_0.c
317
if ((adev->gfx.kiq[0].ring.sched.ready || adev->mes.ring[0].sched.ready) &&
drivers/gpu/drm/amd/amdgpu/gmc_v12_0.c
352
if (adev->enable_uni_mes && adev->mes.ring[AMDGPU_MES_SCHED_PIPE].sched.ready &&
drivers/gpu/drm/amd/amdgpu/gmc_v12_0.c
388
static uint64_t gmc_v12_0_emit_flush_gpu_tlb(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gmc_v12_0.c
391
bool use_semaphore = gmc_v12_0_use_invalidate_semaphore(ring->adev, ring->vm_hub);
drivers/gpu/drm/amd/amdgpu/gmc_v12_0.c
392
struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->vm_hub];
drivers/gpu/drm/amd/amdgpu/gmc_v12_0.c
394
unsigned eng = ring->vm_inv_eng;
drivers/gpu/drm/amd/amdgpu/gmc_v12_0.c
406
amdgpu_ring_emit_reg_wait(ring,
drivers/gpu/drm/amd/amdgpu/gmc_v12_0.c
410
amdgpu_ring_emit_wreg(ring, hub->ctx0_ptb_addr_lo32 +
drivers/gpu/drm/amd/amdgpu/gmc_v12_0.c
414
amdgpu_ring_emit_wreg(ring, hub->ctx0_ptb_addr_hi32 +
drivers/gpu/drm/amd/amdgpu/gmc_v12_0.c
418
amdgpu_ring_emit_reg_write_reg_wait(ring, hub->vm_inv_eng0_req +
drivers/gpu/drm/amd/amdgpu/gmc_v12_0.c
430
amdgpu_ring_emit_wreg(ring, hub->vm_inv_eng0_sem +
drivers/gpu/drm/amd/amdgpu/gmc_v12_0.c
436
static void gmc_v12_0_emit_pasid_mapping(struct amdgpu_ring *ring, unsigned vmid,
drivers/gpu/drm/amd/amdgpu/gmc_v12_0.c
439
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gmc_v12_0.c
442
if (ring->vm_hub == AMDGPU_GFXHUB(0))
drivers/gpu/drm/amd/amdgpu/gmc_v12_0.c
447
amdgpu_ring_emit_wreg(ring, reg, pasid);
drivers/gpu/drm/amd/amdgpu/gmc_v12_1.c
334
if (((adev->gfx.kiq[inst].ring.sched.ready ||
drivers/gpu/drm/amd/amdgpu/gmc_v12_1.c
335
adev->mes.ring[MES_PIPE_INST(inst, 0)].sched.ready) &&
drivers/gpu/drm/amd/amdgpu/gmc_v12_1.c
372
if (adev->enable_uni_mes && adev->mes.ring[0].sched.ready &&
drivers/gpu/drm/amd/amdgpu/gmc_v12_1.c
420
static uint64_t gmc_v12_1_emit_flush_gpu_tlb(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gmc_v12_1.c
423
struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->vm_hub];
drivers/gpu/drm/amd/amdgpu/gmc_v12_1.c
425
unsigned eng = ring->vm_inv_eng;
drivers/gpu/drm/amd/amdgpu/gmc_v12_1.c
427
amdgpu_ring_emit_wreg(ring, hub->ctx0_ptb_addr_lo32 +
drivers/gpu/drm/amd/amdgpu/gmc_v12_1.c
431
amdgpu_ring_emit_wreg(ring, hub->ctx0_ptb_addr_hi32 +
drivers/gpu/drm/amd/amdgpu/gmc_v12_1.c
435
amdgpu_ring_emit_reg_write_reg_wait(ring, hub->vm_inv_eng0_req +
drivers/gpu/drm/amd/amdgpu/gmc_v12_1.c
444
static void gmc_v12_1_emit_pasid_mapping(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gmc_v12_1.c
447
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gmc_v12_1.c
450
if (ring->vm_hub == AMDGPU_GFXHUB(0))
drivers/gpu/drm/amd/amdgpu/gmc_v12_1.c
455
amdgpu_ring_emit_wreg(ring, reg, pasid);
drivers/gpu/drm/amd/amdgpu/gmc_v6_0.c
360
static uint64_t gmc_v6_0_emit_flush_gpu_tlb(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gmc_v6_0.c
370
amdgpu_ring_emit_wreg(ring, reg, pd_addr >> 12);
drivers/gpu/drm/amd/amdgpu/gmc_v6_0.c
373
amdgpu_ring_emit_wreg(ring, mmVM_INVALIDATE_REQUEST, 1 << vmid);
drivers/gpu/drm/amd/amdgpu/gmc_v7_0.c
477
static uint64_t gmc_v7_0_emit_flush_gpu_tlb(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gmc_v7_0.c
486
amdgpu_ring_emit_wreg(ring, reg, pd_addr >> 12);
drivers/gpu/drm/amd/amdgpu/gmc_v7_0.c
489
amdgpu_ring_emit_wreg(ring, mmVM_INVALIDATE_REQUEST, 1 << vmid);
drivers/gpu/drm/amd/amdgpu/gmc_v7_0.c
494
static void gmc_v7_0_emit_pasid_mapping(struct amdgpu_ring *ring, unsigned int vmid,
drivers/gpu/drm/amd/amdgpu/gmc_v7_0.c
497
amdgpu_ring_emit_wreg(ring, mmIH_VMID_0_LUT + vmid, pasid);
drivers/gpu/drm/amd/amdgpu/gmc_v8_0.c
668
static uint64_t gmc_v8_0_emit_flush_gpu_tlb(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gmc_v8_0.c
677
amdgpu_ring_emit_wreg(ring, reg, pd_addr >> 12);
drivers/gpu/drm/amd/amdgpu/gmc_v8_0.c
680
amdgpu_ring_emit_wreg(ring, mmVM_INVALIDATE_REQUEST, 1 << vmid);
drivers/gpu/drm/amd/amdgpu/gmc_v8_0.c
685
static void gmc_v8_0_emit_pasid_mapping(struct amdgpu_ring *ring, unsigned int vmid,
drivers/gpu/drm/amd/amdgpu/gmc_v8_0.c
688
amdgpu_ring_emit_wreg(ring, mmIH_VMID_0_LUT + vmid, pasid);
drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c
1004
static void gmc_v9_0_emit_pasid_mapping(struct amdgpu_ring *ring, unsigned int vmid,
drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c
1007
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c
1011
if (ring->vm_hub == AMDGPU_MMHUB1(0))
drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c
1014
if (ring->vm_hub == AMDGPU_GFXHUB(0))
drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c
1019
amdgpu_ring_emit_wreg(ring, reg, pasid);
drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c
835
if (adev->gfx.kiq[inst].ring.sched.ready &&
drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c
955
static uint64_t gmc_v9_0_emit_flush_gpu_tlb(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c
958
bool use_semaphore = gmc_v9_0_use_invalidate_semaphore(ring->adev, ring->vm_hub);
drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c
959
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c
960
struct amdgpu_vmhub *hub = &adev->vmhub[ring->vm_hub];
drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c
962
unsigned int eng = ring->vm_inv_eng;
drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c
974
amdgpu_ring_emit_reg_wait(ring,
drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c
978
amdgpu_ring_emit_wreg(ring, hub->ctx0_ptb_addr_lo32 +
drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c
982
amdgpu_ring_emit_wreg(ring, hub->ctx0_ptb_addr_hi32 +
drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c
986
amdgpu_ring_emit_reg_write_reg_wait(ring, hub->vm_inv_eng0_req +
drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c
998
amdgpu_ring_emit_wreg(ring, hub->vm_inv_eng0_sem +
drivers/gpu/drm/amd/amdgpu/hdp_v4_0.c
40
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/hdp_v4_0.c
47
if (!ring || !ring->funcs->emit_wreg) {
drivers/gpu/drm/amd/amdgpu/hdp_v4_0.c
51
amdgpu_ring_emit_wreg(ring, SOC15_REG_OFFSET(
drivers/gpu/drm/amd/amdgpu/hdp_v5_0.c
31
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/hdp_v5_0.c
33
if (!ring || !ring->funcs->emit_wreg) {
drivers/gpu/drm/amd/amdgpu/hdp_v5_0.c
37
amdgpu_ring_emit_wreg(ring, SOC15_REG_OFFSET(
drivers/gpu/drm/amd/amdgpu/hdp_v5_2.c
31
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/hdp_v5_2.c
33
if (!ring || !ring->funcs->emit_wreg) {
drivers/gpu/drm/amd/amdgpu/hdp_v5_2.c
48
amdgpu_ring_emit_wreg(ring,
drivers/gpu/drm/amd/amdgpu/iceland_ih.c
252
dw[0] = le32_to_cpu(ih->ring[ring_index + 0]);
drivers/gpu/drm/amd/amdgpu/iceland_ih.c
253
dw[1] = le32_to_cpu(ih->ring[ring_index + 1]);
drivers/gpu/drm/amd/amdgpu/iceland_ih.c
254
dw[2] = le32_to_cpu(ih->ring[ring_index + 2]);
drivers/gpu/drm/amd/amdgpu/iceland_ih.c
255
dw[3] = le32_to_cpu(ih->ring[ring_index + 3]);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
100
ring->ring[ptr++] = 0x01400200;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
101
ring->ring[ptr++] = PACKETJ(SOC15_REG_OFFSET(JPEG, 0, mmUVD_JRBC_RB_REF_DATA), 0, 0, PACKETJ_TYPE0);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
102
ring->ring[ptr++] = val;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
103
ring->ring[ptr++] = PACKETJ(SOC15_REG_OFFSET(JPEG, 0, mmUVD_JRBC_EXTERNAL_REG_BASE), 0, 0, PACKETJ_TYPE0);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
106
ring->ring[ptr++] = 0;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
107
ring->ring[ptr++] = PACKETJ((reg_offset >> 2), 0, 0, PACKETJ_TYPE3);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
109
ring->ring[ptr++] = reg_offset;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
110
ring->ring[ptr++] = PACKETJ(0, 0, 0, PACKETJ_TYPE3);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
112
ring->ring[ptr++] = mask;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
116
ring->ring[ptr++] = PACKETJ(0, 0, 0, PACKETJ_TYPE6);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
117
ring->ring[ptr++] = 0;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
124
jpeg_v1_0_decode_ring_patch_wreg(ring, &ptr, reg_offset, val);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
130
jpeg_v1_0_decode_ring_patch_wreg(ring, &ptr, reg_offset, val);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
140
static uint64_t jpeg_v1_0_decode_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
142
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
154
static uint64_t jpeg_v1_0_decode_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
156
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
168
static void jpeg_v1_0_decode_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
170
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
172
WREG32_SOC15(JPEG, 0, mmUVD_JRBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
182
static void jpeg_v1_0_decode_ring_insert_start(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
184
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
186
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
188
amdgpu_ring_write(ring, 0x68e04);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
190
amdgpu_ring_write(ring, PACKETJ(0, 0, 0, PACKETJ_TYPE0));
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
191
amdgpu_ring_write(ring, 0x80010000);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
201
static void jpeg_v1_0_decode_ring_insert_end(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
203
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
205
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
207
amdgpu_ring_write(ring, 0x68e04);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
209
amdgpu_ring_write(ring, PACKETJ(0, 0, 0, PACKETJ_TYPE0));
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
210
amdgpu_ring_write(ring, 0x00010000);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
223
static void jpeg_v1_0_decode_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
226
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
230
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
232
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
234
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
236
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
238
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
240
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
242
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
244
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
246
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
248
amdgpu_ring_write(ring, 0x8);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
250
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
252
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
254
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
256
amdgpu_ring_write(ring, 0x01400200);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
258
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
260
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
262
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
264
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
266
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
268
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
270
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
272
amdgpu_ring_write(ring, 0xffffffff);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
274
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
276
amdgpu_ring_write(ring, 0x3fbc);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
278
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
280
amdgpu_ring_write(ring, 0x1);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
283
amdgpu_ring_write(ring, PACKETJ(0, 0, 0, PACKETJ_TYPE7));
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
284
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
297
static void jpeg_v1_0_decode_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
302
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
305
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
307
if (ring->funcs->parse_cs)
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
308
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
310
amdgpu_ring_write(ring, (vmid | (vmid << 4)));
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
312
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
314
amdgpu_ring_write(ring, (vmid | (vmid << 4)));
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
316
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
318
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
320
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
322
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
324
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
326
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
328
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
330
amdgpu_ring_write(ring, lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
332
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
334
amdgpu_ring_write(ring, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
336
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
338
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
340
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
342
amdgpu_ring_write(ring, 0x01400200);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
344
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
346
amdgpu_ring_write(ring, 0x2);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
348
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
350
amdgpu_ring_write(ring, 0x2);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
353
static void jpeg_v1_0_decode_ring_emit_reg_wait(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
357
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
360
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
362
amdgpu_ring_write(ring, 0x01400200);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
364
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
366
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
368
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
37
static void jpeg_v1_0_ring_begin_use(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
372
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
373
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
376
amdgpu_ring_write(ring, reg_offset);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
377
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
380
amdgpu_ring_write(ring, mask);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
383
static void jpeg_v1_0_decode_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
386
struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->vm_hub];
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
389
pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
395
jpeg_v1_0_decode_ring_emit_reg_wait(ring, data0, data1, mask);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
398
static void jpeg_v1_0_decode_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
401
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
404
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
408
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
409
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
412
amdgpu_ring_write(ring, reg_offset);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
413
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
416
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
419
static void jpeg_v1_0_decode_ring_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
42
static void jpeg_v1_0_decode_ring_patch_wreg(struct amdgpu_ring *ring, uint32_t *ptr, uint32_t reg_offset, uint32_t val)
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
423
WARN_ON(ring->wptr % 2 || count % 2);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
426
amdgpu_ring_write(ring, PACKETJ(0, 0, 0, PACKETJ_TYPE6));
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
427
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
44
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
45
ring->ring[(*ptr)++] = PACKETJ(SOC15_REG_OFFSET(JPEG, 0, mmUVD_JRBC_EXTERNAL_REG_BASE), 0, 0, PACKETJ_TYPE0);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
48
ring->ring[(*ptr)++] = 0;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
487
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
49
ring->ring[(*ptr)++] = PACKETJ((reg_offset >> 2), 0, 0, PACKETJ_TYPE0);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
495
ring = adev->jpeg.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
496
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
497
sprintf(ring->name, "jpeg_dec");
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
498
r = amdgpu_ring_init(adev, ring, 512, &adev->jpeg.inst->irq,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
51
ring->ring[(*ptr)++] = reg_offset;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
52
ring->ring[(*ptr)++] = PACKETJ(0, 0, 0, PACKETJ_TYPE0);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
533
struct amdgpu_ring *ring = adev->jpeg.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
539
WREG32_SOC15(JPEG, 0, mmUVD_LMI_JRBC_RB_64BIT_BAR_LOW, lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
54
ring->ring[(*ptr)++] = val;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
540
WREG32_SOC15(JPEG, 0, mmUVD_LMI_JRBC_RB_64BIT_BAR_HIGH, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
547
ring->wptr = RREG32_SOC15(JPEG, 0, mmUVD_JRBC_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
550
jpeg_v1_0_decode_ring_set_patch_ring(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
551
(ring->wptr + ring->max_dw * amdgpu_sched_hw_submission));
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
57
static void jpeg_v1_0_decode_ring_set_patch_ring(struct amdgpu_ring *ring, uint32_t ptr)
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
59
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
604
static void jpeg_v1_0_ring_begin_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
606
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
620
vcn_v1_0_set_pg_for_begin_use(ring, set_clocks);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
66
val = lower_32_bits(ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
67
jpeg_v1_0_decode_ring_patch_wreg(ring, &ptr, reg_offset, val);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
72
val = upper_32_bits(ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
73
jpeg_v1_0_decode_ring_patch_wreg(ring, &ptr, reg_offset, val);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
77
ring->ring[ptr++] = PACKETJ(0, 0, 0, PACKETJ_TYPE2);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
78
ring->ring[ptr++] = 0;
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
85
jpeg_v1_0_decode_ring_patch_wreg(ring, &ptr, reg_offset, val);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
91
jpeg_v1_0_decode_ring_patch_wreg(ring, &ptr, reg_offset, val);
drivers/gpu/drm/amd/amdgpu/jpeg_v1_0.c
99
ring->ring[ptr++] = PACKETJ(SOC15_REG_OFFSET(JPEG, 0, mmUVD_JRBC_RB_COND_RD_TIMER), 0, 0, PACKETJ_TYPE0);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
103
ring = adev->jpeg.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
104
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
105
ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 1;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
106
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
107
sprintf(ring->name, "jpeg_dec");
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
108
r = amdgpu_ring_init(adev, ring, 512, &adev->jpeg.inst->irq,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
161
struct amdgpu_ring *ring = adev->jpeg.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
163
adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
166
return amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
337
struct amdgpu_ring *ring = adev->jpeg.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
365
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
367
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
371
WREG32_SOC15(JPEG, 0, mmUVD_JRBC_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
372
ring->wptr = RREG32_SOC15(JPEG, 0, mmUVD_JRBC_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
414
static uint64_t jpeg_v2_0_dec_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
416
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
428
static uint64_t jpeg_v2_0_dec_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
430
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
432
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
433
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
445
static void jpeg_v2_0_dec_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
447
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
449
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
450
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
451
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
453
WREG32_SOC15(JPEG, 0, mmUVD_JRBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
464
void jpeg_v2_0_dec_ring_insert_start(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
466
amdgpu_ring_write(ring, PACKETJ(mmUVD_JRBC_EXTERNAL_REG_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
468
amdgpu_ring_write(ring, 0x68e04);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
470
amdgpu_ring_write(ring, PACKETJ(JRBC_DEC_EXTERNAL_REG_WRITE_ADDR,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
472
amdgpu_ring_write(ring, 0x80010000);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
482
void jpeg_v2_0_dec_ring_insert_end(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
484
amdgpu_ring_write(ring, PACKETJ(mmUVD_JRBC_EXTERNAL_REG_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
486
amdgpu_ring_write(ring, 0x68e04);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
488
amdgpu_ring_write(ring, PACKETJ(JRBC_DEC_EXTERNAL_REG_WRITE_ADDR,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
490
amdgpu_ring_write(ring, 0x00010000);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
503
void jpeg_v2_0_dec_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
508
amdgpu_ring_write(ring, PACKETJ(mmUVD_JPEG_GPCOM_DATA0_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
510
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
512
amdgpu_ring_write(ring, PACKETJ(mmUVD_JPEG_GPCOM_DATA1_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
514
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
516
amdgpu_ring_write(ring, PACKETJ(mmUVD_LMI_JRBC_RB_MEM_WR_64BIT_BAR_LOW_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
518
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
520
amdgpu_ring_write(ring, PACKETJ(mmUVD_LMI_JRBC_RB_MEM_WR_64BIT_BAR_HIGH_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
522
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
524
amdgpu_ring_write(ring, PACKETJ(mmUVD_JPEG_GPCOM_CMD_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
526
amdgpu_ring_write(ring, 0x8);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
528
amdgpu_ring_write(ring, PACKETJ(mmUVD_JPEG_GPCOM_CMD_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
530
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
532
amdgpu_ring_write(ring, PACKETJ(mmUVD_JRBC_EXTERNAL_REG_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
534
amdgpu_ring_write(ring, 0x3fbc);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
536
amdgpu_ring_write(ring, PACKETJ(JRBC_DEC_EXTERNAL_REG_WRITE_ADDR,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
538
amdgpu_ring_write(ring, 0x1);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
540
amdgpu_ring_write(ring, PACKETJ(0, 0, 0, PACKETJ_TYPE7));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
541
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
554
void jpeg_v2_0_dec_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
561
amdgpu_ring_write(ring, PACKETJ(mmUVD_JPEG_IH_CTRL_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
563
amdgpu_ring_write(ring, (vmid << JPEG_IH_CTRL__IH_VMID__SHIFT));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
565
amdgpu_ring_write(ring, PACKETJ(mmUVD_LMI_JRBC_IB_VMID_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
568
if (ring->funcs->parse_cs)
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
569
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
571
amdgpu_ring_write(ring, (vmid | (vmid << 4) | (vmid << 8)));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
573
amdgpu_ring_write(ring, PACKETJ(mmUVD_LMI_JPEG_VMID_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
575
amdgpu_ring_write(ring, (vmid | (vmid << 4) | (vmid << 8)));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
577
amdgpu_ring_write(ring, PACKETJ(mmUVD_LMI_JRBC_IB_64BIT_BAR_LOW_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
579
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
581
amdgpu_ring_write(ring, PACKETJ(mmUVD_LMI_JRBC_IB_64BIT_BAR_HIGH_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
583
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
585
amdgpu_ring_write(ring, PACKETJ(mmUVD_JRBC_IB_SIZE_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
587
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
589
amdgpu_ring_write(ring, PACKETJ(mmUVD_LMI_JRBC_RB_MEM_RD_64BIT_BAR_LOW_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
591
amdgpu_ring_write(ring, lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
593
amdgpu_ring_write(ring, PACKETJ(mmUVD_LMI_JRBC_RB_MEM_RD_64BIT_BAR_HIGH_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
595
amdgpu_ring_write(ring, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
597
amdgpu_ring_write(ring, PACKETJ(0, 0, PACKETJ_CONDITION_CHECK0, PACKETJ_TYPE2));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
598
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
600
amdgpu_ring_write(ring, PACKETJ(mmUVD_JRBC_RB_COND_RD_TIMER_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
602
amdgpu_ring_write(ring, 0x01400200);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
604
amdgpu_ring_write(ring, PACKETJ(mmUVD_JRBC_RB_REF_DATA_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
606
amdgpu_ring_write(ring, 0x2);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
608
amdgpu_ring_write(ring, PACKETJ(mmUVD_JRBC_STATUS_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
610
amdgpu_ring_write(ring, 0x2);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
613
void jpeg_v2_0_dec_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
618
amdgpu_ring_write(ring, PACKETJ(mmUVD_JRBC_RB_COND_RD_TIMER_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
620
amdgpu_ring_write(ring, 0x01400200);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
622
amdgpu_ring_write(ring, PACKETJ(mmUVD_JRBC_RB_REF_DATA_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
624
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
626
amdgpu_ring_write(ring, PACKETJ(mmUVD_JRBC_EXTERNAL_REG_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
629
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
630
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
633
amdgpu_ring_write(ring, reg_offset);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
634
amdgpu_ring_write(ring, PACKETJ(JRBC_DEC_EXTERNAL_REG_WRITE_ADDR,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
637
amdgpu_ring_write(ring, mask);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
640
void jpeg_v2_0_dec_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
643
struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->vm_hub];
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
646
pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
652
jpeg_v2_0_dec_ring_emit_reg_wait(ring, data0, data1, mask);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
655
void jpeg_v2_0_dec_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val)
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
659
amdgpu_ring_write(ring, PACKETJ(mmUVD_JRBC_EXTERNAL_REG_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
662
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
663
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
666
amdgpu_ring_write(ring, reg_offset);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
667
amdgpu_ring_write(ring, PACKETJ(JRBC_DEC_EXTERNAL_REG_WRITE_ADDR,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
670
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
673
void jpeg_v2_0_dec_ring_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
677
WARN_ON(ring->wptr % 2 || count % 2);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
680
amdgpu_ring_write(ring, PACKETJ(0, 0, 0, PACKETJ_TYPE6));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
681
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
769
static int jpeg_v2_0_ring_reset(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
775
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
776
r = jpeg_v2_0_stop(ring->adev);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
779
r = jpeg_v2_0_start(ring->adev);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
782
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.c
86
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.h
48
void jpeg_v2_0_dec_ring_insert_start(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.h
49
void jpeg_v2_0_dec_ring_insert_end(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.h
50
void jpeg_v2_0_dec_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.h
52
void jpeg_v2_0_dec_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.h
54
void jpeg_v2_0_dec_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.h
56
void jpeg_v2_0_dec_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.h
58
void jpeg_v2_0_dec_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_0.h
59
void jpeg_v2_0_dec_ring_nop(struct amdgpu_ring *ring, uint32_t count);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
106
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
145
ring = adev->jpeg.inst[i].ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
146
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
148
ring->vm_hub = AMDGPU_MMHUB1(0);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
150
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
151
ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 1 + 8 * i;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
152
sprintf(ring->name, "jpeg_dec_%d", i);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
153
r = amdgpu_ring_init(adev, ring, 512, &adev->jpeg.inst[i].irq,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
211
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
218
ring = adev->jpeg.inst[i].ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
219
adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
222
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
343
struct amdgpu_ring *ring = adev->jpeg.inst[i].ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
369
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
371
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
375
WREG32_SOC15(JPEG, i, mmUVD_JRBC_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
376
ring->wptr = RREG32_SOC15(JPEG, i, mmUVD_JRBC_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
442
static uint64_t jpeg_v2_5_dec_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
444
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
446
return RREG32_SOC15(JPEG, ring->me, mmUVD_JRBC_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
456
static uint64_t jpeg_v2_5_dec_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
458
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
460
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
461
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
463
return RREG32_SOC15(JPEG, ring->me, mmUVD_JRBC_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
473
static void jpeg_v2_5_dec_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
475
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
477
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
478
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
479
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
481
WREG32_SOC15(JPEG, ring->me, mmUVD_JRBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
492
static void jpeg_v2_6_dec_ring_insert_start(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
494
amdgpu_ring_write(ring, PACKETJ(mmUVD_JRBC_EXTERNAL_REG_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
496
amdgpu_ring_write(ring, 0x6aa04); /* PCTL0_MMHUB_DEEPSLEEP_IB */
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
498
amdgpu_ring_write(ring, PACKETJ(JRBC_DEC_EXTERNAL_REG_WRITE_ADDR,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
500
amdgpu_ring_write(ring, 0x80000000 | (1 << (ring->me * 2 + 14)));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
510
static void jpeg_v2_6_dec_ring_insert_end(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
512
amdgpu_ring_write(ring, PACKETJ(mmUVD_JRBC_EXTERNAL_REG_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
514
amdgpu_ring_write(ring, 0x6aa04); /* PCTL0_MMHUB_DEEPSLEEP_IB */
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
516
amdgpu_ring_write(ring, PACKETJ(JRBC_DEC_EXTERNAL_REG_WRITE_ADDR,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
518
amdgpu_ring_write(ring, (1 << (ring->me * 2 + 14)));
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
649
static int jpeg_v2_5_ring_reset(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
653
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
654
jpeg_v2_5_stop_inst(ring->adev, ring->me);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
655
jpeg_v2_5_start_inst(ring->adev, ring->me);
drivers/gpu/drm/amd/amdgpu/jpeg_v2_5.c
656
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
101
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
118
ring = adev->jpeg.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
119
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
120
ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 1;
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
121
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
122
sprintf(ring->name, "jpeg_dec");
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
123
r = amdgpu_ring_init(adev, ring, 512, &adev->jpeg.inst->irq, 0,
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
176
struct amdgpu_ring *ring = adev->jpeg.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
178
adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
181
return amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
353
struct amdgpu_ring *ring = adev->jpeg.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
385
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
387
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
391
WREG32_SOC15(JPEG, 0, mmUVD_JRBC_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
392
ring->wptr = RREG32_SOC15(JPEG, 0, mmUVD_JRBC_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
433
static uint64_t jpeg_v3_0_dec_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
435
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
447
static uint64_t jpeg_v3_0_dec_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
449
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
451
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
452
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
464
static void jpeg_v3_0_dec_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
466
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
468
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
469
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
470
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
472
WREG32_SOC15(JPEG, 0, mmUVD_JRBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
561
static int jpeg_v3_0_ring_reset(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
567
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
568
r = jpeg_v3_0_stop(ring->adev);
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
571
r = jpeg_v3_0_start(ring->adev);
drivers/gpu/drm/amd/amdgpu/jpeg_v3_0.c
574
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
124
ring = adev->jpeg.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
125
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
126
ring->doorbell_index = amdgpu_sriov_vf(adev) ? (((adev->doorbell_index.vcn.vcn_ring0_1) << 1) + 4) : ((adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 1);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
127
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
129
sprintf(ring->name, "jpeg_dec");
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
130
r = amdgpu_ring_init(adev, ring, 512, &adev->jpeg.inst->irq, 0,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
186
struct amdgpu_ring *ring = adev->jpeg.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
193
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
194
ring->wptr_old = 0;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
195
jpeg_v4_0_dec_ring_set_wptr(ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
196
ring->sched.ready = true;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
198
adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
202
ring->doorbell_index << VCN_JPEG_DB_CTRL__OFFSET__SHIFT |
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
205
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
390
struct amdgpu_ring *ring = adev->jpeg.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
421
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
423
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
427
WREG32_SOC15(JPEG, 0, regUVD_JRBC_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
428
ring->wptr = RREG32_SOC15(JPEG, 0, regUVD_JRBC_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
435
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
472
ring = adev->jpeg.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
476
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
479
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
481
regUVD_JRBC_RB_SIZE), ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
593
static uint64_t jpeg_v4_0_dec_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
595
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
607
static uint64_t jpeg_v4_0_dec_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
609
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
61
static void jpeg_v4_0_dec_ring_set_wptr(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
611
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
612
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
624
static void jpeg_v4_0_dec_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
626
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
628
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
629
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
630
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
632
WREG32_SOC15(JPEG, 0, regUVD_JRBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
726
static int jpeg_v4_0_ring_reset(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
732
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
733
r = jpeg_v4_0_stop(ring->adev);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
736
r = jpeg_v4_0_start(ring->adev);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
739
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0.c
95
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
1119
static void jpeg_v4_0_3_core_stall_reset(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
1121
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
1122
int jpeg_inst = GET_INST(JPEG, ring->me);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
1123
int reg_offset = jpeg_v4_0_3_core_reg_offset(ring->pipe);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
1134
WREG32_SOC15(JPEG, jpeg_inst, regJPEG_CORE_RST_CTRL, 1 << ring->pipe);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
1144
static int jpeg_v4_0_3_ring_reset(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
1148
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
1149
struct amdgpu_vcn_inst *vinst = &adev->vcn.inst[ring->me];
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
1154
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
1155
jpeg_v4_0_3_core_stall_reset(ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
1156
jpeg_v4_0_3_start_jrbc(ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
1157
r = amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
141
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
176
ring = &adev->jpeg.inst[i].ring_dec[j];
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
177
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
178
ring->vm_hub = AMDGPU_MMHUB0(adev->jpeg.inst[i].aid_id);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
180
ring->doorbell_index =
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
185
ring->doorbell_index =
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
189
ring->doorbell_index =
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
193
sprintf(ring->name, "jpeg_dec_%d.%d", adev->jpeg.inst[i].aid_id, j);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
194
r = amdgpu_ring_init(adev, ring, 512, &adev->jpeg.inst->irq, 0,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
253
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
288
ring = &adev->jpeg.inst[i].ring_dec[j];
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
292
MMSCH_V4_0_INSERT_DIRECT_WT(tmp, lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
294
MMSCH_V4_0_INSERT_DIRECT_WT(tmp, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
296
MMSCH_V4_0_INSERT_DIRECT_WT(tmp, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
373
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
383
ring = &adev->jpeg.inst[i].ring_dec[j];
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
384
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
385
ring->wptr_old = 0;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
386
jpeg_v4_0_3_dec_ring_set_wptr(ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
387
ring->sched.ready = true;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
399
ring = adev->jpeg.inst[i].ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
401
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
403
adev, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
409
ring = &adev->jpeg.inst[i].ring_dec[j];
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
410
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
414
(ring->pipe ? (ring->pipe - 0x15) : 0),
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
415
ring->doorbell_index
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
418
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
49
static void jpeg_v4_0_3_dec_ring_set_wptr(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
572
static void jpeg_v4_0_3_start_jrbc(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
574
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
575
int jpeg_inst = GET_INST(JPEG, ring->me);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
576
int reg_offset = jpeg_v4_0_3_core_reg_offset(ring->pipe);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
580
JPEG_SYS_INT_EN__DJRBC0_MASK << ring->pipe,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
581
~(JPEG_SYS_INT_EN__DJRBC0_MASK << ring->pipe));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
592
reg_offset, lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
595
reg_offset, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
607
reg_offset, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
608
ring->wptr = RREG32_SOC15_OFFSET(JPEG, jpeg_inst, regUVD_JRBC0_UVD_JRBC_RB_WPTR,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
621
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
627
ring = &adev->jpeg.inst[i].ring_dec[j];
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
628
jpeg_v4_0_3_start_jrbc(ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
676
static uint64_t jpeg_v4_0_3_dec_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
678
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
680
return RREG32_SOC15_OFFSET(JPEG, GET_INST(JPEG, ring->me), regUVD_JRBC0_UVD_JRBC_RB_RPTR,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
681
jpeg_v4_0_3_core_reg_offset(ring->pipe));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
691
static uint64_t jpeg_v4_0_3_dec_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
693
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
695
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
696
return adev->wb.wb[ring->wptr_offs];
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
698
return RREG32_SOC15_OFFSET(JPEG, GET_INST(JPEG, ring->me), regUVD_JRBC0_UVD_JRBC_RB_WPTR,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
699
jpeg_v4_0_3_core_reg_offset(ring->pipe));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
702
void jpeg_v4_0_3_ring_emit_hdp_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
716
static void jpeg_v4_0_3_dec_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
718
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
720
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
721
adev->wb.wb[ring->wptr_offs] = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
722
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
724
WREG32_SOC15_OFFSET(JPEG, GET_INST(JPEG, ring->me), regUVD_JRBC0_UVD_JRBC_RB_WPTR,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
725
jpeg_v4_0_3_core_reg_offset(ring->pipe),
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
726
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
737
void jpeg_v4_0_3_dec_ring_insert_start(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
739
if (!amdgpu_sriov_vf(ring->adev)) {
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
740
amdgpu_ring_write(ring, PACKETJ(regUVD_JRBC_EXTERNAL_REG_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
742
amdgpu_ring_write(ring, 0x62a04); /* PCTL0_MMHUB_DEEPSLEEP_IB */
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
744
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
747
amdgpu_ring_write(ring, 0x80004000);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
758
void jpeg_v4_0_3_dec_ring_insert_end(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
760
if (!amdgpu_sriov_vf(ring->adev)) {
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
761
amdgpu_ring_write(ring, PACKETJ(regUVD_JRBC_EXTERNAL_REG_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
763
amdgpu_ring_write(ring, 0x62a04);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
765
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
768
amdgpu_ring_write(ring, 0x00004000);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
782
void jpeg_v4_0_3_dec_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
787
amdgpu_ring_write(ring, PACKETJ(regUVD_JPEG_GPCOM_DATA0_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
789
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
791
amdgpu_ring_write(ring, PACKETJ(regUVD_JPEG_GPCOM_DATA1_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
793
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
795
amdgpu_ring_write(ring, PACKETJ(regUVD_LMI_JRBC_RB_MEM_WR_64BIT_BAR_LOW_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
797
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
799
amdgpu_ring_write(ring, PACKETJ(regUVD_LMI_JRBC_RB_MEM_WR_64BIT_BAR_HIGH_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
801
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
803
amdgpu_ring_write(ring, PACKETJ(regUVD_JPEG_GPCOM_CMD_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
805
amdgpu_ring_write(ring, 0x8);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
807
amdgpu_ring_write(ring, PACKETJ(regUVD_JPEG_GPCOM_CMD_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
809
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
811
amdgpu_ring_write(ring, PACKETJ(0, 0, 0, PACKETJ_TYPE6));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
812
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
814
amdgpu_ring_write(ring, PACKETJ(0, 0, 0, PACKETJ_TYPE6));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
815
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
817
amdgpu_ring_write(ring, PACKETJ(0, 0, 0, PACKETJ_TYPE7));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
818
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
831
void jpeg_v4_0_3_dec_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
838
amdgpu_ring_write(ring, PACKETJ(regUVD_LMI_JRBC_IB_VMID_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
841
if (ring->funcs->parse_cs)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
842
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
844
amdgpu_ring_write(ring, (vmid | (vmid << 4) | (vmid << 8)));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
846
amdgpu_ring_write(ring, PACKETJ(regUVD_LMI_JPEG_VMID_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
848
amdgpu_ring_write(ring, (vmid | (vmid << 4) | (vmid << 8)));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
850
amdgpu_ring_write(ring, PACKETJ(regUVD_LMI_JRBC_IB_64BIT_BAR_LOW_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
852
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
854
amdgpu_ring_write(ring, PACKETJ(regUVD_LMI_JRBC_IB_64BIT_BAR_HIGH_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
856
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
858
amdgpu_ring_write(ring, PACKETJ(regUVD_JRBC_IB_SIZE_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
860
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
862
amdgpu_ring_write(ring, PACKETJ(regUVD_LMI_JRBC_RB_MEM_RD_64BIT_BAR_LOW_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
864
amdgpu_ring_write(ring, lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
866
amdgpu_ring_write(ring, PACKETJ(regUVD_LMI_JRBC_RB_MEM_RD_64BIT_BAR_HIGH_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
868
amdgpu_ring_write(ring, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
870
amdgpu_ring_write(ring, PACKETJ(0, 0, PACKETJ_CONDITION_CHECK0, PACKETJ_TYPE2));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
871
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
873
amdgpu_ring_write(ring, PACKETJ(regUVD_JRBC_RB_COND_RD_TIMER_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
875
amdgpu_ring_write(ring, 0x01400200);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
877
amdgpu_ring_write(ring, PACKETJ(regUVD_JRBC_RB_REF_DATA_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
879
amdgpu_ring_write(ring, 0x2);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
881
amdgpu_ring_write(ring, PACKETJ(regUVD_JRBC_STATUS_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
883
amdgpu_ring_write(ring, 0x2);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
886
void jpeg_v4_0_3_dec_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
892
if (jpeg_v4_0_3_normalizn_reqd(ring->adev))
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
897
amdgpu_ring_write(ring, PACKETJ(regUVD_JRBC_RB_COND_RD_TIMER_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
899
amdgpu_ring_write(ring, 0x01400200);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
901
amdgpu_ring_write(ring, PACKETJ(regUVD_JRBC_RB_REF_DATA_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
903
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
905
amdgpu_ring_write(ring, PACKETJ(regUVD_JRBC_EXTERNAL_REG_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
908
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
909
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
912
amdgpu_ring_write(ring, reg_offset);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
913
amdgpu_ring_write(ring, PACKETJ(JRBC_DEC_EXTERNAL_REG_WRITE_ADDR,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
916
amdgpu_ring_write(ring, mask);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
919
void jpeg_v4_0_3_dec_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
922
struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->vm_hub];
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
925
pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
931
jpeg_v4_0_3_dec_ring_emit_reg_wait(ring, data0, data1, mask);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
934
void jpeg_v4_0_3_dec_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
939
if (jpeg_v4_0_3_normalizn_reqd(ring->adev))
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
944
amdgpu_ring_write(ring, PACKETJ(regUVD_JRBC_EXTERNAL_REG_INTERNAL_OFFSET,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
947
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
948
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
951
amdgpu_ring_write(ring, reg_offset);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
952
amdgpu_ring_write(ring, PACKETJ(JRBC_DEC_EXTERNAL_REG_WRITE_ADDR,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
955
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
958
void jpeg_v4_0_3_dec_ring_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
962
WARN_ON(ring->wptr % 2 || count % 2);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
965
amdgpu_ring_write(ring, PACKETJ(0, 0, 0, PACKETJ_TYPE6));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.c
966
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.h
58
void jpeg_v4_0_3_dec_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.h
62
void jpeg_v4_0_3_dec_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.h
64
void jpeg_v4_0_3_dec_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.h
66
void jpeg_v4_0_3_ring_emit_hdp_flush(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.h
67
void jpeg_v4_0_3_dec_ring_nop(struct amdgpu_ring *ring, uint32_t count);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.h
68
void jpeg_v4_0_3_dec_ring_insert_start(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.h
69
void jpeg_v4_0_3_dec_ring_insert_end(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.h
70
void jpeg_v4_0_3_dec_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_3.h
71
void jpeg_v4_0_3_dec_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
121
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
159
ring = adev->jpeg.inst[i].ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
160
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
161
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
162
ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 1 + 8 * i;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
163
sprintf(ring->name, "jpeg_dec_%d", i);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
164
r = amdgpu_ring_init(adev, ring, 512, &adev->jpeg.inst[i].irq,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
219
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
231
ring = adev->jpeg.inst[i].ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
232
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
423
struct amdgpu_ring *ring = adev->jpeg.inst[inst_idx].ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
466
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
468
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
472
WREG32_SOC15(JPEG, inst_idx, regUVD_JRBC_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
473
ring->wptr = RREG32_SOC15(JPEG, inst_idx, regUVD_JRBC_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
503
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
513
ring = adev->jpeg.inst[i].ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
515
adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
519
ring->doorbell_index << VCN_JPEG_DB_CTRL__OFFSET__SHIFT |
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
551
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
553
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
557
WREG32_SOC15(JPEG, i, regUVD_JRBC_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
558
ring->wptr = RREG32_SOC15(JPEG, i, regUVD_JRBC_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
609
static uint64_t jpeg_v4_0_5_dec_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
611
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
613
return RREG32_SOC15(JPEG, ring->me, regUVD_JRBC_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
623
static uint64_t jpeg_v4_0_5_dec_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
625
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
627
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
628
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
630
return RREG32_SOC15(JPEG, ring->me, regUVD_JRBC_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
640
static void jpeg_v4_0_5_dec_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
642
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
644
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
645
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
646
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
648
WREG32_SOC15(JPEG, ring->me, regUVD_JRBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
69
static void jpeg_v4_0_5_dec_ring_set_wptr(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
771
static int jpeg_v4_0_5_ring_reset(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
777
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
778
r = jpeg_v4_0_5_stop(ring->adev);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
781
r = jpeg_v4_0_5_start(ring->adev);
drivers/gpu/drm/amd/amdgpu/jpeg_v4_0_5.c
784
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
105
ring = adev->jpeg.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
106
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
107
ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 1;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
108
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
110
sprintf(ring->name, "jpeg_dec");
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
111
r = amdgpu_ring_init(adev, ring, 512, &adev->jpeg.inst->irq, 0,
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
163
struct amdgpu_ring *ring = adev->jpeg.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
166
adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
173
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
343
struct amdgpu_ring *ring = adev->jpeg.inst[inst_idx].ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
383
ring->doorbell_index << VCN_JPEG_DB_CTRL__OFFSET__SHIFT |
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
389
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
391
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
395
WREG32_SOC15(JPEG, inst_idx, regUVD_JRBC_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
396
ring->wptr = RREG32_SOC15(JPEG, inst_idx, regUVD_JRBC_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
427
struct amdgpu_ring *ring = adev->jpeg.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
460
ring->doorbell_index << VCN_JPEG_DB_CTRL__OFFSET__SHIFT |
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
466
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
468
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
472
WREG32_SOC15(JPEG, 0, regUVD_JRBC_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
473
ring->wptr = RREG32_SOC15(JPEG, 0, regUVD_JRBC_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
519
static uint64_t jpeg_v5_0_0_dec_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
521
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
533
static uint64_t jpeg_v5_0_0_dec_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
535
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
537
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
538
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
550
static void jpeg_v5_0_0_dec_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
552
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
554
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
555
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
556
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
558
WREG32_SOC15(JPEG, 0, regUVD_JRBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
647
static int jpeg_v5_0_0_ring_reset(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
653
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
654
r = jpeg_v5_0_0_stop(ring->adev);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
657
r = jpeg_v5_0_0_start(ring->adev);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
660
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_0.c
88
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
139
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
173
ring = &adev->jpeg.inst[i].ring_dec[j];
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
174
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
175
ring->vm_hub = AMDGPU_MMHUB0(adev->jpeg.inst[i].aid_id);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
177
ring->doorbell_index =
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
181
ring->doorbell_index =
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
185
sprintf(ring->name, "jpeg_dec_%d.%d", adev->jpeg.inst[i].aid_id, j);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
186
r = amdgpu_ring_init(adev, ring, 512, &adev->jpeg.inst->irq, 0,
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
252
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
262
ring = &adev->jpeg.inst[i].ring_dec[j];
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
263
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
264
ring->wptr_old = 0;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
265
jpeg_v5_0_1_dec_ring_set_wptr(ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
266
ring->sched.ready = true;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
276
ring = adev->jpeg.inst[i].ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
277
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
278
adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
283
ring = &adev->jpeg.inst[i].ring_dec[j];
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
284
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
286
ring->pipe,
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
287
ring->doorbell_index <<
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
290
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
401
static void jpeg_v5_0_1_init_jrbc(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
403
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
405
int jpeg_inst = GET_INST(JPEG, ring->me);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
406
int reg_offset = ring->pipe ? jpeg_v5_0_1_core_reg_offset(ring->pipe) : 0;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
410
if (ring->pipe < AMDGPU_MAX_JPEG_RINGS_4_0_3) {
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
411
data = JPEG_SYS_INT_EN__DJRBC0_MASK << ring->pipe;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
412
mask = ~(JPEG_SYS_INT_EN__DJRBC0_MASK << ring->pipe);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
415
data = JPEG_SYS_INT_EN__DJRBC0_MASK << (ring->pipe+12);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
416
mask = ~(JPEG_SYS_INT_EN__DJRBC0_MASK << (ring->pipe+12));
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
429
reg_offset, lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
43
static void jpeg_v5_0_1_dec_ring_set_wptr(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
432
reg_offset, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
444
reg_offset, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
445
ring->wptr = RREG32_SOC15_OFFSET(JPEG, jpeg_inst, regUVD_JRBC_RB_WPTR,
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
451
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
486
ring = &adev->jpeg.inst[i].ring_dec[j];
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
490
MMSCH_V5_0_INSERT_DIRECT_WT(tmp, lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
492
MMSCH_V5_0_INSERT_DIRECT_WT(tmp, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
494
MMSCH_V5_0_INSERT_DIRECT_WT(tmp, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
571
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
577
ring = &adev->jpeg.inst[i].ring_dec[j];
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
578
jpeg_v5_0_1_init_jrbc(ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
609
static uint64_t jpeg_v5_0_1_dec_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
611
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
613
return RREG32_SOC15_OFFSET(JPEG, GET_INST(JPEG, ring->me), regUVD_JRBC_RB_RPTR,
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
614
ring->pipe ? jpeg_v5_0_1_core_reg_offset(ring->pipe) : 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
624
static uint64_t jpeg_v5_0_1_dec_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
626
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
628
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
629
return adev->wb.wb[ring->wptr_offs];
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
631
return RREG32_SOC15_OFFSET(JPEG, GET_INST(JPEG, ring->me), regUVD_JRBC_RB_WPTR,
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
632
ring->pipe ? jpeg_v5_0_1_core_reg_offset(ring->pipe) : 0);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
642
static void jpeg_v5_0_1_dec_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
644
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
646
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
647
adev->wb.wb[ring->wptr_offs] = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
648
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
650
WREG32_SOC15_OFFSET(JPEG, GET_INST(JPEG, ring->me),
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
652
(ring->pipe ? jpeg_v5_0_1_core_reg_offset(ring->pipe) : 0),
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
653
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
818
static void jpeg_v5_0_1_core_stall_reset(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
820
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
821
int jpeg_inst = GET_INST(JPEG, ring->me);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
822
int reg_offset = ring->pipe ? jpeg_v5_0_1_core_reg_offset(ring->pipe) : 0;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
833
WREG32_SOC15(JPEG, jpeg_inst, regJPEG_CORE_RST_CTRL, 1 << ring->pipe);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
843
static int jpeg_v5_0_1_ring_reset(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
847
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
848
struct amdgpu_vcn_inst *vinst = &adev->vcn.inst[ring->me];
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
854
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
855
jpeg_v5_0_1_core_stall_reset(ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
856
jpeg_v5_0_1_init_jrbc(ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_0_1.c
857
r = amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
146
struct amdgpu_ring *ring = adev->jpeg.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
149
adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
156
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
326
struct amdgpu_ring *ring = adev->jpeg.inst[inst_idx].ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
366
ring->doorbell_index << VCN_JPEG_DB_CTRL__OFFSET__SHIFT |
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
372
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
374
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
378
WREG32_SOC15(JPEG, inst_idx, regUVD_JRBC0_UVD_JRBC_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
379
ring->wptr = RREG32_SOC15(JPEG, inst_idx, regUVD_JRBC0_UVD_JRBC_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
410
struct amdgpu_ring *ring = adev->jpeg.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
443
ring->doorbell_index << VCN_JPEG_DB_CTRL__OFFSET__SHIFT |
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
449
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
451
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
455
WREG32_SOC15(JPEG, 0, regUVD_JRBC0_UVD_JRBC_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
456
ring->wptr = RREG32_SOC15(JPEG, 0, regUVD_JRBC0_UVD_JRBC_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
502
static uint64_t jpeg_v5_3_0_dec_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
504
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
516
static uint64_t jpeg_v5_3_0_dec_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
518
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
520
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
521
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
533
static void jpeg_v5_3_0_dec_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
535
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
537
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
538
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
539
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
541
WREG32_SOC15(JPEG, 0, regUVD_JRBC0_UVD_JRBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
630
static int jpeg_v5_3_0_ring_reset(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
636
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
637
r = jpeg_v5_3_0_stop(ring->adev);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
640
r = jpeg_v5_3_0_start(ring->adev);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
643
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
73
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
90
ring = adev->jpeg.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
91
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
92
ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 1;
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
93
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
95
sprintf(ring->name, "jpeg_dec");
drivers/gpu/drm/amd/amdgpu/jpeg_v5_3_0.c
96
r = amdgpu_ring_init(adev, ring, 512, &adev->jpeg.inst->irq, 0,
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1110
static int mes_v11_0_mqd_init(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1112
struct v11_compute_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1126
eop_base_addr = ring->eop_gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1138
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1144
mqd->cp_mqd_base_addr_lo = ring->mqd_gpu_addr & 0xfffffffc;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1145
mqd->cp_mqd_base_addr_hi = upper_32_bits(ring->mqd_gpu_addr);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1153
hqd_gpu_addr = ring->gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1158
wb_gpu_addr = ring->rptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1164
wb_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1171
(order_base_2(ring->ring_size / 4) - 1));
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1183
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1185
DOORBELL_OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1210
amdgpu_device_flush_hdp(ring->adev, NULL);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1214
static void mes_v11_0_queue_init_register(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1216
struct v11_compute_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1217
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1221
soc21_grbm_select(adev, 3, ring->pipe, 0, 0);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1279
struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1291
kiq->pmf->kiq_map_queues(kiq_ring, &adev->mes.ring[0]);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1299
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1303
ring = &adev->gfx.kiq[0].ring;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1305
ring = &adev->mes.ring[0];
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1311
*(ring->wptr_cpu_addr) = 0;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1312
*(ring->rptr_cpu_addr) = 0;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1313
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1316
r = mes_v11_0_mqd_init(ring);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1325
mes_v11_0_queue_init_register(ring);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1333
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1335
ring = &adev->mes.ring[0];
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1337
ring->funcs = &mes_v11_0_ring_funcs;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1339
ring->me = 3;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1340
ring->pipe = 0;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1341
ring->queue = 0;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1343
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1344
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1345
ring->doorbell_index = adev->doorbell_index.mes_ring0 << 1;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1346
ring->eop_gpu_addr = adev->mes.eop_gpu_addr[AMDGPU_MES_SCHED_PIPE];
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1347
ring->no_scheduler = true;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1348
sprintf(ring->name, "mes_%d.%d.%d", ring->me, ring->pipe, ring->queue);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1350
return amdgpu_ring_init(adev, ring, 1024, NULL, 0,
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1356
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1360
ring = &adev->gfx.kiq[0].ring;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1362
ring->me = 3;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1363
ring->pipe = 1;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1364
ring->queue = 0;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1366
ring->adev = NULL;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1367
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1368
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1369
ring->doorbell_index = adev->doorbell_index.mes_ring1 << 1;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1370
ring->eop_gpu_addr = adev->mes.eop_gpu_addr[AMDGPU_MES_KIQ_PIPE];
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1371
ring->no_scheduler = true;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1372
sprintf(ring->name, "mes_kiq_%d.%d.%d",
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1373
ring->me, ring->pipe, ring->queue);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1375
return amdgpu_ring_init(adev, ring, 1024, NULL, 0,
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1383
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1386
ring = &adev->gfx.kiq[0].ring;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1388
ring = &adev->mes.ring[0];
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1392
if (ring->mqd_obj)
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1397
AMDGPU_GEM_DOMAIN_GTT, &ring->mqd_obj,
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1398
&ring->mqd_gpu_addr, &ring->mqd_ptr);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1404
memset(ring->mqd_ptr, 0, mqd_size);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1411
ring->name);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1493
amdgpu_bo_free_kernel(&adev->gfx.kiq[0].ring.mqd_obj,
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1494
&adev->gfx.kiq[0].ring.mqd_gpu_addr,
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1495
&adev->gfx.kiq[0].ring.mqd_ptr);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1497
amdgpu_bo_free_kernel(&adev->mes.ring[0].mqd_obj,
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1498
&adev->mes.ring[0].mqd_gpu_addr,
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1499
&adev->mes.ring[0].mqd_ptr);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1501
amdgpu_ring_fini(&adev->gfx.kiq[0].ring);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1502
amdgpu_ring_fini(&adev->mes.ring[0]);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1513
static void mes_v11_0_kiq_dequeue(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1517
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1520
soc21_grbm_select(adev, 3, ring->pipe, 0, 0);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1548
static void mes_v11_0_kiq_setting(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1551
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1556
tmp |= (ring->me << 5) | (ring->pipe << 3) | (ring->queue);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1595
mes_v11_0_kiq_setting(&adev->gfx.kiq[0].ring);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1627
if (adev->mes.ring[0].sched.ready) {
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1628
mes_v11_0_kiq_dequeue(&adev->mes.ring[0]);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1629
adev->mes.ring[0].sched.ready = false;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1633
mes_v11_0_kiq_dequeue(&adev->gfx.kiq[0].ring);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1647
if (adev->mes.ring[0].sched.ready)
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1695
adev->gfx.kiq[0].ring.sched.ready = false;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
1696
adev->mes.ring[0].sched.ready = true;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
174
struct amdgpu_ring *ring = &mes->ring[0];
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
204
r = amdgpu_ring_alloc(ring, (size + sizeof(mes_status_pkt)) / 4);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
208
seq = ++ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
209
r = amdgpu_fence_wait_polling(ring,
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
210
seq - ring->fence_drv.num_fences_mask,
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
219
amdgpu_ring_write_multiple(ring, pkt, size / 4);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
226
ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
229
amdgpu_ring_write_multiple(ring, &mes_status_pkt,
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
232
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
247
r = amdgpu_fence_wait_polling(ring, seq, timeout);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
272
amdgpu_ring_undo(ring);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
74
static void mes_v11_0_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
76
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
78
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
79
atomic64_set((atomic64_t *)ring->wptr_cpu_addr,
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
80
ring->wptr);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
81
WDOORBELL64(ring->doorbell_index, ring->wptr);
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
87
static u64 mes_v11_0_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
89
return *ring->rptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
92
static u64 mes_v11_0_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
96
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/mes_v11_0.c
97
wptr = atomic64_read((atomic64_t *)ring->wptr_cpu_addr);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1270
static int mes_v12_0_mqd_init(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1272
struct v12_compute_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1284
eop_base_addr = ring->eop_gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1296
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1302
mqd->cp_mqd_base_addr_lo = ring->mqd_gpu_addr & 0xfffffffc;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1303
mqd->cp_mqd_base_addr_hi = upper_32_bits(ring->mqd_gpu_addr);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1311
hqd_gpu_addr = ring->gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1316
wb_gpu_addr = ring->rptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1322
wb_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1329
(order_base_2(ring->ring_size / 4) - 1));
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1341
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1343
DOORBELL_OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1379
static void mes_v12_0_queue_init_register(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1381
struct v12_compute_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1382
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1386
soc21_grbm_select(adev, 3, ring->pipe, 0, 0);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1444
struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1456
kiq->pmf->kiq_map_queues(kiq_ring, &adev->mes.ring[0]);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1469
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1473
ring = &adev->gfx.kiq[0].ring;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1475
ring = &adev->mes.ring[pipe];
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1479
*(ring->wptr_cpu_addr) = 0;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1480
*(ring->rptr_cpu_addr) = 0;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1481
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1484
r = mes_v12_0_mqd_init(ring);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1490
r = amdgpu_mes_map_legacy_queue(adev, ring, 0);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1496
mes_v12_0_queue_init_register(ring);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1519
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1521
ring = &adev->mes.ring[pipe];
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1523
ring->funcs = &mes_v12_0_ring_funcs;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1525
ring->me = 3;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1526
ring->pipe = pipe;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1527
ring->queue = 0;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1529
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1530
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1531
ring->eop_gpu_addr = adev->mes.eop_gpu_addr[pipe];
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1532
ring->no_scheduler = true;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1533
sprintf(ring->name, "mes_%d.%d.%d", ring->me, ring->pipe, ring->queue);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1536
ring->doorbell_index = adev->doorbell_index.mes_ring0 << 1;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1538
ring->doorbell_index = adev->doorbell_index.mes_ring1 << 1;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1540
return amdgpu_ring_init(adev, ring, 1024, NULL, 0,
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1546
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1550
ring = &adev->gfx.kiq[0].ring;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1552
ring->me = 3;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1553
ring->pipe = 1;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1554
ring->queue = 0;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1556
ring->adev = NULL;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1557
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1558
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1559
ring->doorbell_index = adev->doorbell_index.mes_ring1 << 1;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
156
struct amdgpu_ring *ring = &mes->ring[pipe];
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1560
ring->eop_gpu_addr = adev->mes.eop_gpu_addr[AMDGPU_MES_KIQ_PIPE];
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1561
ring->no_scheduler = true;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1562
sprintf(ring->name, "mes_kiq_%d.%d.%d",
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1563
ring->me, ring->pipe, ring->queue);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1565
return amdgpu_ring_init(adev, ring, 1024, NULL, 0,
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1573
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1576
ring = &adev->gfx.kiq[0].ring;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1578
ring = &adev->mes.ring[pipe];
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1580
if (ring->mqd_obj)
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1584
AMDGPU_GEM_DOMAIN_GTT, &ring->mqd_obj,
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1585
&ring->mqd_gpu_addr, &ring->mqd_ptr);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1591
memset(ring->mqd_ptr, 0, mqd_size);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1598
ring->name);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1669
amdgpu_bo_free_kernel(&adev->mes.ring[pipe].mqd_obj,
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1670
&adev->mes.ring[pipe].mqd_gpu_addr,
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1671
&adev->mes.ring[pipe].mqd_ptr);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1672
amdgpu_ring_fini(&adev->mes.ring[pipe]);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1677
amdgpu_bo_free_kernel(&adev->gfx.kiq[0].ring.mqd_obj,
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1678
&adev->gfx.kiq[0].ring.mqd_gpu_addr,
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1679
&adev->gfx.kiq[0].ring.mqd_ptr);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1680
amdgpu_ring_fini(&adev->gfx.kiq[0].ring);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1725
adev->mes.ring[0].sched.ready = false;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1728
static void mes_v12_0_kiq_setting(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1731
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1736
tmp |= (ring->me << 5) | (ring->pipe << 3) | (ring->queue);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1746
mes_v12_0_kiq_setting(&adev->mes.ring[AMDGPU_MES_KIQ_PIPE]);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1748
mes_v12_0_kiq_setting(&adev->gfx.kiq[0].ring);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1804
if (adev->mes.ring[0].sched.ready) {
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1807
&adev->mes.ring[AMDGPU_MES_SCHED_PIPE],
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1812
adev->mes.ring[0].sched.ready = false;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1825
if (adev->mes.ring[0].sched.ready)
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
187
r = amdgpu_ring_alloc(ring, (size + sizeof(mes_status_pkt)) / 4);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1880
adev->gfx.kiq[0].ring.sched.ready = false;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
1881
adev->mes.ring[0].sched.ready = true;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
191
seq = ++ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
192
r = amdgpu_fence_wait_polling(ring,
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
193
seq - ring->fence_drv.num_fences_mask,
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
202
amdgpu_ring_write_multiple(ring, pkt, size / 4);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
209
ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
212
amdgpu_ring_write_multiple(ring, &mes_status_pkt,
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
215
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
231
r = amdgpu_fence_wait_polling(ring, seq, timeout);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
261
amdgpu_ring_undo(ring);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
53
static void mes_v12_0_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
55
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
57
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
58
atomic64_set((atomic64_t *)ring->wptr_cpu_addr,
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
59
ring->wptr);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
60
WDOORBELL64(ring->doorbell_index, ring->wptr);
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
66
static u64 mes_v12_0_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
68
return *ring->rptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
71
static u64 mes_v12_0_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
75
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/mes_v12_0.c
76
wptr = atomic64_read((atomic64_t *)ring->wptr_cpu_addr);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1195
static int mes_v12_1_mqd_init(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1197
struct v12_1_mes_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1209
eop_base_addr = ring->eop_gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1221
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1227
mqd->cp_mqd_base_addr_lo = ring->mqd_gpu_addr & 0xfffffffc;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1228
mqd->cp_mqd_base_addr_hi = upper_32_bits(ring->mqd_gpu_addr);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1236
hqd_gpu_addr = ring->gpu_addr >> 8;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1241
wb_gpu_addr = ring->rptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1247
wb_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1254
(order_base_2(ring->ring_size / 4) - 1));
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1266
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1268
DOORBELL_OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1304
static void mes_v12_1_queue_init_register(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1307
struct v12_1_mes_mqd *mqd = ring->mqd_ptr;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1308
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1312
soc_v1_0_grbm_select(adev, 3, ring->pipe, 0, 0, GET_INST(GC, xcc_id));
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1370
struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[xcc_id].ring;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1382
kiq->pmf->kiq_map_queues(kiq_ring, &adev->mes.ring[inst]);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1396
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1400
ring = &adev->gfx.kiq[xcc_id].ring;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1402
ring = &adev->mes.ring[MES_PIPE_INST(xcc_id, pipe)];
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1406
*(ring->wptr_cpu_addr) = 0;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1407
*(ring->rptr_cpu_addr) = 0;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1408
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1411
r = mes_v12_1_mqd_init(ring);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1417
r = amdgpu_mes_map_legacy_queue(adev, ring, xcc_id);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1423
mes_v12_1_queue_init_register(ring, xcc_id);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1444
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1447
ring = &adev->mes.ring[inst];
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1449
ring->funcs = &mes_v12_1_ring_funcs;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1451
ring->me = 3;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1452
ring->pipe = pipe;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1453
ring->queue = 0;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1454
ring->xcc_id = xcc_id;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1455
ring->vm_hub = AMDGPU_GFXHUB(xcc_id);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1457
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1458
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1459
ring->eop_gpu_addr = adev->mes.eop_gpu_addr[inst];
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1460
ring->no_scheduler = true;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1461
snprintf(ring->name, sizeof(ring->name), "mes_%hhu.%hhu.%hhu.%hhu",
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1462
(unsigned char)xcc_id, (unsigned char)ring->me,
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1463
(unsigned char)ring->pipe, (unsigned char)ring->queue);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1466
ring->doorbell_index =
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1471
ring->doorbell_index =
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1476
return amdgpu_ring_init(adev, ring, 1024, NULL, 0,
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1482
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1487
ring = &adev->gfx.kiq[xcc_id].ring;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1489
ring->me = 3;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1490
ring->pipe = 1;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1491
ring->queue = 0;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1492
ring->xcc_id = xcc_id;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1493
ring->vm_hub = AMDGPU_GFXHUB(xcc_id);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1495
ring->adev = NULL;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1496
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1497
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1498
ring->eop_gpu_addr = adev->mes.eop_gpu_addr[inst];
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1499
ring->no_scheduler = true;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1500
ring->doorbell_index =
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1505
snprintf(ring->name, sizeof(ring->name), "mes_kiq_%hhu.%hhu.%hhu.%hhu",
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1506
(unsigned char)xcc_id, (unsigned char)ring->me,
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1507
(unsigned char)ring->pipe, (unsigned char)ring->queue);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1509
return amdgpu_ring_init(adev, ring, 1024, NULL, 0,
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1518
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1522
ring = &adev->gfx.kiq[xcc_id].ring;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1524
ring = &adev->mes.ring[inst];
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1526
if (ring->mqd_obj)
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
153
struct amdgpu_ring *ring = &mes->ring[MES_PIPE_INST(xcc_id, pipe)];
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1530
AMDGPU_GEM_DOMAIN_GTT, &ring->mqd_obj,
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1531
&ring->mqd_gpu_addr, &ring->mqd_ptr);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1537
memset(ring->mqd_ptr, 0, mqd_size);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1544
ring->name);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1617
amdgpu_bo_free_kernel(&adev->mes.ring[inst].mqd_obj,
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1618
&adev->mes.ring[inst].mqd_gpu_addr,
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1619
&adev->mes.ring[inst].mqd_ptr);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1620
amdgpu_ring_fini(&adev->mes.ring[inst]);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1627
amdgpu_bo_free_kernel(&adev->gfx.kiq[xcc_id].ring.mqd_obj,
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1628
&adev->gfx.kiq[xcc_id].ring.mqd_gpu_addr,
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1629
&adev->gfx.kiq[xcc_id].ring.mqd_ptr);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1630
amdgpu_ring_fini(&adev->gfx.kiq[xcc_id].ring);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1680
adev->mes.ring[MES_PIPE_INST(xcc_id, 0)].sched.ready = false;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1683
static void mes_v12_1_kiq_setting(struct amdgpu_ring *ring, int xcc_id)
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1686
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1691
tmp |= (ring->me << 5) | (ring->pipe << 3) | (ring->queue);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1704
mes_v12_1_kiq_setting(&adev->mes.ring[inst], xcc_id);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1706
mes_v12_1_kiq_setting(&adev->gfx.kiq[xcc_id].ring, xcc_id);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1768
if (adev->mes.ring[inst].sched.ready) {
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1771
&adev->mes.ring[inst],
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1776
adev->mes.ring[inst].sched.ready = false;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1828
if (adev->mes.ring[MES_PIPE_INST(xcc_id, 0)].sched.ready)
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
184
r = amdgpu_ring_alloc(ring, (size + sizeof(mes_status_pkt)) / 4);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
188
seq = ++ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1885
adev->gfx.kiq[xcc_id].ring.sched.ready = false;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
1886
adev->mes.ring[MES_PIPE_INST(xcc_id, 0)].sched.ready = true;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
189
r = amdgpu_fence_wait_polling(ring,
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
190
seq - ring->fence_drv.num_fences_mask,
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
199
amdgpu_ring_write_multiple(ring, pkt, size / 4);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
206
ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
209
amdgpu_ring_write_multiple(ring, &mes_status_pkt,
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
212
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
228
r = amdgpu_fence_wait_polling(ring, seq, timeout);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
255
amdgpu_ring_undo(ring);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
50
static void mes_v12_1_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
52
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
54
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
55
atomic64_set((atomic64_t *)ring->wptr_cpu_addr,
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
56
ring->wptr);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
57
WDOORBELL64(ring->doorbell_index, ring->wptr);
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
63
static u64 mes_v12_1_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
65
return *ring->rptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
68
static u64 mes_v12_1_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
72
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/mes_v12_1.c
73
wptr = atomic64_read((atomic64_t *)ring->wptr_cpu_addr);
drivers/gpu/drm/amd/amdgpu/psp_v10_0.c
127
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/psp_v10_0.c
135
&ring->ring_mem_mc_addr,
drivers/gpu/drm/amd/amdgpu/psp_v10_0.c
136
(void **)&ring->ring_mem);
drivers/gpu/drm/amd/amdgpu/psp_v10_0.c
75
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/psp_v10_0.c
79
psp_ring_reg = lower_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v10_0.c
82
psp_ring_reg = upper_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v10_0.c
85
psp_ring_reg = ring->ring_size;
drivers/gpu/drm/amd/amdgpu/psp_v11_0.c
319
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/psp_v11_0.c
323
ring->ring_wptr = 0;
drivers/gpu/drm/amd/amdgpu/psp_v11_0.c
331
psp_ring_reg = lower_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v11_0.c
334
psp_ring_reg = upper_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v11_0.c
360
psp_ring_reg = lower_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v11_0.c
363
psp_ring_reg = upper_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v11_0.c
366
psp_ring_reg = ring->ring_size;
drivers/gpu/drm/amd/amdgpu/psp_v11_0.c
390
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/psp_v11_0.c
398
&ring->ring_mem_mc_addr,
drivers/gpu/drm/amd/amdgpu/psp_v11_0.c
399
(void **)&ring->ring_mem);
drivers/gpu/drm/amd/amdgpu/psp_v11_0_8.c
107
psp_ring_reg = lower_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v11_0_8.c
110
psp_ring_reg = upper_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v11_0_8.c
113
psp_ring_reg = ring->ring_size;
drivers/gpu/drm/amd/amdgpu/psp_v11_0_8.c
136
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/psp_v11_0_8.c
144
&ring->ring_mem_mc_addr,
drivers/gpu/drm/amd/amdgpu/psp_v11_0_8.c
145
(void **)&ring->ring_mem);
drivers/gpu/drm/amd/amdgpu/psp_v11_0_8.c
67
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/psp_v11_0_8.c
78
psp_ring_reg = lower_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v11_0_8.c
81
psp_ring_reg = upper_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v12_0.c
147
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/psp_v12_0.c
151
psp_ring_reg = lower_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v12_0.c
154
psp_ring_reg = upper_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v12_0.c
157
psp_ring_reg = ring->ring_size;
drivers/gpu/drm/amd/amdgpu/psp_v12_0.c
202
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/psp_v12_0.c
210
&ring->ring_mem_mc_addr,
drivers/gpu/drm/amd/amdgpu/psp_v12_0.c
211
(void **)&ring->ring_mem);
drivers/gpu/drm/amd/amdgpu/psp_v13_0.c
416
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/psp_v13_0.c
427
psp_ring_reg = lower_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v13_0.c
430
psp_ring_reg = upper_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v13_0.c
456
psp_ring_reg = lower_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v13_0.c
459
psp_ring_reg = upper_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v13_0.c
462
psp_ring_reg = ring->ring_size;
drivers/gpu/drm/amd/amdgpu/psp_v13_0.c
485
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/psp_v13_0.c
493
&ring->ring_mem_mc_addr,
drivers/gpu/drm/amd/amdgpu/psp_v13_0.c
494
(void **)&ring->ring_mem);
drivers/gpu/drm/amd/amdgpu/psp_v13_0_4.c
228
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/psp_v13_0_4.c
239
psp_ring_reg = lower_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v13_0_4.c
242
psp_ring_reg = upper_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v13_0_4.c
268
psp_ring_reg = lower_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v13_0_4.c
271
psp_ring_reg = upper_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v13_0_4.c
274
psp_ring_reg = ring->ring_size;
drivers/gpu/drm/amd/amdgpu/psp_v13_0_4.c
297
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/psp_v13_0_4.c
305
&ring->ring_mem_mc_addr,
drivers/gpu/drm/amd/amdgpu/psp_v13_0_4.c
306
(void **)&ring->ring_mem);
drivers/gpu/drm/amd/amdgpu/psp_v14_0.c
275
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/psp_v14_0.c
286
psp_ring_reg = lower_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v14_0.c
289
psp_ring_reg = upper_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v14_0.c
315
psp_ring_reg = lower_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v14_0.c
318
psp_ring_reg = upper_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v14_0.c
321
psp_ring_reg = ring->ring_size;
drivers/gpu/drm/amd/amdgpu/psp_v14_0.c
344
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/psp_v14_0.c
352
&ring->ring_mem_mc_addr,
drivers/gpu/drm/amd/amdgpu/psp_v14_0.c
353
(void **)&ring->ring_mem);
drivers/gpu/drm/amd/amdgpu/psp_v15_0.c
100
psp_ring_reg = lower_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v15_0.c
103
psp_ring_reg = upper_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v15_0.c
127
psp_ring_reg = lower_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v15_0.c
130
psp_ring_reg = upper_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v15_0.c
133
psp_ring_reg = ring->ring_size;
drivers/gpu/drm/amd/amdgpu/psp_v15_0.c
155
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/psp_v15_0.c
163
&ring->ring_mem_mc_addr,
drivers/gpu/drm/amd/amdgpu/psp_v15_0.c
164
(void **)&ring->ring_mem);
drivers/gpu/drm/amd/amdgpu/psp_v15_0.c
89
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/psp_v15_0_8.c
123
psp_ring_reg = lower_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v15_0_8.c
126
psp_ring_reg = upper_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v15_0_8.c
129
psp_ring_reg = ring->ring_size;
drivers/gpu/drm/amd/amdgpu/psp_v15_0_8.c
151
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/psp_v15_0_8.c
159
&ring->ring_mem_mc_addr,
drivers/gpu/drm/amd/amdgpu/psp_v15_0_8.c
160
(void **)&ring->ring_mem);
drivers/gpu/drm/amd/amdgpu/psp_v15_0_8.c
85
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/psp_v15_0_8.c
96
psp_ring_reg = lower_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v15_0_8.c
99
psp_ring_reg = upper_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v3_1.c
191
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/psp_v3_1.c
197
ring->ring_wptr = 0;
drivers/gpu/drm/amd/amdgpu/psp_v3_1.c
205
psp_ring_reg = lower_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v3_1.c
208
psp_ring_reg = upper_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v3_1.c
226
psp_ring_reg = lower_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v3_1.c
229
psp_ring_reg = upper_32_bits(ring->ring_mem_mc_addr);
drivers/gpu/drm/amd/amdgpu/psp_v3_1.c
232
psp_ring_reg = ring->ring_size;
drivers/gpu/drm/amd/amdgpu/psp_v3_1.c
284
struct psp_ring *ring = &psp->km_ring;
drivers/gpu/drm/amd/amdgpu/psp_v3_1.c
292
&ring->ring_mem_mc_addr,
drivers/gpu/drm/amd/amdgpu/psp_v3_1.c
293
(void **)&ring->ring_mem);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
1043
amdgpu_fence_process(&adev->sdma.instance[0].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
1056
amdgpu_fence_process(&adev->sdma.instance[1].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
1081
drm_sched_fault(&adev->sdma.instance[instance_id].ring.sched);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
1147
adev->sdma.instance[i].ring.funcs = &sdma_v2_4_ring_funcs;
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
1148
adev->sdma.instance[i].ring.me = i;
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
1232
adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
1250
&adev->sdma.instance[i].ring.sched;
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
189
static uint64_t sdma_v2_4_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
192
return *ring->rptr_cpu_addr >> 2;
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
202
static uint64_t sdma_v2_4_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
204
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
205
u32 wptr = RREG32(mmSDMA0_GFX_RB_WPTR + sdma_offsets[ring->me]) >> 2;
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
217
static void sdma_v2_4_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
219
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
221
WREG32(mmSDMA0_GFX_RB_WPTR + sdma_offsets[ring->me], ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
224
static void sdma_v2_4_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
226
struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
231
amdgpu_ring_write(ring, ring->funcs->nop |
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
234
amdgpu_ring_write(ring, ring->funcs->nop);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
247
static void sdma_v2_4_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
255
sdma_v2_4_ring_insert_nop(ring, (2 - lower_32_bits(ring->wptr)) & 7);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
257
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_INDIRECT) |
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
260
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
261
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
262
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
263
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
264
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
275
static void sdma_v2_4_ring_emit_hdp_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
279
if (ring->me == 0)
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
284
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
287
amdgpu_ring_write(ring, mmGPU_HDP_FLUSH_DONE << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
288
amdgpu_ring_write(ring, mmGPU_HDP_FLUSH_REQ << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
289
amdgpu_ring_write(ring, ref_and_mask); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
290
amdgpu_ring_write(ring, ref_and_mask); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
291
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
307
static void sdma_v2_4_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
312
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_FENCE));
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
313
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
314
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
315
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
320
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_FENCE));
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
321
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
322
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
323
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
327
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_TRAP));
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
328
amdgpu_ring_write(ring, SDMA_PKT_TRAP_INT_CONTEXT_INT_CONTEXT(0));
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
403
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
409
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
427
rb_bufsz = order_base_2(ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
445
upper_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFF);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
447
lower_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFC);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
451
WREG32(mmSDMA0_GFX_RB_BASE + sdma_offsets[i], ring->gpu_addr >> 8);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
452
WREG32(mmSDMA0_GFX_RB_BASE_HI + sdma_offsets[i], ring->gpu_addr >> 40);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
454
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
455
WREG32(mmSDMA0_GFX_RB_WPTR + sdma_offsets[i], ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
472
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
473
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
531
static int sdma_v2_4_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
533
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
548
r = amdgpu_ring_alloc(ring, 5);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
552
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) |
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
554
amdgpu_ring_write(ring, lower_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
555
amdgpu_ring_write(ring, upper_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
556
amdgpu_ring_write(ring, SDMA_PKT_WRITE_UNTILED_DW_3_COUNT(1));
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
557
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
558
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
584
static int sdma_v2_4_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
586
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
618
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
734
static void sdma_v2_4_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib)
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
736
struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
758
static void sdma_v2_4_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
760
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
761
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
764
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
768
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
769
amdgpu_ring_write(ring, upper_32_bits(addr) & 0xffffffff);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
770
amdgpu_ring_write(ring, seq); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
771
amdgpu_ring_write(ring, 0xffffffff); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
772
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
786
static void sdma_v2_4_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
789
amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
792
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
795
amdgpu_ring_write(ring, mmVM_INVALIDATE_REQUEST << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
796
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
797
amdgpu_ring_write(ring, 0); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
798
amdgpu_ring_write(ring, 0); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
799
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
803
static void sdma_v2_4_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
806
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_SRBM_WRITE) |
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
808
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
809
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
833
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
856
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
857
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
858
ring->use_doorbell = false;
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
859
sprintf(ring->name, "sdma%d", i);
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
860
r = amdgpu_ring_init(adev, ring, 1024, &adev->sdma.trap_irq,
drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c
877
amdgpu_ring_fini(&adev->sdma.instance[i].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1007
static void sdma_v3_0_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib)
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1009
struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1031
static void sdma_v3_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1033
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1034
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1037
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1041
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1042
amdgpu_ring_write(ring, upper_32_bits(addr) & 0xffffffff);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1043
amdgpu_ring_write(ring, seq); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1044
amdgpu_ring_write(ring, 0xffffffff); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1045
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1059
static void sdma_v3_0_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1062
amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1065
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1068
amdgpu_ring_write(ring, mmVM_INVALIDATE_REQUEST << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1069
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1070
amdgpu_ring_write(ring, 0); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1071
amdgpu_ring_write(ring, 0); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1072
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1076
static void sdma_v3_0_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1079
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_SRBM_WRITE) |
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1081
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1082
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1113
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1136
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1137
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1139
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1140
ring->doorbell_index = adev->doorbell_index.sdma_engine[i];
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1142
ring->use_pollmem = true;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1145
sprintf(ring->name, "sdma%d", i);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1146
r = amdgpu_ring_init(adev, ring, 1024, &adev->sdma.trap_irq,
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1163
amdgpu_ring_fini(&adev->sdma.instance[i].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1381
amdgpu_fence_process(&adev->sdma.instance[0].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1394
amdgpu_fence_process(&adev->sdma.instance[1].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1419
drm_sched_fault(&adev->sdma.instance[instance_id].ring.sched);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1589
adev->sdma.instance[i].ring.funcs = &sdma_v3_0_ring_funcs;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1590
adev->sdma.instance[i].ring.me = i;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1674
adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
1692
&adev->sdma.instance[i].ring.sched;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
347
static uint64_t sdma_v3_0_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
350
return *ring->rptr_cpu_addr >> 2;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
360
static uint64_t sdma_v3_0_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
362
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
365
if (ring->use_doorbell || ring->use_pollmem) {
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
367
wptr = *ring->wptr_cpu_addr >> 2;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
369
wptr = RREG32(mmSDMA0_GFX_RB_WPTR + sdma_offsets[ring->me]) >> 2;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
382
static void sdma_v3_0_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
384
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
386
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
387
u32 *wb = (u32 *)ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
389
WRITE_ONCE(*wb, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
390
WDOORBELL32(ring->doorbell_index, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
391
} else if (ring->use_pollmem) {
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
392
u32 *wb = (u32 *)ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
394
WRITE_ONCE(*wb, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
396
WREG32(mmSDMA0_GFX_RB_WPTR + sdma_offsets[ring->me], ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
400
static void sdma_v3_0_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
402
struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
407
amdgpu_ring_write(ring, ring->funcs->nop |
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
410
amdgpu_ring_write(ring, ring->funcs->nop);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
423
static void sdma_v3_0_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
431
sdma_v3_0_ring_insert_nop(ring, (2 - lower_32_bits(ring->wptr)) & 7);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
433
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_INDIRECT) |
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
436
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
437
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
438
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
439
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
440
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
451
static void sdma_v3_0_ring_emit_hdp_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
455
if (ring->me == 0)
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
460
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
463
amdgpu_ring_write(ring, mmGPU_HDP_FLUSH_DONE << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
464
amdgpu_ring_write(ring, mmGPU_HDP_FLUSH_REQ << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
465
amdgpu_ring_write(ring, ref_and_mask); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
466
amdgpu_ring_write(ring, ref_and_mask); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
467
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
483
static void sdma_v3_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
488
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_FENCE));
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
489
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
490
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
491
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
496
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_FENCE));
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
497
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
498
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
499
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
503
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_TRAP));
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
504
amdgpu_ring_write(ring, SDMA_PKT_TRAP_INT_CONTEXT_INT_CONTEXT(0));
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
640
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
648
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
649
amdgpu_ring_clear_ring(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
667
rb_bufsz = order_base_2(ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
678
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
680
sdma_v3_0_ring_set_wptr(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
686
upper_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFF);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
688
lower_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFC);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
692
WREG32(mmSDMA0_GFX_RB_BASE + sdma_offsets[i], ring->gpu_addr >> 8);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
693
WREG32(mmSDMA0_GFX_RB_BASE_HI + sdma_offsets[i], ring->gpu_addr >> 40);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
697
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
699
OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
707
wptr_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
714
if (ring->use_pollmem) {
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
746
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
747
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
805
static int sdma_v3_0_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
807
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
822
r = amdgpu_ring_alloc(ring, 5);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
826
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) |
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
828
amdgpu_ring_write(ring, lower_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
829
amdgpu_ring_write(ring, upper_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
830
amdgpu_ring_write(ring, SDMA_PKT_WRITE_UNTILED_DW_3_COUNT(1));
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
831
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
832
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
858
static int sdma_v3_0_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
860
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c
892
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1066
static uint32_t sdma_v4_0_rb_cntl(struct amdgpu_ring *ring, uint32_t rb_cntl)
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1069
uint32_t rb_bufsz = order_base_2(ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1091
struct amdgpu_ring *ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1098
rb_cntl = sdma_v4_0_rb_cntl(ring, rb_cntl);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1109
upper_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFF);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1111
lower_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFC);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1116
WREG32_SDMA(i, mmSDMA0_GFX_RB_BASE, ring->gpu_addr >> 8);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1117
WREG32_SDMA(i, mmSDMA0_GFX_RB_BASE_HI, ring->gpu_addr >> 40);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1119
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1128
ring->use_doorbell);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1131
OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1135
sdma_v4_0_ring_set_wptr(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1141
wptr_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1176
struct amdgpu_ring *ring = &adev->sdma.instance[i].page;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1183
rb_cntl = sdma_v4_0_rb_cntl(ring, rb_cntl);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1194
upper_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFF);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1196
lower_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFC);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1201
WREG32_SDMA(i, mmSDMA0_PAGE_RB_BASE, ring->gpu_addr >> 8);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1202
WREG32_SDMA(i, mmSDMA0_PAGE_RB_BASE_HI, ring->gpu_addr >> 40);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1204
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1213
ring->use_doorbell);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1216
OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1221
sdma_v4_0_page_ring_set_wptr(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1227
wptr_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1384
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1436
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1438
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1463
static int sdma_v4_0_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1465
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1480
r = amdgpu_ring_alloc(ring, 5);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1484
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) |
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1486
amdgpu_ring_write(ring, lower_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1487
amdgpu_ring_write(ring, upper_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1488
amdgpu_ring_write(ring, SDMA_PKT_WRITE_UNTILED_DW_3_COUNT(0));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1489
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1490
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1516
static int sdma_v4_0_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1518
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1550
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1668
static void sdma_v4_0_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib)
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1670
struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1693
static void sdma_v4_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1695
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1696
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1699
sdma_v4_0_wait_reg_mem(ring, 1, 0,
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1716
static void sdma_v4_0_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1719
amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1722
static void sdma_v4_0_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1725
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_SRBM_WRITE) |
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1727
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1728
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1731
static void sdma_v4_0_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1734
sdma_v4_0_wait_reg_mem(ring, 0, 0, reg, 0, val, mask, 10);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1797
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1849
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1850
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1851
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1854
ring->use_doorbell?"true":"false");
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1857
ring->doorbell_index = adev->doorbell_index.sdma_engine[i] << 1;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1866
ring->vm_hub = AMDGPU_MMHUB1(0);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1868
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1870
sprintf(ring->name, "sdma%d", i);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1871
r = amdgpu_ring_init(adev, ring, 1024, &adev->sdma.trap_irq,
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1878
ring = &adev->sdma.instance[i].page;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1879
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1880
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1889
ring->doorbell_index =
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1891
ring->doorbell_index += 0x400;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1896
ring->doorbell_index =
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1903
ring->vm_hub = AMDGPU_MMHUB1(0);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1905
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1907
sprintf(ring->name, "page%d", i);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1908
r = amdgpu_ring_init(adev, ring, 1024,
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
1938
amdgpu_ring_fini(&adev->sdma.instance[i].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
2087
amdgpu_fence_process(&adev->sdma.instance[instance].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
2143
drm_sched_fault(&adev->sdma.instance[instance].ring.sched);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
2482
adev->sdma.instance[i].ring.funcs = &sdma_v4_0_ring_funcs;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
2483
adev->sdma.instance[i].ring.me = i;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
2615
adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
2636
sched = &adev->sdma.instance[i].ring.sched;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
653
static uint64_t sdma_v4_0_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
658
rptr = ((u64 *)ring->rptr_cpu_addr);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
671
static uint64_t sdma_v4_0_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
673
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
676
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
678
wptr = READ_ONCE(*((u64 *)ring->wptr_cpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
681
wptr = RREG32_SDMA(ring->me, mmSDMA0_GFX_RB_WPTR_HI);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
683
wptr |= RREG32_SDMA(ring->me, mmSDMA0_GFX_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
685
ring->me, wptr);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
698
static void sdma_v4_0_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
700
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
703
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
704
u64 *wb = (u64 *)ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
710
ring->wptr_offs,
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
711
lower_32_bits(ring->wptr << 2),
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
712
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
714
WRITE_ONCE(*wb, (ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
716
ring->doorbell_index, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
717
WDOORBELL64(ring->doorbell_index, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
722
ring->me,
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
723
lower_32_bits(ring->wptr << 2),
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
724
ring->me,
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
725
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
726
WREG32_SDMA(ring->me, mmSDMA0_GFX_RB_WPTR,
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
727
lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
728
WREG32_SDMA(ring->me, mmSDMA0_GFX_RB_WPTR_HI,
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
729
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
740
static uint64_t sdma_v4_0_page_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
742
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
745
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
747
wptr = READ_ONCE(*((u64 *)ring->wptr_cpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
749
wptr = RREG32_SDMA(ring->me, mmSDMA0_PAGE_RB_WPTR_HI);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
751
wptr |= RREG32_SDMA(ring->me, mmSDMA0_PAGE_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
764
static void sdma_v4_0_page_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
766
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
768
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
769
u64 *wb = (u64 *)ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
772
WRITE_ONCE(*wb, (ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
773
WDOORBELL64(ring->doorbell_index, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
775
uint64_t wptr = ring->wptr << 2;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
777
WREG32_SDMA(ring->me, mmSDMA0_PAGE_RB_WPTR,
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
779
WREG32_SDMA(ring->me, mmSDMA0_PAGE_RB_WPTR_HI,
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
784
static void sdma_v4_0_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
786
struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
791
amdgpu_ring_write(ring, ring->funcs->nop |
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
794
amdgpu_ring_write(ring, ring->funcs->nop);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
807
static void sdma_v4_0_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
815
sdma_v4_0_ring_insert_nop(ring, (2 - lower_32_bits(ring->wptr)) & 7);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
817
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_INDIRECT) |
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
820
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
821
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
822
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
823
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
824
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
828
static void sdma_v4_0_wait_reg_mem(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
834
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
840
amdgpu_ring_write(ring, addr0);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
841
amdgpu_ring_write(ring, addr1);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
844
amdgpu_ring_write(ring, addr0 << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
845
amdgpu_ring_write(ring, addr1 << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
847
amdgpu_ring_write(ring, ref); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
848
amdgpu_ring_write(ring, mask); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
849
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
860
static void sdma_v4_0_ring_emit_hdp_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
862
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
866
ref_and_mask = nbio_hf_reg->ref_and_mask_sdma0 << ring->me;
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
868
sdma_v4_0_wait_reg_mem(ring, 0, 1,
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
886
static void sdma_v4_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
891
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_FENCE));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
894
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
895
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
896
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
901
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_FENCE));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
904
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
905
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
906
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
910
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_TRAP));
drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c
911
amdgpu_ring_write(ring, SDMA_PKT_TRAP_INT_CONTEXT_INT_CONTEXT(0));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1032
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1034
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1059
static int sdma_v4_4_2_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1061
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1076
r = amdgpu_ring_alloc(ring, 5);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1080
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) |
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1082
amdgpu_ring_write(ring, lower_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1083
amdgpu_ring_write(ring, upper_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1084
amdgpu_ring_write(ring, SDMA_PKT_WRITE_UNTILED_DW_3_COUNT(0));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1085
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1086
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
111
static int sdma_v4_4_2_stop_queue(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1112
static int sdma_v4_4_2_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1114
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
112
static int sdma_v4_4_2_restore_queue(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1146
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1264
static void sdma_v4_4_2_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib)
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1266
struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1289
static void sdma_v4_4_2_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1291
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1292
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1295
sdma_v4_4_2_wait_reg_mem(ring, 1, 0,
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1312
static void sdma_v4_4_2_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1315
amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1318
static void sdma_v4_4_2_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1321
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_SRBM_WRITE) |
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1323
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1324
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1327
static void sdma_v4_4_2_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1330
sdma_v4_4_2_wait_reg_mem(ring, 0, 0, reg, 0, val, mask, 10);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1399
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1464
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1465
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1466
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1470
ring->use_doorbell?"true":"false");
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1473
ring->doorbell_index = adev->doorbell_index.sdma_engine[i] << 1;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1474
ring->vm_hub = AMDGPU_MMHUB0(aid_id);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1476
sprintf(ring->name, "sdma%d.%d", aid_id,
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1478
r = amdgpu_ring_init(adev, ring, 1024, &adev->sdma.trap_irq,
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1485
ring = &adev->sdma.instance[i].page;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1486
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1487
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1492
ring->doorbell_index =
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1494
ring->vm_hub = AMDGPU_MMHUB0(aid_id);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1496
sprintf(ring->name, "page%d.%d", aid_id,
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1498
r = amdgpu_ring_init(adev, ring, 1024,
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1508
amdgpu_get_soft_full_reset_mask(&adev->sdma.instance[0].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1535
amdgpu_ring_fini(&adev->sdma.instance[i].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1659
static int sdma_v4_4_2_reset_queue(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1663
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1664
u32 id = ring->me;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1673
static int sdma_v4_4_2_stop_queue(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1675
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1676
u32 instance_id = ring->me;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1693
rptr = amdgpu_ring_get_rptr(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1694
ring->cached_rptr = rptr;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1703
inst_mask = 1 << ring->me;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1711
static int sdma_v4_4_2_restore_queue(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1713
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1717
inst_mask = 1 << ring->me;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1721
if (!REG_GET_FIELD(RREG32_SDMA(ring->me, regSDMA_F32_CNTL), SDMA_F32_CNTL, HALT))
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1728
ring->me);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1789
amdgpu_fence_process(&adev->sdma.instance[i].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
1839
drm_sched_fault(&adev->sdma.instance[instance].ring.sched);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
2179
adev->sdma.instance[i].ring.funcs = &sdma_v4_4_2_ring_funcs;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
2180
adev->sdma.instance[i].ring.me = i;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
220
static uint64_t sdma_v4_4_2_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
225
rptr = READ_ONCE(*((u64 *)&ring->adev->wb.wb[ring->rptr_offs]));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
2316
adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
2337
sched = &adev->sdma.instance[i].ring.sched;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
238
static uint64_t sdma_v4_4_2_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
240
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
243
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
245
wptr = READ_ONCE(*((u64 *)&adev->wb.wb[ring->wptr_offs]));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
248
wptr = RREG32_SDMA(ring->me, regSDMA_GFX_RB_WPTR_HI);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
250
wptr |= RREG32_SDMA(ring->me, regSDMA_GFX_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
252
ring->me, wptr);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
265
static void sdma_v4_4_2_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
267
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
270
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
271
u64 *wb = (u64 *)&adev->wb.wb[ring->wptr_offs];
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
277
ring->wptr_offs,
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
278
lower_32_bits(ring->wptr << 2),
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
279
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
281
WRITE_ONCE(*wb, (ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
283
ring->doorbell_index, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
284
WDOORBELL64(ring->doorbell_index, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
289
ring->me,
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
290
lower_32_bits(ring->wptr << 2),
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
291
ring->me,
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
292
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
293
WREG32_SDMA(ring->me, regSDMA_GFX_RB_WPTR,
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
294
lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
295
WREG32_SDMA(ring->me, regSDMA_GFX_RB_WPTR_HI,
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
296
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
307
static uint64_t sdma_v4_4_2_page_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
309
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
312
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
314
wptr = READ_ONCE(*((u64 *)&adev->wb.wb[ring->wptr_offs]));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
316
wptr = RREG32_SDMA(ring->me, regSDMA_PAGE_RB_WPTR_HI);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
318
wptr |= RREG32_SDMA(ring->me, regSDMA_PAGE_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
331
static void sdma_v4_4_2_page_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
333
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
335
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
336
u64 *wb = (u64 *)&adev->wb.wb[ring->wptr_offs];
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
339
WRITE_ONCE(*wb, (ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
340
WDOORBELL64(ring->doorbell_index, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
342
uint64_t wptr = ring->wptr << 2;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
344
WREG32_SDMA(ring->me, regSDMA_PAGE_RB_WPTR,
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
346
WREG32_SDMA(ring->me, regSDMA_PAGE_RB_WPTR_HI,
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
351
static void sdma_v4_4_2_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
353
struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
358
amdgpu_ring_write(ring, ring->funcs->nop |
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
361
amdgpu_ring_write(ring, ring->funcs->nop);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
374
static void sdma_v4_4_2_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
382
sdma_v4_4_2_ring_insert_nop(ring, (2 - lower_32_bits(ring->wptr)) & 7);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
384
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_INDIRECT) |
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
387
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
388
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
389
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
390
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
391
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
395
static void sdma_v4_4_2_wait_reg_mem(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
401
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
407
amdgpu_ring_write(ring, addr0);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
408
amdgpu_ring_write(ring, addr1);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
411
amdgpu_ring_write(ring, addr0 << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
412
amdgpu_ring_write(ring, addr1 << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
414
amdgpu_ring_write(ring, ref); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
415
amdgpu_ring_write(ring, mask); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
416
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
427
static void sdma_v4_4_2_ring_emit_hdp_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
429
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
434
<< (ring->me % adev->sdma.num_inst_per_aid);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
436
sdma_v4_4_2_wait_reg_mem(ring, 0, 1,
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
454
static void sdma_v4_4_2_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
459
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_FENCE));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
462
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
463
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
464
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
469
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_FENCE));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
472
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
473
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
474
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
478
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_TRAP));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
479
amdgpu_ring_write(ring, SDMA_PKT_TRAP_INT_CONTEXT_INT_CONTEXT(0));
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
500
sdma[i] = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
661
static uint32_t sdma_v4_4_2_rb_cntl(struct amdgpu_ring *ring, uint32_t rb_cntl)
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
664
uint32_t rb_bufsz = order_base_2(ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
688
struct amdgpu_ring *ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
696
wb_offset = (ring->rptr_offs * 4);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
699
rb_cntl = sdma_v4_4_2_rb_cntl(ring, rb_cntl);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
711
WREG32_SDMA(i, regSDMA_GFX_RB_BASE, ring->gpu_addr >> 8);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
712
WREG32_SDMA(i, regSDMA_GFX_RB_BASE_HI, ring->gpu_addr >> 40);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
715
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
724
rwptr = ring->wptr;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
726
rwptr = ring->cached_rptr;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
745
ring->use_doorbell);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
748
OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
752
sdma_v4_4_2_ring_set_wptr(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
758
wptr_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
794
struct amdgpu_ring *ring = &adev->sdma.instance[i].page;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
802
wb_offset = (ring->rptr_offs * 4);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
805
rb_cntl = sdma_v4_4_2_rb_cntl(ring, rb_cntl);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
812
rwptr = ring->wptr;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
814
rwptr = ring->cached_rptr;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
838
WREG32_SDMA(i, regSDMA_PAGE_RB_BASE, ring->gpu_addr >> 8);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
839
WREG32_SDMA(i, regSDMA_PAGE_RB_BASE_HI, ring->gpu_addr >> 40);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
842
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
851
ring->use_doorbell);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
854
OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
859
sdma_v4_4_2_page_ring_set_wptr(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
865
wptr_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4);
drivers/gpu/drm/amd/amdgpu/sdma_v4_4_2.c
969
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1012
static int sdma_v5_0_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1014
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1032
r = amdgpu_ring_alloc(ring, 20);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1034
drm_err(adev_to_drm(adev), "dma failed to lock ring %d (%d).\n", ring->idx, r);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1039
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1041
amdgpu_ring_write(ring, lower_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1042
amdgpu_ring_write(ring, upper_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1043
amdgpu_ring_write(ring, SDMA_PKT_WRITE_UNTILED_DW_3_COUNT(0));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1044
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1045
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1074
static int sdma_v5_0_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1076
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1114
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
115
static int sdma_v5_0_stop_queue(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
116
static int sdma_v5_0_restore_queue(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1237
static void sdma_v5_0_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib)
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1239
struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1262
static void sdma_v5_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1264
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1265
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1268
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1272
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1273
amdgpu_ring_write(ring, upper_32_bits(addr) & 0xffffffff);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1274
amdgpu_ring_write(ring, seq); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1275
amdgpu_ring_write(ring, 0xffffffff); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1276
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1291
static void sdma_v5_0_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1294
amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1297
static void sdma_v5_0_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1300
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_SRBM_WRITE) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1302
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1303
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1306
static void sdma_v5_0_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1309
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1312
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1313
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1314
amdgpu_ring_write(ring, val); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1315
amdgpu_ring_write(ring, mask); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1316
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1320
static void sdma_v5_0_ring_emit_reg_write_reg_wait(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1324
amdgpu_ring_emit_wreg(ring, reg0, ref);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1326
amdgpu_ring_emit_reg_wait(ring, reg0, 0, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1327
amdgpu_ring_emit_reg_wait(ring, reg1, mask, mask);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1381
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1404
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1405
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1406
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1409
ring->use_doorbell?"true":"false");
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1411
ring->doorbell_index = (i == 0) ?
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1415
ring->vm_hub = AMDGPU_GFXHUB(0);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1416
sprintf(ring->name, "sdma%d", i);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1417
r = amdgpu_ring_init(adev, ring, 1024, &adev->sdma.trap_irq,
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1426
amdgpu_get_soft_full_reset_mask(&adev->sdma.instance[0].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1451
amdgpu_ring_fini(&adev->sdma.instance[i].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1535
static int sdma_v5_0_reset_queue(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1539
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1542
if (ring->me >= adev->sdma.num_instances) {
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1547
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1550
r = amdgpu_sdma_reset_engine(adev, ring->me, true);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1555
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1558
static int sdma_v5_0_stop_queue(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1561
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1567
i = ring->me;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1607
static int sdma_v5_0_restore_queue(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1609
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1610
u32 inst_id = ring->me;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1626
static int sdma_v5_0_ring_preempt_ib(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1629
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1633
amdgpu_sdma_get_index_from_ring(ring, &index);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1640
amdgpu_ring_set_preempt_cond_exec(ring, false);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1643
ring->trail_seq += 1;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1644
amdgpu_ring_alloc(ring, 10);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1645
sdma_v5_0_ring_emit_fence(ring, ring->trail_fence_gpu_addr,
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1646
ring->trail_seq, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1647
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1654
if (ring->trail_seq ==
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1655
le32_to_cpu(*(ring->trail_fence_cpu_addr)))
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1662
DRM_ERROR("ring %d failed to be preempted\n", ring->idx);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1669
amdgpu_ring_set_preempt_cond_exec(ring, true);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1709
amdgpu_fence_process(&adev->sdma.instance[0].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1725
amdgpu_fence_process(&adev->sdma.instance[1].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1963
adev->sdma.instance[i].ring.funcs = &sdma_v5_0_ring_funcs;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
1964
adev->sdma.instance[i].ring.me = i;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
2051
adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
2070
&adev->sdma.instance[i].ring.sched;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
303
static unsigned sdma_v5_0_ring_init_cond_exec(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
308
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_COND_EXE));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
309
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
310
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
311
amdgpu_ring_write(ring, 1);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
313
ret = ring->wptr & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
315
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
327
static uint64_t sdma_v5_0_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
332
rptr = (u64 *)ring->rptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
345
static uint64_t sdma_v5_0_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
347
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
350
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
352
wptr = READ_ONCE(*((u64 *)ring->wptr_cpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
355
wptr = RREG32_SOC15_IP(GC, sdma_v5_0_get_reg_offset(adev, ring->me, mmSDMA0_GFX_RB_WPTR_HI));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
357
wptr |= RREG32_SOC15_IP(GC, sdma_v5_0_get_reg_offset(adev, ring->me, mmSDMA0_GFX_RB_WPTR));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
358
DRM_DEBUG("wptr before shift [%i] wptr == 0x%016llx\n", ring->me, wptr);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
371
static void sdma_v5_0_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
373
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
376
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
381
ring->wptr_offs,
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
382
lower_32_bits(ring->wptr << 2),
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
383
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
385
atomic64_set((atomic64_t *)ring->wptr_cpu_addr,
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
386
ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
388
ring->doorbell_index, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
389
WDOORBELL64(ring->doorbell_index, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
394
ring->me,
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
395
lower_32_bits(ring->wptr << 2),
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
396
ring->me,
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
397
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
399
ring->me, mmSDMA0_GFX_RB_WPTR),
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
400
lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
402
ring->me, mmSDMA0_GFX_RB_WPTR_HI),
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
403
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
407
static void sdma_v5_0_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
409
struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
414
amdgpu_ring_write(ring, ring->funcs->nop |
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
417
amdgpu_ring_write(ring, ring->funcs->nop);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
430
static void sdma_v5_0_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
436
uint64_t csa_mc_addr = amdgpu_sdma_get_csa_mc_addr(ring, vmid);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
446
sdma_v5_0_ring_insert_nop(ring, (2 - lower_32_bits(ring->wptr)) & 7);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
448
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_INDIRECT) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
451
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
452
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
453
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
454
amdgpu_ring_write(ring, lower_32_bits(csa_mc_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
455
amdgpu_ring_write(ring, upper_32_bits(csa_mc_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
465
static void sdma_v5_0_ring_emit_mem_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
472
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_GCR_REQ));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
473
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD1_BASE_VA_31_7(0));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
474
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD2_GCR_CONTROL_15_0(gcr_cntl) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
476
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD3_LIMIT_VA_31_7(0) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
478
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD4_LIMIT_VA_47_32(0) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
489
static void sdma_v5_0_ring_emit_hdp_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
491
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
495
if (ring->me == 0)
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
500
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
503
amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
504
amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
505
amdgpu_ring_write(ring, ref_and_mask); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
506
amdgpu_ring_write(ring, ref_and_mask); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
507
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
523
static void sdma_v5_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
528
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_FENCE) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
532
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
533
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
534
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
539
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_FENCE) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
543
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
544
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
545
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
550
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_TRAP));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
551
amdgpu_ring_write(ring, SDMA_PKT_TRAP_INT_CONTEXT_INT_CONTEXT(0));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
690
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
699
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
705
rb_bufsz = order_base_2(ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
717
WREG32_SOC15_IP(GC, sdma_v5_0_get_reg_offset(adev, i, mmSDMA0_GFX_RB_RPTR), lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
718
WREG32_SOC15_IP(GC, sdma_v5_0_get_reg_offset(adev, i, mmSDMA0_GFX_RB_RPTR_HI), upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
719
WREG32_SOC15_IP(GC, sdma_v5_0_get_reg_offset(adev, i, mmSDMA0_GFX_RB_WPTR), lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
720
WREG32_SOC15_IP(GC, sdma_v5_0_get_reg_offset(adev, i, mmSDMA0_GFX_RB_WPTR_HI), upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
728
wptr_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
743
upper_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFF);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
745
lower_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFC);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
750
ring->gpu_addr >> 8);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
752
ring->gpu_addr >> 40);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
755
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
762
lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
764
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
771
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
774
OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
782
adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
783
ring->doorbell_index, 20);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
786
sdma_v5_0_ring_set_wptr(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c
838
return amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1013
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1137
static void sdma_v5_2_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib)
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1139
struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
116
static int sdma_v5_2_stop_queue(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1162
static void sdma_v5_2_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1164
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1165
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1168
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
117
static int sdma_v5_2_restore_queue(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1172
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1173
amdgpu_ring_write(ring, upper_32_bits(addr) & 0xffffffff);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1174
amdgpu_ring_write(ring, seq); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1175
amdgpu_ring_write(ring, 0xffffffff); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1176
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1191
static void sdma_v5_2_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1194
struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->vm_hub];
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1198
amdgpu_ring_emit_wreg(ring, hub->ctx0_ptb_addr_lo32 +
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1201
amdgpu_ring_emit_wreg(ring, hub->ctx0_ptb_addr_hi32 +
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1206
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1209
SDMA_PKT_VM_INVALIDATION_HEADER_GFX_ENG_ID(ring->vm_inv_eng) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1211
amdgpu_ring_write(ring, req);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1212
amdgpu_ring_write(ring, 0xFFFFFFFF);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1213
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1218
static void sdma_v5_2_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1221
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_SRBM_WRITE) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1223
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1224
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1227
static void sdma_v5_2_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1230
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1233
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1234
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1235
amdgpu_ring_write(ring, val); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1236
amdgpu_ring_write(ring, mask); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1237
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1241
static void sdma_v5_2_ring_emit_reg_write_reg_wait(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1245
amdgpu_ring_emit_wreg(ring, reg0, ref);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1247
amdgpu_ring_emit_reg_wait(ring, reg0, 0, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1248
amdgpu_ring_emit_reg_wait(ring, reg1, mask, mask);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1305
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1323
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1324
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1325
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1326
ring->me = i;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1329
ring->use_doorbell?"true":"false");
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1331
ring->doorbell_index =
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1334
ring->vm_hub = AMDGPU_GFXHUB(0);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1335
sprintf(ring->name, "sdma%d", i);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1336
r = amdgpu_ring_init(adev, ring, 1024, &adev->sdma.trap_irq,
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1344
amdgpu_get_soft_full_reset_mask(&adev->sdma.instance[0].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1369
amdgpu_ring_fini(&adev->sdma.instance[i].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
143
static unsigned sdma_v5_2_ring_init_cond_exec(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1443
static int sdma_v5_2_reset_queue(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1447
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1450
if (ring->me >= adev->sdma.num_instances) {
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1455
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1458
r = amdgpu_sdma_reset_engine(adev, ring->me, true);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1463
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1466
static int sdma_v5_2_stop_queue(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1469
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1475
i = ring->me;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
148
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_COND_EXE));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
149
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
150
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
151
amdgpu_ring_write(ring, 1);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1517
static int sdma_v5_2_restore_queue(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1519
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1520
u32 inst_id = ring->me;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
153
ret = ring->wptr & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1537
static int sdma_v5_2_ring_preempt_ib(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1540
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1544
amdgpu_sdma_get_index_from_ring(ring, &index);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1549
amdgpu_ring_set_preempt_cond_exec(ring, false);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
155
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1552
ring->trail_seq += 1;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1553
amdgpu_ring_alloc(ring, 10);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1554
sdma_v5_2_ring_emit_fence(ring, ring->trail_fence_gpu_addr,
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1555
ring->trail_seq, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1556
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1563
if (ring->trail_seq ==
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1564
le32_to_cpu(*(ring->trail_fence_cpu_addr)))
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1571
DRM_ERROR("ring %d failed to be preempted\n", ring->idx);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1578
amdgpu_ring_set_preempt_cond_exec(ring, true);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1615
amdgpu_fence_process(&adev->sdma.instance[0].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1631
amdgpu_fence_process(&adev->sdma.instance[1].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1647
amdgpu_fence_process(&adev->sdma.instance[2].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1663
amdgpu_fence_process(&adev->sdma.instance[3].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
167
static uint64_t sdma_v5_2_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
172
rptr = (u64 *)ring->rptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1832
static void sdma_v5_2_ring_begin_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1834
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
185
static uint64_t sdma_v5_2_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1851
static void sdma_v5_2_ring_end_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1853
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
187
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
190
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
192
wptr = READ_ONCE(*((u64 *)ring->wptr_cpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
195
wptr = RREG32(sdma_v5_2_get_reg_offset(adev, ring->me, mmSDMA0_GFX_RB_WPTR_HI));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1967
adev->sdma.instance[i].ring.funcs = &sdma_v5_2_ring_funcs;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
1968
adev->sdma.instance[i].ring.me = i;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
197
wptr |= RREG32(sdma_v5_2_get_reg_offset(adev, ring->me, mmSDMA0_GFX_RB_WPTR));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
198
DRM_DEBUG("wptr before shift [%i] wptr == 0x%016llx\n", ring->me, wptr);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
2055
adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
2074
&adev->sdma.instance[i].ring.sched;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
211
static void sdma_v5_2_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
213
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
216
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
221
ring->wptr_offs,
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
222
lower_32_bits(ring->wptr << 2),
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
223
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
225
atomic64_set((atomic64_t *)ring->wptr_cpu_addr,
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
226
ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
228
ring->doorbell_index, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
229
WDOORBELL64(ring->doorbell_index, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
235
WREG32(sdma_v5_2_get_reg_offset(adev, ring->me, mmSDMA0_GFX_RB_WPTR),
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
236
lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
237
WREG32(sdma_v5_2_get_reg_offset(adev, ring->me, mmSDMA0_GFX_RB_WPTR_HI),
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
238
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
244
ring->me,
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
245
lower_32_bits(ring->wptr << 2),
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
246
ring->me,
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
247
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
248
WREG32(sdma_v5_2_get_reg_offset(adev, ring->me, mmSDMA0_GFX_RB_WPTR),
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
249
lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
250
WREG32(sdma_v5_2_get_reg_offset(adev, ring->me, mmSDMA0_GFX_RB_WPTR_HI),
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
251
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
255
static void sdma_v5_2_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
257
struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
262
amdgpu_ring_write(ring, ring->funcs->nop |
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
265
amdgpu_ring_write(ring, ring->funcs->nop);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
278
static void sdma_v5_2_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
284
uint64_t csa_mc_addr = amdgpu_sdma_get_csa_mc_addr(ring, vmid);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
294
sdma_v5_2_ring_insert_nop(ring, (2 - lower_32_bits(ring->wptr)) & 7);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
296
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_INDIRECT) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
299
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
300
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
301
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
302
amdgpu_ring_write(ring, lower_32_bits(csa_mc_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
303
amdgpu_ring_write(ring, upper_32_bits(csa_mc_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
313
static void sdma_v5_2_ring_emit_mem_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
321
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_GCR_REQ));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
322
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD1_BASE_VA_31_7(0));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
323
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD2_GCR_CONTROL_15_0(gcr_cntl) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
325
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD3_LIMIT_VA_31_7(0) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
327
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD4_LIMIT_VA_47_32(0) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
338
static void sdma_v5_2_ring_emit_hdp_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
340
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
344
if (ring->me > 1) {
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
345
amdgpu_hdp_flush(adev, ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
347
ref_and_mask = nbio_hf_reg->ref_and_mask_sdma0 << ring->me;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
349
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
352
amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
353
amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
354
amdgpu_ring_write(ring, ref_and_mask); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
355
amdgpu_ring_write(ring, ref_and_mask); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
356
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
373
static void sdma_v5_2_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
378
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_FENCE) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
382
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
383
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
384
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
389
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_FENCE) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
393
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
394
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
395
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
400
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_TRAP));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
401
amdgpu_ring_write(ring, SDMA_PKT_TRAP_INT_CONTEXT_INT_CONTEXT(0));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
539
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
548
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
554
rb_bufsz = order_base_2(ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
566
WREG32_SOC15_IP(GC, sdma_v5_2_get_reg_offset(adev, i, mmSDMA0_GFX_RB_RPTR), lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
567
WREG32_SOC15_IP(GC, sdma_v5_2_get_reg_offset(adev, i, mmSDMA0_GFX_RB_RPTR_HI), upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
568
WREG32_SOC15_IP(GC, sdma_v5_2_get_reg_offset(adev, i, mmSDMA0_GFX_RB_WPTR), lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
569
WREG32_SOC15_IP(GC, sdma_v5_2_get_reg_offset(adev, i, mmSDMA0_GFX_RB_WPTR_HI), upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
578
wptr_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
593
upper_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFF);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
595
lower_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFC);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
599
WREG32_SOC15_IP(GC, sdma_v5_2_get_reg_offset(adev, i, mmSDMA0_GFX_RB_BASE), ring->gpu_addr >> 8);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
600
WREG32_SOC15_IP(GC, sdma_v5_2_get_reg_offset(adev, i, mmSDMA0_GFX_RB_BASE_HI), ring->gpu_addr >> 40);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
603
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
609
WREG32(sdma_v5_2_get_reg_offset(adev, i, mmSDMA0_GFX_RB_WPTR), lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
610
WREG32(sdma_v5_2_get_reg_offset(adev, i, mmSDMA0_GFX_RB_WPTR_HI), upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
616
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
619
OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
626
adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
627
ring->doorbell_index,
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
631
sdma_v5_2_ring_set_wptr(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
685
return amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
912
static int sdma_v5_2_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
914
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
932
r = amdgpu_ring_alloc(ring, 20);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
934
drm_err(adev_to_drm(adev), "dma failed to lock ring %d (%d).\n", ring->idx, r);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
939
amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) |
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
941
amdgpu_ring_write(ring, lower_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
942
amdgpu_ring_write(ring, upper_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
943
amdgpu_ring_write(ring, SDMA_PKT_WRITE_UNTILED_DW_3_COUNT(0));
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
944
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
945
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
974
static int sdma_v5_2_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/sdma_v5_2.c
976
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1020
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1143
static void sdma_v6_0_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib)
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1145
struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1167
static void sdma_v6_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1169
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1170
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1173
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1177
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1178
amdgpu_ring_write(ring, upper_32_bits(addr) & 0xffffffff);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1179
amdgpu_ring_write(ring, seq); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1180
amdgpu_ring_write(ring, 0xffffffff); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1181
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1195
static void sdma_v6_0_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1198
struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->vm_hub];
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1202
amdgpu_ring_emit_wreg(ring, hub->ctx0_ptb_addr_lo32 +
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1205
amdgpu_ring_emit_wreg(ring, hub->ctx0_ptb_addr_hi32 +
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1210
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1213
SDMA_PKT_VM_INVALIDATION_HEADER_GFX_ENG_ID(ring->vm_inv_eng) |
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1215
amdgpu_ring_write(ring, req);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1216
amdgpu_ring_write(ring, 0xFFFFFFFF);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1217
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1222
static void sdma_v6_0_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1225
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_SRBM_WRITE) |
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1227
amdgpu_ring_write(ring, reg);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1228
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1231
static void sdma_v6_0_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1234
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1237
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1238
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1239
amdgpu_ring_write(ring, val); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1240
amdgpu_ring_write(ring, mask); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1241
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1245
static void sdma_v6_0_ring_emit_reg_write_reg_wait(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1249
amdgpu_ring_emit_wreg(ring, reg0, ref);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1251
amdgpu_ring_emit_reg_wait(ring, reg0, 0, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1252
amdgpu_ring_emit_reg_wait(ring, reg1, mask, mask);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1322
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1343
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1344
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1345
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1346
ring->me = i;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1347
ring->no_user_submission = adev->sdma.no_user_submission;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1350
ring->use_doorbell?"true":"false");
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1352
ring->doorbell_index =
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1355
ring->vm_hub = AMDGPU_GFXHUB(0);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1356
sprintf(ring->name, "sdma%d", i);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1357
r = amdgpu_ring_init(adev, ring, 1024,
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1366
amdgpu_get_soft_full_reset_mask(&adev->sdma.instance[0].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1433
amdgpu_ring_fini(&adev->sdma.instance[i].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
145
static unsigned sdma_v6_0_ring_init_cond_exec(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
150
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_COND_EXE));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
151
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
152
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
153
amdgpu_ring_write(ring, 1);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1534
static int sdma_v6_0_ring_preempt_ib(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1537
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1541
amdgpu_sdma_get_index_from_ring(ring, &index);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1546
amdgpu_ring_set_preempt_cond_exec(ring, false);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1549
ring->trail_seq += 1;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
155
ret = ring->wptr & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1550
amdgpu_ring_alloc(ring, 10);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1551
sdma_v6_0_ring_emit_fence(ring, ring->trail_fence_gpu_addr,
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1552
ring->trail_seq, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1553
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1560
if (ring->trail_seq ==
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1561
le32_to_cpu(*(ring->trail_fence_cpu_addr)))
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1568
DRM_ERROR("ring %d failed to be preempted\n", ring->idx);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
157
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1575
amdgpu_ring_set_preempt_cond_exec(ring, true);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1579
static int sdma_v6_0_reset_queue(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1583
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1586
if (ring->me >= adev->sdma.num_instances) {
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1591
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1593
r = amdgpu_mes_reset_legacy_queue(adev, ring, vmid, true, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1597
r = sdma_v6_0_gfx_resume_instance(adev, ring->me, true);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1601
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1642
amdgpu_fence_process(&adev->sdma.instance[instances].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
169
static uint64_t sdma_v6_0_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
174
rptr = (u64 *)ring->rptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1801
adev->sdma.instance[i].ring.funcs = &sdma_v6_0_ring_funcs;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1802
adev->sdma.instance[i].ring.me = i;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
187
static uint64_t sdma_v6_0_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1893
adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
191
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
1910
&adev->sdma.instance[i].ring.sched;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
193
wptr = READ_ONCE(*((u64 *)ring->wptr_cpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
207
static void sdma_v6_0_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
209
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
211
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
216
ring->wptr_offs,
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
217
lower_32_bits(ring->wptr << 2),
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
218
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
220
atomic64_set((atomic64_t *)ring->wptr_cpu_addr,
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
221
ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
223
ring->doorbell_index, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
224
WDOORBELL64(ring->doorbell_index, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
229
ring->me,
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
230
lower_32_bits(ring->wptr << 2),
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
231
ring->me,
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
232
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
234
ring->me, regSDMA0_QUEUE0_RB_WPTR),
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
235
lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
237
ring->me, regSDMA0_QUEUE0_RB_WPTR_HI),
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
238
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
242
static void sdma_v6_0_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
244
struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
249
amdgpu_ring_write(ring, ring->funcs->nop |
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
252
amdgpu_ring_write(ring, ring->funcs->nop);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
265
static void sdma_v6_0_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
271
uint64_t csa_mc_addr = amdgpu_sdma_get_csa_mc_addr(ring, vmid);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
281
sdma_v6_0_ring_insert_nop(ring, (2 - lower_32_bits(ring->wptr)) & 7);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
283
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_INDIRECT) |
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
286
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
287
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
288
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
289
amdgpu_ring_write(ring, lower_32_bits(csa_mc_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
290
amdgpu_ring_write(ring, upper_32_bits(csa_mc_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
300
static void sdma_v6_0_ring_emit_mem_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
307
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_GCR_REQ));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
308
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD1_BASE_VA_31_7(0));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
309
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD2_GCR_CONTROL_15_0(gcr_cntl) |
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
311
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD3_LIMIT_VA_31_7(0) |
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
313
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD4_LIMIT_VA_47_32(0) |
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
325
static void sdma_v6_0_ring_emit_hdp_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
327
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
331
ref_and_mask = nbio_hf_reg->ref_and_mask_sdma0 << ring->me;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
333
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
336
amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
337
amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
338
amdgpu_ring_write(ring, ref_and_mask); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
339
amdgpu_ring_write(ring, ref_and_mask); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
340
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
356
static void sdma_v6_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
361
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_FENCE) |
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
365
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
366
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
367
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
372
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_FENCE) |
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
376
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
377
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
378
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
383
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_TRAP));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
384
amdgpu_ring_write(ring, SDMA_PKT_TRAP_INT_CONTEXT_INT_CONTEXT(0));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
485
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
493
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
498
rb_bufsz = order_base_2(ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
511
WREG32_SOC15_IP(GC, sdma_v6_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_RPTR), lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
512
WREG32_SOC15_IP(GC, sdma_v6_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_RPTR_HI), upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
513
WREG32_SOC15_IP(GC, sdma_v6_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_WPTR), lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
514
WREG32_SOC15_IP(GC, sdma_v6_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_WPTR_HI), upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
522
wptr_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
530
upper_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFF);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
532
lower_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFC);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
538
WREG32_SOC15_IP(GC, sdma_v6_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_BASE), ring->gpu_addr >> 8);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
539
WREG32_SOC15_IP(GC, sdma_v6_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_BASE_HI), ring->gpu_addr >> 40);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
542
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
548
WREG32_SOC15_IP(GC, sdma_v6_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_WPTR), lower_32_bits(ring->wptr) << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
549
WREG32_SOC15_IP(GC, sdma_v6_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_WPTR_HI), upper_32_bits(ring->wptr) << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
555
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
558
OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
566
adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
567
ring->doorbell_index,
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
571
sdma_v6_0_ring_set_wptr(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
621
return amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
799
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
804
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
805
r = amdgpu_ring_test_ib(ring, tmo);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
919
static int sdma_v6_0_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
921
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
939
r = amdgpu_ring_alloc(ring, 5);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
941
drm_err(adev_to_drm(adev), "dma failed to lock ring %d (%d).\n", ring->idx, r);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
946
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_WRITE) |
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
948
amdgpu_ring_write(ring, lower_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
949
amdgpu_ring_write(ring, upper_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
950
amdgpu_ring_write(ring, SDMA_PKT_WRITE_UNTILED_DW_3_COUNT(0));
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
951
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
952
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
981
static int sdma_v6_0_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/sdma_v6_0.c
983
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1036
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1163
static void sdma_v7_0_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib)
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1165
struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1187
static void sdma_v7_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1189
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1190
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1193
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1197
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1198
amdgpu_ring_write(ring, upper_32_bits(addr) & 0xffffffff);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1199
amdgpu_ring_write(ring, seq); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1200
amdgpu_ring_write(ring, 0xffffffff); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1201
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1215
static void sdma_v7_0_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1218
amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1221
static void sdma_v7_0_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1227
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_SRBM_WRITE));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1228
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1229
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1232
static void sdma_v7_0_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1235
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1238
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1239
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1240
amdgpu_ring_write(ring, val); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1241
amdgpu_ring_write(ring, mask); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1242
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1246
static void sdma_v7_0_ring_emit_reg_write_reg_wait(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1250
amdgpu_ring_emit_wreg(ring, reg0, ref);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1252
amdgpu_ring_emit_reg_wait(ring, reg0, 0, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1253
amdgpu_ring_emit_reg_wait(ring, reg1, mask, mask);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1307
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1328
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1329
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1330
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1331
ring->me = i;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1332
ring->no_user_submission = adev->sdma.no_user_submission;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1335
ring->use_doorbell?"true":"false");
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1337
ring->doorbell_index =
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1340
ring->vm_hub = AMDGPU_GFXHUB(0);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1341
sprintf(ring->name, "sdma%d", i);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1342
r = amdgpu_ring_init(adev, ring, 1024,
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1351
amdgpu_get_soft_full_reset_mask(&adev->sdma.instance[0].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1385
amdgpu_ring_fini(&adev->sdma.instance[i].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
144
static unsigned sdma_v7_0_ring_init_cond_exec(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1489
static int sdma_v7_0_ring_preempt_ib(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
149
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_COND_EXE));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1492
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1496
amdgpu_sdma_get_index_from_ring(ring, &index);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
150
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1501
amdgpu_ring_set_preempt_cond_exec(ring, false);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1504
ring->trail_seq += 1;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1505
r = amdgpu_ring_alloc(ring, 10);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1507
DRM_ERROR("ring %d failed to be allocated\n", ring->idx);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
151
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1510
sdma_v7_0_ring_emit_fence(ring, ring->trail_fence_gpu_addr,
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1511
ring->trail_seq, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1512
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1519
if (ring->trail_seq ==
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
152
amdgpu_ring_write(ring, 1);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1520
le32_to_cpu(*(ring->trail_fence_cpu_addr)))
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1527
DRM_ERROR("ring %d failed to be preempted\n", ring->idx);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1534
amdgpu_ring_set_preempt_cond_exec(ring, true);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
154
ret = ring->wptr & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
156
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1574
amdgpu_fence_process(&adev->sdma.instance[instances].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
168
static uint64_t sdma_v7_0_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
173
rptr = (u64 *)ring->rptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1734
adev->sdma.instance[i].ring.funcs = &sdma_v7_0_ring_funcs;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1735
adev->sdma.instance[i].ring.me = i;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1843
adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
186
static uint64_t sdma_v7_0_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
1860
&adev->sdma.instance[i].ring.sched;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
190
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
192
wptr = READ_ONCE(*((u64 *)ring->wptr_cpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
206
static void sdma_v7_0_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
208
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
212
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
217
ring->wptr_offs,
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
218
lower_32_bits(ring->wptr << 2),
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
219
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
221
atomic64_set((atomic64_t *)ring->wptr_cpu_addr,
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
222
ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
224
ring->doorbell_index, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
225
WDOORBELL64(ring->doorbell_index, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
230
ring->me,
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
231
lower_32_bits(ring->wptr << 2),
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
232
ring->me,
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
233
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
235
ring->me,
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
237
lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
239
ring->me,
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
241
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
245
static void sdma_v7_0_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
247
struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
252
amdgpu_ring_write(ring, ring->funcs->nop |
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
255
amdgpu_ring_write(ring, ring->funcs->nop);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
268
static void sdma_v7_0_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
274
uint64_t csa_mc_addr = amdgpu_sdma_get_csa_mc_addr(ring, vmid);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
284
sdma_v7_0_ring_insert_nop(ring, (2 - lower_32_bits(ring->wptr)) & 7);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
286
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_INDIRECT) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
289
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
290
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
291
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
292
amdgpu_ring_write(ring, lower_32_bits(csa_mc_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
293
amdgpu_ring_write(ring, upper_32_bits(csa_mc_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
303
static void sdma_v7_0_ring_emit_mem_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
310
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_GCR_REQ));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
311
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD1_BASE_VA_31_7(0));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
312
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD2_GCR_CONTROL_15_0(gcr_cntl) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
314
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD3_LIMIT_VA_31_7(0) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
316
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD4_LIMIT_VA_47_32(0) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
328
static void sdma_v7_0_ring_emit_hdp_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
330
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
334
ref_and_mask = nbio_hf_reg->ref_and_mask_sdma0 << ring->me;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
336
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
339
amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
340
amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
341
amdgpu_ring_write(ring, ref_and_mask); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
342
amdgpu_ring_write(ring, ref_and_mask); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
343
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
359
static void sdma_v7_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
364
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_FENCE) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
368
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
369
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
370
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
375
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_FENCE) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
379
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
380
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
381
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
386
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_TRAP));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
387
amdgpu_ring_write(ring, SDMA_PKT_TRAP_INT_CONTEXT_INT_CONTEXT(0));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
477
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
486
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
489
rb_bufsz = order_base_2(ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
502
WREG32_SOC15_IP(GC, sdma_v7_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_RPTR), lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
503
WREG32_SOC15_IP(GC, sdma_v7_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_RPTR_HI), upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
504
WREG32_SOC15_IP(GC, sdma_v7_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_WPTR), lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
505
WREG32_SOC15_IP(GC, sdma_v7_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_WPTR_HI), upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
513
wptr_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
521
upper_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFF);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
523
lower_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFC);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
533
WREG32_SOC15_IP(GC, sdma_v7_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_BASE), ring->gpu_addr >> 8);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
534
WREG32_SOC15_IP(GC, sdma_v7_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_BASE_HI), ring->gpu_addr >> 40);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
537
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
543
WREG32_SOC15_IP(GC, sdma_v7_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_WPTR), lower_32_bits(ring->wptr) << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
544
WREG32_SOC15_IP(GC, sdma_v7_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_WPTR_HI), upper_32_bits(ring->wptr) << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
550
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
553
OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
561
adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
562
ring->doorbell_index,
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
566
sdma_v7_0_ring_set_wptr(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
611
ring->sched.ready = true;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
618
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
620
ring->sched.ready = false;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
791
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
796
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
797
r = amdgpu_ring_test_ib(ring, tmo);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
805
static int sdma_v7_0_reset_queue(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
809
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
812
if (ring->me >= adev->sdma.num_instances) {
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
817
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
819
r = amdgpu_mes_reset_legacy_queue(adev, ring, vmid, true, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
823
r = sdma_v7_0_gfx_resume_instance(adev, ring->me, true);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
827
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
935
static int sdma_v7_0_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
937
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
955
r = amdgpu_ring_alloc(ring, 5);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
957
drm_err(adev_to_drm(adev), "dma failed to lock ring %d (%d).\n", ring->idx, r);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
962
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_WRITE) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
964
amdgpu_ring_write(ring, lower_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
965
amdgpu_ring_write(ring, upper_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
966
amdgpu_ring_write(ring, SDMA_PKT_WRITE_UNTILED_DW_3_COUNT(0));
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
967
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
968
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
997
static int sdma_v7_0_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/sdma_v7_0.c
999
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1026
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1160
static void sdma_v7_1_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib)
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1162
struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1184
static void sdma_v7_1_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1186
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1187
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1190
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1193
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1194
amdgpu_ring_write(ring, upper_32_bits(addr) & 0xffffffff);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1195
amdgpu_ring_write(ring, seq); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1196
amdgpu_ring_write(ring, 0xffffffff); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1197
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1211
static void sdma_v7_1_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1214
amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1217
static void sdma_v7_1_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1223
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_SRBM_WRITE));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1224
amdgpu_ring_write(ring, soc_v1_0_normalize_xcc_reg_offset(reg) << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1225
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1228
static void sdma_v7_1_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1231
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_POLL_REGMEM) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1233
amdgpu_ring_write(ring, soc_v1_0_normalize_xcc_reg_offset(reg) << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1234
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1235
amdgpu_ring_write(ring, val); /* reference */
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1236
amdgpu_ring_write(ring, mask); /* mask */
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1237
amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1241
static void sdma_v7_1_ring_emit_reg_write_reg_wait(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1245
amdgpu_ring_emit_wreg(ring, reg0, ref);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1247
amdgpu_ring_emit_reg_wait(ring, reg0, 0, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1248
amdgpu_ring_emit_reg_wait(ring, reg1, mask, mask);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1273
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1288
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1289
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1290
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1291
ring->me = i;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1300
ring->use_doorbell?"true":"false");
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1302
ring->doorbell_index =
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1305
ring->vm_hub = AMDGPU_GFXHUB(xcc_id);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1306
sprintf(ring->name, "sdma%d.%d", xcc_id,
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1308
r = amdgpu_ring_init(adev, ring, 1024,
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1317
amdgpu_get_soft_full_reset_mask(&adev->sdma.instance[0].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1345
amdgpu_ring_fini(&adev->sdma.instance[i].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
138
static unsigned sdma_v7_1_ring_init_cond_exec(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1426
static int sdma_v7_1_ring_preempt_ib(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1429
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
143
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_COND_EXE));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1433
amdgpu_sdma_get_index_from_ring(ring, &index);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1438
amdgpu_ring_set_preempt_cond_exec(ring, false);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
144
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1441
ring->trail_seq += 1;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1442
r = amdgpu_ring_alloc(ring, 10);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1444
DRM_ERROR("ring %d failed to be allocated \n", ring->idx);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1447
sdma_v7_1_ring_emit_fence(ring, ring->trail_fence_gpu_addr,
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1448
ring->trail_seq, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1449
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
145
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1456
if (ring->trail_seq ==
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1457
le32_to_cpu(*(ring->trail_fence_cpu_addr)))
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
146
amdgpu_ring_write(ring, 1);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1464
DRM_ERROR("ring %d failed to be preempted\n", ring->idx);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1471
amdgpu_ring_set_preempt_cond_exec(ring, true);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
148
ret = ring->wptr & ring->buf_mask;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
150
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1525
amdgpu_fence_process(&adev->sdma.instance[instances].ring);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
162
static uint64_t sdma_v7_1_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1661
adev->sdma.instance[i].ring.funcs = &sdma_v7_1_ring_funcs;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1662
adev->sdma.instance[i].ring.me = i;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
167
rptr = (u64 *)ring->rptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1753
adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
1770
&adev->sdma.instance[i].ring.sched;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
180
static uint64_t sdma_v7_1_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
184
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
186
wptr = READ_ONCE(*((u64 *)ring->wptr_cpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
200
static void sdma_v7_1_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
202
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
206
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
211
ring->wptr_offs,
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
212
lower_32_bits(ring->wptr << 2),
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
213
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
215
atomic64_set((atomic64_t *)ring->wptr_cpu_addr,
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
216
ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
218
ring->doorbell_index, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
219
WDOORBELL64(ring->doorbell_index, ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
224
ring->me,
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
225
lower_32_bits(ring->wptr << 2),
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
226
ring->me,
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
227
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
229
ring->me,
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
231
lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
233
ring->me,
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
235
upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
239
static void sdma_v7_1_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
241
struct amdgpu_sdma_instance *sdma = amdgpu_sdma_get_instance_from_ring(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
246
amdgpu_ring_write(ring, ring->funcs->nop |
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
249
amdgpu_ring_write(ring, ring->funcs->nop);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
262
static void sdma_v7_1_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
268
uint64_t csa_mc_addr = amdgpu_sdma_get_csa_mc_addr(ring, vmid);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
278
sdma_v7_1_ring_insert_nop(ring, (2 - lower_32_bits(ring->wptr)) & 7);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
280
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_INDIRECT) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
283
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
284
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
285
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
286
amdgpu_ring_write(ring, lower_32_bits(csa_mc_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
287
amdgpu_ring_write(ring, upper_32_bits(csa_mc_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
297
static void sdma_v7_1_ring_emit_mem_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
304
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_GCR_REQ));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
305
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD1_BASE_VA_31_7(0));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
306
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD2_BASE_VA_56_32(0));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
307
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD3_GCR_CONTROL_18_0(gcr_cntl) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
309
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD4_LIMIT_VA_47_16(0));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
310
amdgpu_ring_write(ring, SDMA_PKT_GCR_REQ_PAYLOAD5_LIMIT_VA_56_48(0) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
327
static void sdma_v7_1_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
332
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_FENCE) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
336
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
337
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
338
amdgpu_ring_write(ring, lower_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
343
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_FENCE) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
347
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
348
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
349
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
354
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_TRAP));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
355
amdgpu_ring_write(ring, SDMA_PKT_TRAP_INT_CONTEXT_INT_CONTEXT(0));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
459
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
468
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
471
rb_bufsz = order_base_2(ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
484
WREG32_SOC15_IP(GC, sdma_v7_1_get_reg_offset(adev, i, regSDMA0_SDMA_QUEUE0_RB_RPTR), lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
485
WREG32_SOC15_IP(GC, sdma_v7_1_get_reg_offset(adev, i, regSDMA0_SDMA_QUEUE0_RB_RPTR_HI), upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
486
WREG32_SOC15_IP(GC, sdma_v7_1_get_reg_offset(adev, i, regSDMA0_SDMA_QUEUE0_RB_WPTR), lower_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
487
WREG32_SOC15_IP(GC, sdma_v7_1_get_reg_offset(adev, i, regSDMA0_SDMA_QUEUE0_RB_WPTR_HI), upper_32_bits(ring->wptr << 2));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
495
wptr_gpu_addr = ring->wptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
503
upper_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFF);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
505
lower_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFC);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
515
WREG32_SOC15_IP(GC, sdma_v7_1_get_reg_offset(adev, i, regSDMA0_SDMA_QUEUE0_RB_BASE), ring->gpu_addr >> 8);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
516
WREG32_SOC15_IP(GC, sdma_v7_1_get_reg_offset(adev, i, regSDMA0_SDMA_QUEUE0_RB_BASE_HI), ring->gpu_addr >> 40);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
519
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
525
WREG32_SOC15_IP(GC, sdma_v7_1_get_reg_offset(adev, i, regSDMA0_SDMA_QUEUE0_RB_WPTR), lower_32_bits(ring->wptr) << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
526
WREG32_SOC15_IP(GC, sdma_v7_1_get_reg_offset(adev, i, regSDMA0_SDMA_QUEUE0_RB_WPTR_HI), upper_32_bits(ring->wptr) << 2);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
532
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
535
OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
543
adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
544
ring->doorbell_index,
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
548
sdma_v7_1_ring_set_wptr(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
593
ring->sched.ready = true;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
600
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
602
ring->sched.ready = false;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
782
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
787
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
788
r = amdgpu_ring_test_ib(ring, tmo);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
796
static int sdma_v7_1_reset_queue(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
800
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
803
if (ring->me >= adev->sdma.num_instances) {
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
808
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
810
r = amdgpu_mes_reset_legacy_queue(adev, ring, vmid, true, 0);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
814
r = sdma_v7_1_gfx_resume_instance(adev, ring->me, true);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
818
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
925
static int sdma_v7_1_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
927
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
945
r = amdgpu_ring_alloc(ring, 5);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
947
DRM_ERROR("amdgpu: dma failed to lock ring %d (%d).\n", ring->idx, r);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
952
amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_WRITE) |
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
954
amdgpu_ring_write(ring, lower_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
955
amdgpu_ring_write(ring, upper_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
956
amdgpu_ring_write(ring, SDMA_PKT_WRITE_UNTILED_DW_3_COUNT(0));
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
957
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
958
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
987
static int sdma_v7_1_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/sdma_v7_1.c
989
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/si.c
1491
static void si_flush_hdp(struct amdgpu_device *adev, struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/si.c
1493
if (!ring || !ring->funcs->emit_wreg) {
drivers/gpu/drm/amd/amdgpu/si.c
1497
amdgpu_ring_emit_wreg(ring, mmHDP_MEM_COHERENCY_FLUSH_CNTL, 1);
drivers/gpu/drm/amd/amdgpu/si.c
1502
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/si.c
1504
if (!ring || !ring->funcs->emit_wreg) {
drivers/gpu/drm/amd/amdgpu/si.c
1508
amdgpu_ring_emit_wreg(ring, mmHDP_DEBUG0, 1);
drivers/gpu/drm/amd/amdgpu/si_dma.c
107
static void si_dma_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/si_dma.c
113
amdgpu_ring_write(ring, DMA_PACKET(DMA_PACKET_FENCE, 0, 0, 0, 0));
drivers/gpu/drm/amd/amdgpu/si_dma.c
114
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/si_dma.c
115
amdgpu_ring_write(ring, (upper_32_bits(addr) & 0xff));
drivers/gpu/drm/amd/amdgpu/si_dma.c
116
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/si_dma.c
120
amdgpu_ring_write(ring, DMA_PACKET(DMA_PACKET_FENCE, 0, 0, 0, 0));
drivers/gpu/drm/amd/amdgpu/si_dma.c
121
amdgpu_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/amd/amdgpu/si_dma.c
122
amdgpu_ring_write(ring, (upper_32_bits(addr) & 0xff));
drivers/gpu/drm/amd/amdgpu/si_dma.c
123
amdgpu_ring_write(ring, upper_32_bits(seq));
drivers/gpu/drm/amd/amdgpu/si_dma.c
126
amdgpu_ring_write(ring, DMA_PACKET(DMA_PACKET_TRAP, 0, 0, 0, 0));
drivers/gpu/drm/amd/amdgpu/si_dma.c
144
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/si_dma.c
150
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/si_dma.c
156
rb_bufsz = order_base_2(ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/si_dma.c
167
rptr_addr = ring->rptr_gpu_addr;
drivers/gpu/drm/amd/amdgpu/si_dma.c
174
WREG32(mmDMA_GFX_RB_BASE + sdma_offsets[i], ring->gpu_addr >> 8);
drivers/gpu/drm/amd/amdgpu/si_dma.c
187
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/si_dma.c
188
WREG32(mmDMA_GFX_RB_WPTR + sdma_offsets[i], ring->wptr << 2);
drivers/gpu/drm/amd/amdgpu/si_dma.c
191
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/si_dma.c
208
static int si_dma_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/si_dma.c
210
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/si_dma.c
225
r = amdgpu_ring_alloc(ring, 4);
drivers/gpu/drm/amd/amdgpu/si_dma.c
229
amdgpu_ring_write(ring, DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, 1));
drivers/gpu/drm/amd/amdgpu/si_dma.c
230
amdgpu_ring_write(ring, lower_32_bits(gpu_addr));
drivers/gpu/drm/amd/amdgpu/si_dma.c
231
amdgpu_ring_write(ring, upper_32_bits(gpu_addr) & 0xff);
drivers/gpu/drm/amd/amdgpu/si_dma.c
232
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/si_dma.c
233
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/si_dma.c
259
static int si_dma_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/si_dma.c
261
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/si_dma.c
287
r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
drivers/gpu/drm/amd/amdgpu/si_dma.c
416
static void si_dma_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib)
drivers/gpu/drm/amd/amdgpu/si_dma.c
429
static void si_dma_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/si_dma.c
431
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/si_dma.c
432
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/si_dma.c
435
amdgpu_ring_write(ring, DMA_PACKET(DMA_PACKET_POLL_REG_MEM, 0, 0, 0, 0) |
drivers/gpu/drm/amd/amdgpu/si_dma.c
437
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/si_dma.c
438
amdgpu_ring_write(ring, (0xff << 16) | upper_32_bits(addr)); /* retry, addr_hi */
drivers/gpu/drm/amd/amdgpu/si_dma.c
439
amdgpu_ring_write(ring, 0xffffffff); /* mask */
drivers/gpu/drm/amd/amdgpu/si_dma.c
440
amdgpu_ring_write(ring, seq); /* value */
drivers/gpu/drm/amd/amdgpu/si_dma.c
441
amdgpu_ring_write(ring, (3 << 28) | 0x20); /* func(equal) | poll interval */
drivers/gpu/drm/amd/amdgpu/si_dma.c
454
static void si_dma_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/si_dma.c
457
amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/si_dma.c
460
amdgpu_ring_write(ring, DMA_PACKET(DMA_PACKET_POLL_REG_MEM, 0, 0, 0, 0));
drivers/gpu/drm/amd/amdgpu/si_dma.c
461
amdgpu_ring_write(ring, VM_INVALIDATE_REQUEST);
drivers/gpu/drm/amd/amdgpu/si_dma.c
462
amdgpu_ring_write(ring, 0xff << 16); /* retry */
drivers/gpu/drm/amd/amdgpu/si_dma.c
463
amdgpu_ring_write(ring, 1 << vmid); /* mask */
drivers/gpu/drm/amd/amdgpu/si_dma.c
464
amdgpu_ring_write(ring, 0); /* value */
drivers/gpu/drm/amd/amdgpu/si_dma.c
465
amdgpu_ring_write(ring, (0 << 28) | 0x20); /* func(always) | poll interval */
drivers/gpu/drm/amd/amdgpu/si_dma.c
468
static void si_dma_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/si_dma.c
471
amdgpu_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0, 0, 0));
drivers/gpu/drm/amd/amdgpu/si_dma.c
472
amdgpu_ring_write(ring, (0xf << 16) | reg);
drivers/gpu/drm/amd/amdgpu/si_dma.c
473
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/si_dma.c
492
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/si_dma.c
50
static uint64_t si_dma_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/si_dma.c
509
ring = &adev->sdma.instance[i].ring;
drivers/gpu/drm/amd/amdgpu/si_dma.c
510
ring->ring_obj = NULL;
drivers/gpu/drm/amd/amdgpu/si_dma.c
511
ring->use_doorbell = false;
drivers/gpu/drm/amd/amdgpu/si_dma.c
512
sprintf(ring->name, "sdma%d", i);
drivers/gpu/drm/amd/amdgpu/si_dma.c
513
r = amdgpu_ring_init(adev, ring, 1024,
drivers/gpu/drm/amd/amdgpu/si_dma.c
52
return *ring->rptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/si_dma.c
531
amdgpu_ring_fini(&adev->sdma.instance[i].ring);
drivers/gpu/drm/amd/amdgpu/si_dma.c
62
static uint64_t si_dma_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/si_dma.c
64
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/si_dma.c
642
amdgpu_fence_process(&adev->sdma.instance[0].ring);
drivers/gpu/drm/amd/amdgpu/si_dma.c
644
amdgpu_fence_process(&adev->sdma.instance[1].ring);
drivers/gpu/drm/amd/amdgpu/si_dma.c
65
u32 me = (ring == &adev->sdma.instance[0].ring) ? 0 : 1;
drivers/gpu/drm/amd/amdgpu/si_dma.c
70
static void si_dma_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/si_dma.c
72
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/si_dma.c
73
u32 me = (ring == &adev->sdma.instance[0].ring) ? 0 : 1;
drivers/gpu/drm/amd/amdgpu/si_dma.c
75
WREG32(mmDMA_GFX_RB_WPTR + sdma_offsets[me], (ring->wptr << 2) & 0x3fffc);
drivers/gpu/drm/amd/amdgpu/si_dma.c
753
adev->sdma.instance[i].ring.funcs = &si_dma_ring_funcs;
drivers/gpu/drm/amd/amdgpu/si_dma.c
78
static void si_dma_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/si_dma.c
830
adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring;
drivers/gpu/drm/amd/amdgpu/si_dma.c
848
&adev->sdma.instance[i].ring.sched;
drivers/gpu/drm/amd/amdgpu/si_dma.c
87
while ((lower_32_bits(ring->wptr) & 7) != 5)
drivers/gpu/drm/amd/amdgpu/si_dma.c
88
amdgpu_ring_write(ring, DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0, 0));
drivers/gpu/drm/amd/amdgpu/si_dma.c
89
amdgpu_ring_write(ring, DMA_IB_PACKET(DMA_PACKET_INDIRECT_BUFFER, vmid, 0));
drivers/gpu/drm/amd/amdgpu/si_dma.c
90
amdgpu_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
drivers/gpu/drm/amd/amdgpu/si_dma.c
91
amdgpu_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF));
drivers/gpu/drm/amd/amdgpu/si_ih.c
148
dw[0] = le32_to_cpu(ih->ring[ring_index + 0]);
drivers/gpu/drm/amd/amdgpu/si_ih.c
149
dw[1] = le32_to_cpu(ih->ring[ring_index + 1]);
drivers/gpu/drm/amd/amdgpu/si_ih.c
150
dw[2] = le32_to_cpu(ih->ring[ring_index + 2]);
drivers/gpu/drm/amd/amdgpu/si_ih.c
151
dw[3] = le32_to_cpu(ih->ring[ring_index + 3]);
drivers/gpu/drm/amd/amdgpu/tonga_ih.c
256
dw[0] = le32_to_cpu(ih->ring[ring_index + 0]);
drivers/gpu/drm/amd/amdgpu/tonga_ih.c
257
dw[1] = le32_to_cpu(ih->ring[ring_index + 1]);
drivers/gpu/drm/amd/amdgpu/tonga_ih.c
258
dw[2] = le32_to_cpu(ih->ring[ring_index + 2]);
drivers/gpu/drm/amd/amdgpu/tonga_ih.c
259
dw[3] = le32_to_cpu(ih->ring[ring_index + 3]);
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
186
struct amdgpu_device *adev = umsch->ring.adev;
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
216
struct amdgpu_ring *ring = &umsch->ring;
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
217
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
221
data = REG_SET_FIELD(data, VCN_UMSCH_RB_DB_CTRL, OFFSET, ring->doorbell_index);
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
225
adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
228
WREG32_SOC15(VCN, 0, regVCN_UMSCH_RB_BASE_LO, lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
229
WREG32_SOC15(VCN, 0, regVCN_UMSCH_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
231
WREG32_SOC15(VCN, 0, regVCN_UMSCH_RB_SIZE, ring->ring_size);
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
233
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
246
struct amdgpu_ring *ring = &umsch->ring;
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
247
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
272
struct amdgpu_device *adev = umsch->ring.adev;
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
310
set_hw_resources.api_status.api_completion_fence_addr = umsch->ring.fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
311
set_hw_resources.api_status.api_completion_fence_value = ++umsch->ring.fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
330
struct amdgpu_device *adev = umsch->ring.adev;
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
360
add_queue.api_status.api_completion_fence_addr = umsch->ring.fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
361
add_queue.api_status.api_completion_fence_value = ++umsch->ring.fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
381
struct amdgpu_device *adev = umsch->ring.adev;
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
392
remove_queue.api_status.api_completion_fence_addr = umsch->ring.fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
393
remove_queue.api_status.api_completion_fence_value = ++umsch->ring.fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/umsch_mm_v4_0.c
50
struct amdgpu_device *adev = umsch->ring.adev;
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
110
static void uvd_v3_1_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
115
amdgpu_ring_write(ring, PACKET0(mmUVD_CONTEXT_ID, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
116
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
117
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA0, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
118
amdgpu_ring_write(ring, addr & 0xffffffff);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
119
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA1, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
120
amdgpu_ring_write(ring, upper_32_bits(addr) & 0xff);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
121
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_CMD, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
122
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
124
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA0, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
125
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
126
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA1, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
127
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
128
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_CMD, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
129
amdgpu_ring_write(ring, 2);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
139
static int uvd_v3_1_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
141
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
147
r = amdgpu_ring_alloc(ring, 3);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
151
amdgpu_ring_write(ring, PACKET0(mmUVD_CONTEXT_ID, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
152
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
153
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
167
static void uvd_v3_1_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
171
WARN_ON(ring->wptr % 2 || count % 2);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
174
amdgpu_ring_write(ring, PACKET0(mmUVD_NO_OP, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
175
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
203
adev->uvd.inst->ring.funcs = &uvd_v3_1_ring_funcs;
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
322
struct amdgpu_ring *ring = &adev->uvd.inst->ring;
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
420
WREG32(mmUVD_LMI_EXT40_ADDR, upper_32_bits(ring->gpu_addr) |
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
426
ring->wptr = RREG32(mmUVD_RBC_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
427
WREG32(mmUVD_RBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
430
WREG32(mmUVD_RBC_RB_BASE, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
433
rb_bufsz = order_base_2(ring->ring_size);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
44
static uint64_t uvd_v3_1_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
46
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
517
amdgpu_fence_process(&adev->uvd.inst->ring);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
547
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
562
ring = &adev->uvd.inst->ring;
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
563
sprintf(ring->name, "uvd");
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
564
r = amdgpu_ring_init(adev, ring, 512, &adev->uvd.inst->irq, 0,
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
58
static uint64_t uvd_v3_1_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
60
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
646
struct amdgpu_ring *ring = &adev->uvd.inst->ring;
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
669
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
675
r = amdgpu_ring_alloc(ring, 10);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
682
amdgpu_ring_write(ring, tmp);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
683
amdgpu_ring_write(ring, 0xFFFFF);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
686
amdgpu_ring_write(ring, tmp);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
687
amdgpu_ring_write(ring, 0xFFFFF);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
690
amdgpu_ring_write(ring, tmp);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
691
amdgpu_ring_write(ring, 0xFFFFF);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
694
amdgpu_ring_write(ring, PACKET0(mmUVD_SEMA_TIMEOUT_STATUS, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
695
amdgpu_ring_write(ring, 0x8);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
697
amdgpu_ring_write(ring, PACKET0(mmUVD_SEMA_CNTL, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
698
amdgpu_ring_write(ring, 3);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
700
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
72
static void uvd_v3_1_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
74
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
76
WREG32(mmUVD_RBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
89
static void uvd_v3_1_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
94
amdgpu_ring_write(ring, PACKET0(mmUVD_RBC_IB_BASE, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
95
amdgpu_ring_write(ring, ib->gpu_addr);
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
96
amdgpu_ring_write(ring, PACKET0(mmUVD_RBC_IB_SIZE, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v3_1.c
97
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
106
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
119
ring = &adev->uvd.inst->ring;
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
120
sprintf(ring->name, "uvd");
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
121
r = amdgpu_ring_init(adev, ring, 512, &adev->uvd.inst->irq, 0,
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
157
struct amdgpu_ring *ring = &adev->uvd.inst->ring;
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
164
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
168
r = amdgpu_ring_alloc(ring, 10);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
175
amdgpu_ring_write(ring, tmp);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
176
amdgpu_ring_write(ring, 0xFFFFF);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
179
amdgpu_ring_write(ring, tmp);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
180
amdgpu_ring_write(ring, 0xFFFFF);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
183
amdgpu_ring_write(ring, tmp);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
184
amdgpu_ring_write(ring, 0xFFFFF);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
187
amdgpu_ring_write(ring, PACKET0(mmUVD_SEMA_TIMEOUT_STATUS, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
188
amdgpu_ring_write(ring, 0x8);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
190
amdgpu_ring_write(ring, PACKET0(mmUVD_SEMA_CNTL, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
191
amdgpu_ring_write(ring, 3);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
193
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
284
struct amdgpu_ring *ring = &adev->uvd.inst->ring;
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
383
WREG32(mmUVD_LMI_EXT40_ADDR, upper_32_bits(ring->gpu_addr) |
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
389
ring->wptr = RREG32(mmUVD_RBC_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
390
WREG32(mmUVD_RBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
393
WREG32(mmUVD_RBC_RB_BASE, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
396
rb_bufsz = order_base_2(ring->ring_size);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
477
static void uvd_v4_2_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
482
amdgpu_ring_write(ring, PACKET0(mmUVD_CONTEXT_ID, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
483
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
484
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA0, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
485
amdgpu_ring_write(ring, addr & 0xffffffff);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
486
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA1, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
487
amdgpu_ring_write(ring, upper_32_bits(addr) & 0xff);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
488
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_CMD, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
489
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
491
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA0, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
492
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
493
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA1, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
494
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
495
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_CMD, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
496
amdgpu_ring_write(ring, 2);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
506
static int uvd_v4_2_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
508
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
514
r = amdgpu_ring_alloc(ring, 3);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
518
amdgpu_ring_write(ring, PACKET0(mmUVD_CONTEXT_ID, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
519
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
520
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
544
static void uvd_v4_2_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
549
amdgpu_ring_write(ring, PACKET0(mmUVD_RBC_IB_BASE, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
550
amdgpu_ring_write(ring, ib->gpu_addr);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
551
amdgpu_ring_write(ring, PACKET0(mmUVD_RBC_IB_SIZE, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
552
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
555
static void uvd_v4_2_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
559
WARN_ON(ring->wptr % 2 || count % 2);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
562
amdgpu_ring_write(ring, PACKET0(mmUVD_NO_OP, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
563
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
58
static uint64_t uvd_v4_2_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
60
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
708
amdgpu_fence_process(&adev->uvd.inst->ring);
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
72
static uint64_t uvd_v4_2_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
74
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
797
adev->uvd.inst->ring.funcs = &uvd_v4_2_ring_funcs;
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
86
static void uvd_v4_2_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
88
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c
90
WREG32(mmUVD_RBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
104
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
117
ring = &adev->uvd.inst->ring;
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
118
sprintf(ring->name, "uvd");
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
119
r = amdgpu_ring_init(adev, ring, 512, &adev->uvd.inst->irq, 0,
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
153
struct amdgpu_ring *ring = &adev->uvd.inst->ring;
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
161
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
165
r = amdgpu_ring_alloc(ring, 10);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
172
amdgpu_ring_write(ring, tmp);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
173
amdgpu_ring_write(ring, 0xFFFFF);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
176
amdgpu_ring_write(ring, tmp);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
177
amdgpu_ring_write(ring, 0xFFFFF);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
180
amdgpu_ring_write(ring, tmp);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
181
amdgpu_ring_write(ring, 0xFFFFF);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
184
amdgpu_ring_write(ring, PACKET0(mmUVD_SEMA_TIMEOUT_STATUS, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
185
amdgpu_ring_write(ring, 0x8);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
187
amdgpu_ring_write(ring, PACKET0(mmUVD_SEMA_CNTL, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
188
amdgpu_ring_write(ring, 3);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
190
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
321
struct amdgpu_ring *ring = &adev->uvd.inst->ring;
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
419
rb_bufsz = order_base_2(ring->ring_size);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
434
WREG32(mmUVD_RBC_RB_RPTR_ADDR, (upper_32_bits(ring->gpu_addr) >> 2));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
438
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
440
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
445
ring->wptr = RREG32(mmUVD_RBC_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
446
WREG32(mmUVD_RBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
492
static void uvd_v5_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
497
amdgpu_ring_write(ring, PACKET0(mmUVD_CONTEXT_ID, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
498
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
499
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA0, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
500
amdgpu_ring_write(ring, addr & 0xffffffff);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
501
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA1, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
502
amdgpu_ring_write(ring, upper_32_bits(addr) & 0xff);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
503
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_CMD, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
504
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
506
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA0, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
507
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
508
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA1, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
509
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
510
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_CMD, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
511
amdgpu_ring_write(ring, 2);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
521
static int uvd_v5_0_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
523
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
529
r = amdgpu_ring_alloc(ring, 3);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
532
amdgpu_ring_write(ring, PACKET0(mmUVD_CONTEXT_ID, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
533
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
534
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
558
static void uvd_v5_0_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
56
static uint64_t uvd_v5_0_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
563
amdgpu_ring_write(ring, PACKET0(mmUVD_LMI_RBC_IB_64BIT_BAR_LOW, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
564
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
565
amdgpu_ring_write(ring, PACKET0(mmUVD_LMI_RBC_IB_64BIT_BAR_HIGH, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
566
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
567
amdgpu_ring_write(ring, PACKET0(mmUVD_RBC_IB_SIZE, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
568
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
571
static void uvd_v5_0_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
575
WARN_ON(ring->wptr % 2 || count % 2);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
578
amdgpu_ring_write(ring, PACKET0(mmUVD_NO_OP, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
579
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
58
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
629
amdgpu_fence_process(&adev->uvd.inst->ring);
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
70
static uint64_t uvd_v5_0_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
72
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
84
static void uvd_v5_0_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
86
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
88
WREG32(mmUVD_RBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
904
adev->uvd.inst->ring.funcs = &uvd_v5_0_ring_funcs;
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1000
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1024
static void uvd_v6_0_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1031
amdgpu_ring_write(ring, PACKET0(mmUVD_LMI_RBC_IB_VMID, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1032
amdgpu_ring_write(ring, vmid);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1034
amdgpu_ring_write(ring, PACKET0(mmUVD_LMI_RBC_IB_64BIT_BAR_LOW, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1035
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1036
amdgpu_ring_write(ring, PACKET0(mmUVD_LMI_RBC_IB_64BIT_BAR_HIGH, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1037
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1038
amdgpu_ring_write(ring, PACKET0(mmUVD_RBC_IB_SIZE, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1039
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1052
static void uvd_v6_0_enc_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1059
amdgpu_ring_write(ring, HEVC_ENC_CMD_IB_VM);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1060
amdgpu_ring_write(ring, vmid);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1061
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1062
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1063
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1066
static void uvd_v6_0_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1069
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA0, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
107
static uint64_t uvd_v6_0_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1070
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1071
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA1, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1072
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1073
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_CMD, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1074
amdgpu_ring_write(ring, 0x8);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1077
static void uvd_v6_0_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1080
amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1082
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA0, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1083
amdgpu_ring_write(ring, mmVM_INVALIDATE_REQUEST << 2);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1084
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA1, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1085
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1086
amdgpu_ring_write(ring, PACKET0(mmUVD_GP_SCRATCH8, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1087
amdgpu_ring_write(ring, 1 << vmid); /* mask */
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1088
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_CMD, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1089
amdgpu_ring_write(ring, 0xC);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
109
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1092
static void uvd_v6_0_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1094
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1095
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1097
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA0, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1098
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1099
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA1, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1100
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1101
amdgpu_ring_write(ring, PACKET0(mmUVD_GP_SCRATCH8, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1102
amdgpu_ring_write(ring, 0xffffffff); /* mask */
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1103
amdgpu_ring_write(ring, PACKET0(mmUVD_GP_SCRATCH9, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1104
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1105
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_CMD, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1106
amdgpu_ring_write(ring, 0xE);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1109
static void uvd_v6_0_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1113
WARN_ON(ring->wptr % 2 || count % 2);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1116
amdgpu_ring_write(ring, PACKET0(mmUVD_NO_OP, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1117
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1121
static void uvd_v6_0_enc_ring_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1123
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1124
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1126
amdgpu_ring_write(ring, HEVC_ENC_CMD_WAIT_GE);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1127
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1128
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1129
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1132
static void uvd_v6_0_enc_ring_insert_end(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1134
amdgpu_ring_write(ring, HEVC_ENC_CMD_END);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1137
static void uvd_v6_0_enc_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1140
amdgpu_ring_write(ring, HEVC_ENC_CMD_UPDATE_PTB);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1141
amdgpu_ring_write(ring, vmid);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1142
amdgpu_ring_write(ring, pd_addr >> 12);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1144
amdgpu_ring_write(ring, HEVC_ENC_CMD_FLUSH_TLB);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1145
amdgpu_ring_write(ring, vmid);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
121
static uint64_t uvd_v6_0_enc_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
123
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
125
if (ring == &adev->uvd.inst->ring_enc[0])
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1260
amdgpu_fence_process(&adev->uvd.inst->ring);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
138
static void uvd_v6_0_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
140
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
142
WREG32(mmUVD_RBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
152
static void uvd_v6_0_enc_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
154
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
156
if (ring == &adev->uvd.inst->ring_enc[0])
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
158
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
161
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1635
adev->uvd.inst->ring.funcs = &uvd_v6_0_ring_vm_funcs;
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
1638
adev->uvd.inst->ring.funcs = &uvd_v6_0_ring_phys_funcs;
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
170
static int uvd_v6_0_enc_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
172
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
177
r = amdgpu_ring_alloc(ring, 16);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
181
rptr = amdgpu_ring_get_rptr(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
183
amdgpu_ring_write(ring, HEVC_ENC_CMD_END);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
184
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
187
if (amdgpu_ring_get_rptr(ring) != rptr)
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
208
static int uvd_v6_0_enc_get_create_msg(struct amdgpu_ring *ring, uint32_t handle,
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
219
r = amdgpu_job_alloc_with_ib(ring->adev, NULL, NULL, ib_size_dw * 4,
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
248
r = amdgpu_job_submit_direct(job, ring, &f);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
272
static int uvd_v6_0_enc_get_destroy_msg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
284
r = amdgpu_job_alloc_with_ib(ring->adev, NULL, NULL, ib_size_dw * 4,
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
313
r = amdgpu_job_submit_direct(job, ring, &f);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
334
static int uvd_v6_0_enc_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
337
struct amdgpu_bo *bo = ring->adev->uvd.ib_bo;
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
340
r = uvd_v6_0_enc_get_create_msg(ring, 1, bo, NULL);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
344
r = uvd_v6_0_enc_get_destroy_msg(ring, 1, bo, &fence);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
382
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
414
ring = &adev->uvd.inst->ring;
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
415
sprintf(ring->name, "uvd");
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
416
r = amdgpu_ring_init(adev, ring, 512, &adev->uvd.inst->irq, 0,
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
427
ring = &adev->uvd.inst->ring_enc[i];
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
428
sprintf(ring->name, "uvd_enc%d", i);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
429
r = amdgpu_ring_init(adev, ring, 512,
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
467
struct amdgpu_ring *ring = &adev->uvd.inst->ring;
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
475
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
479
r = amdgpu_ring_alloc(ring, 10);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
486
amdgpu_ring_write(ring, tmp);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
487
amdgpu_ring_write(ring, 0xFFFFF);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
490
amdgpu_ring_write(ring, tmp);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
491
amdgpu_ring_write(ring, 0xFFFFF);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
494
amdgpu_ring_write(ring, tmp);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
495
amdgpu_ring_write(ring, 0xFFFFF);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
498
amdgpu_ring_write(ring, PACKET0(mmUVD_SEMA_TIMEOUT_STATUS, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
499
amdgpu_ring_write(ring, 0x8);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
501
amdgpu_ring_write(ring, PACKET0(mmUVD_SEMA_CNTL, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
502
amdgpu_ring_write(ring, 3);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
504
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
508
ring = &adev->uvd.inst->ring_enc[i];
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
509
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
728
struct amdgpu_ring *ring = &adev->uvd.inst->ring;
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
77
static uint64_t uvd_v6_0_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
79
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
838
rb_bufsz = order_base_2(ring->ring_size);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
851
WREG32(mmUVD_RBC_RB_RPTR_ADDR, (upper_32_bits(ring->gpu_addr) >> 2));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
855
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
857
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
862
ring->wptr = RREG32(mmUVD_RBC_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
863
WREG32(mmUVD_RBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
868
ring = &adev->uvd.inst->ring_enc[0];
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
869
WREG32(mmUVD_RB_RPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
870
WREG32(mmUVD_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
871
WREG32(mmUVD_RB_BASE_LO, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
872
WREG32(mmUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
873
WREG32(mmUVD_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
875
ring = &adev->uvd.inst->ring_enc[1];
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
876
WREG32(mmUVD_RB_RPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
877
WREG32(mmUVD_RB_WPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
878
WREG32(mmUVD_RB_BASE_LO2, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
879
WREG32(mmUVD_RB_BASE_HI2, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
880
WREG32(mmUVD_RB_SIZE2, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
91
static uint64_t uvd_v6_0_enc_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
925
static void uvd_v6_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
93
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
930
amdgpu_ring_write(ring, PACKET0(mmUVD_CONTEXT_ID, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
931
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
932
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA0, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
933
amdgpu_ring_write(ring, addr & 0xffffffff);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
934
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA1, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
935
amdgpu_ring_write(ring, upper_32_bits(addr) & 0xff);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
936
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_CMD, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
937
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
939
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA0, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
940
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
941
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_DATA1, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
942
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
943
amdgpu_ring_write(ring, PACKET0(mmUVD_GPCOM_VCPU_CMD, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
944
amdgpu_ring_write(ring, 2);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
95
if (ring == &adev->uvd.inst->ring_enc[0])
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
957
static void uvd_v6_0_enc_ring_emit_fence(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
962
amdgpu_ring_write(ring, HEVC_ENC_CMD_FENCE);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
963
amdgpu_ring_write(ring, addr);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
964
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
965
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
966
amdgpu_ring_write(ring, HEVC_ENC_CMD_TRAP);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
974
static void uvd_v6_0_ring_emit_hdp_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
986
static int uvd_v6_0_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
988
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
994
r = amdgpu_ring_alloc(ring, 3);
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
998
amdgpu_ring_write(ring, PACKET0(mmUVD_CONTEXT_ID, 0));
drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
999
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
102
static uint64_t uvd_v7_0_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
104
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
106
return RREG32_SOC15(UVD, ring->me, mmUVD_RBC_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1085
rb_bufsz = order_base_2(ring->ring_size);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1099
(upper_32_bits(ring->gpu_addr) >> 2));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1103
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1105
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1110
ring->wptr = RREG32_SOC15(UVD, k, mmUVD_RBC_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1112
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1117
ring = &adev->uvd.inst[k].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1118
WREG32_SOC15(UVD, k, mmUVD_RB_RPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1119
WREG32_SOC15(UVD, k, mmUVD_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1120
WREG32_SOC15(UVD, k, mmUVD_RB_BASE_LO, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1121
WREG32_SOC15(UVD, k, mmUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1122
WREG32_SOC15(UVD, k, mmUVD_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1124
ring = &adev->uvd.inst[k].ring_enc[1];
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1125
WREG32_SOC15(UVD, k, mmUVD_RB_RPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1126
WREG32_SOC15(UVD, k, mmUVD_RB_WPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1127
WREG32_SOC15(UVD, k, mmUVD_RB_BASE_LO2, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1128
WREG32_SOC15(UVD, k, mmUVD_RB_BASE_HI2, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1129
WREG32_SOC15(UVD, k, mmUVD_RB_SIZE2, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
116
static uint64_t uvd_v7_0_enc_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
118
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1181
static void uvd_v7_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1184
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1188
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1189
PACKET0(SOC15_REG_OFFSET(UVD, ring->me, mmUVD_CONTEXT_ID), 0));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1190
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1191
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1192
PACKET0(SOC15_REG_OFFSET(UVD, ring->me, mmUVD_GPCOM_VCPU_DATA0), 0));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1193
amdgpu_ring_write(ring, addr & 0xffffffff);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1194
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1195
PACKET0(SOC15_REG_OFFSET(UVD, ring->me, mmUVD_GPCOM_VCPU_DATA1), 0));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1196
amdgpu_ring_write(ring, upper_32_bits(addr) & 0xff);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1197
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1198
PACKET0(SOC15_REG_OFFSET(UVD, ring->me, mmUVD_GPCOM_VCPU_CMD), 0));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1199
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
120
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1201
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1202
PACKET0(SOC15_REG_OFFSET(UVD, ring->me, mmUVD_GPCOM_VCPU_DATA0), 0));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1203
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1204
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1205
PACKET0(SOC15_REG_OFFSET(UVD, ring->me, mmUVD_GPCOM_VCPU_DATA1), 0));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1206
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1207
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1208
PACKET0(SOC15_REG_OFFSET(UVD, ring->me, mmUVD_GPCOM_VCPU_CMD), 0));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1209
amdgpu_ring_write(ring, 2);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
121
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1222
static void uvd_v7_0_enc_ring_emit_fence(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1228
amdgpu_ring_write(ring, HEVC_ENC_CMD_FENCE);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1229
amdgpu_ring_write(ring, addr);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
123
if (ring == &adev->uvd.inst[ring->me].ring_enc[0])
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1230
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1231
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1232
amdgpu_ring_write(ring, HEVC_ENC_CMD_TRAP);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
124
return RREG32_SOC15(UVD, ring->me, mmUVD_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1240
static void uvd_v7_0_ring_emit_hdp_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1252
static int uvd_v7_0_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1254
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1259
WREG32_SOC15(UVD, ring->me, mmUVD_CONTEXT_ID, 0xCAFEDEAD);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
126
return RREG32_SOC15(UVD, ring->me, mmUVD_RB_WPTR2);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1260
r = amdgpu_ring_alloc(ring, 3);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1264
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1265
PACKET0(SOC15_REG_OFFSET(UVD, ring->me, mmUVD_CONTEXT_ID), 0));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1266
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1267
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1269
tmp = RREG32_SOC15(UVD, ring->me, mmUVD_CONTEXT_ID);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1293
struct amdgpu_ring *ring = amdgpu_job_ring(job);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1297
if (!ring->me)
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1321
static void uvd_v7_0_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1326
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1329
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1330
PACKET0(SOC15_REG_OFFSET(UVD, ring->me, mmUVD_LMI_RBC_IB_VMID), 0));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1331
amdgpu_ring_write(ring, vmid);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1333
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1334
PACKET0(SOC15_REG_OFFSET(UVD, ring->me, mmUVD_LMI_RBC_IB_64BIT_BAR_LOW), 0));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1335
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1336
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1337
PACKET0(SOC15_REG_OFFSET(UVD, ring->me, mmUVD_LMI_RBC_IB_64BIT_BAR_HIGH), 0));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1338
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1339
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1340
PACKET0(SOC15_REG_OFFSET(UVD, ring->me, mmUVD_RBC_IB_SIZE), 0));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1341
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1354
static void uvd_v7_0_enc_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
136
static void uvd_v7_0_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1361
amdgpu_ring_write(ring, HEVC_ENC_CMD_IB_VM);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1362
amdgpu_ring_write(ring, vmid);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1363
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1364
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1365
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1368
static void uvd_v7_0_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1371
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1373
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1374
PACKET0(SOC15_REG_OFFSET(UVD, ring->me, mmUVD_GPCOM_VCPU_DATA0), 0));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1375
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1376
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1377
PACKET0(SOC15_REG_OFFSET(UVD, ring->me, mmUVD_GPCOM_VCPU_DATA1), 0));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1378
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1379
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
138
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1380
PACKET0(SOC15_REG_OFFSET(UVD, ring->me, mmUVD_GPCOM_VCPU_CMD), 0));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1381
amdgpu_ring_write(ring, 8);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1384
static void uvd_v7_0_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1387
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1389
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1390
PACKET0(SOC15_REG_OFFSET(UVD, ring->me, mmUVD_GPCOM_VCPU_DATA0), 0));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1391
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1392
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1393
PACKET0(SOC15_REG_OFFSET(UVD, ring->me, mmUVD_GPCOM_VCPU_DATA1), 0));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1394
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1395
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1396
PACKET0(SOC15_REG_OFFSET(UVD, ring->me, mmUVD_GP_SCRATCH8), 0));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1397
amdgpu_ring_write(ring, mask);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1398
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1399
PACKET0(SOC15_REG_OFFSET(UVD, ring->me, mmUVD_GPCOM_VCPU_CMD), 0));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
140
WREG32_SOC15(UVD, ring->me, mmUVD_RBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1400
amdgpu_ring_write(ring, 12);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1403
static void uvd_v7_0_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1406
struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->vm_hub];
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1409
pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1415
uvd_v7_0_ring_emit_reg_wait(ring, data0, data1, mask);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1418
static void uvd_v7_0_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1420
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1423
WARN_ON(ring->wptr % 2 || count % 2);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1426
amdgpu_ring_write(ring, PACKET0(SOC15_REG_OFFSET(UVD, ring->me, mmUVD_NO_OP), 0));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1427
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1431
static void uvd_v7_0_enc_ring_insert_end(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1433
amdgpu_ring_write(ring, HEVC_ENC_CMD_END);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1436
static void uvd_v7_0_enc_ring_emit_reg_wait(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1440
amdgpu_ring_write(ring, HEVC_ENC_CMD_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1441
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1442
amdgpu_ring_write(ring, mask);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1443
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1446
static void uvd_v7_0_enc_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1449
struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->vm_hub];
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1451
pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1454
uvd_v7_0_enc_ring_emit_reg_wait(ring, hub->ctx0_ptb_addr_lo32 +
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1459
static void uvd_v7_0_enc_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1462
amdgpu_ring_write(ring, HEVC_ENC_CMD_REG_WRITE);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1463
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1464
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1498
amdgpu_fence_process(&adev->uvd.inst[ip_instance].ring);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
150
static void uvd_v7_0_enc_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
152
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
154
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
156
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
157
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1607
adev->uvd.inst[i].ring.funcs = &uvd_v7_0_ring_vm_funcs;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
1608
adev->uvd.inst[i].ring.me = i;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
161
if (ring == &adev->uvd.inst[ring->me].ring_enc[0])
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
162
WREG32_SOC15(UVD, ring->me, mmUVD_RB_WPTR,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
163
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
165
WREG32_SOC15(UVD, ring->me, mmUVD_RB_WPTR2,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
166
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
175
static int uvd_v7_0_enc_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
177
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
185
r = amdgpu_ring_alloc(ring, 16);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
189
rptr = amdgpu_ring_get_rptr(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
191
amdgpu_ring_write(ring, HEVC_ENC_CMD_END);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
192
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
195
if (amdgpu_ring_get_rptr(ring) != rptr)
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
216
static int uvd_v7_0_enc_get_create_msg(struct amdgpu_ring *ring, u32 handle,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
227
r = amdgpu_job_alloc_with_ib(ring->adev, NULL, NULL, ib_size_dw * 4,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
256
r = amdgpu_job_submit_direct(job, ring, &f);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
280
static int uvd_v7_0_enc_get_destroy_msg(struct amdgpu_ring *ring, u32 handle,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
291
r = amdgpu_job_alloc_with_ib(ring->adev, NULL, NULL, ib_size_dw * 4,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
320
r = amdgpu_job_submit_direct(job, ring, &f);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
341
static int uvd_v7_0_enc_ring_test_ib(struct amdgpu_ring *ring, long timeout)
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
344
struct amdgpu_bo *bo = ring->adev->uvd.ib_bo;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
347
r = uvd_v7_0_enc_get_create_msg(ring, 1, bo, NULL);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
351
r = uvd_v7_0_enc_get_destroy_msg(ring, 1, bo, &fence);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
402
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
448
ring = &adev->uvd.inst[j].ring;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
449
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
450
sprintf(ring->name, "uvd_%d", ring->me);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
451
r = amdgpu_ring_init(adev, ring, 512,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
459
ring = &adev->uvd.inst[j].ring_enc[i];
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
460
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
461
sprintf(ring->name, "uvd_enc_%d.%d", ring->me, i);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
463
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
469
ring->doorbell_index = adev->doorbell_index.uvd_vce.uvd_ring0_1 * 2;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
471
ring->doorbell_index = adev->doorbell_index.uvd_vce.uvd_ring2_3 * 2 + 1;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
473
r = amdgpu_ring_init(adev, ring, 512,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
522
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
536
ring = &adev->uvd.inst[j].ring;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
539
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
543
r = amdgpu_ring_alloc(ring, 10);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
551
amdgpu_ring_write(ring, tmp);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
552
amdgpu_ring_write(ring, 0xFFFFF);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
556
amdgpu_ring_write(ring, tmp);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
557
amdgpu_ring_write(ring, 0xFFFFF);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
561
amdgpu_ring_write(ring, tmp);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
562
amdgpu_ring_write(ring, 0xFFFFF);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
565
amdgpu_ring_write(ring, PACKET0(SOC15_REG_OFFSET(UVD, j,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
567
amdgpu_ring_write(ring, 0x8);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
569
amdgpu_ring_write(ring, PACKET0(SOC15_REG_OFFSET(UVD, j,
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
571
amdgpu_ring_write(ring, 3);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
573
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
577
ring = &adev->uvd.inst[j].ring_enc[i];
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
578
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
71
static uint64_t uvd_v7_0_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
73
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
75
return RREG32_SOC15(UVD, ring->me, mmUVD_RBC_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
790
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
820
ring = &adev->uvd.inst[i].ring;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
821
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
85
static uint64_t uvd_v7_0_enc_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
87
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
89
if (ring == &adev->uvd.inst[ring->me].ring_enc[0])
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
90
return RREG32_SOC15(UVD, ring->me, mmUVD_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
918
size = order_base_2(ring->ring_size);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
92
return RREG32_SOC15(UVD, ring->me, mmUVD_RB_RPTR2);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
923
ring = &adev->uvd.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
924
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
925
MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(UVD, i, mmUVD_RB_BASE_LO), ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
926
MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(UVD, i, mmUVD_RB_BASE_HI), upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
927
MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(UVD, i, mmUVD_RB_SIZE), ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
956
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c
979
ring = &adev->uvd.inst[k].ring;
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
111
static void vce_v1_0_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
113
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
115
if (ring->me == 0)
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
116
WREG32(mmVCE_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
118
WREG32(mmVCE_RB_WPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
381
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
390
ring = &adev->vce.ring[0];
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
391
WREG32(mmVCE_RB_RPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
392
WREG32(mmVCE_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
393
WREG32(mmVCE_RB_BASE_LO, lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
394
WREG32(mmVCE_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
395
WREG32(mmVCE_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
397
ring = &adev->vce.ring[1];
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
398
WREG32(mmVCE_RB_RPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
399
WREG32(mmVCE_RB_WPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
400
WREG32(mmVCE_RB_BASE_LO2, lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
401
WREG32(mmVCE_RB_BASE_HI2, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
402
WREG32(mmVCE_RB_SIZE2, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
568
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
593
ring = &adev->vce.ring[i];
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
594
sprintf(ring->name, "vce%d", i);
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
595
r = amdgpu_ring_init(adev, ring, 512, &adev->vce.irq, 0,
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
634
r = amdgpu_ring_test_helper(&adev->vce.ring[i]);
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
734
amdgpu_fence_process(&adev->vce.ring[entry->src_data[0]]);
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
77
static uint64_t vce_v1_0_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
79
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
81
if (ring->me == 0)
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
817
adev->vce.ring[i].funcs = &vce_v1_0_ring_funcs;
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
818
adev->vce.ring[i].me = i;
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
94
static uint64_t vce_v1_0_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
96
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vce_v1_0.c
98
if (ring->me == 0)
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
232
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
243
ring = &adev->vce.ring[0];
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
244
WREG32(mmVCE_RB_RPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
245
WREG32(mmVCE_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
246
WREG32(mmVCE_RB_BASE_LO, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
247
WREG32(mmVCE_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
248
WREG32(mmVCE_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
250
ring = &adev->vce.ring[1];
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
251
WREG32(mmVCE_RB_RPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
252
WREG32(mmVCE_RB_WPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
253
WREG32(mmVCE_RB_BASE_LO2, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
254
WREG32(mmVCE_RB_BASE_HI2, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
255
WREG32(mmVCE_RB_SIZE2, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
426
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
447
ring = &adev->vce.ring[i];
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
448
sprintf(ring->name, "vce%d", i);
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
449
r = amdgpu_ring_init(adev, ring, 512, &adev->vce.irq, 0,
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
479
r = amdgpu_ring_test_helper(&adev->vce.ring[i]);
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
55
static uint64_t vce_v2_0_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
57
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
575
amdgpu_fence_process(&adev->vce.ring[entry->src_data[0]]);
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
59
if (ring->me == 0)
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
665
adev->vce.ring[i].funcs = &vce_v2_0_ring_funcs;
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
666
adev->vce.ring[i].me = i;
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
72
static uint64_t vce_v2_0_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
74
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
76
if (ring->me == 0)
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
89
static void vce_v2_0_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
91
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
93
if (ring->me == 0)
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
94
WREG32(mmVCE_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v2_0.c
96
WREG32(mmVCE_RB_WPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
109
static uint64_t vce_v3_0_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
111
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
121
if (ring->me == 0)
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
123
else if (ring->me == 1)
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
141
static void vce_v3_0_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
143
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
152
if (ring->me == 0)
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
153
WREG32(mmVCE_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
154
else if (ring->me == 1)
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
155
WREG32(mmVCE_RB_WPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
157
WREG32(mmVCE_RB_WPTR3, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
267
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
280
ring = &adev->vce.ring[0];
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
281
WREG32(mmVCE_RB_RPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
282
WREG32(mmVCE_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
283
WREG32(mmVCE_RB_BASE_LO, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
284
WREG32(mmVCE_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
285
WREG32(mmVCE_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
287
ring = &adev->vce.ring[1];
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
288
WREG32(mmVCE_RB_RPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
289
WREG32(mmVCE_RB_WPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
290
WREG32(mmVCE_RB_BASE_LO2, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
291
WREG32(mmVCE_RB_BASE_HI2, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
292
WREG32(mmVCE_RB_SIZE2, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
294
ring = &adev->vce.ring[2];
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
295
WREG32(mmVCE_RB_RPTR3, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
296
WREG32(mmVCE_RB_WPTR3, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
297
WREG32(mmVCE_RB_BASE_LO3, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
298
WREG32(mmVCE_RB_BASE_HI3, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
299
WREG32(mmVCE_RB_SIZE3, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
426
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
450
ring = &adev->vce.ring[i];
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
451
sprintf(ring->name, "vce%d", i);
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
452
r = amdgpu_ring_init(adev, ring, 512, &adev->vce.irq, 0,
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
483
r = amdgpu_ring_test_helper(&adev->vce.ring[i]);
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
757
amdgpu_fence_process(&adev->vce.ring[entry->src_data[0]]);
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
77
static uint64_t vce_v3_0_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
79
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
864
static void vce_v3_0_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
871
amdgpu_ring_write(ring, VCE_CMD_IB_VM);
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
872
amdgpu_ring_write(ring, vmid);
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
873
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
874
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
875
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
878
static void vce_v3_0_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
881
amdgpu_ring_write(ring, VCE_CMD_UPDATE_PTB);
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
882
amdgpu_ring_write(ring, vmid);
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
883
amdgpu_ring_write(ring, pd_addr >> 12);
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
885
amdgpu_ring_write(ring, VCE_CMD_FLUSH_TLB);
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
886
amdgpu_ring_write(ring, vmid);
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
887
amdgpu_ring_write(ring, VCE_CMD_END);
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
89
if (ring->me == 0)
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
890
static void vce_v3_0_emit_pipeline_sync(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
892
uint32_t seq = ring->fence_drv.sync_seq;
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
893
uint64_t addr = ring->fence_drv.gpu_addr;
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
895
amdgpu_ring_write(ring, VCE_CMD_WAIT_GE);
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
896
amdgpu_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
897
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
898
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
91
else if (ring->me == 1)
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
978
adev->vce.ring[i].funcs = &vce_v3_0_ring_vm_funcs;
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
979
adev->vce.ring[i].me = i;
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
984
adev->vce.ring[i].funcs = &vce_v3_0_ring_phys_funcs;
drivers/gpu/drm/amd/amdgpu/vce_v3_0.c
985
adev->vce.ring[i].me = i;
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
103
static void vce_v4_0_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
105
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
107
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
109
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
110
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
114
if (ring->me == 0)
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
116
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
117
else if (ring->me == 1)
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
119
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
122
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
179
WDOORBELL32(adev->vce.ring[0].doorbell_index, 0);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
180
*adev->vce.ring[0].wptr_cpu_addr = 0;
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
181
adev->vce.ring[0].wptr = 0;
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
182
adev->vce.ring[0].wptr_old = 0;
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
207
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
233
ring = &adev->vce.ring[0];
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
235
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
237
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
239
ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
338
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
341
ring = &adev->vce.ring[0];
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
343
WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_RPTR), lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
344
WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR), lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
345
WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_LO), ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
346
WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_HI), upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
347
WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_SIZE), ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
349
ring = &adev->vce.ring[1];
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
351
WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_RPTR2), lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
352
WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR2), lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
353
WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_LO2), ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
354
WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_HI2), upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
355
WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_SIZE2), ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
357
ring = &adev->vce.ring[2];
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
359
WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_RPTR3), lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
360
WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR3), lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
361
WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_LO3), ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
362
WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_HI3), upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
363
WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_SIZE3), ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
433
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
473
ring = &adev->vce.ring[i];
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
474
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
475
sprintf(ring->name, "vce%d", i);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
478
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
484
ring->doorbell_index = adev->doorbell_index.uvd_vce.vce_ring0_1 * 2;
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
486
ring->doorbell_index = adev->doorbell_index.uvd_vce.vce_ring2_3 * 2 + 1;
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
488
r = amdgpu_ring_init(adev, ring, 512, &adev->vce.irq, 0,
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
534
r = amdgpu_ring_test_helper(&adev->vce.ring[i]);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
62
static uint64_t vce_v4_0_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
64
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
66
if (ring->me == 0)
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
68
else if (ring->me == 1)
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
717
static void vce_v4_0_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job,
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
722
amdgpu_ring_write(ring, VCE_CMD_IB_VM);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
723
amdgpu_ring_write(ring, vmid);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
724
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
725
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
726
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
729
static void vce_v4_0_ring_emit_fence(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
734
amdgpu_ring_write(ring, VCE_CMD_FENCE);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
735
amdgpu_ring_write(ring, addr);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
736
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
737
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
738
amdgpu_ring_write(ring, VCE_CMD_TRAP);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
741
static void vce_v4_0_ring_insert_end(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
743
amdgpu_ring_write(ring, VCE_CMD_END);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
746
static void vce_v4_0_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
749
amdgpu_ring_write(ring, VCE_CMD_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
750
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
751
amdgpu_ring_write(ring, mask);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
752
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
755
static void vce_v4_0_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
758
struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->vm_hub];
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
760
pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
763
vce_v4_0_emit_reg_wait(ring, hub->ctx0_ptb_addr_lo32 +
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
768
static void vce_v4_0_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
771
amdgpu_ring_write(ring, VCE_CMD_REG_WRITE);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
772
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
773
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
803
amdgpu_fence_process(&adev->vce.ring[entry->src_data[0]]);
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
81
static uint64_t vce_v4_0_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
83
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
85
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
86
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
864
adev->vce.ring[i].funcs = &vce_v4_0_ring_vm_funcs;
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
865
adev->vce.ring[i].me = i;
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
88
if (ring->me == 0)
drivers/gpu/drm/amd/amdgpu/vce_v4_0.c
90
else if (ring->me == 1)
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
27
void vcn_dec_sw_ring_emit_fence(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
32
amdgpu_ring_write(ring, VCN_DEC_SW_CMD_FENCE);
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
33
amdgpu_ring_write(ring, addr);
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
34
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
35
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
36
amdgpu_ring_write(ring, VCN_DEC_SW_CMD_TRAP);
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
39
void vcn_dec_sw_ring_insert_end(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
41
amdgpu_ring_write(ring, VCN_DEC_SW_CMD_END);
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
44
void vcn_dec_sw_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job,
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
49
amdgpu_ring_write(ring, VCN_DEC_SW_CMD_IB);
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
50
amdgpu_ring_write(ring, vmid);
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
51
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
52
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
53
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
56
void vcn_dec_sw_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
59
amdgpu_ring_write(ring, VCN_DEC_SW_CMD_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
60
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
61
amdgpu_ring_write(ring, mask);
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
62
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
65
void vcn_dec_sw_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
68
struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->vm_hub];
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
71
pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
77
vcn_dec_sw_ring_emit_reg_wait(ring, data0, data1, mask);
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
80
void vcn_dec_sw_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
83
amdgpu_ring_write(ring, VCN_DEC_SW_CMD_REG_WRITE);
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
84
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.c
85
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.h
32
void vcn_dec_sw_ring_emit_fence(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.h
34
void vcn_dec_sw_ring_insert_end(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.h
35
void vcn_dec_sw_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job,
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.h
37
void vcn_dec_sw_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.h
39
void vcn_dec_sw_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_sw_ring.h
41
void vcn_dec_sw_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1000
WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1001
WREG32_SOC15(UVD, 0, mmUVD_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1003
ring = &adev->vcn.inst->ring_enc[1];
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1004
WREG32_SOC15(UVD, 0, mmUVD_RB_RPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1005
WREG32_SOC15(UVD, 0, mmUVD_RB_WPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1006
WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_LO2, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1007
WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_HI2, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1008
WREG32_SOC15(UVD, 0, mmUVD_RB_SIZE2, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1023
struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1127
rb_bufsz = order_base_2(ring->ring_size);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1140
(upper_32_bits(ring->gpu_addr) >> 2));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1144
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1146
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1153
ring->wptr = RREG32_SOC15(UVD, 0, mmUVD_RBC_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1155
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
129
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1298
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1327
ring = &adev->vcn.inst->ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1328
WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_LO, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1329
WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1330
WREG32_SOC15(UVD, 0, mmUVD_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1331
WREG32_SOC15(UVD, 0, mmUVD_RB_RPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1332
WREG32_SOC15(UVD, 0, mmUVD_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1334
ring = &adev->vcn.inst->ring_enc[1];
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1335
WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_LO2, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1336
WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_HI2, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1337
WREG32_SOC15(UVD, 0, mmUVD_RB_SIZE2, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1338
WREG32_SOC15(UVD, 0, mmUVD_RB_RPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1339
WREG32_SOC15(UVD, 0, mmUVD_RB_WPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1387
ring = adev->jpeg.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1393
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1395
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1396
WREG32_SOC15(UVD, 0, mmUVD_JRBC_RB_RPTR, ring->wptr);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1397
WREG32_SOC15(UVD, 0, mmUVD_JRBC_RB_WPTR, ring->wptr);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1462
static uint64_t vcn_v1_0_dec_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1464
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1476
static uint64_t vcn_v1_0_dec_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1478
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1490
static void vcn_v1_0_dec_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1492
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1496
lower_32_bits(ring->wptr) | 0x80000000);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1498
WREG32_SOC15(UVD, 0, mmUVD_RBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1508
static void vcn_v1_0_dec_ring_insert_start(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1510
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1512
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1514
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1515
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1517
amdgpu_ring_write(ring, VCN_DEC_CMD_PACKET_START << 1);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1527
static void vcn_v1_0_dec_ring_insert_end(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1529
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1531
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1533
amdgpu_ring_write(ring, VCN_DEC_CMD_PACKET_END << 1);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1546
static void vcn_v1_0_dec_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1549
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1553
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1555
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1556
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1558
amdgpu_ring_write(ring, addr & 0xffffffff);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1559
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1561
amdgpu_ring_write(ring, upper_32_bits(addr) & 0xff);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1562
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1564
amdgpu_ring_write(ring, VCN_DEC_CMD_FENCE << 1);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1566
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1568
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1569
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1571
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1572
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1574
amdgpu_ring_write(ring, VCN_DEC_CMD_TRAP << 1);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1587
static void vcn_v1_0_dec_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1592
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1595
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1597
amdgpu_ring_write(ring, vmid);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1599
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1601
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1602
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1604
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1605
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1607
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1610
static void vcn_v1_0_dec_ring_emit_reg_wait(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1614
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1616
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1618
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1619
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
162
ring = &adev->vcn.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1621
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1622
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1624
amdgpu_ring_write(ring, mask);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1625
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1627
amdgpu_ring_write(ring, VCN_DEC_CMD_REG_READ_COND_WAIT << 1);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
163
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1630
static void vcn_v1_0_dec_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1633
struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->vm_hub];
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1636
pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
164
sprintf(ring->name, "vcn_dec");
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1642
vcn_v1_0_dec_ring_emit_reg_wait(ring, data0, data1, mask);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1645
static void vcn_v1_0_dec_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1648
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
165
r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst->irq, 0,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1650
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1652
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1653
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1655
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1656
amdgpu_ring_write(ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1658
amdgpu_ring_write(ring, VCN_DEC_CMD_WRITE_REG << 1);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1668
static uint64_t vcn_v1_0_enc_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1670
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1672
if (ring == &adev->vcn.inst->ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1685
static uint64_t vcn_v1_0_enc_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1687
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1689
if (ring == &adev->vcn.inst->ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1702
static void vcn_v1_0_enc_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1704
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1706
if (ring == &adev->vcn.inst->ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1708
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1711
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1724
static void vcn_v1_0_enc_ring_emit_fence(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1729
amdgpu_ring_write(ring, VCN_ENC_CMD_FENCE);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1730
amdgpu_ring_write(ring, addr);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1731
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1732
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1733
amdgpu_ring_write(ring, VCN_ENC_CMD_TRAP);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1736
static void vcn_v1_0_enc_ring_insert_end(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1738
amdgpu_ring_write(ring, VCN_ENC_CMD_END);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1751
static void vcn_v1_0_enc_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1758
amdgpu_ring_write(ring, VCN_ENC_CMD_IB);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1759
amdgpu_ring_write(ring, vmid);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1760
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1761
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1762
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1765
static void vcn_v1_0_enc_ring_emit_reg_wait(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1769
amdgpu_ring_write(ring, VCN_ENC_CMD_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1770
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1771
amdgpu_ring_write(ring, mask);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1772
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1775
static void vcn_v1_0_enc_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1778
struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->vm_hub];
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1780
pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1783
vcn_v1_0_enc_ring_emit_reg_wait(ring, hub->ctx0_ptb_addr_lo32 +
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1788
static void vcn_v1_0_enc_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1791
amdgpu_ring_write(ring, VCN_ENC_CMD_REG_WRITE);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1792
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1793
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1829
static void vcn_v1_0_dec_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1831
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1834
WARN_ON(ring->wptr % 2 || count % 2);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1837
amdgpu_ring_write(ring, PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_NO_OP), 0));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1838
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
184
ring = &adev->vcn.inst->ring_enc[i];
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
185
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
186
sprintf(ring->name, "vcn_enc%d", i);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
187
r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst->irq, 0,
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1909
static void vcn_v1_0_ring_begin_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1911
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1916
if (amdgpu_fence_wait_empty(ring->adev->jpeg.inst->ring_dec))
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1919
vcn_v1_0_set_pg_for_begin_use(ring, set_clocks);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1923
void vcn_v1_0_set_pg_for_begin_use(struct amdgpu_ring *ring, bool set_clocks)
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1925
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1953
if (ring->funcs->type == AMDGPU_RING_TYPE_VCN_ENC)
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1955
else if (ring->funcs->type == AMDGPU_RING_TYPE_VCN_JPEG)
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1962
void vcn_v1_0_ring_end_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1964
schedule_delayed_work(&ring->adev->vcn.inst[0].idle_work, VCN_IDLE_TIMEOUT);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
1965
mutex_unlock(&ring->adev->vcn.inst[0].vcn1_jpeg1_workaround);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
250
struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
253
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
258
ring = &adev->vcn.inst->ring_enc[i];
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
259
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
264
ring = adev->jpeg.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
265
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
843
struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
94
static void vcn_v1_0_ring_begin_use(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
963
rb_bufsz = order_base_2(ring->ring_size);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
976
(upper_32_bits(ring->gpu_addr) >> 2));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
980
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
982
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
989
ring->wptr = RREG32_SOC15(UVD, 0, mmUVD_RBC_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
991
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
996
ring = &adev->vcn.inst->ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
997
WREG32_SOC15(UVD, 0, mmUVD_RB_RPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
998
WREG32_SOC15(UVD, 0, mmUVD_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
999
WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_LO, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.h
27
void vcn_v1_0_ring_end_use(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/vcn_v1_0.h
28
void vcn_v1_0_set_pg_for_begin_use(struct amdgpu_ring *ring, bool set_clocks);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1005
struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1131
rb_bufsz = order_base_2(ring->ring_size);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1142
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1144
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1149
ring->wptr = RREG32_SOC15(UVD, 0, mmUVD_RBC_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1151
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1155
ring = &adev->vcn.inst->ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1156
WREG32_SOC15(UVD, 0, mmUVD_RB_RPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1157
WREG32_SOC15(UVD, 0, mmUVD_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1158
WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_LO, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1159
WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1160
WREG32_SOC15(UVD, 0, mmUVD_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1164
ring = &adev->vcn.inst->ring_enc[1];
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1165
WREG32_SOC15(UVD, 0, mmUVD_RB_RPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1166
WREG32_SOC15(UVD, 0, mmUVD_RB_WPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1167
WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_LO2, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1168
WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_HI2, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1169
WREG32_SOC15(UVD, 0, mmUVD_RB_SIZE2, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1295
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1327
ring = &adev->vcn.inst->ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1328
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1329
WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_LO, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1330
WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1331
WREG32_SOC15(UVD, 0, mmUVD_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1332
WREG32_SOC15(UVD, 0, mmUVD_RB_RPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1333
WREG32_SOC15(UVD, 0, mmUVD_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1337
ring = &adev->vcn.inst->ring_enc[1];
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1338
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1339
WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_LO2, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1340
WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_HI2, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1341
WREG32_SOC15(UVD, 0, mmUVD_RB_SIZE2, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1342
WREG32_SOC15(UVD, 0, mmUVD_RB_RPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1343
WREG32_SOC15(UVD, 0, mmUVD_RB_WPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
137
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1425
static uint64_t vcn_v2_0_dec_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1427
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1439
static uint64_t vcn_v2_0_dec_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1441
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1443
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1444
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1456
static void vcn_v2_0_dec_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1458
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1462
lower_32_bits(ring->wptr) | 0x80000000);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1464
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1465
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1466
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1468
WREG32_SOC15(UVD, 0, mmUVD_RBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1479
void vcn_v2_0_dec_ring_insert_start(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1481
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1483
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.data0, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1484
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1485
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.cmd, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1486
amdgpu_ring_write(ring, VCN_DEC_KMD_CMD | (VCN_DEC_CMD_PACKET_START << 1));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1496
void vcn_v2_0_dec_ring_insert_end(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1498
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1500
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[0].internal.cmd, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1501
amdgpu_ring_write(ring, VCN_DEC_KMD_CMD | (VCN_DEC_CMD_PACKET_END << 1));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1512
void vcn_v2_0_dec_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1514
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1517
WARN_ON(ring->wptr % 2 || count % 2);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1520
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.nop, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1521
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1535
void vcn_v2_0_dec_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1538
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1541
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.context_id, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1542
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1544
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.data0, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1545
amdgpu_ring_write(ring, addr & 0xffffffff);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1547
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.data1, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1548
amdgpu_ring_write(ring, upper_32_bits(addr) & 0xff);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1550
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.cmd, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1551
amdgpu_ring_write(ring, VCN_DEC_KMD_CMD | (VCN_DEC_CMD_FENCE << 1));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1553
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.data0, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1554
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1556
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.data1, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1557
amdgpu_ring_write(ring, 0);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1559
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.cmd, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1561
amdgpu_ring_write(ring, VCN_DEC_KMD_CMD | (VCN_DEC_CMD_TRAP << 1));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1574
void vcn_v2_0_dec_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1579
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1582
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.ib_vmid, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1583
amdgpu_ring_write(ring, vmid);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1585
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.ib_bar_low, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1586
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1587
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.ib_bar_high, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1588
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1589
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.ib_size, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1590
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1593
void vcn_v2_0_dec_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1596
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1598
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.data0, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1599
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1601
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.data1, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1602
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1604
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.gp_scratch8, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1605
amdgpu_ring_write(ring, mask);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1607
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.cmd, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1609
amdgpu_ring_write(ring, VCN_DEC_KMD_CMD | (VCN_DEC_CMD_REG_READ_COND_WAIT << 1));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1612
void vcn_v2_0_dec_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1615
struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->vm_hub];
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1618
pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1624
vcn_v2_0_dec_ring_emit_reg_wait(ring, data0, data1, mask);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1627
void vcn_v2_0_dec_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1630
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1632
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.data0, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1633
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1635
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.data1, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1636
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1638
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.cmd, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1640
amdgpu_ring_write(ring, VCN_DEC_KMD_CMD | (VCN_DEC_CMD_WRITE_REG << 1));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1650
static uint64_t vcn_v2_0_enc_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1652
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1654
if (ring == &adev->vcn.inst->ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1667
static uint64_t vcn_v2_0_enc_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1669
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1671
if (ring == &adev->vcn.inst->ring_enc[0]) {
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1672
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1673
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1677
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1678
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
168
ring = &adev->vcn.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1691
static void vcn_v2_0_enc_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1693
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1695
if (ring == &adev->vcn.inst->ring_enc[0]) {
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1696
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1697
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1698
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
170
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1700
WREG32_SOC15(UVD, 0, mmUVD_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1703
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1704
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1705
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1707
WREG32_SOC15(UVD, 0, mmUVD_RB_WPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
171
ring->doorbell_index = adev->doorbell_index.vcn.vcn_ring0_1 << 1;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
172
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1722
void vcn_v2_0_enc_ring_emit_fence(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1727
amdgpu_ring_write(ring, VCN_ENC_CMD_FENCE);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1728
amdgpu_ring_write(ring, addr);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1729
amdgpu_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1730
amdgpu_ring_write(ring, seq);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1731
amdgpu_ring_write(ring, VCN_ENC_CMD_TRAP);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1734
void vcn_v2_0_enc_ring_insert_end(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1736
amdgpu_ring_write(ring, VCN_ENC_CMD_END);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
174
sprintf(ring->name, "vcn_dec");
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1749
void vcn_v2_0_enc_ring_emit_ib(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
175
r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst->irq, 0,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1756
amdgpu_ring_write(ring, VCN_ENC_CMD_IB);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1757
amdgpu_ring_write(ring, vmid);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1758
amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1759
amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1760
amdgpu_ring_write(ring, ib->length_dw);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1763
void vcn_v2_0_enc_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1766
amdgpu_ring_write(ring, VCN_ENC_CMD_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1767
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1768
amdgpu_ring_write(ring, mask);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1769
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1772
void vcn_v2_0_enc_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1775
struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->vm_hub];
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1777
pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1780
vcn_v2_0_enc_ring_emit_reg_wait(ring, hub->ctx0_ptb_addr_lo32 +
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1785
void vcn_v2_0_enc_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val)
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1787
amdgpu_ring_write(ring, VCN_ENC_CMD_REG_WRITE);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1788
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1789
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1825
int vcn_v2_0_dec_ring_test_ring(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1827
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1835
WREG32(adev->vcn.inst[ring->me].external.scratch9, 0xCAFEDEAD);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1836
r = amdgpu_ring_alloc(ring, 4);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1839
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.cmd, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1840
amdgpu_ring_write(ring, VCN_DEC_KMD_CMD | (VCN_DEC_CMD_PACKET_START << 1));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1841
amdgpu_ring_write(ring, PACKET0(adev->vcn.inst[ring->me].internal.scratch9, 0));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1842
amdgpu_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1843
amdgpu_ring_commit(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1845
tmp = RREG32(adev->vcn.inst[ring->me].external.scratch9);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
1959
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
201
ring = &adev->vcn.inst->ring_enc[i];
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
202
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
203
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
205
ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 2 + i;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
2053
ring = &adev->vcn.inst->ring_enc[r];
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
2054
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
2057
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
2060
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
2063
ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
2066
ring = &adev->vcn.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
2067
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
207
ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 1 + i;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
2071
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
2075
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
2077
tmp = order_base_2(ring->ring_size);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
208
sprintf(ring->name, "vcn_enc%d", i);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
209
r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst->irq, 0,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
285
struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
288
adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
289
ring->doorbell_index, 0);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
294
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
300
ring->sched.ready = false;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
303
ring = &adev->vcn.inst->ring_enc[i];
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
304
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
857
struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
952
rb_bufsz = order_base_2(ring->ring_size);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
971
(upper_32_bits(ring->gpu_addr) >> 2));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
975
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
977
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
984
ring->wptr = RREG32_SOC15(UVD, 0, mmUVD_RBC_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c
986
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.h
27
extern void vcn_v2_0_dec_ring_insert_start(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.h
28
extern void vcn_v2_0_dec_ring_insert_end(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.h
29
extern void vcn_v2_0_dec_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.h
30
extern void vcn_v2_0_dec_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, u64 seq,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.h
32
extern void vcn_v2_0_dec_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.h
34
extern void vcn_v2_0_dec_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.h
36
extern void vcn_v2_0_dec_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.h
38
extern void vcn_v2_0_dec_ring_emit_wreg(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.h
40
extern int vcn_v2_0_dec_ring_test_ring(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.h
42
extern void vcn_v2_0_enc_ring_insert_end(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.h
43
extern void vcn_v2_0_enc_ring_emit_fence(struct amdgpu_ring *ring, u64 addr,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.h
45
extern void vcn_v2_0_enc_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.h
47
extern void vcn_v2_0_enc_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.h
49
extern void vcn_v2_0_enc_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v2_0.h
51
extern void vcn_v2_0_enc_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg, uint32_t val);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1004
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1105
ring = &adev->vcn.inst[inst_idx].ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1107
rb_bufsz = order_base_2(ring->ring_size);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1126
(upper_32_bits(ring->gpu_addr) >> 2));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1130
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1132
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1139
ring->wptr = RREG32_SOC15(VCN, inst_idx, mmUVD_RBC_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1141
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1162
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1295
ring = &adev->vcn.inst[i].ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1297
rb_bufsz = order_base_2(ring->ring_size);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1308
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1310
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1315
ring->wptr = RREG32_SOC15(VCN, i, mmUVD_RBC_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1317
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1321
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1322
WREG32_SOC15(VCN, i, mmUVD_RB_RPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1323
WREG32_SOC15(VCN, i, mmUVD_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1324
WREG32_SOC15(VCN, i, mmUVD_RB_BASE_LO, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1325
WREG32_SOC15(VCN, i, mmUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1326
WREG32_SOC15(VCN, i, mmUVD_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1330
ring = &adev->vcn.inst[i].ring_enc[1];
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1331
WREG32_SOC15(VCN, i, mmUVD_RB_RPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1332
WREG32_SOC15(VCN, i, mmUVD_RB_WPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1333
WREG32_SOC15(VCN, i, mmUVD_RB_BASE_LO2, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1334
WREG32_SOC15(VCN, i, mmUVD_RB_BASE_HI2, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1335
WREG32_SOC15(VCN, i, mmUVD_RB_SIZE2, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1404
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1495
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1496
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1500
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1503
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1506
ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1508
ring = &adev->vcn.inst[i].ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1509
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1513
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1517
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1520
rb_bufsz = order_base_2(ring->ring_size);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
157
static void vcn_v2_5_ring_begin_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
159
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
160
struct amdgpu_vcn_inst *v = &adev->vcn.inst[ring->me];
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1656
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1690
ring = &adev->vcn.inst[inst_idx].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1691
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1692
WREG32_SOC15(VCN, inst_idx, mmUVD_RB_BASE_LO, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1693
WREG32_SOC15(VCN, inst_idx, mmUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1694
WREG32_SOC15(VCN, inst_idx, mmUVD_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1695
WREG32_SOC15(VCN, inst_idx, mmUVD_RB_RPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1696
WREG32_SOC15(VCN, inst_idx, mmUVD_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1700
ring = &adev->vcn.inst[inst_idx].ring_enc[1];
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1701
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1702
WREG32_SOC15(VCN, inst_idx, mmUVD_RB_BASE_LO2, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1703
WREG32_SOC15(VCN, inst_idx, mmUVD_RB_BASE_HI2, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1704
WREG32_SOC15(VCN, inst_idx, mmUVD_RB_SIZE2, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1705
WREG32_SOC15(VCN, inst_idx, mmUVD_RB_RPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1706
WREG32_SOC15(VCN, inst_idx, mmUVD_RB_WPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1735
static uint64_t vcn_v2_5_dec_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1737
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1739
return RREG32_SOC15(VCN, ring->me, mmUVD_RBC_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1749
static uint64_t vcn_v2_5_dec_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1751
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1753
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1754
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1756
return RREG32_SOC15(VCN, ring->me, mmUVD_RBC_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1766
static void vcn_v2_5_dec_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1768
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1770
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1771
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1772
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1774
WREG32_SOC15(VCN, ring->me, mmUVD_RBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
179
if (ring->funcs->type == AMDGPU_RING_TYPE_VCN_ENC) {
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1816
static uint64_t vcn_v2_5_enc_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1818
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1820
if (ring == &adev->vcn.inst[ring->me].ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1821
return RREG32_SOC15(VCN, ring->me, mmUVD_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1823
return RREG32_SOC15(VCN, ring->me, mmUVD_RB_RPTR2);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1833
static uint64_t vcn_v2_5_enc_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1835
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1837
if (ring == &adev->vcn.inst[ring->me].ring_enc[0]) {
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1838
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1839
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1841
return RREG32_SOC15(VCN, ring->me, mmUVD_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1843
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1844
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1846
return RREG32_SOC15(VCN, ring->me, mmUVD_RB_WPTR2);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1857
static void vcn_v2_5_enc_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1859
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1861
if (ring == &adev->vcn.inst[ring->me].ring_enc[0]) {
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1862
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1863
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1864
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1866
WREG32_SOC15(VCN, ring->me, mmUVD_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1869
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1870
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1871
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
1873
WREG32_SOC15(VCN, ring->me, mmUVD_RB_WPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
200
static void vcn_v2_5_ring_end_use(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
202
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
205
if (ring->adev->pg_flags & AMD_PG_SUPPORT_VCN_DPG &&
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
206
ring->funcs->type == AMDGPU_RING_TYPE_VCN_ENC &&
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
207
!adev->vcn.inst[ring->me].using_unified_queue)
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
208
atomic_dec(&adev->vcn.inst[ring->me].dpg_enc_submission_cnt);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
275
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
335
ring = &adev->vcn.inst[j].ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
336
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
338
ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) +
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
342
ring->vm_hub = AMDGPU_MMHUB1(0);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
344
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
346
sprintf(ring->name, "vcn_dec_%d", j);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
347
r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst[j].irq,
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
355
ring = &adev->vcn.inst[j].ring_enc[i];
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
356
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
358
ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) +
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
363
ring->vm_hub = AMDGPU_MMHUB1(0);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
365
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
367
sprintf(ring->name, "vcn_enc_%d.%d", j, i);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
368
r = amdgpu_ring_init(adev, ring, 512,
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
461
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
478
ring = &adev->vcn.inst[j].ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
480
adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
481
ring->doorbell_index, j);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
483
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
488
ring = &adev->vcn.inst[j].ring_enc[i];
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
489
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1033
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1136
ring = &adev->vcn.inst[inst_idx].ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1138
rb_bufsz = order_base_2(ring->ring_size);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
115
static void vcn_v3_0_dec_ring_set_wptr(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1157
(upper_32_bits(ring->gpu_addr) >> 2));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
116
static void vcn_v3_0_enc_ring_set_wptr(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1161
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1163
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1170
ring->wptr = RREG32_SOC15(VCN, inst_idx, mmUVD_RBC_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1172
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1176
fw_shared->rb.wptr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1198
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1327
ring = &adev->vcn.inst[i].ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1329
rb_bufsz = order_base_2(ring->ring_size);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1342
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1344
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1350
ring->wptr = RREG32_SOC15(VCN, i, mmUVD_RBC_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1352
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1353
fw_shared->rb.wptr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1359
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1360
WREG32_SOC15(VCN, i, mmUVD_RB_RPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1361
WREG32_SOC15(VCN, i, mmUVD_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1362
WREG32_SOC15(VCN, i, mmUVD_RB_BASE_LO, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1363
WREG32_SOC15(VCN, i, mmUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1364
WREG32_SOC15(VCN, i, mmUVD_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1368
ring = &adev->vcn.inst[i].ring_enc[1];
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1369
WREG32_SOC15(VCN, i, mmUVD_RB_RPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1370
WREG32_SOC15(VCN, i, mmUVD_RB_WPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1371
WREG32_SOC15(VCN, i, mmUVD_RB_BASE_LO2, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1372
WREG32_SOC15(VCN, i, mmUVD_RB_BASE_HI2, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1373
WREG32_SOC15(VCN, i, mmUVD_RB_SIZE2, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1388
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1495
ring = &adev->vcn.inst[i].ring_enc[j];
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1496
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1497
rb_addr = ring->gpu_addr;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1506
ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1509
ring = &adev->vcn.inst[i].ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1510
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1511
rb_addr = ring->gpu_addr;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1519
tmp = order_base_2(ring->ring_size);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1719
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
175
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1754
ring = &adev->vcn.inst[inst_idx].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1755
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1756
WREG32_SOC15(VCN, inst_idx, mmUVD_RB_BASE_LO, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1757
WREG32_SOC15(VCN, inst_idx, mmUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1758
WREG32_SOC15(VCN, inst_idx, mmUVD_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1759
WREG32_SOC15(VCN, inst_idx, mmUVD_RB_RPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1760
WREG32_SOC15(VCN, inst_idx, mmUVD_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1764
ring = &adev->vcn.inst[inst_idx].ring_enc[1];
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1765
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1766
WREG32_SOC15(VCN, inst_idx, mmUVD_RB_BASE_LO2, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1767
WREG32_SOC15(VCN, inst_idx, mmUVD_RB_BASE_HI2, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1768
WREG32_SOC15(VCN, inst_idx, mmUVD_RB_SIZE2, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1769
WREG32_SOC15(VCN, inst_idx, mmUVD_RB_RPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1770
WREG32_SOC15(VCN, inst_idx, mmUVD_RB_WPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1803
static uint64_t vcn_v3_0_dec_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1805
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1807
return RREG32_SOC15(VCN, ring->me, mmUVD_RBC_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1817
static uint64_t vcn_v3_0_dec_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1819
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1821
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1822
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1824
return RREG32_SOC15(VCN, ring->me, mmUVD_RBC_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1834
static void vcn_v3_0_dec_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1836
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1841
fw_shared = adev->vcn.inst[ring->me].fw_shared.cpu_addr;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1842
fw_shared->rb.wptr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1843
WREG32_SOC15(VCN, ring->me, mmUVD_SCRATCH2,
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1844
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1847
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1848
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1849
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1851
WREG32_SOC15(VCN, ring->me, mmUVD_RBC_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1993
struct amdgpu_ring *ring = amdgpu_job_ring(job);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
1999
if (!ring->me)
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2006
if (reg == PACKET0(p->adev->vcn.inst[ring->me].internal.data0, 0)) {
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2008
} else if (reg == PACKET0(p->adev->vcn.inst[ring->me].internal.data1, 0)) {
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2010
} else if (reg == PACKET0(p->adev->vcn.inst[ring->me].internal.cmd, 0) &&
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2060
static uint64_t vcn_v3_0_enc_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2062
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2064
if (ring == &adev->vcn.inst[ring->me].ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2065
return RREG32_SOC15(VCN, ring->me, mmUVD_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2067
return RREG32_SOC15(VCN, ring->me, mmUVD_RB_RPTR2);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2077
static uint64_t vcn_v3_0_enc_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2079
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2081
if (ring == &adev->vcn.inst[ring->me].ring_enc[0]) {
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2082
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2083
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2085
return RREG32_SOC15(VCN, ring->me, mmUVD_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2087
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2088
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2090
return RREG32_SOC15(VCN, ring->me, mmUVD_RB_WPTR2);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2101
static void vcn_v3_0_enc_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2103
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2105
if (ring == &adev->vcn.inst[ring->me].ring_enc[0]) {
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2106
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2107
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2108
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2110
WREG32_SOC15(VCN, ring->me, mmUVD_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2113
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2114
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2115
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
2117
WREG32_SOC15(VCN, ring->me, mmUVD_RB_WPTR2, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
235
ring = &adev->vcn.inst[i].ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
236
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
238
ring->doorbell_index = vcn_doorbell_index + i * (adev->vcn.inst[i].num_enc_rings + 1);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
240
ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 8 * i;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
242
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
243
sprintf(ring->name, "vcn_dec_%d", i);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
244
r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst[i].irq, 0,
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
259
ring = &adev->vcn.inst[i].ring_enc[j];
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
260
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
262
ring->doorbell_index = vcn_doorbell_index + i * (adev->vcn.inst[i].num_enc_rings + 1) + 1 + j;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
264
ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 2 + j + 8 * i;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
266
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
267
sprintf(ring->name, "vcn_enc_%d.%d", i, j);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
268
r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst[i].irq, 0,
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
368
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
381
ring = &adev->vcn.inst[i].ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
383
ring->sched.ready = false;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
384
ring->no_scheduler = true;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
385
dev_info(adev->dev, "ring %s is disabled by hypervisor\n", ring->name);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
387
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
388
ring->wptr_old = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
389
vcn_v3_0_dec_ring_set_wptr(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
390
ring->sched.ready = true;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
394
ring = &adev->vcn.inst[i].ring_enc[j];
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
396
ring->sched.ready = false;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
397
ring->no_scheduler = true;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
398
dev_info(adev->dev, "ring %s is disabled by hypervisor\n", ring->name);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
400
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
401
ring->wptr_old = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
402
vcn_v3_0_enc_ring_set_wptr(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
403
ring->sched.ready = true;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
412
ring = &adev->vcn.inst[i].ring_dec;
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
414
adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
415
ring->doorbell_index, i);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
417
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
422
ring = &adev->vcn.inst[i].ring_enc[j];
drivers/gpu/drm/amd/amdgpu/vcn_v3_0.c
423
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1001
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
104
static void vcn_v4_0_unified_ring_set_wptr(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1094
ring = &adev->vcn.inst[inst_idx].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1096
WREG32_SOC15(VCN, inst_idx, regUVD_RB_BASE_LO, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1097
WREG32_SOC15(VCN, inst_idx, regUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1098
WREG32_SOC15(VCN, inst_idx, regUVD_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1109
ring->wptr = RREG32_SOC15(VCN, inst_idx, regUVD_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1117
ring->doorbell_index << VCN_RB1_DB_CTRL__OFFSET__SHIFT |
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1141
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1281
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1283
ring->doorbell_index << VCN_RB1_DB_CTRL__OFFSET__SHIFT |
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1286
WREG32_SOC15(VCN, i, regUVD_RB_BASE_LO, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1287
WREG32_SOC15(VCN, i, regUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1288
WREG32_SOC15(VCN, i, regUVD_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1299
ring->wptr = RREG32_SOC15(VCN, i, regUVD_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1317
uint8_t *rb_ptr = (uint8_t *)ring_enc->ring;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1748
static uint64_t vcn_v4_0_unified_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1750
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1752
if (ring != &adev->vcn.inst[ring->me].ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1755
return RREG32_SOC15(VCN, ring->me, regUVD_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1765
static uint64_t vcn_v4_0_unified_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1767
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1769
if (ring != &adev->vcn.inst[ring->me].ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1772
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1773
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1775
return RREG32_SOC15(VCN, ring->me, regUVD_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1785
static void vcn_v4_0_unified_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1787
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1789
if (ring != &adev->vcn.inst[ring->me].ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1792
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1793
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1794
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1796
WREG32_SOC15(VCN, ring->me, regUVD_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
183
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1928
struct amdgpu_ring *ring = amdgpu_job_ring(job);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1935
if (!ring->me)
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1959
static int vcn_v4_0_ring_reset(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1963
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1964
struct amdgpu_vcn_inst *vinst = &adev->vcn.inst[ring->me];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1967
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
1974
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
219
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
220
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
222
ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + i *
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
225
ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 2 + 8 * i;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
226
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
227
sprintf(ring->name, "vcn_unified_%d", i);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
229
r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst[i].irq, 0,
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
321
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
333
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
334
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
335
ring->wptr_old = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
336
vcn_v4_0_unified_ring_set_wptr(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
337
ring->sched.ready = true;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
344
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
346
adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/vcn_v4_0.c
349
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1195
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1322
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1327
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1329
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1332
ring->ring_size / sizeof(uint32_t));
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1347
ring->wptr = RREG32_SOC15(VCN, vcn_inst, regUVD_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1541
static uint64_t vcn_v4_0_3_unified_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1543
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1545
if (ring != &adev->vcn.inst[ring->me].ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1548
return RREG32_SOC15(VCN, GET_INST(VCN, ring->me), regUVD_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1558
static uint64_t vcn_v4_0_3_unified_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1560
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1562
if (ring != &adev->vcn.inst[ring->me].ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1565
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1566
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1568
return RREG32_SOC15(VCN, GET_INST(VCN, ring->me),
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1572
void vcn_v4_0_3_enc_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1576
if (vcn_v4_0_3_normalizn_reqd(ring->adev))
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1579
amdgpu_ring_write(ring, VCN_ENC_CMD_REG_WAIT);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1580
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1581
amdgpu_ring_write(ring, mask);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1582
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1585
void vcn_v4_0_3_enc_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1589
if (vcn_v4_0_3_normalizn_reqd(ring->adev))
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1592
amdgpu_ring_write(ring, VCN_ENC_CMD_REG_WRITE);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1593
amdgpu_ring_write(ring, reg << 2);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1594
amdgpu_ring_write(ring, val);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1597
void vcn_v4_0_3_enc_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1600
struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->vm_hub];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1602
pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1605
vcn_v4_0_3_enc_ring_emit_reg_wait(ring, hub->ctx0_ptb_addr_lo32 +
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1610
void vcn_v4_0_3_ring_emit_hdp_flush(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1624
static void vcn_v4_0_3_unified_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1626
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1628
if (ring != &adev->vcn.inst[ring->me].ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1631
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1632
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1633
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1635
WREG32_SOC15(VCN, GET_INST(VCN, ring->me), regUVD_RB_WPTR,
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1636
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1642
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1647
ring = &adev->jpeg.inst[inst].ring_dec[i];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1649
drm_sched_wqueue_stop(&ring->sched);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1651
wait_seq = atomic_read(&ring->fence_drv.last_seq);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1658
amdgpu_fence_wait_polling(ring, wait_seq, adev->video_timeout);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1666
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1670
ring = &adev->jpeg.inst[inst].ring_dec[i];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1672
amdgpu_fence_driver_force_completion(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1674
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1678
(ring->pipe ? (ring->pipe - 0x15) : 0),
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1679
ring->doorbell_index << VCN_JPEG_DB_CTRL__OFFSET__SHIFT |
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1682
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1686
drm_sched_wqueue_start(&ring->sched);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1694
static int vcn_v4_0_3_ring_reset(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1700
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1701
struct amdgpu_vcn_inst *vinst = &adev->vcn.inst[ring->me];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1714
vcn_v4_0_3_reset_jpeg_pre_helper(adev, ring->me);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1715
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1717
vcn_inst = GET_INST(VCN, ring->me);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1734
vcn_v4_0_3_start_dpg_mode(vinst, adev->vcn.inst[ring->me].indirect_sram);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1736
r = amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
174
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1749
r = vcn_v4_0_3_reset_jpeg_post_helper(adev, ring->me);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
201
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
202
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
205
ring->doorbell_index =
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
209
ring->doorbell_index =
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
213
ring->vm_hub = AMDGPU_MMHUB0(adev->vcn.inst[i].aid_id);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
214
sprintf(ring->name, "vcn_unified_%d", adev->vcn.inst[i].aid_id);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
219
r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst[0].irq, 0,
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
296
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
300
ring = &adev->vcn.inst[inst_idx].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
301
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
302
adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
307
ring->doorbell_index << VCN_RB1_DB_CTRL__OFFSET__SHIFT |
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
327
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
337
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
338
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
339
ring->wptr_old = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
340
vcn_v4_0_3_unified_ring_set_wptr(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
341
ring->sched.ready = true;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
352
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
361
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
851
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
953
ring = &adev->vcn.inst[inst_idx].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
96
static void vcn_v4_0_3_unified_ring_set_wptr(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
960
lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
962
upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
965
ring->ring_size / sizeof(uint32_t));
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
976
ring->wptr = RREG32_SOC15(VCN, vcn_inst, regUVD_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.h
35
void vcn_v4_0_3_enc_ring_emit_reg_wait(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.h
38
void vcn_v4_0_3_enc_ring_emit_wreg(struct amdgpu_ring *ring, uint32_t reg,
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.h
40
void vcn_v4_0_3_enc_ring_emit_vm_flush(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.h
42
void vcn_v4_0_3_ring_emit_hdp_flush(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1007
ring = &adev->vcn.inst[inst_idx].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1009
WREG32_SOC15(VCN, inst_idx, regUVD_RB_BASE_LO, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1010
WREG32_SOC15(VCN, inst_idx, regUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1011
WREG32_SOC15(VCN, inst_idx, regUVD_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1022
ring->wptr = RREG32_SOC15(VCN, inst_idx, regUVD_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
103
static void vcn_v4_0_5_unified_ring_set_wptr(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1030
ring->doorbell_index << VCN_RB1_DB_CTRL__OFFSET__SHIFT |
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1053
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1194
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1196
ring->doorbell_index << VCN_RB1_DB_CTRL__OFFSET__SHIFT |
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1199
WREG32_SOC15(VCN, i, regUVD_RB_BASE_LO, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1200
WREG32_SOC15(VCN, i, regUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1201
WREG32_SOC15(VCN, i, regUVD_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1212
ring->wptr = RREG32_SOC15(VCN, i, regUVD_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1412
static uint64_t vcn_v4_0_5_unified_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1414
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1416
if (ring != &adev->vcn.inst[ring->me].ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1419
return RREG32_SOC15(VCN, ring->me, regUVD_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1429
static uint64_t vcn_v4_0_5_unified_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1431
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1433
if (ring != &adev->vcn.inst[ring->me].ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1436
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1437
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1439
return RREG32_SOC15(VCN, ring->me, regUVD_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1449
static void vcn_v4_0_5_unified_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1451
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1453
if (ring != &adev->vcn.inst[ring->me].ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1456
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1457
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1458
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1460
WREG32_SOC15(VCN, ring->me, regUVD_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1464
static int vcn_v4_0_5_ring_reset(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1468
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1469
struct amdgpu_vcn_inst *vinst = &adev->vcn.inst[ring->me];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
147
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1472
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
1479
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
181
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
182
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
184
ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) +
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
187
ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) +
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
189
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
190
sprintf(ring->name, "vcn_unified_%d", i);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
192
r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst[i].irq, 0,
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
291
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
298
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
300
adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
303
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_5.c
916
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1136
static uint64_t vcn_v5_0_0_unified_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1138
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1140
if (ring != &adev->vcn.inst[ring->me].ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1143
return RREG32_SOC15(VCN, ring->me, regUVD_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1153
static uint64_t vcn_v5_0_0_unified_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1155
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1157
if (ring != &adev->vcn.inst[ring->me].ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1160
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1161
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1163
return RREG32_SOC15(VCN, ring->me, regUVD_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1173
static void vcn_v5_0_0_unified_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1175
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1177
if (ring != &adev->vcn.inst[ring->me].ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1180
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1181
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1182
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1184
WREG32_SOC15(VCN, ring->me, regUVD_RB_WPTR, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1188
static int vcn_v5_0_0_ring_reset(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1192
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1193
struct amdgpu_vcn_inst *vinst = &adev->vcn.inst[ring->me];
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1196
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
1203
return amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
127
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
161
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
162
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
163
ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 2 + 8 * i;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
165
ring->vm_hub = AMDGPU_MMHUB0(0);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
166
sprintf(ring->name, "vcn_unified_%d", i);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
168
r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst[i].irq, 0,
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
255
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
262
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
264
adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
267
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
700
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
764
ring = &adev->vcn.inst[inst_idx].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
766
WREG32_SOC15(VCN, inst_idx, regUVD_RB_BASE_LO, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
767
WREG32_SOC15(VCN, inst_idx, regUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
768
WREG32_SOC15(VCN, inst_idx, regUVD_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
779
ring->wptr = RREG32_SOC15(VCN, inst_idx, regUVD_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
787
ring->doorbell_index << VCN_RB1_DB_CTRL__OFFSET__SHIFT |
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
810
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
85
static void vcn_v5_0_0_unified_ring_set_wptr(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
922
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
924
ring->doorbell_index << VCN_RB1_DB_CTRL__OFFSET__SHIFT |
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
927
WREG32_SOC15(VCN, i, regUVD_RB_BASE_LO, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
928
WREG32_SOC15(VCN, i, regUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
929
WREG32_SOC15(VCN, i, regUVD_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_0.c
940
ring->wptr = RREG32_SOC15(VCN, i, regUVD_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1088
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1091
ring->doorbell_index << VCN_RB1_DB_CTRL__OFFSET__SHIFT |
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1097
WREG32_SOC15(VCN, vcn_inst, regUVD_RB_BASE_LO, ring->gpu_addr);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1098
WREG32_SOC15(VCN, vcn_inst, regUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1099
WREG32_SOC15(VCN, vcn_inst, regUVD_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1110
ring->wptr = RREG32_SOC15(VCN, vcn_inst, regUVD_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1251
static uint64_t vcn_v5_0_1_unified_ring_get_rptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1253
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1255
if (ring != &adev->vcn.inst[ring->me].ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1258
return RREG32_SOC15(VCN, GET_INST(VCN, ring->me), regUVD_RB_RPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1268
static uint64_t vcn_v5_0_1_unified_ring_get_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1270
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1272
if (ring != &adev->vcn.inst[ring->me].ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1275
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1276
return *ring->wptr_cpu_addr;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1278
return RREG32_SOC15(VCN, GET_INST(VCN, ring->me), regUVD_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1288
static void vcn_v5_0_1_unified_ring_set_wptr(struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1290
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1292
if (ring != &adev->vcn.inst[ring->me].ring_enc[0])
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1295
if (ring->use_doorbell) {
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1296
*ring->wptr_cpu_addr = lower_32_bits(ring->wptr);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1297
WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1299
WREG32_SOC15(VCN, GET_INST(VCN, ring->me), regUVD_RB_WPTR,
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1300
lower_32_bits(ring->wptr));
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1306
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1311
ring = &adev->jpeg.inst[inst].ring_dec[i];
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1313
drm_sched_wqueue_stop(&ring->sched);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1314
wait_seq = atomic_read(&ring->fence_drv.last_seq);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1321
amdgpu_fence_wait_polling(ring, wait_seq, adev->video_timeout);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1329
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1333
ring = &adev->jpeg.inst[inst].ring_dec[i];
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1335
amdgpu_fence_driver_force_completion(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1337
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1341
(ring->pipe ? (ring->pipe - 0x15) : 0),
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1342
ring->doorbell_index << VCN_JPEG_DB_CTRL__OFFSET__SHIFT |
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1345
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1349
drm_sched_wqueue_start(&ring->sched);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1357
static int vcn_v5_0_1_ring_reset(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1363
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1364
struct amdgpu_vcn_inst *vinst = &adev->vcn.inst[ring->me];
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1369
vcn_v5_0_1_reset_jpeg_pre_helper(adev, ring->me);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1378
amdgpu_ring_reset_helper_begin(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1380
vcn_inst = GET_INST(VCN, ring->me);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1393
vcn_v5_0_1_hw_init_inst(adev, ring->me);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1396
r = amdgpu_ring_reset_helper_end(ring, timedout_fence);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1410
r = vcn_v5_0_1_reset_jpeg_post_helper(adev, ring->me);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
163
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
189
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
190
ring->use_doorbell = true;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
192
ring->doorbell_index =
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
196
ring->doorbell_index =
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
200
ring->vm_hub = AMDGPU_MMHUB0(adev->vcn.inst[i].aid_id);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
201
sprintf(ring->name, "vcn_unified_%d", adev->vcn.inst[i].aid_id);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
203
r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst[i].irq, 0,
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
275
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
279
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
281
if (ring->use_doorbell)
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
282
adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell,
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
300
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
309
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
310
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
311
ring->wptr_old = 0;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
312
vcn_v5_0_1_unified_ring_set_wptr(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
313
ring->sched.ready = true;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
319
ring = &adev->vcn.inst[i].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
325
r = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
673
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
750
ring = &adev->vcn.inst[inst_idx].ring_enc[0];
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
752
WREG32_SOC15(VCN, vcn_inst, regUVD_RB_BASE_LO, lower_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
753
WREG32_SOC15(VCN, vcn_inst, regUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
754
WREG32_SOC15(VCN, vcn_inst, regUVD_RB_SIZE, ring->ring_size / sizeof(uint32_t));
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
765
ring->wptr = RREG32_SOC15(VCN, vcn_inst, regUVD_RB_WPTR);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
774
ring->doorbell_index << VCN_RB1_DB_CTRL__OFFSET__SHIFT |
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
82
static void vcn_v5_0_1_unified_ring_set_wptr(struct amdgpu_ring *ring);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
983
struct amdgpu_ring *ring;
drivers/gpu/drm/amd/amdgpu/vi.c
1310
static void vi_flush_hdp(struct amdgpu_device *adev, struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vi.c
1312
if (!ring || !ring->funcs->emit_wreg) {
drivers/gpu/drm/amd/amdgpu/vi.c
1316
amdgpu_ring_emit_wreg(ring, mmHDP_MEM_COHERENCY_FLUSH_CNTL, 1);
drivers/gpu/drm/amd/amdgpu/vi.c
1321
struct amdgpu_ring *ring)
drivers/gpu/drm/amd/amdgpu/vi.c
1323
if (!ring || !ring->funcs->emit_wreg) {
drivers/gpu/drm/amd/amdgpu/vi.c
1327
amdgpu_ring_emit_wreg(ring, mmHDP_DEBUG0, 1);
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
102
struct amdgpu_device *adev = vpe->ring.adev;
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
125
struct amdgpu_device *adev = vpe->ring.adev;
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
208
struct amdgpu_ring *ring = &vpe->ring;
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
209
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
217
rb_bufsz = order_base_2(ring->ring_size / 4);
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
232
lower_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFC);
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
234
upper_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFF);
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
238
WREG32(vpe_get_reg_offset(vpe, i, regVPEC_QUEUE0_RB_BASE), ring->gpu_addr >> 8);
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
239
WREG32(vpe_get_reg_offset(vpe, i, regVPEC_QUEUE0_RB_BASE_HI), ring->gpu_addr >> 40);
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
241
ring->wptr = 0;
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
245
WREG32(vpe_get_reg_offset(vpe, i, regVPEC_QUEUE0_RB_WPTR), lower_32_bits(ring->wptr) << 2);
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
246
WREG32(vpe_get_reg_offset(vpe, i, regVPEC_QUEUE0_RB_WPTR_HI), upper_32_bits(ring->wptr) << 2);
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
251
doorbell_offset = REG_SET_FIELD(doorbell_offset, VPEC_QUEUE0_DOORBELL_OFFSET, OFFSET, ring->doorbell_index + i*4);
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
255
doorbell = REG_SET_FIELD(doorbell, VPEC_QUEUE0_DOORBELL, ENABLE, ring->use_doorbell ? 1 : 0);
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
258
adev->nbio.funcs->vpe_doorbell_range(adev, i, ring->use_doorbell, ring->doorbell_index + i*4, 4);
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
269
ret = amdgpu_ring_test_helper(ring);
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
278
struct amdgpu_device *adev = vpe->ring.adev;
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
304
vpe->ring.sched.ready = false;
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
342
amdgpu_fence_process(&adev->vpe.ring);
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
68
base = vpe->ring.adev->reg_offset[VPE_HWIP][inst][0];
drivers/gpu/drm/amd/amdgpu/vpe_v6_1.c
75
struct amdgpu_device *adev = vpe->ring.adev;
drivers/gpu/drm/amd/amdkfd/kfd_migrate.c
132
struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring;
drivers/gpu/drm/amd/amdkfd/kfd_migrate.c
148
r = svm_migrate_gart_map(ring, entity, size, sys, &gart_d, 0);
drivers/gpu/drm/amd/amdkfd/kfd_migrate.c
151
r = svm_migrate_gart_map(ring, entity, size, sys, &gart_s,
drivers/gpu/drm/amd/amdkfd/kfd_migrate.c
48
svm_migrate_gart_map(struct amdgpu_ring *ring,
drivers/gpu/drm/amd/amdkfd/kfd_migrate.c
53
struct amdgpu_device *adev = ring->adev;
drivers/gpu/drm/amd/amdkfd/kfd_migrate.c
84
amdgpu_ring_pad_ib(ring, &job->ibs[0]);
drivers/gpu/drm/amd/pm/amdgpu_dpm.c
581
struct amdgpu_ring *ring = adev->rings[i];
drivers/gpu/drm/amd/pm/amdgpu_dpm.c
582
if (ring && ring->sched.ready)
drivers/gpu/drm/amd/pm/amdgpu_dpm.c
583
amdgpu_fence_wait_empty(ring);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2469
struct intel_ring *ring = ce->ring;
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2477
if (intel_ring_update_space(ring) >= PAGE_SIZE)
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2488
if (rq->ring != ring)
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2492
ring->emit, ring->size) > ring->size / 2)
drivers/gpu/drm/i915/gt/gen2_engine_cs.c
167
assert_ring_tail_valid(rq->ring, rq->tail);
drivers/gpu/drm/i915/gt/gen6_engine_cs.c
173
assert_ring_tail_valid(rq->ring, rq->tail);
drivers/gpu/drm/i915/gt/gen6_engine_cs.c
370
assert_ring_tail_valid(rq->ring, rq->tail);
drivers/gpu/drm/i915/gt/gen6_engine_cs.c
387
assert_ring_tail_valid(rq->ring, rq->tail);
drivers/gpu/drm/i915/gt/gen6_engine_cs.c
419
assert_ring_tail_valid(rq->ring, rq->tail);
drivers/gpu/drm/i915/gt/gen8_engine_cs.c
604
struct intel_ring *ring __maybe_unused = rq->ring;
drivers/gpu/drm/i915/gt/gen8_engine_cs.c
607
GEM_BUG_ON(intel_ring_direction(ring, rq->wa_tail, rq->head) <= 0);
drivers/gpu/drm/i915/gt/gen8_engine_cs.c
654
assert_ring_tail_valid(rq->ring, rq->tail);
drivers/gpu/drm/i915/gt/gen8_engine_cs.c
802
assert_ring_tail_valid(rq->ring, rq->tail);
drivers/gpu/drm/i915/gt/intel_context.c
152
static int __ring_active(struct intel_ring *ring,
drivers/gpu/drm/i915/gt/intel_context.c
157
err = intel_ring_pin(ring, ww);
drivers/gpu/drm/i915/gt/intel_context.c
161
err = i915_active_acquire(&ring->vma->active);
drivers/gpu/drm/i915/gt/intel_context.c
168
intel_ring_unpin(ring);
drivers/gpu/drm/i915/gt/intel_context.c
172
static void __ring_retire(struct intel_ring *ring)
drivers/gpu/drm/i915/gt/intel_context.c
174
i915_active_release(&ring->vma->active);
drivers/gpu/drm/i915/gt/intel_context.c
175
intel_ring_unpin(ring);
drivers/gpu/drm/i915/gt/intel_context.c
185
err = __ring_active(ce->ring, ww);
drivers/gpu/drm/i915/gt/intel_context.c
206
__ring_retire(ce->ring);
drivers/gpu/drm/i915/gt/intel_context.c
216
__ring_retire(ce->ring);
drivers/gpu/drm/i915/gt/intel_context.c
240
err = i915_gem_object_lock(ce->ring->vma->obj, ww);
drivers/gpu/drm/i915/gt/intel_context.c
279
i915_ggtt_offset(ce->ring->vma),
drivers/gpu/drm/i915/gt/intel_context.c
280
ce->ring->head, ce->ring->tail);
drivers/gpu/drm/i915/gt/intel_context.c
370
GEM_WARN_ON(!i915_active_acquire_if_busy(&ce->ring->vma->active));
drivers/gpu/drm/i915/gt/intel_context.c
371
__intel_ring_pin(ce->ring);
drivers/gpu/drm/i915/gt/intel_context.c
402
ce->ring = NULL;
drivers/gpu/drm/i915/gt/intel_context_types.h
115
struct intel_ring *ring;
drivers/gpu/drm/i915/gt/intel_engine_cs.c
1302
struct intel_ring ring;
drivers/gpu/drm/i915/gt/intel_engine_cs.c
1324
frame->ring.vaddr = frame->cs;
drivers/gpu/drm/i915/gt/intel_engine_cs.c
1325
frame->ring.size = sizeof(frame->cs);
drivers/gpu/drm/i915/gt/intel_engine_cs.c
1326
frame->ring.wrap =
drivers/gpu/drm/i915/gt/intel_engine_cs.c
1327
BITS_PER_TYPE(frame->ring.size) - ilog2(frame->ring.size);
drivers/gpu/drm/i915/gt/intel_engine_cs.c
1328
frame->ring.effective_size = frame->ring.size;
drivers/gpu/drm/i915/gt/intel_engine_cs.c
1329
intel_ring_update_space(&frame->ring);
drivers/gpu/drm/i915/gt/intel_engine_cs.c
1330
frame->rq.ring = &frame->ring;
drivers/gpu/drm/i915/gt/intel_engine_cs.c
1363
ce->ring = NULL;
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2022
i915_ggtt_offset(rq->ring->vma),
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2219
void *ring;
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2230
size += rq->ring->size;
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2232
ring = kmalloc(size, GFP_ATOMIC);
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2233
if (ring) {
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2234
const void *vaddr = rq->ring->vaddr;
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2239
len = rq->ring->size - head;
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2240
memcpy(ring, vaddr + head, len);
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2243
memcpy(ring + len, vaddr + head, size - len);
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2245
hexdump(m, ring, size);
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2246
kfree(ring);
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2292
i915_ggtt_offset(rq->ring->vma));
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2294
rq->ring->head);
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2296
rq->ring->tail);
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2298
rq->ring->emit);
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2300
rq->ring->space);
drivers/gpu/drm/i915/gt/intel_engine_pm.c
86
ce->ring->emit);
drivers/gpu/drm/i915/gt/intel_engine_types.h
460
struct intel_ring *ring;
drivers/gpu/drm/i915/gt/intel_engine_user.c
162
static int legacy_ring_idx(const struct legacy_ring *ring)
drivers/gpu/drm/i915/gt/intel_engine_user.c
174
if (GEM_DEBUG_WARN_ON(ring->class >= ARRAY_SIZE(map)))
drivers/gpu/drm/i915/gt/intel_engine_user.c
177
if (GEM_DEBUG_WARN_ON(ring->instance >= map[ring->class].max))
drivers/gpu/drm/i915/gt/intel_engine_user.c
180
return map[ring->class].base + ring->instance;
drivers/gpu/drm/i915/gt/intel_engine_user.c
183
static void add_legacy_ring(struct legacy_ring *ring,
drivers/gpu/drm/i915/gt/intel_engine_user.c
186
if (engine->gt != ring->gt || engine->class != ring->class) {
drivers/gpu/drm/i915/gt/intel_engine_user.c
187
ring->gt = engine->gt;
drivers/gpu/drm/i915/gt/intel_engine_user.c
188
ring->class = engine->class;
drivers/gpu/drm/i915/gt/intel_engine_user.c
189
ring->instance = 0;
drivers/gpu/drm/i915/gt/intel_engine_user.c
192
engine->legacy_idx = legacy_ring_idx(ring);
drivers/gpu/drm/i915/gt/intel_engine_user.c
194
ring->instance++;
drivers/gpu/drm/i915/gt/intel_engine_user.c
209
struct legacy_ring ring = {};
drivers/gpu/drm/i915/gt/intel_engine_user.c
256
add_legacy_ring(&ring, engine);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
1980
i915_ggtt_offset(rq->ring->vma),
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3044
head = intel_ring_wrap(ce->ring, rq->tail);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3055
head = intel_ring_wrap(ce->ring, rq->head);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3056
GEM_BUG_ON(head == ce->ring->tail);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3096
head, ce->ring->tail);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
403
if (intel_ring_direction(rq->ring,
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
405
rq->ring->tail + 8) > 0)
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
457
head = intel_ring_wrap(ce->ring, head);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
699
GEM_BUG_ON(ce->lrc_reg_state[CTX_RING_TAIL] != rq->ring->tail);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
700
prev = rq->ring->tail;
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
701
tail = intel_ring_set_tail(rq->ring, rq->tail);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
702
if (unlikely(intel_ring_direction(rq->ring, tail, prev) <= 0))
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
887
if (!i915_vma_is_pinned(ce->ring->vma)) {
drivers/gpu/drm/i915/gt/intel_lrc.c
1131
struct intel_ring *ring;
drivers/gpu/drm/i915/gt/intel_lrc.c
1144
ring = intel_engine_create_ring(engine, ce->ring_size);
drivers/gpu/drm/i915/gt/intel_lrc.c
1145
if (IS_ERR(ring)) {
drivers/gpu/drm/i915/gt/intel_lrc.c
1146
err = PTR_ERR(ring);
drivers/gpu/drm/i915/gt/intel_lrc.c
1169
ce->ring = ring;
drivers/gpu/drm/i915/gt/intel_lrc.c
1175
intel_ring_put(ring);
drivers/gpu/drm/i915/gt/intel_lrc.c
1185
intel_ring_reset(ce->ring, ce->ring->emit);
drivers/gpu/drm/i915/gt/intel_lrc.c
1189
ce->lrc.lrca = lrc_update_regs(ce, ce->engine, ce->ring->tail);
drivers/gpu/drm/i915/gt/intel_lrc.c
1220
ce->lrc.lrca = lrc_update_regs(ce, engine, ce->ring->tail);
drivers/gpu/drm/i915/gt/intel_lrc.c
1244
intel_ring_put(fetch_and_zero(&ce->ring));
drivers/gpu/drm/i915/gt/intel_lrc.c
1541
struct intel_ring *ring = ce->ring;
drivers/gpu/drm/i915/gt/intel_lrc.c
1544
GEM_BUG_ON(!intel_ring_offset_valid(ring, head));
drivers/gpu/drm/i915/gt/intel_lrc.c
1545
GEM_BUG_ON(!intel_ring_offset_valid(ring, ring->tail));
drivers/gpu/drm/i915/gt/intel_lrc.c
1547
regs[CTX_RING_START] = i915_ggtt_offset(ring->vma);
drivers/gpu/drm/i915/gt/intel_lrc.c
1549
regs[CTX_RING_TAIL] = ring->tail;
drivers/gpu/drm/i915/gt/intel_lrc.c
1550
regs[CTX_RING_CTL] = RING_CTL_SIZE(ring->size) | RING_VALID;
drivers/gpu/drm/i915/gt/intel_lrc.c
1586
const struct intel_ring *ring = ce->ring;
drivers/gpu/drm/i915/gt/intel_lrc.c
1591
if (regs[CTX_RING_START] != i915_ggtt_offset(ring->vma)) {
drivers/gpu/drm/i915/gt/intel_lrc.c
1595
i915_ggtt_offset(ring->vma));
drivers/gpu/drm/i915/gt/intel_lrc.c
1596
regs[CTX_RING_START] = i915_ggtt_offset(ring->vma);
drivers/gpu/drm/i915/gt/intel_lrc.c
1601
(RING_CTL_SIZE(ring->size) | RING_VALID)) {
drivers/gpu/drm/i915/gt/intel_lrc.c
1605
(u32)(RING_CTL_SIZE(ring->size) | RING_VALID));
drivers/gpu/drm/i915/gt/intel_lrc.c
1606
regs[CTX_RING_CTL] = RING_CTL_SIZE(ring->size) | RING_VALID;
drivers/gpu/drm/i915/gt/intel_migrate.c
1001
GEM_BUG_ON(ce->ring->size < SZ_64K);
drivers/gpu/drm/i915/gt/intel_migrate.c
318
ce->ring = NULL;
drivers/gpu/drm/i915/gt/intel_migrate.c
352
struct intel_ring *ring = rq->ring;
drivers/gpu/drm/i915/gt/intel_migrate.c
354
pkt = min_t(int, pkt, (ring->space - rq->reserved_space) / sizeof(u32) + 5);
drivers/gpu/drm/i915/gt/intel_migrate.c
355
pkt = min_t(int, pkt, (ring->size - ring->emit) / sizeof(u32) + 5);
drivers/gpu/drm/i915/gt/intel_migrate.c
372
struct intel_ring *ring = rq->ring;
drivers/gpu/drm/i915/gt/intel_migrate.c
422
ring->emit = (void *)cs - ring->vaddr;
drivers/gpu/drm/i915/gt/intel_migrate.c
424
intel_ring_update_space(ring);
drivers/gpu/drm/i915/gt/intel_migrate.c
471
ring->emit = (void *)cs - ring->vaddr;
drivers/gpu/drm/i915/gt/intel_migrate.c
473
intel_ring_update_space(ring);
drivers/gpu/drm/i915/gt/intel_migrate.c
703
GEM_BUG_ON(ce->ring->size < SZ_64K);
drivers/gpu/drm/i915/gt/intel_ring.c
149
struct intel_ring *ring;
drivers/gpu/drm/i915/gt/intel_ring.c
155
ring = kzalloc_obj(*ring);
drivers/gpu/drm/i915/gt/intel_ring.c
156
if (!ring)
drivers/gpu/drm/i915/gt/intel_ring.c
159
kref_init(&ring->ref);
drivers/gpu/drm/i915/gt/intel_ring.c
160
ring->size = size;
drivers/gpu/drm/i915/gt/intel_ring.c
161
ring->wrap = BITS_PER_TYPE(ring->size) - ilog2(size);
drivers/gpu/drm/i915/gt/intel_ring.c
168
ring->effective_size = size;
drivers/gpu/drm/i915/gt/intel_ring.c
170
ring->effective_size -= 2 * CACHELINE_BYTES;
drivers/gpu/drm/i915/gt/intel_ring.c
172
intel_ring_update_space(ring);
drivers/gpu/drm/i915/gt/intel_ring.c
176
kfree(ring);
drivers/gpu/drm/i915/gt/intel_ring.c
179
ring->vma = vma;
drivers/gpu/drm/i915/gt/intel_ring.c
181
return ring;
drivers/gpu/drm/i915/gt/intel_ring.c
186
struct intel_ring *ring = container_of(ref, typeof(*ring), ref);
drivers/gpu/drm/i915/gt/intel_ring.c
188
i915_vma_put(ring->vma);
drivers/gpu/drm/i915/gt/intel_ring.c
189
kfree(ring);
drivers/gpu/drm/i915/gt/intel_ring.c
19
unsigned int intel_ring_update_space(struct intel_ring *ring)
drivers/gpu/drm/i915/gt/intel_ring.c
193
wait_for_space(struct intel_ring *ring,
drivers/gpu/drm/i915/gt/intel_ring.c
200
if (intel_ring_update_space(ring) >= bytes)
drivers/gpu/drm/i915/gt/intel_ring.c
205
if (target->ring != ring)
drivers/gpu/drm/i915/gt/intel_ring.c
210
ring->emit, ring->size))
drivers/gpu/drm/i915/gt/intel_ring.c
225
intel_ring_update_space(ring);
drivers/gpu/drm/i915/gt/intel_ring.c
226
GEM_BUG_ON(ring->space < bytes);
drivers/gpu/drm/i915/gt/intel_ring.c
23
space = __intel_ring_space(ring->head, ring->emit, ring->size);
drivers/gpu/drm/i915/gt/intel_ring.c
232
struct intel_ring *ring = rq->ring;
drivers/gpu/drm/i915/gt/intel_ring.c
233
const unsigned int remain_usable = ring->effective_size - ring->emit;
drivers/gpu/drm/i915/gt/intel_ring.c
243
GEM_BUG_ON(total_bytes > ring->effective_size);
drivers/gpu/drm/i915/gt/intel_ring.c
246
const int remain_actual = ring->size - ring->emit;
drivers/gpu/drm/i915/gt/intel_ring.c
25
ring->space = space;
drivers/gpu/drm/i915/gt/intel_ring.c
267
if (unlikely(total_bytes > ring->space)) {
drivers/gpu/drm/i915/gt/intel_ring.c
281
ret = wait_for_space(ring,
drivers/gpu/drm/i915/gt/intel_ring.c
29
void __intel_ring_pin(struct intel_ring *ring)
drivers/gpu/drm/i915/gt/intel_ring.c
290
GEM_BUG_ON(need_wrap > ring->space);
drivers/gpu/drm/i915/gt/intel_ring.c
291
GEM_BUG_ON(ring->emit + need_wrap > ring->size);
drivers/gpu/drm/i915/gt/intel_ring.c
295
memset64(ring->vaddr + ring->emit, 0, need_wrap / sizeof(u64));
drivers/gpu/drm/i915/gt/intel_ring.c
296
ring->space -= need_wrap;
drivers/gpu/drm/i915/gt/intel_ring.c
297
ring->emit = 0;
drivers/gpu/drm/i915/gt/intel_ring.c
300
GEM_BUG_ON(ring->emit > ring->size - bytes);
drivers/gpu/drm/i915/gt/intel_ring.c
301
GEM_BUG_ON(ring->space < bytes);
drivers/gpu/drm/i915/gt/intel_ring.c
302
cs = ring->vaddr + ring->emit;
drivers/gpu/drm/i915/gt/intel_ring.c
305
ring->emit += bytes;
drivers/gpu/drm/i915/gt/intel_ring.c
306
ring->space -= bytes;
drivers/gpu/drm/i915/gt/intel_ring.c
31
GEM_BUG_ON(!atomic_read(&ring->pin_count));
drivers/gpu/drm/i915/gt/intel_ring.c
32
atomic_inc(&ring->pin_count);
drivers/gpu/drm/i915/gt/intel_ring.c
35
int intel_ring_pin(struct intel_ring *ring, struct i915_gem_ww_ctx *ww)
drivers/gpu/drm/i915/gt/intel_ring.c
37
struct i915_vma *vma = ring->vma;
drivers/gpu/drm/i915/gt/intel_ring.c
42
if (atomic_fetch_inc(&ring->pin_count))
drivers/gpu/drm/i915/gt/intel_ring.c
73
intel_ring_reset(ring, ring->emit);
drivers/gpu/drm/i915/gt/intel_ring.c
75
ring->vaddr = addr;
drivers/gpu/drm/i915/gt/intel_ring.c
81
atomic_dec(&ring->pin_count);
drivers/gpu/drm/i915/gt/intel_ring.c
85
void intel_ring_reset(struct intel_ring *ring, u32 tail)
drivers/gpu/drm/i915/gt/intel_ring.c
87
tail = intel_ring_wrap(ring, tail);
drivers/gpu/drm/i915/gt/intel_ring.c
88
ring->tail = tail;
drivers/gpu/drm/i915/gt/intel_ring.c
89
ring->head = tail;
drivers/gpu/drm/i915/gt/intel_ring.c
90
ring->emit = tail;
drivers/gpu/drm/i915/gt/intel_ring.c
91
intel_ring_update_space(ring);
drivers/gpu/drm/i915/gt/intel_ring.c
94
void intel_ring_unpin(struct intel_ring *ring)
drivers/gpu/drm/i915/gt/intel_ring.c
96
struct i915_vma *vma = ring->vma;
drivers/gpu/drm/i915/gt/intel_ring.c
98
if (!atomic_dec_and_test(&ring->pin_count))
drivers/gpu/drm/i915/gt/intel_ring.h
116
intel_ring_set_tail(struct intel_ring *ring, unsigned int tail)
drivers/gpu/drm/i915/gt/intel_ring.h
124
assert_ring_tail_valid(ring, tail);
drivers/gpu/drm/i915/gt/intel_ring.h
125
ring->tail = tail;
drivers/gpu/drm/i915/gt/intel_ring.h
20
unsigned int intel_ring_update_space(struct intel_ring *ring);
drivers/gpu/drm/i915/gt/intel_ring.h
22
void __intel_ring_pin(struct intel_ring *ring);
drivers/gpu/drm/i915/gt/intel_ring.h
23
int intel_ring_pin(struct intel_ring *ring, struct i915_gem_ww_ctx *ww);
drivers/gpu/drm/i915/gt/intel_ring.h
24
void intel_ring_unpin(struct intel_ring *ring);
drivers/gpu/drm/i915/gt/intel_ring.h
25
void intel_ring_reset(struct intel_ring *ring, u32 tail);
drivers/gpu/drm/i915/gt/intel_ring.h
29
static inline struct intel_ring *intel_ring_get(struct intel_ring *ring)
drivers/gpu/drm/i915/gt/intel_ring.h
31
kref_get(&ring->ref);
drivers/gpu/drm/i915/gt/intel_ring.h
32
return ring;
drivers/gpu/drm/i915/gt/intel_ring.h
35
static inline void intel_ring_put(struct intel_ring *ring)
drivers/gpu/drm/i915/gt/intel_ring.h
37
kref_put(&ring->ref, intel_ring_free);
drivers/gpu/drm/i915/gt/intel_ring.h
50
GEM_BUG_ON((rq->ring->vaddr + rq->ring->emit) != cs);
drivers/gpu/drm/i915/gt/intel_ring.h
51
GEM_BUG_ON(!IS_ALIGNED(rq->ring->emit, 8)); /* RING_TAIL qword align */
drivers/gpu/drm/i915/gt/intel_ring.h
54
static inline u32 intel_ring_wrap(const struct intel_ring *ring, u32 pos)
drivers/gpu/drm/i915/gt/intel_ring.h
56
return pos & (ring->size - 1);
drivers/gpu/drm/i915/gt/intel_ring.h
59
static inline int intel_ring_direction(const struct intel_ring *ring,
drivers/gpu/drm/i915/gt/intel_ring.h
62
typecheck(typeof(ring->size), next);
drivers/gpu/drm/i915/gt/intel_ring.h
63
typecheck(typeof(ring->size), prev);
drivers/gpu/drm/i915/gt/intel_ring.h
64
return (next - prev) << ring->wrap;
drivers/gpu/drm/i915/gt/intel_ring.h
68
intel_ring_offset_valid(const struct intel_ring *ring,
drivers/gpu/drm/i915/gt/intel_ring.h
71
if (pos & -ring->size) /* must be strictly within the ring */
drivers/gpu/drm/i915/gt/intel_ring.h
83
u32 offset = addr - rq->ring->vaddr;
drivers/gpu/drm/i915/gt/intel_ring.h
85
GEM_BUG_ON(offset > rq->ring->size);
drivers/gpu/drm/i915/gt/intel_ring.h
86
return intel_ring_wrap(rq->ring, offset);
drivers/gpu/drm/i915/gt/intel_ring.h
90
assert_ring_tail_valid(const struct intel_ring *ring, unsigned int tail)
drivers/gpu/drm/i915/gt/intel_ring.h
92
unsigned int head = READ_ONCE(ring->head);
drivers/gpu/drm/i915/gt/intel_ring.h
94
GEM_BUG_ON(!intel_ring_offset_valid(ring, tail));
drivers/gpu/drm/i915/gt/intel_ring_submission.c
1106
intel_ring_unpin(engine->legacy.ring);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
1107
intel_ring_put(engine->legacy.ring);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
1353
struct intel_ring *ring;
drivers/gpu/drm/i915/gt/intel_ring_submission.c
1385
ring = intel_engine_create_ring(engine, SZ_16K);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
1386
if (IS_ERR(ring)) {
drivers/gpu/drm/i915/gt/intel_ring_submission.c
1387
err = PTR_ERR(ring);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
1391
GEM_BUG_ON(engine->legacy.ring);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
1392
engine->legacy.ring = ring;
drivers/gpu/drm/i915/gt/intel_ring_submission.c
1408
err = i915_gem_object_lock(engine->legacy.ring->vma->obj, &ww);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
1412
err = intel_ring_pin(ring, &ww);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
1424
intel_ring_unpin(ring);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
1450
intel_ring_put(ring);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
196
struct intel_ring *ring = engine->legacy.ring;
drivers/gpu/drm/i915/gt/intel_ring_submission.c
200
ring->head, ring->tail);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
226
ENGINE_WRITE_FW(engine, RING_START, i915_ggtt_offset(ring->vma));
drivers/gpu/drm/i915/gt/intel_ring_submission.c
229
GEM_BUG_ON(!intel_ring_offset_valid(ring, ring->head));
drivers/gpu/drm/i915/gt/intel_ring_submission.c
230
GEM_BUG_ON(!intel_ring_offset_valid(ring, ring->tail));
drivers/gpu/drm/i915/gt/intel_ring_submission.c
231
intel_ring_update_space(ring);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
249
ENGINE_WRITE_FW(engine, RING_HEAD, ring->head);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
250
if (ENGINE_READ_FW(engine, RING_HEAD) == ring->head)
drivers/gpu/drm/i915/gt/intel_ring_submission.c
254
ENGINE_WRITE_FW(engine, RING_TAIL, ring->head);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
259
ring->head);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
264
RING_CTL_SIZE(ring->size) | RING_VALID);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
282
if (ring->tail != ring->head) {
drivers/gpu/drm/i915/gt/intel_ring_submission.c
283
ENGINE_WRITE_FW(engine, RING_TAIL, ring->tail);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
297
ENGINE_READ(engine, RING_HEAD), ring->head,
drivers/gpu/drm/i915/gt/intel_ring_submission.c
298
ENGINE_READ(engine, RING_TAIL), ring->tail,
drivers/gpu/drm/i915/gt/intel_ring_submission.c
300
i915_ggtt_offset(ring->vma));
drivers/gpu/drm/i915/gt/intel_ring_submission.c
445
GEM_BUG_ON(rq->ring != engine->legacy.ring);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
448
head = engine->legacy.ring->tail;
drivers/gpu/drm/i915/gt/intel_ring_submission.c
450
engine->legacy.ring->head = intel_ring_wrap(engine->legacy.ring, head);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
482
intel_ring_set_tail(request->ring, request->tail));
drivers/gpu/drm/i915/gt/intel_ring_submission.c
612
GEM_BUG_ON(!engine->legacy.ring);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
613
ce->ring = engine->legacy.ring;
drivers/gpu/drm/i915/gt/intel_ring_submission.c
638
intel_ring_reset(ce->ring, ce->ring->emit);
drivers/gpu/drm/i915/gt/mock_engine.c
145
i915_vma_unpin(ce->ring->vma);
drivers/gpu/drm/i915/gt/mock_engine.c
155
mock_ring_free(ce->ring);
drivers/gpu/drm/i915/gt/mock_engine.c
167
ce->ring = mock_ring(ce->engine);
drivers/gpu/drm/i915/gt/mock_engine.c
168
if (!ce->ring)
drivers/gpu/drm/i915/gt/mock_engine.c
190
return i915_vma_pin_ww(ce->ring->vma, ww, 0, 0, PIN_GLOBAL | PIN_HIGH);
drivers/gpu/drm/i915/gt/mock_engine.c
63
struct intel_ring *ring;
drivers/gpu/drm/i915/gt/mock_engine.c
65
ring = kzalloc(sizeof(*ring) + sz, GFP_KERNEL);
drivers/gpu/drm/i915/gt/mock_engine.c
66
if (!ring)
drivers/gpu/drm/i915/gt/mock_engine.c
69
kref_init(&ring->ref);
drivers/gpu/drm/i915/gt/mock_engine.c
70
ring->size = sz;
drivers/gpu/drm/i915/gt/mock_engine.c
71
ring->effective_size = sz;
drivers/gpu/drm/i915/gt/mock_engine.c
72
ring->vaddr = (void *)(ring + 1);
drivers/gpu/drm/i915/gt/mock_engine.c
73
atomic_set(&ring->pin_count, 1);
drivers/gpu/drm/i915/gt/mock_engine.c
75
ring->vma = create_ring_vma(engine->gt->ggtt, PAGE_SIZE);
drivers/gpu/drm/i915/gt/mock_engine.c
76
if (IS_ERR(ring->vma)) {
drivers/gpu/drm/i915/gt/mock_engine.c
77
kfree(ring);
drivers/gpu/drm/i915/gt/mock_engine.c
81
intel_ring_update_space(ring);
drivers/gpu/drm/i915/gt/mock_engine.c
83
return ring;
drivers/gpu/drm/i915/gt/mock_engine.c
86
static void mock_ring_free(struct intel_ring *ring)
drivers/gpu/drm/i915/gt/mock_engine.c
88
i915_vma_put(ring->vma);
drivers/gpu/drm/i915/gt/mock_engine.c
90
kfree(ring);
drivers/gpu/drm/i915/gt/selftest_execlists.c
221
memset(tmp->ring->vaddr,
drivers/gpu/drm/i915/gt/selftest_execlists.c
223
tmp->ring->vma->size);
drivers/gpu/drm/i915/gt/selftest_execlists.c
227
GEM_BUG_ON(!ce[1]->ring->size);
drivers/gpu/drm/i915/gt/selftest_execlists.c
228
intel_ring_reset(ce[1]->ring, ce[1]->ring->size / 2);
drivers/gpu/drm/i915/gt/selftest_execlists.c
229
lrc_update_regs(ce[1], engine, ce[1]->ring->head);
drivers/gpu/drm/i915/gt/selftest_execlists.c
239
GEM_BUG_ON(rq[0]->postfix > ce[1]->ring->emit);
drivers/gpu/drm/i915/gt/selftest_execlists.c
2596
ring_size += rq->ring->size;
drivers/gpu/drm/i915/gt/selftest_execlists.c
2597
ring_size = rq->ring->size / ring_size;
drivers/gpu/drm/i915/gt/selftest_execlists.c
2829
memset32(tmp->ring->vaddr,
drivers/gpu/drm/i915/gt/selftest_execlists.c
2831
tmp->ring->vma->size / sizeof(u32));
drivers/gpu/drm/i915/gt/selftest_execlists.c
2855
while (ce[0]->ring->tail - rq->wa_tail <= queue_sz) {
drivers/gpu/drm/i915/gt/selftest_execlists.c
2872
ce[0]->ring->size,
drivers/gpu/drm/i915/gt/selftest_execlists.c
2873
ce[0]->ring->tail,
drivers/gpu/drm/i915/gt/selftest_execlists.c
2874
ce[0]->ring->emit,
drivers/gpu/drm/i915/gt/selftest_execlists.c
2899
ce[0]->ring->tail, ce[0]->ring->emit,
drivers/gpu/drm/i915/gt/selftest_execlists.c
2900
ce[1]->ring->tail, ce[1]->ring->emit);
drivers/gpu/drm/i915/gt/selftest_execlists.c
378
memset32(tmp->ring->vaddr,
drivers/gpu/drm/i915/gt/selftest_execlists.c
380
tmp->ring->vma->size / sizeof(u32));
drivers/gpu/drm/i915/gt/selftest_execlists.c
405
while (intel_ring_direction(ce[0]->ring,
drivers/gpu/drm/i915/gt/selftest_execlists.c
407
ce[0]->ring->tail) <= 0) {
drivers/gpu/drm/i915/gt/selftest_execlists.c
424
ce[0]->ring->size,
drivers/gpu/drm/i915/gt/selftest_execlists.c
425
ce[0]->ring->tail,
drivers/gpu/drm/i915/gt/selftest_execlists.c
426
ce[0]->ring->emit,
drivers/gpu/drm/i915/gt/selftest_execlists.c
428
GEM_BUG_ON(intel_ring_direction(ce[0]->ring,
drivers/gpu/drm/i915/gt/selftest_execlists.c
430
ce[0]->ring->tail) <= 0);
drivers/gpu/drm/i915/gt/selftest_execlists.c
454
ce[0]->ring->tail, ce[0]->ring->emit,
drivers/gpu/drm/i915/gt/selftest_execlists.c
455
ce[1]->ring->tail, ce[1]->ring->emit);
drivers/gpu/drm/i915/gt/selftest_execlists.c
497
struct intel_ring *ring;
drivers/gpu/drm/i915/gt/selftest_execlists.c
524
ring = ce->ring;
drivers/gpu/drm/i915/gt/selftest_execlists.c
527
memset32(ring->vaddr, STACK_MAGIC, ring->size / sizeof(u32));
drivers/gpu/drm/i915/gt/selftest_execlists.c
528
ring->emit = ring->size / 2;
drivers/gpu/drm/i915/gt/selftest_execlists.c
529
ring->tail = ring->emit;
drivers/gpu/drm/i915/gt/selftest_execlists.c
530
GEM_BUG_ON(ring->head);
drivers/gpu/drm/i915/gt/selftest_lrc.c
449
expected[RING_START_IDX] = i915_ggtt_offset(ce->ring->vma);
drivers/gpu/drm/i915/gt/selftest_lrc.c
464
expected[RING_TAIL_IDX] = ce->ring->tail;
drivers/gpu/drm/i915/gt/selftest_migrate.c
154
GEM_BUG_ON(ce->ring->size < SZ_64K);
drivers/gpu/drm/i915/gt/selftest_migrate.c
622
sz = (rq->ring->space - rq->reserved_space) / sizeof(u32) -
drivers/gpu/drm/i915/gt/selftest_migrate.c
636
pr_info("%s emit=%u sz=%d\n", __func__, rq->ring->emit, sz);
drivers/gpu/drm/i915/gt/selftest_migrate.c
639
} while (rq->ring->space > (rq->reserved_space +
drivers/gpu/drm/i915/gt/selftest_migrate.c
649
pr_info("%s emite_pte ring space=%u\n", __func__, rq->ring->space);
drivers/gpu/drm/i915/gt/selftest_ring.c
10
ring = kzalloc(sizeof(*ring) + sz, GFP_KERNEL);
drivers/gpu/drm/i915/gt/selftest_ring.c
11
if (!ring)
drivers/gpu/drm/i915/gt/selftest_ring.c
14
kref_init(&ring->ref);
drivers/gpu/drm/i915/gt/selftest_ring.c
15
ring->size = sz;
drivers/gpu/drm/i915/gt/selftest_ring.c
16
ring->wrap = BITS_PER_TYPE(ring->size) - ilog2(sz);
drivers/gpu/drm/i915/gt/selftest_ring.c
17
ring->effective_size = sz;
drivers/gpu/drm/i915/gt/selftest_ring.c
18
ring->vaddr = (void *)(ring + 1);
drivers/gpu/drm/i915/gt/selftest_ring.c
19
atomic_set(&ring->pin_count, 1);
drivers/gpu/drm/i915/gt/selftest_ring.c
21
intel_ring_update_space(ring);
drivers/gpu/drm/i915/gt/selftest_ring.c
23
return ring;
drivers/gpu/drm/i915/gt/selftest_ring.c
26
static void mock_ring_free(struct intel_ring *ring)
drivers/gpu/drm/i915/gt/selftest_ring.c
28
kfree(ring);
drivers/gpu/drm/i915/gt/selftest_ring.c
31
static int check_ring_direction(struct intel_ring *ring,
drivers/gpu/drm/i915/gt/selftest_ring.c
37
result = intel_ring_direction(ring, next, prev);
drivers/gpu/drm/i915/gt/selftest_ring.c
52
static int check_ring_step(struct intel_ring *ring, u32 x, u32 step)
drivers/gpu/drm/i915/gt/selftest_ring.c
54
u32 prev = x, next = intel_ring_wrap(ring, x + step);
drivers/gpu/drm/i915/gt/selftest_ring.c
57
err |= check_ring_direction(ring, next, next, 0);
drivers/gpu/drm/i915/gt/selftest_ring.c
58
err |= check_ring_direction(ring, prev, prev, 0);
drivers/gpu/drm/i915/gt/selftest_ring.c
59
err |= check_ring_direction(ring, next, prev, 1);
drivers/gpu/drm/i915/gt/selftest_ring.c
60
err |= check_ring_direction(ring, prev, next, -1);
drivers/gpu/drm/i915/gt/selftest_ring.c
65
static int check_ring_offset(struct intel_ring *ring, u32 x, u32 step)
drivers/gpu/drm/i915/gt/selftest_ring.c
69
err |= check_ring_step(ring, x, step);
drivers/gpu/drm/i915/gt/selftest_ring.c
70
err |= check_ring_step(ring, intel_ring_wrap(ring, x + 1), step);
drivers/gpu/drm/i915/gt/selftest_ring.c
71
err |= check_ring_step(ring, intel_ring_wrap(ring, x - 1), step);
drivers/gpu/drm/i915/gt/selftest_ring.c
78
struct intel_ring *ring;
drivers/gpu/drm/i915/gt/selftest_ring.c
8
struct intel_ring *ring;
drivers/gpu/drm/i915/gt/selftest_ring.c
82
ring = mock_ring(2 * half);
drivers/gpu/drm/i915/gt/selftest_ring.c
83
if (!ring)
drivers/gpu/drm/i915/gt/selftest_ring.c
86
GEM_BUG_ON(ring->size != 2 * half);
drivers/gpu/drm/i915/gt/selftest_ring.c
90
err |= check_ring_offset(ring, 0, step);
drivers/gpu/drm/i915/gt/selftest_ring.c
91
err |= check_ring_offset(ring, half, step);
drivers/gpu/drm/i915/gt/selftest_ring.c
93
err |= check_ring_step(ring, 0, half - 64);
drivers/gpu/drm/i915/gt/selftest_ring.c
96
err |= check_ring_offset(ring, 0, 2 * half + 64);
drivers/gpu/drm/i915/gt/selftest_ring.c
97
err |= check_ring_offset(ring, 3 * half, 1);
drivers/gpu/drm/i915/gt/selftest_ring.c
99
mock_ring_free(ring);
drivers/gpu/drm/i915/gt/selftest_timeline.c
1148
if (8 * watcher[1].rq->ring->emit >
drivers/gpu/drm/i915/gt/selftest_timeline.c
1149
3 * watcher[1].rq->ring->size)
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
1890
head = ce->ring->tail;
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
1898
head = intel_ring_wrap(ce->ring, rq->head);
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
2810
i915_gem_object_is_lmem(ce->ring->vma->obj));
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
2877
i915_gem_object_is_lmem(ce->ring->vma->obj));
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
3224
guc_reset_state(ce, intel_ring_wrap(ce->ring, rq->head),
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
5546
ce->ring->head,
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
5549
ce->ring->tail,
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
5554
ce->ring->head);
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
5556
ce->ring->tail);
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
794
intel_ring_set_tail(rq->ring, rq->tail);
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
883
FIELD_PREP(WQ_RING_TAIL_MASK, ce->ring->tail / sizeof(u64));
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
886
*wqi++ = child->ring->tail / sizeof(u64);
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
915
intel_ring_set_tail(rq->ring, rq->tail);
drivers/gpu/drm/i915/i915_drm_client.c
217
if (ce->ring != ce->engine->legacy.ring && ce->ring->vma)
drivers/gpu/drm/i915/i915_drm_client.c
218
i915_drm_client_add_object(client, ce->ring->vma->obj);
drivers/gpu/drm/i915/i915_gpu_error.c
1631
vma = capture_vma(vma, ce->ring->vma, "ring", gfp);
drivers/gpu/drm/i915/i915_request.c
1002
rq->head = rq->ring->emit;
drivers/gpu/drm/i915/i915_request.c
1008
rq->infix = rq->ring->emit; /* end of header; start of user payload */
drivers/gpu/drm/i915/i915_request.c
1016
ce->ring->emit = rq->head;
drivers/gpu/drm/i915/i915_request.c
1790
struct intel_ring *ring = rq->ring;
drivers/gpu/drm/i915/i915_request.c
1800
GEM_BUG_ON(rq->reserved_space > ring->space);
drivers/gpu/drm/i915/i915_request.c
224
void *vaddr = rq->ring->vaddr;
drivers/gpu/drm/i915/i915_request.c
2240
u32 ring = ENGINE_READ(engine, RING_START);
drivers/gpu/drm/i915/i915_request.c
2242
return ring == i915_ggtt_offset(rq->ring->vma);
drivers/gpu/drm/i915/i915_request.c
229
memset(vaddr + head, val, rq->ring->size - head);
drivers/gpu/drm/i915/i915_request.c
385
rq->ring->head = rq->postfix;
drivers/gpu/drm/i915/i915_request.c
664
request->ring->vaddr + request->postfix);
drivers/gpu/drm/i915/i915_request.c
949
rq->ring = ce->ring;
drivers/gpu/drm/i915/i915_request.h
214
struct intel_ring *ring;
drivers/gpu/drm/i915/selftests/i915_mock_selftests.h
24
selftest(ring, intel_ring_mock_selftests)
drivers/gpu/drm/i915/selftests/i915_request.c
1712
ret = rq->ring->size - rq->reserved_space;
drivers/gpu/drm/i915/selftests/i915_request.c
1715
sz = rq->ring->emit - rq->head;
drivers/gpu/drm/i915/selftests/i915_request.c
1717
sz += rq->ring->size;
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
103
OUT_PKT3(ring, CP_SET_PROTECTED_MODE, 1);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
104
OUT_RING(ring, 1);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
107
adreno_flush(gpu, ring, REG_AXXX_CP_RB_WPTR);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
15
struct msm_ringbuffer *ring = submit->ring;
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
25
if (ring->cur_ctx_seqno == submit->queue->ctx->seqno)
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
29
OUT_PKT3(ring, CP_INDIRECT_BUFFER_PFD, 2);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
30
OUT_RING(ring, lower_32_bits(submit->cmd[i].iova));
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
31
OUT_RING(ring, submit->cmd[i].size);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
32
OUT_PKT2(ring);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
37
OUT_PKT0(ring, REG_AXXX_CP_SCRATCH_REG2, 1);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
38
OUT_RING(ring, submit->seqno);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
41
OUT_PKT3(ring, CP_WAIT_FOR_IDLE, 1);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
42
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
44
OUT_PKT3(ring, CP_EVENT_WRITE, 3);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
45
OUT_RING(ring, CACHE_FLUSH_TS);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
46
OUT_RING(ring, rbmemptr(ring, fence));
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
47
OUT_RING(ring, submit->seqno);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
48
OUT_PKT3(ring, CP_INTERRUPT, 1);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
486
static u32 a2xx_get_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
488
ring->memptrs->rptr = gpu_read(gpu, REG_AXXX_CP_RB_RPTR);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
489
return ring->memptrs->rptr;
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
49
OUT_RING(ring, 0x80000000);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
51
adreno_flush(gpu, ring, REG_AXXX_CP_RB_WPTR);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
58
struct msm_ringbuffer *ring = gpu->rb[0];
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
60
OUT_PKT3(ring, CP_ME_INIT, 18);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
63
OUT_RING(ring, 0x000003ff);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
65
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
67
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
69
OUT_RING(ring, REG_A2XX_RB_SURFACE_INFO - 0x2000);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
70
OUT_RING(ring, REG_A2XX_PA_SC_WINDOW_OFFSET - 0x2000);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
71
OUT_RING(ring, REG_A2XX_VGT_MAX_VTX_INDX - 0x2000);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
72
OUT_RING(ring, REG_A2XX_SQ_PROGRAM_CNTL - 0x2000);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
73
OUT_RING(ring, REG_A2XX_RB_DEPTHCONTROL - 0x2000);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
74
OUT_RING(ring, REG_A2XX_PA_SU_POINT_SIZE - 0x2000);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
75
OUT_RING(ring, REG_A2XX_PA_SC_LINE_CNTL - 0x2000);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
76
OUT_RING(ring, REG_A2XX_PA_SU_POLY_OFFSET_FRONT_SCALE - 0x2000);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
81
OUT_RING(ring, 0x80000300);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
83
OUT_RING(ring, 0x80000180);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
85
OUT_RING(ring, 0x00000001);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
88
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
90
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
93
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
95
OUT_RING(ring, 0x200001f2);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
97
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a2xx_gpu.c
99
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
100
OUT_RING(ring, 0x00000154);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
101
OUT_RING(ring, 0x00000001);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
102
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
103
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
104
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
105
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
106
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
108
adreno_flush(gpu, ring, REG_AXXX_CP_RB_WPTR);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
33
struct msm_ringbuffer *ring = submit->ring;
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
43
if (ring->cur_ctx_seqno == submit->queue->ctx->seqno)
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
47
OUT_PKT3(ring, CP_INDIRECT_BUFFER_PFD, 2);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
48
OUT_RING(ring, lower_32_bits(submit->cmd[i].iova));
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
49
OUT_RING(ring, submit->cmd[i].size);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
50
OUT_PKT2(ring);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
505
static u32 a3xx_get_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
507
ring->memptrs->rptr = gpu_read(gpu, REG_AXXX_CP_RB_RPTR);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
508
return ring->memptrs->rptr;
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
55
OUT_PKT0(ring, REG_AXXX_CP_SCRATCH_REG2, 1);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
56
OUT_RING(ring, submit->seqno);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
62
OUT_PKT3(ring, CP_EVENT_WRITE, 1);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
63
OUT_RING(ring, HLSQ_FLUSH);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
66
OUT_PKT3(ring, CP_WAIT_FOR_IDLE, 1);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
67
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
70
OUT_PKT3(ring, CP_EVENT_WRITE, 3);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
71
OUT_RING(ring, CACHE_FLUSH_TS | CP_EVENT_WRITE_0_IRQ);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
72
OUT_RING(ring, rbmemptr(ring, fence));
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
73
OUT_RING(ring, submit->seqno);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
77
OUT_PKT3(ring, CP_SET_CONSTANT, 2);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
78
OUT_RING(ring, CP_REG(REG_A3XX_HLSQ_CL_KERNEL_GROUP_X_REG));
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
79
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
82
adreno_flush(gpu, ring, REG_AXXX_CP_RB_WPTR);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
87
struct msm_ringbuffer *ring = gpu->rb[0];
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
89
OUT_PKT3(ring, CP_ME_INIT, 17);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
90
OUT_RING(ring, 0x000003f7);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
91
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
92
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
93
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
94
OUT_RING(ring, 0x00000080);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
95
OUT_RING(ring, 0x00000100);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
96
OUT_RING(ring, 0x00000180);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
97
OUT_RING(ring, 0x00006600);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
98
OUT_RING(ring, 0x00000150);
drivers/gpu/drm/msm/adreno/a3xx_gpu.c
99
OUT_RING(ring, 0x0000014e);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
158
struct msm_ringbuffer *ring = gpu->rb[0];
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
160
OUT_PKT3(ring, CP_ME_INIT, 17);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
161
OUT_RING(ring, 0x000003f7);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
162
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
163
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
164
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
165
OUT_RING(ring, 0x00000080);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
166
OUT_RING(ring, 0x00000100);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
167
OUT_RING(ring, 0x00000180);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
168
OUT_RING(ring, 0x00006600);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
169
OUT_RING(ring, 0x00000150);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
170
OUT_RING(ring, 0x0000014e);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
171
OUT_RING(ring, 0x00000154);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
172
OUT_RING(ring, 0x00000001);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
173
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
174
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
175
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
176
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
177
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
179
adreno_flush(gpu, ring, REG_A4XX_CP_RB_WPTR);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
27
struct msm_ringbuffer *ring = submit->ring;
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
37
if (ring->cur_ctx_seqno == submit->queue->ctx->seqno)
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
41
OUT_PKT3(ring, CP_INDIRECT_BUFFER_PFE, 2);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
42
OUT_RING(ring, lower_32_bits(submit->cmd[i].iova));
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
43
OUT_RING(ring, submit->cmd[i].size);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
44
OUT_PKT2(ring);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
49
OUT_PKT0(ring, REG_AXXX_CP_SCRATCH_REG2, 1);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
50
OUT_RING(ring, submit->seqno);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
56
OUT_PKT3(ring, CP_EVENT_WRITE, 1);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
57
OUT_RING(ring, HLSQ_FLUSH);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
60
OUT_PKT3(ring, CP_WAIT_FOR_IDLE, 1);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
61
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
624
static u32 a4xx_get_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
626
ring->memptrs->rptr = gpu_read(gpu, REG_A4XX_CP_RB_RPTR);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
627
return ring->memptrs->rptr;
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
64
OUT_PKT3(ring, CP_EVENT_WRITE, 3);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
65
OUT_RING(ring, CACHE_FLUSH_TS | CP_EVENT_WRITE_0_IRQ);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
66
OUT_RING(ring, rbmemptr(ring, fence));
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
67
OUT_RING(ring, submit->seqno);
drivers/gpu/drm/msm/adreno/a4xx_gpu.c
69
adreno_flush(gpu, ring, REG_A4XX_CP_RB_WPTR);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
104
adreno_wait_ring(ring, 1);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
105
OUT_RING(ring, ptr[i]);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
1081
bool a5xx_idle(struct msm_gpu *gpu, struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
1086
if (ring != a5xx_gpu->cur_ring) {
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
1092
if (!adreno_idle(gpu, ring))
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
114
a5xx_gpu->last_seqno[ring->id] = submit->seqno;
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
115
a5xx_flush(gpu, ring, true);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
122
a5xx_idle(gpu, ring);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
123
ring->memptrs->fence = submit->seqno;
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
1236
struct msm_ringbuffer *ring = gpu->funcs->active_ring(gpu);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
1248
ring ? ring->id : -1, ring ? ring->fctx->last_fence : 0,
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
131
struct msm_ringbuffer *ring = submit->ring;
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
137
ring->cur_ctx_seqno = 0;
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
142
OUT_PKT7(ring, CP_PREEMPT_ENABLE_GLOBAL, 1);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
143
OUT_RING(ring, 0x02);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
146
OUT_PKT7(ring, CP_SET_PROTECTED_MODE, 1);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
147
OUT_RING(ring, 0);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
150
OUT_PKT4(ring, REG_A5XX_CP_CONTEXT_SWITCH_SAVE_ADDR_LO, 2);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
151
OUT_RING(ring, lower_32_bits(a5xx_gpu->preempt_iova[submit->ring->id]));
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
152
OUT_RING(ring, upper_32_bits(a5xx_gpu->preempt_iova[submit->ring->id]));
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
155
OUT_PKT7(ring, CP_SET_PROTECTED_MODE, 1);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
156
OUT_RING(ring, 1);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
163
OUT_PKT7(ring, CP_PREEMPT_ENABLE_LOCAL, 1);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
164
OUT_RING(ring, 0x0);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
167
OUT_PKT7(ring, CP_YIELD_ENABLE, 1);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
168
OUT_RING(ring, 0x02);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
1682
static uint32_t a5xx_get_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
1688
return a5xx_gpu->shadow[ring->id];
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
1690
return ring->memptrs->rptr = gpu_read(gpu, REG_A5XX_CP_RB_RPTR);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
176
if (ring->cur_ctx_seqno == submit->queue->ctx->seqno)
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
180
OUT_PKT7(ring, CP_INDIRECT_BUFFER_PFE, 3);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
181
OUT_RING(ring, lower_32_bits(submit->cmd[i].iova));
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
182
OUT_RING(ring, upper_32_bits(submit->cmd[i].iova));
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
183
OUT_RING(ring, submit->cmd[i].size);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
196
update_shadow_rptr(gpu, ring);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
204
OUT_PKT7(ring, CP_SET_RENDER_MODE, 5);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
205
OUT_RING(ring, 0);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
206
OUT_RING(ring, 0);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
207
OUT_RING(ring, 0);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
208
OUT_RING(ring, 0);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
209
OUT_RING(ring, 0);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
21
static void update_shadow_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
212
OUT_PKT7(ring, CP_YIELD_ENABLE, 1);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
213
OUT_RING(ring, 0x01);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
216
OUT_PKT4(ring, REG_A5XX_CP_SCRATCH_REG(2), 1);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
217
OUT_RING(ring, submit->seqno);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
218
a5xx_gpu->last_seqno[ring->id] = submit->seqno;
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
224
OUT_PKT7(ring, CP_EVENT_WRITE, 4);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
225
OUT_RING(ring, CP_EVENT_WRITE_0_EVENT(CACHE_FLUSH_TS) |
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
227
OUT_RING(ring, lower_32_bits(rbmemptr(ring, fence)));
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
228
OUT_RING(ring, upper_32_bits(rbmemptr(ring, fence)));
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
229
OUT_RING(ring, submit->seqno);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
232
OUT_PKT7(ring, CP_CONTEXT_SWITCH_YIELD, 4);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
238
OUT_RING(ring, 0x00);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
239
OUT_RING(ring, 0x00);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
241
OUT_RING(ring, 0x01);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
243
OUT_RING(ring, 0x01);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
246
a5xx_flush(gpu, ring, false);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
27
OUT_PKT7(ring, CP_WHERE_AM_I, 2);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
28
OUT_RING(ring, lower_32_bits(shadowptr(a5xx_gpu, ring)));
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
29
OUT_RING(ring, upper_32_bits(shadowptr(a5xx_gpu, ring)));
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
33
void a5xx_flush(struct msm_gpu *gpu, struct msm_ringbuffer *ring,
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
46
update_shadow_rptr(gpu, ring);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
48
spin_lock_irqsave(&ring->preempt_lock, flags);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
480
struct msm_ringbuffer *ring = gpu->rb[0];
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
482
OUT_PKT7(ring, CP_ME_INIT, 8);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
484
OUT_RING(ring, 0x0000002F);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
487
OUT_RING(ring, 0x00000003);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
490
OUT_RING(ring, 0x20000000);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
493
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
494
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
503
OUT_RING(ring, 0x0000000B);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
506
OUT_RING(ring, 0x00000001);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
509
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
51
ring->cur = ring->next;
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
512
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
513
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
515
a5xx_flush(gpu, ring, true);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
516
return a5xx_idle(gpu, ring) ? 0 : -EINVAL;
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
523
struct msm_ringbuffer *ring = gpu->rb[0];
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
529
OUT_PKT7(ring, CP_SET_PROTECTED_MODE, 1);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
530
OUT_RING(ring, 0);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
533
OUT_PKT4(ring, REG_A5XX_CP_CONTEXT_SWITCH_SAVE_ADDR_LO, 2);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
534
OUT_RING(ring, lower_32_bits(a5xx_gpu->preempt_iova[ring->id]));
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
535
OUT_RING(ring, upper_32_bits(a5xx_gpu->preempt_iova[ring->id]));
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
538
OUT_PKT7(ring, CP_SET_PROTECTED_MODE, 1);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
539
OUT_RING(ring, 1);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
54
wptr = get_wptr(ring);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
541
OUT_PKT7(ring, CP_PREEMPT_ENABLE_GLOBAL, 1);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
542
OUT_RING(ring, 0x00);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
544
OUT_PKT7(ring, CP_PREEMPT_ENABLE_LOCAL, 1);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
545
OUT_RING(ring, 0x01);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
547
OUT_PKT7(ring, CP_YIELD_ENABLE, 1);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
548
OUT_RING(ring, 0x01);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
551
OUT_PKT7(ring, CP_CONTEXT_SWITCH_YIELD, 4);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
552
OUT_RING(ring, 0x00);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
553
OUT_RING(ring, 0x00);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
554
OUT_RING(ring, 0x01);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
555
OUT_RING(ring, 0x01);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
558
a5xx_flush(gpu, ring, false);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
56
spin_unlock_irqrestore(&ring->preempt_lock, flags);
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
560
return a5xx_idle(gpu, ring) ? 0 : -EINVAL;
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
62
if (a5xx_gpu->cur_ring == ring && !a5xx_in_preempt(a5xx_gpu))
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
70
struct msm_ringbuffer *ring = submit->ring;
drivers/gpu/drm/msm/adreno/a5xx_gpu.c
80
if (ring->cur_ctx_seqno == submit->queue->ctx->seqno)
drivers/gpu/drm/msm/adreno/a5xx_gpu.h
154
#define shadowptr(a5xx_gpu, ring) ((a5xx_gpu)->shadow_iova + \
drivers/gpu/drm/msm/adreno/a5xx_gpu.h
155
((ring)->id * sizeof(uint32_t)))
drivers/gpu/drm/msm/adreno/a5xx_gpu.h
157
bool a5xx_idle(struct msm_gpu *gpu, struct msm_ringbuffer *ring);
drivers/gpu/drm/msm/adreno/a5xx_gpu.h
166
void a5xx_flush(struct msm_gpu *gpu, struct msm_ringbuffer *ring, bool sync);
drivers/gpu/drm/msm/adreno/a5xx_power.c
224
struct msm_ringbuffer *ring = gpu->rb[0];
drivers/gpu/drm/msm/adreno/a5xx_power.c
230
OUT_PKT7(ring, CP_SET_PROTECTED_MODE, 1);
drivers/gpu/drm/msm/adreno/a5xx_power.c
231
OUT_RING(ring, 0);
drivers/gpu/drm/msm/adreno/a5xx_power.c
234
OUT_PKT7(ring, CP_INDIRECT_BUFFER_PFE, 3);
drivers/gpu/drm/msm/adreno/a5xx_power.c
235
OUT_RING(ring, lower_32_bits(a5xx_gpu->gpmu_iova));
drivers/gpu/drm/msm/adreno/a5xx_power.c
236
OUT_RING(ring, upper_32_bits(a5xx_gpu->gpmu_iova));
drivers/gpu/drm/msm/adreno/a5xx_power.c
237
OUT_RING(ring, a5xx_gpu->gpmu_dwords);
drivers/gpu/drm/msm/adreno/a5xx_power.c
240
OUT_PKT7(ring, CP_SET_PROTECTED_MODE, 1);
drivers/gpu/drm/msm/adreno/a5xx_power.c
241
OUT_RING(ring, 1);
drivers/gpu/drm/msm/adreno/a5xx_power.c
243
a5xx_flush(gpu, ring, true);
drivers/gpu/drm/msm/adreno/a5xx_power.c
245
if (!a5xx_idle(gpu, ring)) {
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
100
struct msm_ringbuffer *ring;
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
120
ring = get_next_ring(gpu);
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
126
if (!ring || (a5xx_gpu->cur_ring == ring)) {
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
148
spin_lock_irqsave(&ring->preempt_lock, flags);
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
149
a5xx_gpu->preempt[ring->id]->wptr = get_wptr(ring);
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
150
spin_unlock_irqrestore(&ring->preempt_lock, flags);
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
154
a5xx_gpu->preempt_iova[ring->id]);
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
156
a5xx_gpu->next_ring = ring;
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
247
struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
275
a5xx_gpu->preempt_bo[ring->id] = bo;
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
276
a5xx_gpu->preempt_counters_bo[ring->id] = counters_bo;
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
277
a5xx_gpu->preempt_iova[ring->id] = iova;
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
278
a5xx_gpu->preempt[ring->id] = ptr;
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
40
static inline void update_wptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
45
if (!ring)
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
48
spin_lock_irqsave(&ring->preempt_lock, flags);
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
49
wptr = get_wptr(ring);
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
50
spin_unlock_irqrestore(&ring->preempt_lock, flags);
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
65
struct msm_ringbuffer *ring = gpu->rb[i];
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
67
spin_lock_irqsave(&ring->preempt_lock, flags);
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
68
empty = (get_wptr(ring) == gpu->funcs->get_rptr(gpu, ring));
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
69
if (!empty && ring == a5xx_gpu->cur_ring)
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
70
empty = ring->memptrs->fence == a5xx_gpu->last_seqno[i];
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
71
spin_unlock_irqrestore(&ring->preempt_lock, flags);
drivers/gpu/drm/msm/adreno/a5xx_preempt.c
74
return ring;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1002
struct msm_ringbuffer *ring = gpu->rb[0];
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1006
OUT_PKT7(ring, CP_THREAD_CONTROL, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1007
OUT_RING(ring, BIT(27));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1009
OUT_PKT7(ring, CP_ME_INIT, 7);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1026
OUT_RING(ring, mask);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1029
OUT_RING(ring, 0x00000003);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1032
OUT_RING(ring, 0x20000000);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1035
OUT_RING(ring, 0x00000002);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1039
OUT_RING(ring, lower_32_bits(a6xx_gpu->pwrup_reglist_iova));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1041
OUT_RING(ring, upper_32_bits(a6xx_gpu->pwrup_reglist_iova));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1043
OUT_RING(ring, BIT(31));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1045
a6xx_flush(gpu, ring);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1046
return a6xx_idle(gpu, ring) ? 0 : -EINVAL;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
128
static bool a6xx_idle(struct msm_gpu *gpu, struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
131
if (!adreno_idle(gpu, ring))
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
147
static void update_shadow_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
154
OUT_PKT7(ring, CP_WHERE_AM_I, 2);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
155
OUT_RING(ring, lower_32_bits(shadowptr(a6xx_gpu, ring)));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
156
OUT_RING(ring, upper_32_bits(shadowptr(a6xx_gpu, ring)));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
160
void a6xx_flush(struct msm_gpu *gpu, struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
167
update_shadow_rptr(gpu, ring);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
169
spin_lock_irqsave(&ring->preempt_lock, flags);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
172
ring->cur = ring->next;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
175
wptr = get_wptr(ring);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
179
if (a6xx_gpu->cur_ring == ring)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
182
ring->restore_wptr = true;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
184
ring->restore_wptr = true;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
187
spin_unlock_irqrestore(&ring->preempt_lock, flags);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1883
struct msm_ringbuffer *ring = gpu->funcs->active_ring(gpu);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
1896
ring ? ring->id : -1, ring ? ring->fctx->last_fence : 0,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
190
static void get_stats_counter(struct msm_ringbuffer *ring, u32 counter,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
193
OUT_PKT7(ring, CP_REG_TO_MEM, 3);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
194
OUT_RING(ring, CP_REG_TO_MEM_0_REG(counter) |
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
197
OUT_RING(ring, lower_32_bits(iova));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
198
OUT_RING(ring, upper_32_bits(iova));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
202
struct msm_ringbuffer *ring, struct msm_gem_submit *submit)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
210
u64 memptr = rbmemptr(ring, ttbr0);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
212
if (ctx->seqno == ring->cur_ctx_seqno)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
220
OUT_PKT7(ring, CP_WAIT_TIMESTAMP, 4);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
221
OUT_RING(ring, 0);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
222
OUT_RING(ring, lower_32_bits(rbmemptr(ring, fence)));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
223
OUT_RING(ring, upper_32_bits(rbmemptr(ring, fence)));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
224
OUT_RING(ring, submit->seqno - 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
226
OUT_PKT7(ring, CP_THREAD_CONTROL, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
227
OUT_RING(ring, CP_THREAD_CONTROL_0_SYNC_THREADS | CP_SET_THREAD_BOTH);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
230
OUT_PKT7(ring, CP_RESET_CONTEXT_STATE, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
231
OUT_RING(ring,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
237
OUT_PKT7(ring, CP_THREAD_CONTROL, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
238
OUT_RING(ring, CP_THREAD_CONTROL_0_SYNC_THREADS | CP_SET_THREAD_BOTH);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
240
OUT_PKT7(ring, CP_EVENT_WRITE, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
241
OUT_RING(ring, LRZ_FLUSH_INVALIDATE);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
243
OUT_PKT7(ring, CP_THREAD_CONTROL, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
244
OUT_RING(ring, CP_THREAD_CONTROL_0_SYNC_THREADS | CP_SET_THREAD_BR);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
250
OUT_PKT7(ring, CP_SET_PROTECTED_MODE, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
251
OUT_RING(ring, 0);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2529
static uint32_t a6xx_get_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2535
return a6xx_gpu->shadow[ring->id];
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2544
return ring->memptrs->rptr = gpu_read(gpu, REG_A6XX_CP_RB_RPTR);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2547
static bool a6xx_progress(struct msm_gpu *gpu, struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
255
OUT_PKT4(ring, REG_A8XX_RBBM_PERFCTR_SRAM_INIT_CMD, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
256
OUT_RING(ring, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
257
OUT_PKT4(ring, REG_A8XX_RBBM_SLICE_PERFCTR_SRAM_INIT_CMD, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
258
OUT_RING(ring, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2584
progress = !!memcmp(&cp_state, &ring->last_cp_state, sizeof(cp_state));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
2586
ring->last_cp_state = cp_state;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
260
OUT_PKT4(ring, REG_A6XX_RBBM_PERFCTR_SRAM_INIT_CMD, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
261
OUT_RING(ring, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
266
OUT_PKT7(ring, CP_SMMU_TABLE_UPDATE, 4);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
267
OUT_RING(ring, CP_SMMU_TABLE_UPDATE_0_TTBR0_LO(lower_32_bits(ttbr)));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
269
OUT_RING(ring,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
272
OUT_RING(ring, CP_SMMU_TABLE_UPDATE_2_CONTEXTIDR(0));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
273
OUT_RING(ring, CP_SMMU_TABLE_UPDATE_3_CONTEXTBANK(0));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
279
OUT_PKT7(ring, CP_MEM_WRITE, 5);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
280
OUT_RING(ring, A5XX_CP_MEM_WRITE_ADDR_LO(lower_32_bits(memptr)));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
281
OUT_RING(ring, A5XX_CP_MEM_WRITE_ADDR_HI(upper_32_bits(memptr)));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
282
OUT_RING(ring, lower_32_bits(ttbr));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
283
OUT_RING(ring, upper_32_bits(ttbr));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
284
OUT_RING(ring, ctx->seqno);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
292
OUT_PKT7(ring, CP_THREAD_CONTROL, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
293
OUT_RING(ring, CP_THREAD_CONTROL_0_SYNC_THREADS | CP_SET_THREAD_BR);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
301
OUT_PKT7(ring, CP_EVENT_WRITE, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
302
OUT_RING(ring, CACHE_INVALIDATE);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
312
OUT_PKT7(ring, CP_WAIT_REG_MEM, 6);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
313
OUT_RING(ring, CP_WAIT_REG_MEM_0_FUNCTION(WRITE_EQ));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
314
OUT_RING(ring, CP_WAIT_REG_MEM_POLL_ADDR_LO(reg_status));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
315
OUT_RING(ring, CP_WAIT_REG_MEM_POLL_ADDR_HI(0));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
316
OUT_RING(ring, CP_WAIT_REG_MEM_3_REF(0x1));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
317
OUT_RING(ring, CP_WAIT_REG_MEM_4_MASK(0x1));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
318
OUT_RING(ring, CP_WAIT_REG_MEM_5_DELAY_LOOP_CYCLES(0));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
322
OUT_PKT7(ring, CP_SET_PROTECTED_MODE, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
323
OUT_RING(ring, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
333
struct msm_ringbuffer *ring = submit->ring;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
338
a6xx_set_pagetable(a6xx_gpu, ring, submit);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
340
get_stats_counter(ring, REG_A6XX_RBBM_PERFCTR_CP(0),
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
341
rbmemptr_stats(ring, index, cpcycles_start));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
348
get_stats_counter(ring, REG_A6XX_CP_ALWAYS_ON_COUNTER,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
349
rbmemptr_stats(ring, index, alwayson_start));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
352
OUT_PKT7(ring, CP_EVENT_WRITE, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
353
OUT_RING(ring, CP_EVENT_WRITE_0_EVENT(PC_CCU_INVALIDATE_DEPTH));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
355
OUT_PKT7(ring, CP_EVENT_WRITE, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
356
OUT_RING(ring, CP_EVENT_WRITE_0_EVENT(PC_CCU_INVALIDATE_COLOR));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
364
if (ring->cur_ctx_seqno == submit->queue->ctx->seqno)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
368
OUT_PKT7(ring, CP_INDIRECT_BUFFER, 3);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
369
OUT_RING(ring, lower_32_bits(submit->cmd[i].iova));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
370
OUT_RING(ring, upper_32_bits(submit->cmd[i].iova));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
371
OUT_RING(ring, A5XX_CP_INDIRECT_BUFFER_2_IB_SIZE(submit->cmd[i].size));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
384
update_shadow_rptr(gpu, ring);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
387
get_stats_counter(ring, REG_A6XX_RBBM_PERFCTR_CP(0),
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
388
rbmemptr_stats(ring, index, cpcycles_end));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
389
get_stats_counter(ring, REG_A6XX_CP_ALWAYS_ON_COUNTER,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
390
rbmemptr_stats(ring, index, alwayson_end));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
393
OUT_PKT4(ring, REG_A6XX_CP_SCRATCH(2), 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
394
OUT_RING(ring, submit->seqno);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
400
OUT_PKT7(ring, CP_EVENT_WRITE, 4);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
401
OUT_RING(ring, CP_EVENT_WRITE_0_EVENT(CACHE_FLUSH_TS) |
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
403
OUT_RING(ring, lower_32_bits(rbmemptr(ring, fence)));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
404
OUT_RING(ring, upper_32_bits(rbmemptr(ring, fence)));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
405
OUT_RING(ring, submit->seqno);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
409
a6xx_flush(gpu, ring);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
412
static void a6xx_emit_set_pseudo_reg(struct msm_ringbuffer *ring,
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
417
OUT_PKT7(ring, CP_SET_PSEUDO_REG, 12);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
419
OUT_RING(ring, SMMU_INFO);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
421
OUT_RING(ring, 0);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
422
OUT_RING(ring, 0);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
425
OUT_RING(ring, NON_SECURE_SAVE_ADDR);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
426
OUT_RING(ring, lower_32_bits(
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
427
a6xx_gpu->preempt_iova[ring->id]));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
428
OUT_RING(ring, upper_32_bits(
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
429
a6xx_gpu->preempt_iova[ring->id]));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
432
OUT_RING(ring, NON_PRIV_SAVE_ADDR);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
433
OUT_RING(ring, 0);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
434
OUT_RING(ring, 0);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
436
OUT_RING(ring, COUNTER);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
438
OUT_RING(ring, 0);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
439
OUT_RING(ring, 0);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
444
OUT_PKT7(ring, CP_SET_AMBLE, 3);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
445
OUT_RING(ring, lower_32_bits(preempt_postamble));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
446
OUT_RING(ring, upper_32_bits(preempt_postamble));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
447
OUT_RING(ring, CP_SET_AMBLE_2_DWORDS(
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
457
struct msm_ringbuffer *ring = submit->ring;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
467
OUT_PKT7(ring, CP_THREAD_CONTROL, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
468
OUT_RING(ring, CP_THREAD_CONTROL_0_SYNC_THREADS | CP_SET_THREAD_BR);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
470
a6xx_set_pagetable(a6xx_gpu, ring, submit);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
477
a6xx_emit_set_pseudo_reg(ring, a6xx_gpu, submit->queue);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
487
get_stats_counter(ring, rbbm_perfctr_cp0, rbmemptr_stats(ring, index, cpcycles_start));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
488
get_stats_counter(ring, cp_always_on_counter, rbmemptr_stats(ring, index, alwayson_start));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
490
OUT_PKT7(ring, CP_THREAD_CONTROL, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
491
OUT_RING(ring, CP_SET_THREAD_BOTH);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
493
OUT_PKT7(ring, CP_SET_MARKER, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
494
OUT_RING(ring, 0x101); /* IFPC disable */
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
497
OUT_PKT7(ring, CP_SET_MARKER, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
498
OUT_RING(ring, 0x00d); /* IB1LIST start */
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
507
if (ring->cur_ctx_seqno == submit->queue->ctx->seqno)
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
511
OUT_PKT7(ring, CP_INDIRECT_BUFFER, 3);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
512
OUT_RING(ring, lower_32_bits(submit->cmd[i].iova));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
513
OUT_RING(ring, upper_32_bits(submit->cmd[i].iova));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
514
OUT_RING(ring, A5XX_CP_INDIRECT_BUFFER_2_IB_SIZE(submit->cmd[i].size));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
527
update_shadow_rptr(gpu, ring);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
531
OUT_PKT7(ring, CP_SET_MARKER, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
532
OUT_RING(ring, 0x00e); /* IB1LIST end */
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
535
get_stats_counter(ring, rbbm_perfctr_cp0, rbmemptr_stats(ring, index, cpcycles_end));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
536
get_stats_counter(ring, cp_always_on_counter, rbmemptr_stats(ring, index, alwayson_end));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
540
OUT_PKT4(ring, REG_A8XX_CP_SCRATCH_GLOBAL(2), 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
541
OUT_RING(ring, submit->seqno);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
543
OUT_PKT4(ring, REG_A6XX_CP_SCRATCH(2), 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
544
OUT_RING(ring, submit->seqno);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
547
OUT_PKT7(ring, CP_THREAD_CONTROL, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
548
OUT_RING(ring, CP_SET_THREAD_BR);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
550
OUT_PKT7(ring, CP_EVENT_WRITE, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
551
OUT_RING(ring, CCU_INVALIDATE_DEPTH);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
553
OUT_PKT7(ring, CP_EVENT_WRITE, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
554
OUT_RING(ring, CCU_INVALIDATE_COLOR);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
556
OUT_PKT7(ring, CP_THREAD_CONTROL, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
557
OUT_RING(ring, CP_SET_THREAD_BV);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
563
OUT_PKT7(ring, CP_EVENT_WRITE, 4);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
564
OUT_RING(ring, CACHE_CLEAN | BIT(27));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
565
OUT_RING(ring, lower_32_bits(rbmemptr(ring, bv_fence)));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
566
OUT_RING(ring, upper_32_bits(rbmemptr(ring, bv_fence)));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
567
OUT_RING(ring, submit->seqno);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
569
OUT_PKT7(ring, CP_THREAD_CONTROL, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
570
OUT_RING(ring, CP_SET_THREAD_BR);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
577
OUT_PKT7(ring, CP_WAIT_TIMESTAMP, 4);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
578
OUT_RING(ring, 0);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
579
OUT_RING(ring, lower_32_bits(rbmemptr(ring, bv_fence)));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
580
OUT_RING(ring, upper_32_bits(rbmemptr(ring, bv_fence)));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
581
OUT_RING(ring, submit->seqno);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
583
a6xx_gpu->last_seqno[ring->id] = submit->seqno;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
586
OUT_PKT7(ring, CP_EVENT_WRITE, 4);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
587
OUT_RING(ring, CACHE_CLEAN | CP_EVENT_WRITE_0_IRQ | BIT(27));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
588
OUT_RING(ring, lower_32_bits(rbmemptr(ring, fence)));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
589
OUT_RING(ring, upper_32_bits(rbmemptr(ring, fence)));
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
590
OUT_RING(ring, submit->seqno);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
592
OUT_PKT7(ring, CP_THREAD_CONTROL, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
593
OUT_RING(ring, CP_SET_THREAD_BOTH);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
595
OUT_PKT7(ring, CP_SET_MARKER, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
596
OUT_RING(ring, 0x100); /* IFPC enable */
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
601
OUT_PKT7(ring, CP_CONTEXT_SWITCH_YIELD, 4);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
608
OUT_RING(ring, 0x00);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
609
OUT_RING(ring, 0x00);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
611
OUT_RING(ring, 0x01);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
613
OUT_RING(ring, 0x00);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
619
a6xx_flush(gpu, ring);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
945
struct msm_ringbuffer *ring = gpu->rb[0];
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
951
OUT_PKT7(ring, CP_SET_PROTECTED_MODE, 1);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
952
OUT_RING(ring, 0);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
954
a6xx_emit_set_pseudo_reg(ring, a6xx_gpu, NULL);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
957
OUT_PKT7(ring, CP_CONTEXT_SWITCH_YIELD, 4);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
958
OUT_RING(ring, 0x00);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
959
OUT_RING(ring, 0x00);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
960
OUT_RING(ring, 0x00);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
962
OUT_RING(ring, 0x00);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
964
a6xx_flush(gpu, ring);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
966
return a6xx_idle(gpu, ring) ? 0 : -EINVAL;
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
971
struct msm_ringbuffer *ring = gpu->rb[0];
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
973
OUT_PKT7(ring, CP_ME_INIT, 8);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
975
OUT_RING(ring, 0x0000002f);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
978
OUT_RING(ring, 0x00000003);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
981
OUT_RING(ring, 0x20000000);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
984
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
985
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
988
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
991
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
992
OUT_RING(ring, 0x00000000);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
994
a6xx_flush(gpu, ring);
drivers/gpu/drm/msm/adreno/a6xx_gpu.c
995
return a6xx_idle(gpu, ring) ? 0 : -EINVAL;
drivers/gpu/drm/msm/adreno/a6xx_gpu.h
317
void a6xx_flush(struct msm_gpu *gpu, struct msm_ringbuffer *ring);
drivers/gpu/drm/msm/adreno/a6xx_gpu.h
322
void a8xx_flush(struct msm_gpu *gpu, struct msm_ringbuffer *ring);
drivers/gpu/drm/msm/adreno/a6xx_gpu.h
330
bool a8xx_progress(struct msm_gpu *gpu, struct msm_ringbuffer *ring);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
246
struct msm_ringbuffer *ring;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
280
ring = get_next_ring(gpu);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
286
if (!ring || (a6xx_gpu->cur_ring == ring)) {
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
296
spin_lock_irqsave(&ring->preempt_lock, flags);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
299
a6xx_gpu->preempt_smmu[ring->id];
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
300
struct a6xx_preempt_record *record_ptr = a6xx_gpu->preempt[ring->id];
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
301
u64 ttbr0 = ring->memptrs->ttbr0;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
302
u32 context_idr = ring->memptrs->context_idr;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
306
record_ptr->wptr = get_wptr(ring);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
316
ring->restore_wptr = false;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
318
trace_msm_gpu_preemption_trigger(a6xx_gpu->cur_ring->id, ring->id);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
320
spin_unlock_irqrestore(&ring->preempt_lock, flags);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
326
REG_A6XX_CP_CONTEXT_SWITCH_SMMU_INFO, a6xx_gpu->preempt_smmu_iova[ring->id],
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
331
a6xx_gpu->preempt_iova[ring->id], BIT(1), true);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
333
a6xx_gpu->next_ring = ring;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
355
struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
374
msm_gem_object_set_name(bo, "preempt_record ring%d", ring->id);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
376
a6xx_gpu->preempt_bo[ring->id] = bo;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
377
a6xx_gpu->preempt_iova[ring->id] = iova;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
378
a6xx_gpu->preempt[ring->id] = ptr;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
392
msm_gem_object_set_name(bo, "preempt_smmu_info ring%d", ring->id);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
394
a6xx_gpu->preempt_smmu_bo[ring->id] = bo;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
395
a6xx_gpu->preempt_smmu_iova[ring->id] = iova;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
396
a6xx_gpu->preempt_smmu[ring->id] = ptr;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
414
record_ptr->rbase = ring->iova;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
416
record_ptr->bv_rptr_addr = rbmemptr(ring, bv_rptr);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
44
static inline void update_wptr(struct a6xx_gpu *a6xx_gpu, struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
49
spin_lock_irqsave(&ring->preempt_lock, flags);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
51
if (ring->restore_wptr) {
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
52
wptr = get_wptr(ring);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
56
ring->restore_wptr = false;
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
59
spin_unlock_irqrestore(&ring->preempt_lock, flags);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
73
struct msm_ringbuffer *ring = gpu->rb[i];
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
75
spin_lock_irqsave(&ring->preempt_lock, flags);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
76
empty = (get_wptr(ring) == gpu->funcs->get_rptr(gpu, ring));
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
77
if (!empty && ring == a6xx_gpu->cur_ring)
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
78
empty = ring->memptrs->fence == a6xx_gpu->last_seqno[i];
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
79
spin_unlock_irqrestore(&ring->preempt_lock, flags);
drivers/gpu/drm/msm/adreno/a6xx_preempt.c
82
return ring;
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
1007
ring ? ring->id : -1, ring ? ring->fctx->last_fence : 0,
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
1212
bool a8xx_progress(struct msm_gpu *gpu, struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
138
static bool a8xx_idle(struct msm_gpu *gpu, struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
141
if (!adreno_idle(gpu, ring))
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
158
void a8xx_flush(struct msm_gpu *gpu, struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
165
spin_lock_irqsave(&ring->preempt_lock, flags);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
168
ring->cur = ring->next;
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
171
wptr = get_wptr(ring);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
175
if (a6xx_gpu->cur_ring == ring)
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
178
ring->restore_wptr = true;
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
180
ring->restore_wptr = true;
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
183
spin_unlock_irqrestore(&ring->preempt_lock, flags);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
391
struct msm_ringbuffer *ring = gpu->rb[0];
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
395
OUT_PKT7(ring, CP_THREAD_CONTROL, 1);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
396
OUT_RING(ring, BIT(27));
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
398
OUT_PKT7(ring, CP_ME_INIT, 4);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
412
OUT_RING(ring, mask);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
415
OUT_RING(ring, 0x00000003);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
418
OUT_RING(ring, 0x20000000);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
421
OUT_RING(ring, 0x00000002);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
423
a6xx_flush(gpu, ring);
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
424
return a8xx_idle(gpu, ring) ? 0 : -EINVAL;
drivers/gpu/drm/msm/adreno/a8xx_gpu.c
986
struct msm_ringbuffer *ring = gpu->funcs->active_ring(gpu);
drivers/gpu/drm/msm/adreno/adreno_gpu.c
1038
struct msm_ringbuffer *ring = gpu->rb[i];
drivers/gpu/drm/msm/adreno/adreno_gpu.c
1041
ring->memptrs->fence,
drivers/gpu/drm/msm/adreno/adreno_gpu.c
1042
ring->fctx->last_fence);
drivers/gpu/drm/msm/adreno/adreno_gpu.c
1044
printk("rptr: %d\n", get_rptr(adreno_gpu, ring));
drivers/gpu/drm/msm/adreno/adreno_gpu.c
1045
printk("rb wptr: %d\n", get_wptr(ring));
drivers/gpu/drm/msm/adreno/adreno_gpu.c
1072
static uint32_t ring_freewords(struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/adreno_gpu.c
1074
struct adreno_gpu *adreno_gpu = to_adreno_gpu(ring->gpu);
drivers/gpu/drm/msm/adreno/adreno_gpu.c
1077
uint32_t wptr = ring->next - ring->start;
drivers/gpu/drm/msm/adreno/adreno_gpu.c
1078
uint32_t rptr = get_rptr(adreno_gpu, ring);
drivers/gpu/drm/msm/adreno/adreno_gpu.c
1082
void adreno_wait_ring(struct msm_ringbuffer *ring, uint32_t ndwords)
drivers/gpu/drm/msm/adreno/adreno_gpu.c
1084
if (spin_until(ring_freewords(ring) >= ndwords))
drivers/gpu/drm/msm/adreno/adreno_gpu.c
1085
DRM_DEV_ERROR(ring->gpu->dev->dev,
drivers/gpu/drm/msm/adreno/adreno_gpu.c
1087
ring->id);
drivers/gpu/drm/msm/adreno/adreno_gpu.c
662
struct msm_ringbuffer *ring = gpu->rb[i];
drivers/gpu/drm/msm/adreno/adreno_gpu.c
664
if (!ring)
drivers/gpu/drm/msm/adreno/adreno_gpu.c
667
ring->cur = ring->start;
drivers/gpu/drm/msm/adreno/adreno_gpu.c
668
ring->next = ring->start;
drivers/gpu/drm/msm/adreno/adreno_gpu.c
669
ring->memptrs->rptr = 0;
drivers/gpu/drm/msm/adreno/adreno_gpu.c
670
ring->memptrs->bv_fence = ring->fctx->completed_fence;
drivers/gpu/drm/msm/adreno/adreno_gpu.c
676
if (fence_before(ring->fctx->last_fence, ring->memptrs->fence)) {
drivers/gpu/drm/msm/adreno/adreno_gpu.c
677
ring->memptrs->fence = ring->fctx->last_fence;
drivers/gpu/drm/msm/adreno/adreno_gpu.c
686
struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/adreno_gpu.c
690
return gpu->funcs->get_rptr(gpu, ring);
drivers/gpu/drm/msm/adreno/adreno_gpu.c
716
void adreno_flush(struct msm_gpu *gpu, struct msm_ringbuffer *ring, u32 reg)
drivers/gpu/drm/msm/adreno/adreno_gpu.c
721
ring->cur = ring->next;
drivers/gpu/drm/msm/adreno/adreno_gpu.c
728
wptr = get_wptr(ring);
drivers/gpu/drm/msm/adreno/adreno_gpu.c
736
bool adreno_idle(struct msm_gpu *gpu, struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/adreno_gpu.c
739
uint32_t wptr = get_wptr(ring);
drivers/gpu/drm/msm/adreno/adreno_gpu.c
742
if (!spin_until(get_rptr(adreno_gpu, ring) == wptr))
drivers/gpu/drm/msm/adreno/adreno_gpu.c
747
gpu->name, ring->id, get_rptr(adreno_gpu, ring), wptr);
drivers/gpu/drm/msm/adreno/adreno_gpu.c
766
state->ring[i].fence = gpu->rb[i]->memptrs->fence;
drivers/gpu/drm/msm/adreno/adreno_gpu.c
767
state->ring[i].iova = gpu->rb[i]->iova;
drivers/gpu/drm/msm/adreno/adreno_gpu.c
768
state->ring[i].seqno = gpu->rb[i]->fctx->last_fence;
drivers/gpu/drm/msm/adreno/adreno_gpu.c
769
state->ring[i].rptr = get_rptr(adreno_gpu, gpu->rb[i]);
drivers/gpu/drm/msm/adreno/adreno_gpu.c
770
state->ring[i].wptr = get_wptr(gpu->rb[i]);
drivers/gpu/drm/msm/adreno/adreno_gpu.c
773
size = state->ring[i].wptr;
drivers/gpu/drm/msm/adreno/adreno_gpu.c
776
for (j = state->ring[i].wptr; j < MSM_GPU_RINGBUFFER_SZ >> 2; j++)
drivers/gpu/drm/msm/adreno/adreno_gpu.c
781
state->ring[i].data = kvmemdup(gpu->rb[i]->start, size << 2, GFP_KERNEL);
drivers/gpu/drm/msm/adreno/adreno_gpu.c
782
if (state->ring[i].data)
drivers/gpu/drm/msm/adreno/adreno_gpu.c
783
state->ring[i].data_size = size << 2;
drivers/gpu/drm/msm/adreno/adreno_gpu.c
821
for (i = 0; i < ARRAY_SIZE(state->ring); i++)
drivers/gpu/drm/msm/adreno/adreno_gpu.c
822
kvfree(state->ring[i].data);
drivers/gpu/drm/msm/adreno/adreno_gpu.c
984
drm_printf(p, " iova: 0x%016llx\n", state->ring[i].iova);
drivers/gpu/drm/msm/adreno/adreno_gpu.c
985
drm_printf(p, " last-fence: %u\n", state->ring[i].seqno);
drivers/gpu/drm/msm/adreno/adreno_gpu.c
986
drm_printf(p, " retired-fence: %u\n", state->ring[i].fence);
drivers/gpu/drm/msm/adreno/adreno_gpu.c
987
drm_printf(p, " rptr: %u\n", state->ring[i].rptr);
drivers/gpu/drm/msm/adreno/adreno_gpu.c
988
drm_printf(p, " wptr: %u\n", state->ring[i].wptr);
drivers/gpu/drm/msm/adreno/adreno_gpu.c
991
adreno_show_object(p, &state->ring[i].data,
drivers/gpu/drm/msm/adreno/adreno_gpu.c
992
state->ring[i].data_size, &state->ring[i].encoded);
drivers/gpu/drm/msm/adreno/adreno_gpu.h
619
void adreno_flush(struct msm_gpu *gpu, struct msm_ringbuffer *ring, u32 reg);
drivers/gpu/drm/msm/adreno/adreno_gpu.h
620
bool adreno_idle(struct msm_gpu *gpu, struct msm_ringbuffer *ring);
drivers/gpu/drm/msm/adreno/adreno_gpu.h
627
void adreno_wait_ring(struct msm_ringbuffer *ring, uint32_t ndwords);
drivers/gpu/drm/msm/adreno/adreno_gpu.h
677
OUT_PKT0(struct msm_ringbuffer *ring, uint16_t regindx, uint16_t cnt)
drivers/gpu/drm/msm/adreno/adreno_gpu.h
679
adreno_wait_ring(ring, cnt+1);
drivers/gpu/drm/msm/adreno/adreno_gpu.h
680
OUT_RING(ring, CP_TYPE0_PKT | ((cnt-1) << 16) | (regindx & 0x7FFF));
drivers/gpu/drm/msm/adreno/adreno_gpu.h
685
OUT_PKT2(struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/adreno_gpu.h
687
adreno_wait_ring(ring, 1);
drivers/gpu/drm/msm/adreno/adreno_gpu.h
688
OUT_RING(ring, CP_TYPE2_PKT);
drivers/gpu/drm/msm/adreno/adreno_gpu.h
692
OUT_PKT3(struct msm_ringbuffer *ring, uint8_t opcode, uint16_t cnt)
drivers/gpu/drm/msm/adreno/adreno_gpu.h
694
adreno_wait_ring(ring, cnt+1);
drivers/gpu/drm/msm/adreno/adreno_gpu.h
695
OUT_RING(ring, CP_TYPE3_PKT | ((cnt-1) << 16) | ((opcode & 0xFF) << 8));
drivers/gpu/drm/msm/adreno/adreno_gpu.h
714
OUT_PKT4(struct msm_ringbuffer *ring, uint16_t regindx, uint16_t cnt)
drivers/gpu/drm/msm/adreno/adreno_gpu.h
716
adreno_wait_ring(ring, cnt + 1);
drivers/gpu/drm/msm/adreno/adreno_gpu.h
717
OUT_RING(ring, PKT4(regindx, cnt));
drivers/gpu/drm/msm/adreno/adreno_gpu.h
725
OUT_PKT7(struct msm_ringbuffer *ring, uint8_t opcode, uint16_t cnt)
drivers/gpu/drm/msm/adreno/adreno_gpu.h
727
adreno_wait_ring(ring, cnt + 1);
drivers/gpu/drm/msm/adreno/adreno_gpu.h
728
OUT_RING(ring, PKT7(opcode, cnt));
drivers/gpu/drm/msm/adreno/adreno_gpu.h
731
static inline uint32_t get_wptr(struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/adreno/adreno_gpu.h
733
return (ring->cur - ring->start) % (MSM_GPU_RINGBUFFER_SZ >> 2);
drivers/gpu/drm/msm/msm_gem.h
455
struct msm_ringbuffer *ring;
drivers/gpu/drm/msm/msm_gem_submit.c
558
struct msm_ringbuffer *ring;
drivers/gpu/drm/msm/msm_gem_submit.c
600
ring = gpu->rb[queue->ring_nr];
drivers/gpu/drm/msm/msm_gem_submit.c
617
trace_msm_gpu_submit(pid_nr(submit->pid), ring->id, submit->ident,
drivers/gpu/drm/msm/msm_gem_submit.c
74
submit->ring = gpu->rb[queue->ring_nr];
drivers/gpu/drm/msm/msm_gpu.c
420
find_submit(struct msm_ringbuffer *ring, uint32_t fence)
drivers/gpu/drm/msm/msm_gpu.c
425
spin_lock_irqsave(&ring->submit_lock, flags);
drivers/gpu/drm/msm/msm_gpu.c
426
list_for_each_entry(submit, &ring->submits, node) {
drivers/gpu/drm/msm/msm_gpu.c
428
spin_unlock_irqrestore(&ring->submit_lock, flags);
drivers/gpu/drm/msm/msm_gpu.c
432
spin_unlock_irqrestore(&ring->submit_lock, flags);
drivers/gpu/drm/msm/msm_gpu.c
535
struct msm_ringbuffer *ring = gpu->rb[i];
drivers/gpu/drm/msm/msm_gpu.c
537
uint32_t fence = ring->memptrs->fence;
drivers/gpu/drm/msm/msm_gpu.c
543
if (ring == cur_ring)
drivers/gpu/drm/msm/msm_gpu.c
544
ring->memptrs->fence = ++fence;
drivers/gpu/drm/msm/msm_gpu.c
546
msm_update_fence(ring->fctx, fence);
drivers/gpu/drm/msm/msm_gpu.c
560
struct msm_ringbuffer *ring = gpu->rb[i];
drivers/gpu/drm/msm/msm_gpu.c
563
spin_lock_irqsave(&ring->submit_lock, flags);
drivers/gpu/drm/msm/msm_gpu.c
564
list_for_each_entry(submit, &ring->submits, node) {
drivers/gpu/drm/msm/msm_gpu.c
573
spin_unlock_irqrestore(&ring->submit_lock, flags);
drivers/gpu/drm/msm/msm_gpu.c
626
static bool made_progress(struct msm_gpu *gpu, struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/msm_gpu.c
628
if (ring->hangcheck_progress_retries >= DRM_MSM_HANGCHECK_PROGRESS_RETRIES)
drivers/gpu/drm/msm/msm_gpu.c
634
if (!gpu->funcs->progress(gpu, ring))
drivers/gpu/drm/msm/msm_gpu.c
637
ring->hangcheck_progress_retries++;
drivers/gpu/drm/msm/msm_gpu.c
645
struct msm_ringbuffer *ring = gpu->funcs->active_ring(gpu);
drivers/gpu/drm/msm/msm_gpu.c
646
uint32_t fence = ring->memptrs->fence;
drivers/gpu/drm/msm/msm_gpu.c
648
if (fence != ring->hangcheck_fence) {
drivers/gpu/drm/msm/msm_gpu.c
650
ring->hangcheck_fence = fence;
drivers/gpu/drm/msm/msm_gpu.c
651
ring->hangcheck_progress_retries = 0;
drivers/gpu/drm/msm/msm_gpu.c
652
} else if (fence_before(fence, ring->fctx->last_fence) &&
drivers/gpu/drm/msm/msm_gpu.c
653
!made_progress(gpu, ring)) {
drivers/gpu/drm/msm/msm_gpu.c
655
ring->hangcheck_fence = fence;
drivers/gpu/drm/msm/msm_gpu.c
656
ring->hangcheck_progress_retries = 0;
drivers/gpu/drm/msm/msm_gpu.c
658
gpu->name, ring->id);
drivers/gpu/drm/msm/msm_gpu.c
662
gpu->name, ring->fctx->last_fence);
drivers/gpu/drm/msm/msm_gpu.c
668
if (fence_after(ring->fctx->last_fence, ring->hangcheck_fence))
drivers/gpu/drm/msm/msm_gpu.c
777
static void retire_submit(struct msm_gpu *gpu, struct msm_ringbuffer *ring,
drivers/gpu/drm/msm/msm_gpu.c
785
stats = &ring->memptrs->stats[index];
drivers/gpu/drm/msm/msm_gpu.c
808
spin_lock_irqsave(&ring->submit_lock, flags);
drivers/gpu/drm/msm/msm_gpu.c
810
spin_unlock_irqrestore(&ring->submit_lock, flags);
drivers/gpu/drm/msm/msm_gpu.c
832
struct msm_ringbuffer *ring = gpu->rb[i];
drivers/gpu/drm/msm/msm_gpu.c
838
spin_lock_irqsave(&ring->submit_lock, flags);
drivers/gpu/drm/msm/msm_gpu.c
839
submit = list_first_entry_or_null(&ring->submits,
drivers/gpu/drm/msm/msm_gpu.c
841
spin_unlock_irqrestore(&ring->submit_lock, flags);
drivers/gpu/drm/msm/msm_gpu.c
849
retire_submit(gpu, ring, submit);
drivers/gpu/drm/msm/msm_gpu.c
881
struct msm_ringbuffer *ring = submit->ring;
drivers/gpu/drm/msm/msm_gpu.c
900
spin_lock_irqsave(&ring->submit_lock, flags);
drivers/gpu/drm/msm/msm_gpu.c
901
list_add_tail(&submit->node, &ring->submits);
drivers/gpu/drm/msm/msm_gpu.c
902
spin_unlock_irqrestore(&ring->submit_lock, flags);
drivers/gpu/drm/msm/msm_gpu.c
914
submit->ring->cur_ctx_seqno = submit->queue->ctx->seqno;
drivers/gpu/drm/msm/msm_gpu.h
314
struct msm_ringbuffer *ring = gpu->rb[i];
drivers/gpu/drm/msm/msm_gpu.h
316
if (fence_after(ring->fctx->last_fence, ring->memptrs->fence))
drivers/gpu/drm/msm/msm_gpu.h
576
} ring[MSM_GPU_MAX_RINGS];
drivers/gpu/drm/msm/msm_gpu.h
63
void (*flush)(struct msm_gpu *gpu, struct msm_ringbuffer *ring);
drivers/gpu/drm/msm/msm_gpu.h
85
uint32_t (*get_rptr)(struct msm_gpu *gpu, struct msm_ringbuffer *ring);
drivers/gpu/drm/msm/msm_gpu.h
94
bool (*progress)(struct msm_gpu *gpu, struct msm_ringbuffer *ring);
drivers/gpu/drm/msm/msm_gpu_trace.h
46
__entry->ringid = submit->ring->id;
drivers/gpu/drm/msm/msm_gpu_trace.h
73
__entry->ringid = submit->ring->id;
drivers/gpu/drm/msm/msm_ringbuffer.c
101
msm_gem_object_set_name(ring->bo, "ring%d", id);
drivers/gpu/drm/msm/msm_ringbuffer.c
102
args.name = to_msm_bo(ring->bo)->name;
drivers/gpu/drm/msm/msm_ringbuffer.c
104
ring->end = ring->start + (MSM_GPU_RINGBUFFER_SZ >> 2);
drivers/gpu/drm/msm/msm_ringbuffer.c
105
ring->next = ring->start;
drivers/gpu/drm/msm/msm_ringbuffer.c
106
ring->cur = ring->start;
drivers/gpu/drm/msm/msm_ringbuffer.c
108
ring->memptrs = memptrs;
drivers/gpu/drm/msm/msm_ringbuffer.c
109
ring->memptrs_iova = memptrs_iova;
drivers/gpu/drm/msm/msm_ringbuffer.c
111
ret = drm_sched_init(&ring->sched, &args);
drivers/gpu/drm/msm/msm_ringbuffer.c
116
INIT_LIST_HEAD(&ring->submits);
drivers/gpu/drm/msm/msm_ringbuffer.c
117
spin_lock_init(&ring->submit_lock);
drivers/gpu/drm/msm/msm_ringbuffer.c
118
spin_lock_init(&ring->preempt_lock);
drivers/gpu/drm/msm/msm_ringbuffer.c
120
snprintf(name, sizeof(name), "gpu-ring-%d", ring->id);
drivers/gpu/drm/msm/msm_ringbuffer.c
122
ring->fctx = msm_fence_context_alloc(gpu->dev, &ring->memptrs->fence, name);
drivers/gpu/drm/msm/msm_ringbuffer.c
124
return ring;
drivers/gpu/drm/msm/msm_ringbuffer.c
127
msm_ringbuffer_destroy(ring);
drivers/gpu/drm/msm/msm_ringbuffer.c
131
void msm_ringbuffer_destroy(struct msm_ringbuffer *ring)
drivers/gpu/drm/msm/msm_ringbuffer.c
133
if (IS_ERR_OR_NULL(ring))
drivers/gpu/drm/msm/msm_ringbuffer.c
136
drm_sched_fini(&ring->sched);
drivers/gpu/drm/msm/msm_ringbuffer.c
138
msm_fence_context_free(ring->fctx);
drivers/gpu/drm/msm/msm_ringbuffer.c
140
msm_gem_kernel_put(ring->bo, ring->gpu->vm);
drivers/gpu/drm/msm/msm_ringbuffer.c
142
kfree(ring);
drivers/gpu/drm/msm/msm_ringbuffer.c
17
struct msm_fence_context *fctx = submit->ring->fctx;
drivers/gpu/drm/msm/msm_ringbuffer.c
75
struct msm_ringbuffer *ring;
drivers/gpu/drm/msm/msm_ringbuffer.c
82
ring = kzalloc_obj(*ring);
drivers/gpu/drm/msm/msm_ringbuffer.c
83
if (!ring) {
drivers/gpu/drm/msm/msm_ringbuffer.c
88
ring->gpu = gpu;
drivers/gpu/drm/msm/msm_ringbuffer.c
89
ring->id = id;
drivers/gpu/drm/msm/msm_ringbuffer.c
91
ring->start = msm_gem_kernel_new(gpu->dev, MSM_GPU_RINGBUFFER_SZ,
drivers/gpu/drm/msm/msm_ringbuffer.c
93
gpu->vm, &ring->bo, &ring->iova);
drivers/gpu/drm/msm/msm_ringbuffer.c
95
if (IS_ERR(ring->start)) {
drivers/gpu/drm/msm/msm_ringbuffer.c
96
ret = PTR_ERR(ring->start);
drivers/gpu/drm/msm/msm_ringbuffer.c
97
ring->start = NULL;
drivers/gpu/drm/msm/msm_ringbuffer.h
124
void msm_ringbuffer_destroy(struct msm_ringbuffer *ring);
drivers/gpu/drm/msm/msm_ringbuffer.h
129
OUT_RING(struct msm_ringbuffer *ring, uint32_t data)
drivers/gpu/drm/msm/msm_ringbuffer.h
13
#define rbmemptr(ring, member) \
drivers/gpu/drm/msm/msm_ringbuffer.h
135
if (ring->next == ring->end)
drivers/gpu/drm/msm/msm_ringbuffer.h
136
ring->next = ring->start;
drivers/gpu/drm/msm/msm_ringbuffer.h
137
*(ring->next++) = data;
drivers/gpu/drm/msm/msm_ringbuffer.h
14
((ring)->memptrs_iova + offsetof(struct msm_rbmemptrs, member))
drivers/gpu/drm/msm/msm_ringbuffer.h
16
#define rbmemptr_stats(ring, index, member) \
drivers/gpu/drm/msm/msm_ringbuffer.h
17
(rbmemptr((ring), stats) + \
drivers/gpu/drm/msm/msm_submitqueue.c
135
get_sched_entity(struct msm_context *ctx, struct msm_ringbuffer *ring,
drivers/gpu/drm/msm/msm_submitqueue.c
151
struct drm_gpu_scheduler *sched = &ring->sched;
drivers/gpu/drm/qxl/qxl_cmd.c
101
spin_unlock_irqrestore(&ring->lock, flags);
drivers/gpu/drm/qxl/qxl_cmd.c
105
int qxl_ring_push(struct qxl_ring *ring,
drivers/gpu/drm/qxl/qxl_cmd.c
108
struct qxl_ring_header *header = &(ring->ring->header);
drivers/gpu/drm/qxl/qxl_cmd.c
113
spin_lock_irqsave(&ring->lock, flags);
drivers/gpu/drm/qxl/qxl_cmd.c
117
spin_unlock_irqrestore(&ring->lock, flags);
drivers/gpu/drm/qxl/qxl_cmd.c
119
while (!qxl_check_header(ring))
drivers/gpu/drm/qxl/qxl_cmd.c
123
ret = wait_event_interruptible(*ring->push_event,
drivers/gpu/drm/qxl/qxl_cmd.c
124
qxl_check_header(ring));
drivers/gpu/drm/qxl/qxl_cmd.c
128
wait_event(*ring->push_event,
drivers/gpu/drm/qxl/qxl_cmd.c
129
qxl_check_header(ring));
drivers/gpu/drm/qxl/qxl_cmd.c
133
spin_lock_irqsave(&ring->lock, flags);
drivers/gpu/drm/qxl/qxl_cmd.c
136
idx = header->prod & (ring->n_elements - 1);
drivers/gpu/drm/qxl/qxl_cmd.c
137
elt = ring->ring->elements + idx * ring->element_size;
drivers/gpu/drm/qxl/qxl_cmd.c
139
memcpy((void *)elt, new_elt, ring->element_size);
drivers/gpu/drm/qxl/qxl_cmd.c
146
outb(0, ring->prod_notify);
drivers/gpu/drm/qxl/qxl_cmd.c
148
spin_unlock_irqrestore(&ring->lock, flags);
drivers/gpu/drm/qxl/qxl_cmd.c
152
static bool qxl_ring_pop(struct qxl_ring *ring,
drivers/gpu/drm/qxl/qxl_cmd.c
155
volatile struct qxl_ring_header *header = &(ring->ring->header);
drivers/gpu/drm/qxl/qxl_cmd.c
160
spin_lock_irqsave(&ring->lock, flags);
drivers/gpu/drm/qxl/qxl_cmd.c
163
spin_unlock_irqrestore(&ring->lock, flags);
drivers/gpu/drm/qxl/qxl_cmd.c
167
idx = header->cons & (ring->n_elements - 1);
drivers/gpu/drm/qxl/qxl_cmd.c
168
ring_elt = ring->ring->elements + idx * ring->element_size;
drivers/gpu/drm/qxl/qxl_cmd.c
170
memcpy(element, (void *)ring_elt, ring->element_size);
drivers/gpu/drm/qxl/qxl_cmd.c
174
spin_unlock_irqrestore(&ring->lock, flags);
drivers/gpu/drm/qxl/qxl_cmd.c
44
struct ring *ring;
drivers/gpu/drm/qxl/qxl_cmd.c
52
void qxl_ring_free(struct qxl_ring *ring)
drivers/gpu/drm/qxl/qxl_cmd.c
54
kfree(ring);
drivers/gpu/drm/qxl/qxl_cmd.c
64
struct qxl_ring *ring;
drivers/gpu/drm/qxl/qxl_cmd.c
66
ring = kmalloc_obj(*ring);
drivers/gpu/drm/qxl/qxl_cmd.c
67
if (!ring)
drivers/gpu/drm/qxl/qxl_cmd.c
70
ring->ring = (struct ring *)header;
drivers/gpu/drm/qxl/qxl_cmd.c
71
ring->element_size = element_size;
drivers/gpu/drm/qxl/qxl_cmd.c
72
ring->n_elements = n_elements;
drivers/gpu/drm/qxl/qxl_cmd.c
73
ring->prod_notify = prod_notify;
drivers/gpu/drm/qxl/qxl_cmd.c
74
ring->push_event = push_event;
drivers/gpu/drm/qxl/qxl_cmd.c
75
spin_lock_init(&ring->lock);
drivers/gpu/drm/qxl/qxl_cmd.c
76
return ring;
drivers/gpu/drm/qxl/qxl_cmd.c
79
static int qxl_check_header(struct qxl_ring *ring)
drivers/gpu/drm/qxl/qxl_cmd.c
82
struct qxl_ring_header *header = &(ring->ring->header);
drivers/gpu/drm/qxl/qxl_cmd.c
85
spin_lock_irqsave(&ring->lock, flags);
drivers/gpu/drm/qxl/qxl_cmd.c
89
spin_unlock_irqrestore(&ring->lock, flags);
drivers/gpu/drm/qxl/qxl_cmd.c
93
int qxl_check_idle(struct qxl_ring *ring)
drivers/gpu/drm/qxl/qxl_cmd.c
96
struct qxl_ring_header *header = &(ring->ring->header);
drivers/gpu/drm/qxl/qxl_cmd.c
99
spin_lock_irqsave(&ring->lock, flags);
drivers/gpu/drm/qxl/qxl_drv.h
276
void qxl_ring_free(struct qxl_ring *ring);
drivers/gpu/drm/qxl/qxl_drv.h
277
int qxl_check_idle(struct qxl_ring *ring);
drivers/gpu/drm/qxl/qxl_drv.h
357
int qxl_ring_push(struct qxl_ring *ring, const void *new_elt, bool interruptible);
drivers/gpu/drm/radeon/cik.c
3445
int cik_ring_test(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/cik.c
3458
r = radeon_ring_lock(rdev, ring, 3);
drivers/gpu/drm/radeon/cik.c
3460
DRM_ERROR("radeon: cp failed to lock ring %d (%d).\n", ring->idx, r);
drivers/gpu/drm/radeon/cik.c
3464
radeon_ring_write(ring, PACKET3(PACKET3_SET_UCONFIG_REG, 1));
drivers/gpu/drm/radeon/cik.c
3465
radeon_ring_write(ring, ((scratch - PACKET3_SET_UCONFIG_REG_START) >> 2));
drivers/gpu/drm/radeon/cik.c
3466
radeon_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/radeon/cik.c
3467
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/cik.c
3476
DRM_INFO("ring test on %d succeeded in %d usecs\n", ring->idx, i);
drivers/gpu/drm/radeon/cik.c
3479
ring->idx, scratch, tmp);
drivers/gpu/drm/radeon/cik.c
3497
struct radeon_ring *ring = &rdev->ring[ridx];
drivers/gpu/drm/radeon/cik.c
3500
switch (ring->idx) {
drivers/gpu/drm/radeon/cik.c
3504
switch (ring->me) {
drivers/gpu/drm/radeon/cik.c
3506
ref_and_mask = CP2 << ring->pipe;
drivers/gpu/drm/radeon/cik.c
3509
ref_and_mask = CP6 << ring->pipe;
drivers/gpu/drm/radeon/cik.c
3520
radeon_ring_write(ring, PACKET3(PACKET3_WAIT_REG_MEM, 5));
drivers/gpu/drm/radeon/cik.c
3521
radeon_ring_write(ring, (WAIT_REG_MEM_OPERATION(1) | /* write, wait, write */
drivers/gpu/drm/radeon/cik.c
3524
radeon_ring_write(ring, GPU_HDP_FLUSH_REQ >> 2);
drivers/gpu/drm/radeon/cik.c
3525
radeon_ring_write(ring, GPU_HDP_FLUSH_DONE >> 2);
drivers/gpu/drm/radeon/cik.c
3526
radeon_ring_write(ring, ref_and_mask);
drivers/gpu/drm/radeon/cik.c
3527
radeon_ring_write(ring, ref_and_mask);
drivers/gpu/drm/radeon/cik.c
3528
radeon_ring_write(ring, 0x20); /* poll interval */
drivers/gpu/drm/radeon/cik.c
3543
struct radeon_ring *ring = &rdev->ring[fence->ring];
drivers/gpu/drm/radeon/cik.c
3544
u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
drivers/gpu/drm/radeon/cik.c
3549
radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4));
drivers/gpu/drm/radeon/cik.c
3550
radeon_ring_write(ring, (EOP_TCL1_ACTION_EN |
drivers/gpu/drm/radeon/cik.c
3554
radeon_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/radeon/cik.c
3555
radeon_ring_write(ring, (upper_32_bits(addr) & 0xffff) |
drivers/gpu/drm/radeon/cik.c
3557
radeon_ring_write(ring, fence->seq - 1);
drivers/gpu/drm/radeon/cik.c
3558
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/cik.c
3561
radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4));
drivers/gpu/drm/radeon/cik.c
3562
radeon_ring_write(ring, (EOP_TCL1_ACTION_EN |
drivers/gpu/drm/radeon/cik.c
3566
radeon_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/radeon/cik.c
3567
radeon_ring_write(ring, (upper_32_bits(addr) & 0xffff) | DATA_SEL(1) | INT_SEL(2));
drivers/gpu/drm/radeon/cik.c
3568
radeon_ring_write(ring, fence->seq);
drivers/gpu/drm/radeon/cik.c
3569
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/cik.c
3584
struct radeon_ring *ring = &rdev->ring[fence->ring];
drivers/gpu/drm/radeon/cik.c
3585
u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
drivers/gpu/drm/radeon/cik.c
3588
radeon_ring_write(ring, PACKET3(PACKET3_RELEASE_MEM, 5));
drivers/gpu/drm/radeon/cik.c
3589
radeon_ring_write(ring, (EOP_TCL1_ACTION_EN |
drivers/gpu/drm/radeon/cik.c
3593
radeon_ring_write(ring, DATA_SEL(1) | INT_SEL(2));
drivers/gpu/drm/radeon/cik.c
3594
radeon_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/radeon/cik.c
3595
radeon_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/radeon/cik.c
3596
radeon_ring_write(ring, fence->seq);
drivers/gpu/drm/radeon/cik.c
3597
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/cik.c
3612
struct radeon_ring *ring,
drivers/gpu/drm/radeon/cik.c
3619
radeon_ring_write(ring, PACKET3(PACKET3_MEM_SEMAPHORE, 1));
drivers/gpu/drm/radeon/cik.c
3620
radeon_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/radeon/cik.c
3621
radeon_ring_write(ring, (upper_32_bits(addr) & 0xffff) | sel);
drivers/gpu/drm/radeon/cik.c
3623
if (emit_wait && ring->idx == RADEON_RING_TYPE_GFX_INDEX) {
drivers/gpu/drm/radeon/cik.c
3625
radeon_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
drivers/gpu/drm/radeon/cik.c
3626
radeon_ring_write(ring, 0x0);
drivers/gpu/drm/radeon/cik.c
3653
struct radeon_ring *ring = &rdev->ring[ring_index];
drivers/gpu/drm/radeon/cik.c
3662
r = radeon_ring_lock(rdev, ring, num_loops * 7 + 18);
drivers/gpu/drm/radeon/cik.c
3670
radeon_sync_rings(rdev, &sync, ring->idx);
drivers/gpu/drm/radeon/cik.c
3680
radeon_ring_write(ring, PACKET3(PACKET3_DMA_DATA, 5));
drivers/gpu/drm/radeon/cik.c
3681
radeon_ring_write(ring, control);
drivers/gpu/drm/radeon/cik.c
3682
radeon_ring_write(ring, lower_32_bits(src_offset));
drivers/gpu/drm/radeon/cik.c
3683
radeon_ring_write(ring, upper_32_bits(src_offset));
drivers/gpu/drm/radeon/cik.c
3684
radeon_ring_write(ring, lower_32_bits(dst_offset));
drivers/gpu/drm/radeon/cik.c
3685
radeon_ring_write(ring, upper_32_bits(dst_offset));
drivers/gpu/drm/radeon/cik.c
3686
radeon_ring_write(ring, cur_size_in_bytes);
drivers/gpu/drm/radeon/cik.c
3691
r = radeon_fence_emit(rdev, &fence, ring->idx);
drivers/gpu/drm/radeon/cik.c
3693
radeon_ring_unlock_undo(rdev, ring);
drivers/gpu/drm/radeon/cik.c
3698
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/cik.c
3721
struct radeon_ring *ring = &rdev->ring[ib->ring];
drivers/gpu/drm/radeon/cik.c
3722
unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0;
drivers/gpu/drm/radeon/cik.c
3727
radeon_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0));
drivers/gpu/drm/radeon/cik.c
3728
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/cik.c
3733
if (ring->rptr_save_reg) {
drivers/gpu/drm/radeon/cik.c
3734
next_rptr = ring->wptr + 3 + 4;
drivers/gpu/drm/radeon/cik.c
3735
radeon_ring_write(ring, PACKET3(PACKET3_SET_UCONFIG_REG, 1));
drivers/gpu/drm/radeon/cik.c
3736
radeon_ring_write(ring, ((ring->rptr_save_reg -
drivers/gpu/drm/radeon/cik.c
3738
radeon_ring_write(ring, next_rptr);
drivers/gpu/drm/radeon/cik.c
3740
next_rptr = ring->wptr + 5 + 4;
drivers/gpu/drm/radeon/cik.c
3741
radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/radeon/cik.c
3742
radeon_ring_write(ring, WRITE_DATA_DST_SEL(1));
drivers/gpu/drm/radeon/cik.c
3743
radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
drivers/gpu/drm/radeon/cik.c
3744
radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr));
drivers/gpu/drm/radeon/cik.c
3745
radeon_ring_write(ring, next_rptr);
drivers/gpu/drm/radeon/cik.c
3753
radeon_ring_write(ring, header);
drivers/gpu/drm/radeon/cik.c
3754
radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFFC));
drivers/gpu/drm/radeon/cik.c
3755
radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF);
drivers/gpu/drm/radeon/cik.c
3756
radeon_ring_write(ring, control);
drivers/gpu/drm/radeon/cik.c
3769
int cik_ib_test(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/cik.c
3783
r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256);
drivers/gpu/drm/radeon/cik.c
3821
DRM_INFO("ib test on ring %d succeeded in %u usecs\n", ib.fence->ring, i);
drivers/gpu/drm/radeon/cik.c
3871
rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
drivers/gpu/drm/radeon/cik.c
3973
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/cik.c
3983
r = radeon_ring_lock(rdev, ring, cik_default_size + 17);
drivers/gpu/drm/radeon/cik.c
3990
radeon_ring_write(ring, PACKET3(PACKET3_SET_BASE, 2));
drivers/gpu/drm/radeon/cik.c
3991
radeon_ring_write(ring, PACKET3_BASE_INDEX(CE_PARTITION_BASE));
drivers/gpu/drm/radeon/cik.c
3992
radeon_ring_write(ring, 0x8000);
drivers/gpu/drm/radeon/cik.c
3993
radeon_ring_write(ring, 0x8000);
drivers/gpu/drm/radeon/cik.c
3996
radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
drivers/gpu/drm/radeon/cik.c
3997
radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
drivers/gpu/drm/radeon/cik.c
3999
radeon_ring_write(ring, PACKET3(PACKET3_CONTEXT_CONTROL, 1));
drivers/gpu/drm/radeon/cik.c
4000
radeon_ring_write(ring, 0x80000000);
drivers/gpu/drm/radeon/cik.c
4001
radeon_ring_write(ring, 0x80000000);
drivers/gpu/drm/radeon/cik.c
4004
radeon_ring_write(ring, cik_default_state[i]);
drivers/gpu/drm/radeon/cik.c
4006
radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
drivers/gpu/drm/radeon/cik.c
4007
radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
drivers/gpu/drm/radeon/cik.c
4010
radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
drivers/gpu/drm/radeon/cik.c
4011
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/cik.c
4013
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
drivers/gpu/drm/radeon/cik.c
4014
radeon_ring_write(ring, 0x00000316);
drivers/gpu/drm/radeon/cik.c
4015
radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
drivers/gpu/drm/radeon/cik.c
4016
radeon_ring_write(ring, 0x00000010); /* VGT_OUT_DEALLOC_CNTL */
drivers/gpu/drm/radeon/cik.c
4018
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/cik.c
4034
radeon_ring_fini(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]);
drivers/gpu/drm/radeon/cik.c
4048
struct radeon_ring *ring;
drivers/gpu/drm/radeon/cik.c
4068
ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/cik.c
4069
rb_bufsz = order_base_2(ring->ring_size / 8);
drivers/gpu/drm/radeon/cik.c
4078
ring->wptr = 0;
drivers/gpu/drm/radeon/cik.c
4079
WREG32(CP_RB0_WPTR, ring->wptr);
drivers/gpu/drm/radeon/cik.c
4094
rb_addr = ring->gpu_addr >> 8;
drivers/gpu/drm/radeon/cik.c
4100
rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true;
drivers/gpu/drm/radeon/cik.c
4101
r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]);
drivers/gpu/drm/radeon/cik.c
4103
rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
drivers/gpu/drm/radeon/cik.c
4114
struct radeon_ring *ring)
drivers/gpu/drm/radeon/cik.c
4119
rptr = rdev->wb.wb[ring->rptr_offs/4];
drivers/gpu/drm/radeon/cik.c
4127
struct radeon_ring *ring)
drivers/gpu/drm/radeon/cik.c
4133
struct radeon_ring *ring)
drivers/gpu/drm/radeon/cik.c
4135
WREG32(CP_RB0_WPTR, ring->wptr);
drivers/gpu/drm/radeon/cik.c
4140
struct radeon_ring *ring)
drivers/gpu/drm/radeon/cik.c
4145
rptr = rdev->wb.wb[ring->rptr_offs/4];
drivers/gpu/drm/radeon/cik.c
4148
cik_srbm_select(rdev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/radeon/cik.c
4158
struct radeon_ring *ring)
drivers/gpu/drm/radeon/cik.c
4164
wptr = rdev->wb.wb[ring->wptr_offs/4];
drivers/gpu/drm/radeon/cik.c
4167
cik_srbm_select(rdev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/radeon/cik.c
4177
struct radeon_ring *ring)
drivers/gpu/drm/radeon/cik.c
4180
rdev->wb.wb[ring->wptr_offs/4] = ring->wptr;
drivers/gpu/drm/radeon/cik.c
4181
WDOORBELL32(ring->doorbell_index, ring->wptr);
drivers/gpu/drm/radeon/cik.c
4185
struct radeon_ring *ring)
drivers/gpu/drm/radeon/cik.c
4189
cik_srbm_select(rdev, ring->me, ring->pipe, ring->queue, 0);
drivers/gpu/drm/radeon/cik.c
4227
cik_compute_stop(rdev,&rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]);
drivers/gpu/drm/radeon/cik.c
4228
cik_compute_stop(rdev,&rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]);
drivers/gpu/drm/radeon/cik.c
4232
rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
drivers/gpu/drm/radeon/cik.c
4233
rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
drivers/gpu/drm/radeon/cik.c
4344
if (rdev->ring[idx].mqd_obj) {
drivers/gpu/drm/radeon/cik.c
4345
r = radeon_bo_reserve(rdev->ring[idx].mqd_obj, false);
drivers/gpu/drm/radeon/cik.c
4349
radeon_bo_unpin(rdev->ring[idx].mqd_obj);
drivers/gpu/drm/radeon/cik.c
4350
radeon_bo_unreserve(rdev->ring[idx].mqd_obj);
drivers/gpu/drm/radeon/cik.c
4352
radeon_bo_unref(&rdev->ring[idx].mqd_obj);
drivers/gpu/drm/radeon/cik.c
4353
rdev->ring[idx].mqd_obj = NULL;
drivers/gpu/drm/radeon/cik.c
4563
if (rdev->ring[idx].mqd_obj == NULL) {
drivers/gpu/drm/radeon/cik.c
4568
NULL, &rdev->ring[idx].mqd_obj);
drivers/gpu/drm/radeon/cik.c
4575
r = radeon_bo_reserve(rdev->ring[idx].mqd_obj, false);
drivers/gpu/drm/radeon/cik.c
4580
r = radeon_bo_pin(rdev->ring[idx].mqd_obj, RADEON_GEM_DOMAIN_GTT,
drivers/gpu/drm/radeon/cik.c
4587
r = radeon_bo_kmap(rdev->ring[idx].mqd_obj, (void **)&buf);
drivers/gpu/drm/radeon/cik.c
4605
cik_srbm_select(rdev, rdev->ring[idx].me,
drivers/gpu/drm/radeon/cik.c
4606
rdev->ring[idx].pipe,
drivers/gpu/drm/radeon/cik.c
4607
rdev->ring[idx].queue, 0);
drivers/gpu/drm/radeon/cik.c
4651
hqd_gpu_addr = rdev->ring[idx].gpu_addr >> 8;
drivers/gpu/drm/radeon/cik.c
4663
order_base_2(rdev->ring[idx].ring_size / 8);
drivers/gpu/drm/radeon/cik.c
4705
DOORBELL_OFFSET(rdev->ring[idx].doorbell_index);
drivers/gpu/drm/radeon/cik.c
4717
rdev->ring[idx].wptr = 0;
drivers/gpu/drm/radeon/cik.c
4718
mqd->queue_state.cp_hqd_pq_wptr = rdev->ring[idx].wptr;
drivers/gpu/drm/radeon/cik.c
4733
radeon_bo_kunmap(rdev->ring[idx].mqd_obj);
drivers/gpu/drm/radeon/cik.c
4734
radeon_bo_unreserve(rdev->ring[idx].mqd_obj);
drivers/gpu/drm/radeon/cik.c
4736
rdev->ring[idx].ready = true;
drivers/gpu/drm/radeon/cik.c
4737
r = radeon_ring_test(rdev, idx, &rdev->ring[idx]);
drivers/gpu/drm/radeon/cik.c
4739
rdev->ring[idx].ready = false;
drivers/gpu/drm/radeon/cik.c
5249
bool cik_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/cik.c
5256
radeon_ring_lockup_update(rdev, ring);
drivers/gpu/drm/radeon/cik.c
5259
return radeon_ring_test_lockup(rdev, ring);
drivers/gpu/drm/radeon/cik.c
5677
void cik_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
drivers/gpu/drm/radeon/cik.c
5680
int usepfp = (ring->idx == RADEON_RING_TYPE_GFX_INDEX);
drivers/gpu/drm/radeon/cik.c
5682
radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/radeon/cik.c
5683
radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(usepfp) |
drivers/gpu/drm/radeon/cik.c
5686
radeon_ring_write(ring,
drivers/gpu/drm/radeon/cik.c
5689
radeon_ring_write(ring,
drivers/gpu/drm/radeon/cik.c
5692
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/cik.c
5693
radeon_ring_write(ring, pd_addr >> 12);
drivers/gpu/drm/radeon/cik.c
5696
radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/radeon/cik.c
5697
radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(usepfp) |
drivers/gpu/drm/radeon/cik.c
5699
radeon_ring_write(ring, SRBM_GFX_CNTL >> 2);
drivers/gpu/drm/radeon/cik.c
5700
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/cik.c
5701
radeon_ring_write(ring, VMID(vm_id));
drivers/gpu/drm/radeon/cik.c
5703
radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 6));
drivers/gpu/drm/radeon/cik.c
5704
radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(usepfp) |
drivers/gpu/drm/radeon/cik.c
5706
radeon_ring_write(ring, SH_MEM_BASES >> 2);
drivers/gpu/drm/radeon/cik.c
5707
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/cik.c
5709
radeon_ring_write(ring, 0); /* SH_MEM_BASES */
drivers/gpu/drm/radeon/cik.c
5710
radeon_ring_write(ring, SH_MEM_CONFIG_GFX_DEFAULT); /* SH_MEM_CONFIG */
drivers/gpu/drm/radeon/cik.c
5711
radeon_ring_write(ring, 1); /* SH_MEM_APE1_BASE */
drivers/gpu/drm/radeon/cik.c
5712
radeon_ring_write(ring, 0); /* SH_MEM_APE1_LIMIT */
drivers/gpu/drm/radeon/cik.c
5714
radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/radeon/cik.c
5715
radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(usepfp) |
drivers/gpu/drm/radeon/cik.c
5717
radeon_ring_write(ring, SRBM_GFX_CNTL >> 2);
drivers/gpu/drm/radeon/cik.c
5718
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/cik.c
5719
radeon_ring_write(ring, VMID(0));
drivers/gpu/drm/radeon/cik.c
5722
cik_hdp_flush_cp_ring_emit(rdev, ring->idx);
drivers/gpu/drm/radeon/cik.c
5725
radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/radeon/cik.c
5726
radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(usepfp) |
drivers/gpu/drm/radeon/cik.c
5728
radeon_ring_write(ring, VM_INVALIDATE_REQUEST >> 2);
drivers/gpu/drm/radeon/cik.c
5729
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/cik.c
5730
radeon_ring_write(ring, 1 << vm_id);
drivers/gpu/drm/radeon/cik.c
5733
radeon_ring_write(ring, PACKET3(PACKET3_WAIT_REG_MEM, 5));
drivers/gpu/drm/radeon/cik.c
5734
radeon_ring_write(ring, (WAIT_REG_MEM_OPERATION(0) | /* wait */
drivers/gpu/drm/radeon/cik.c
5737
radeon_ring_write(ring, VM_INVALIDATE_REQUEST >> 2);
drivers/gpu/drm/radeon/cik.c
5738
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/cik.c
5739
radeon_ring_write(ring, 0); /* ref */
drivers/gpu/drm/radeon/cik.c
5740
radeon_ring_write(ring, 0); /* mask */
drivers/gpu/drm/radeon/cik.c
5741
radeon_ring_write(ring, 0x20); /* poll interval */
drivers/gpu/drm/radeon/cik.c
5746
radeon_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
drivers/gpu/drm/radeon/cik.c
5747
radeon_ring_write(ring, 0x0);
drivers/gpu/drm/radeon/cik.c
7066
struct radeon_ring *ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
drivers/gpu/drm/radeon/cik.c
7068
if (ring->me == 1) {
drivers/gpu/drm/radeon/cik.c
7069
switch (ring->pipe) {
drivers/gpu/drm/radeon/cik.c
7083
DRM_DEBUG("si_irq_set: sw int cp1 invalid pipe %d\n", ring->pipe);
drivers/gpu/drm/radeon/cik.c
7086
} else if (ring->me == 2) {
drivers/gpu/drm/radeon/cik.c
7087
switch (ring->pipe) {
drivers/gpu/drm/radeon/cik.c
7101
DRM_DEBUG("si_irq_set: sw int cp1 invalid pipe %d\n", ring->pipe);
drivers/gpu/drm/radeon/cik.c
7105
DRM_DEBUG("si_irq_set: sw int cp1 invalid me %d\n", ring->me);
drivers/gpu/drm/radeon/cik.c
7109
struct radeon_ring *ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
drivers/gpu/drm/radeon/cik.c
7111
if (ring->me == 1) {
drivers/gpu/drm/radeon/cik.c
7112
switch (ring->pipe) {
drivers/gpu/drm/radeon/cik.c
7126
DRM_DEBUG("si_irq_set: sw int cp2 invalid pipe %d\n", ring->pipe);
drivers/gpu/drm/radeon/cik.c
7129
} else if (ring->me == 2) {
drivers/gpu/drm/radeon/cik.c
7130
switch (ring->pipe) {
drivers/gpu/drm/radeon/cik.c
7144
DRM_DEBUG("si_irq_set: sw int cp2 invalid pipe %d\n", ring->pipe);
drivers/gpu/drm/radeon/cik.c
7148
DRM_DEBUG("si_irq_set: sw int cp2 invalid me %d\n", ring->me);
drivers/gpu/drm/radeon/cik.c
7540
struct radeon_ring *cp1_ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
drivers/gpu/drm/radeon/cik.c
7541
struct radeon_ring *cp2_ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
drivers/gpu/drm/radeon/cik.c
7576
src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
drivers/gpu/drm/radeon/cik.c
7577
src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
drivers/gpu/drm/radeon/cik.c
7578
ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff;
drivers/gpu/drm/radeon/cik.c
8128
rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
drivers/gpu/drm/radeon/cik.c
8129
r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
drivers/gpu/drm/radeon/cik.c
8157
rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
drivers/gpu/drm/radeon/cik.c
8162
struct radeon_ring *ring;
drivers/gpu/drm/radeon/cik.c
8165
if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
drivers/gpu/drm/radeon/cik.c
8168
ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
drivers/gpu/drm/radeon/cik.c
8169
r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
drivers/gpu/drm/radeon/cik.c
8200
rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_obj = NULL;
drivers/gpu/drm/radeon/cik.c
8201
r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE1_INDEX], 4096);
drivers/gpu/drm/radeon/cik.c
8202
rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_obj = NULL;
drivers/gpu/drm/radeon/cik.c
8203
r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE2_INDEX], 4096);
drivers/gpu/drm/radeon/cik.c
8236
rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size = 0;
drivers/gpu/drm/radeon/cik.c
8237
rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_size = 0;
drivers/gpu/drm/radeon/cik.c
8242
struct radeon_ring *ring;
drivers/gpu/drm/radeon/cik.c
8245
if (!rdev->has_vce || !rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size)
drivers/gpu/drm/radeon/cik.c
8248
ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX];
drivers/gpu/drm/radeon/cik.c
8249
r = radeon_ring_init(rdev, ring, ring->ring_size, 0, VCE_CMD_NO_OP);
drivers/gpu/drm/radeon/cik.c
8254
ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX];
drivers/gpu/drm/radeon/cik.c
8255
r = radeon_ring_init(rdev, ring, ring->ring_size, 0, VCE_CMD_NO_OP);
drivers/gpu/drm/radeon/cik.c
8278
struct radeon_ring *ring;
drivers/gpu/drm/radeon/cik.c
8397
ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/cik.c
8398
r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
drivers/gpu/drm/radeon/cik.c
8405
ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
drivers/gpu/drm/radeon/cik.c
8406
r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP1_RPTR_OFFSET,
drivers/gpu/drm/radeon/cik.c
8410
ring->me = 1; /* first MEC */
drivers/gpu/drm/radeon/cik.c
8411
ring->pipe = 0; /* first pipe */
drivers/gpu/drm/radeon/cik.c
8412
ring->queue = 0; /* first queue */
drivers/gpu/drm/radeon/cik.c
8413
ring->wptr_offs = CIK_WB_CP1_WPTR_OFFSET;
drivers/gpu/drm/radeon/cik.c
8416
ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
drivers/gpu/drm/radeon/cik.c
8417
r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP2_RPTR_OFFSET,
drivers/gpu/drm/radeon/cik.c
8422
ring->me = 1; /* first MEC */
drivers/gpu/drm/radeon/cik.c
8423
ring->pipe = 0; /* first pipe */
drivers/gpu/drm/radeon/cik.c
8424
ring->queue = 1; /* second queue */
drivers/gpu/drm/radeon/cik.c
8425
ring->wptr_offs = CIK_WB_CP2_WPTR_OFFSET;
drivers/gpu/drm/radeon/cik.c
8427
ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
drivers/gpu/drm/radeon/cik.c
8428
r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
drivers/gpu/drm/radeon/cik.c
8433
ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
drivers/gpu/drm/radeon/cik.c
8434
r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET,
drivers/gpu/drm/radeon/cik.c
8551
struct radeon_ring *ring, *ring_cp1, *ring_cp2;
drivers/gpu/drm/radeon/cik.c
8622
ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/cik.c
8623
ring->ring_obj = NULL;
drivers/gpu/drm/radeon/cik.c
8624
r600_ring_init(rdev, ring, 1024 * 1024);
drivers/gpu/drm/radeon/cik.c
8626
ring_cp1 = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
drivers/gpu/drm/radeon/cik.c
8627
ring_cp2 = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
drivers/gpu/drm/radeon/cik.c
8643
ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
drivers/gpu/drm/radeon/cik.c
8644
ring->ring_obj = NULL;
drivers/gpu/drm/radeon/cik.c
8645
r600_ring_init(rdev, ring, 256 * 1024);
drivers/gpu/drm/radeon/cik.c
8647
ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
drivers/gpu/drm/radeon/cik.c
8648
ring->ring_obj = NULL;
drivers/gpu/drm/radeon/cik.c
8649
r600_ring_init(rdev, ring, 256 * 1024);
drivers/gpu/drm/radeon/cik.c
8711
struct radeon_ring *ring;
drivers/gpu/drm/radeon/cik.c
8725
ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
drivers/gpu/drm/radeon/cik.c
8726
radeon_doorbell_free(rdev, ring->doorbell_index);
drivers/gpu/drm/radeon/cik.c
8727
ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
drivers/gpu/drm/radeon/cik.c
8728
radeon_doorbell_free(rdev, ring->doorbell_index);
drivers/gpu/drm/radeon/cik_sdma.c
111
struct radeon_ring *ring)
drivers/gpu/drm/radeon/cik_sdma.c
115
if (ring->idx == R600_RING_TYPE_DMA_INDEX)
drivers/gpu/drm/radeon/cik_sdma.c
120
WREG32(reg, (ring->wptr << 2) & 0x3fffc);
drivers/gpu/drm/radeon/cik_sdma.c
135
struct radeon_ring *ring = &rdev->ring[ib->ring];
drivers/gpu/drm/radeon/cik_sdma.c
136
u32 extra_bits = (ib->vm ? ib->vm->ids[ib->ring].id : 0) & 0xf;
drivers/gpu/drm/radeon/cik_sdma.c
139
u32 next_rptr = ring->wptr + 5;
drivers/gpu/drm/radeon/cik_sdma.c
143
radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_WRITE, SDMA_WRITE_SUB_OPCODE_LINEAR, 0));
drivers/gpu/drm/radeon/cik_sdma.c
144
radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
drivers/gpu/drm/radeon/cik_sdma.c
145
radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr));
drivers/gpu/drm/radeon/cik_sdma.c
146
radeon_ring_write(ring, 1); /* number of DWs to follow */
drivers/gpu/drm/radeon/cik_sdma.c
147
radeon_ring_write(ring, next_rptr);
drivers/gpu/drm/radeon/cik_sdma.c
151
while ((ring->wptr & 7) != 4)
drivers/gpu/drm/radeon/cik_sdma.c
152
radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_NOP, 0, 0));
drivers/gpu/drm/radeon/cik_sdma.c
153
radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_INDIRECT_BUFFER, 0, extra_bits));
drivers/gpu/drm/radeon/cik_sdma.c
154
radeon_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */
drivers/gpu/drm/radeon/cik_sdma.c
155
radeon_ring_write(ring, upper_32_bits(ib->gpu_addr));
drivers/gpu/drm/radeon/cik_sdma.c
156
radeon_ring_write(ring, ib->length_dw);
drivers/gpu/drm/radeon/cik_sdma.c
171
struct radeon_ring *ring = &rdev->ring[ridx];
drivers/gpu/drm/radeon/cik_sdma.c
181
radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_POLL_REG_MEM, 0, extra_bits));
drivers/gpu/drm/radeon/cik_sdma.c
182
radeon_ring_write(ring, GPU_HDP_FLUSH_DONE);
drivers/gpu/drm/radeon/cik_sdma.c
183
radeon_ring_write(ring, GPU_HDP_FLUSH_REQ);
drivers/gpu/drm/radeon/cik_sdma.c
184
radeon_ring_write(ring, ref_and_mask); /* reference */
drivers/gpu/drm/radeon/cik_sdma.c
185
radeon_ring_write(ring, ref_and_mask); /* mask */
drivers/gpu/drm/radeon/cik_sdma.c
186
radeon_ring_write(ring, (0xfff << 16) | 10); /* retry count, poll interval */
drivers/gpu/drm/radeon/cik_sdma.c
202
struct radeon_ring *ring = &rdev->ring[fence->ring];
drivers/gpu/drm/radeon/cik_sdma.c
203
u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
drivers/gpu/drm/radeon/cik_sdma.c
206
radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_FENCE, 0, 0));
drivers/gpu/drm/radeon/cik_sdma.c
207
radeon_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/radeon/cik_sdma.c
208
radeon_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/radeon/cik_sdma.c
209
radeon_ring_write(ring, fence->seq);
drivers/gpu/drm/radeon/cik_sdma.c
211
radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_TRAP, 0, 0));
drivers/gpu/drm/radeon/cik_sdma.c
213
cik_sdma_hdp_flush_ring_emit(rdev, fence->ring);
drivers/gpu/drm/radeon/cik_sdma.c
228
struct radeon_ring *ring,
drivers/gpu/drm/radeon/cik_sdma.c
235
radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_SEMAPHORE, 0, extra_bits));
drivers/gpu/drm/radeon/cik_sdma.c
236
radeon_ring_write(ring, addr & 0xfffffff8);
drivers/gpu/drm/radeon/cik_sdma.c
237
radeon_ring_write(ring, upper_32_bits(addr));
drivers/gpu/drm/radeon/cik_sdma.c
268
rdev->ring[R600_RING_TYPE_DMA_INDEX].ready = false;
drivers/gpu/drm/radeon/cik_sdma.c
269
rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX].ready = false;
drivers/gpu/drm/radeon/cik_sdma.c
366
struct radeon_ring *ring;
drivers/gpu/drm/radeon/cik_sdma.c
374
ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
drivers/gpu/drm/radeon/cik_sdma.c
378
ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
drivers/gpu/drm/radeon/cik_sdma.c
387
rb_bufsz = order_base_2(ring->ring_size / 4);
drivers/gpu/drm/radeon/cik_sdma.c
407
WREG32(SDMA0_GFX_RB_BASE + reg_offset, ring->gpu_addr >> 8);
drivers/gpu/drm/radeon/cik_sdma.c
408
WREG32(SDMA0_GFX_RB_BASE_HI + reg_offset, ring->gpu_addr >> 40);
drivers/gpu/drm/radeon/cik_sdma.c
410
ring->wptr = 0;
drivers/gpu/drm/radeon/cik_sdma.c
411
WREG32(SDMA0_GFX_RB_WPTR + reg_offset, ring->wptr << 2);
drivers/gpu/drm/radeon/cik_sdma.c
423
ring->ready = true;
drivers/gpu/drm/radeon/cik_sdma.c
425
r = radeon_ring_test(rdev, ring->idx, ring);
drivers/gpu/drm/radeon/cik_sdma.c
427
ring->ready = false;
drivers/gpu/drm/radeon/cik_sdma.c
560
radeon_ring_fini(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX]);
drivers/gpu/drm/radeon/cik_sdma.c
561
radeon_ring_fini(rdev, &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]);
drivers/gpu/drm/radeon/cik_sdma.c
586
struct radeon_ring *ring = &rdev->ring[ring_index];
drivers/gpu/drm/radeon/cik_sdma.c
595
r = radeon_ring_lock(rdev, ring, num_loops * 7 + 14);
drivers/gpu/drm/radeon/cik_sdma.c
603
radeon_sync_rings(rdev, &sync, ring->idx);
drivers/gpu/drm/radeon/cik_sdma.c
610
radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_COPY, SDMA_COPY_SUB_OPCODE_LINEAR, 0));
drivers/gpu/drm/radeon/cik_sdma.c
611
radeon_ring_write(ring, cur_size_in_bytes);
drivers/gpu/drm/radeon/cik_sdma.c
612
radeon_ring_write(ring, 0); /* src/dst endian swap */
drivers/gpu/drm/radeon/cik_sdma.c
613
radeon_ring_write(ring, lower_32_bits(src_offset));
drivers/gpu/drm/radeon/cik_sdma.c
614
radeon_ring_write(ring, upper_32_bits(src_offset));
drivers/gpu/drm/radeon/cik_sdma.c
615
radeon_ring_write(ring, lower_32_bits(dst_offset));
drivers/gpu/drm/radeon/cik_sdma.c
616
radeon_ring_write(ring, upper_32_bits(dst_offset));
drivers/gpu/drm/radeon/cik_sdma.c
621
r = radeon_fence_emit(rdev, &fence, ring->idx);
drivers/gpu/drm/radeon/cik_sdma.c
623
radeon_ring_unlock_undo(rdev, ring);
drivers/gpu/drm/radeon/cik_sdma.c
628
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/cik_sdma.c
63
struct radeon_ring *ring)
drivers/gpu/drm/radeon/cik_sdma.c
645
struct radeon_ring *ring)
drivers/gpu/drm/radeon/cik_sdma.c
653
if (ring->idx == R600_RING_TYPE_DMA_INDEX)
drivers/gpu/drm/radeon/cik_sdma.c
663
r = radeon_ring_lock(rdev, ring, 5);
drivers/gpu/drm/radeon/cik_sdma.c
665
DRM_ERROR("radeon: dma failed to lock ring %d (%d).\n", ring->idx, r);
drivers/gpu/drm/radeon/cik_sdma.c
668
radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_WRITE, SDMA_WRITE_SUB_OPCODE_LINEAR, 0));
drivers/gpu/drm/radeon/cik_sdma.c
669
radeon_ring_write(ring, lower_32_bits(gpu_addr));
drivers/gpu/drm/radeon/cik_sdma.c
670
radeon_ring_write(ring, upper_32_bits(gpu_addr));
drivers/gpu/drm/radeon/cik_sdma.c
671
radeon_ring_write(ring, 1); /* number of DWs to follow */
drivers/gpu/drm/radeon/cik_sdma.c
672
radeon_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/radeon/cik_sdma.c
673
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/cik_sdma.c
68
rptr = rdev->wb.wb[ring->rptr_offs/4];
drivers/gpu/drm/radeon/cik_sdma.c
683
DRM_INFO("ring test on %d succeeded in %d usecs\n", ring->idx, i);
drivers/gpu/drm/radeon/cik_sdma.c
686
ring->idx, tmp);
drivers/gpu/drm/radeon/cik_sdma.c
70
if (ring->idx == R600_RING_TYPE_DMA_INDEX)
drivers/gpu/drm/radeon/cik_sdma.c
701
int cik_sdma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/cik_sdma.c
710
if (ring->idx == R600_RING_TYPE_DMA_INDEX)
drivers/gpu/drm/radeon/cik_sdma.c
720
r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256);
drivers/gpu/drm/radeon/cik_sdma.c
756
DRM_INFO("ib test on ring %d succeeded in %u usecs\n", ib.fence->ring, i);
drivers/gpu/drm/radeon/cik_sdma.c
774
bool cik_sdma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/cik_sdma.c
779
if (ring->idx == R600_RING_TYPE_DMA_INDEX)
drivers/gpu/drm/radeon/cik_sdma.c
785
radeon_ring_lockup_update(rdev, ring);
drivers/gpu/drm/radeon/cik_sdma.c
788
return radeon_ring_test_lockup(rdev, ring);
drivers/gpu/drm/radeon/cik_sdma.c
90
struct radeon_ring *ring)
drivers/gpu/drm/radeon/cik_sdma.c
94
if (ring->idx == R600_RING_TYPE_DMA_INDEX)
drivers/gpu/drm/radeon/cik_sdma.c
944
void cik_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
drivers/gpu/drm/radeon/cik_sdma.c
950
radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_SRBM_WRITE, 0, 0xf000));
drivers/gpu/drm/radeon/cik_sdma.c
952
radeon_ring_write(ring, (VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (vm_id << 2)) >> 2);
drivers/gpu/drm/radeon/cik_sdma.c
954
radeon_ring_write(ring, (VM_CONTEXT8_PAGE_TABLE_BASE_ADDR + ((vm_id - 8) << 2)) >> 2);
drivers/gpu/drm/radeon/cik_sdma.c
956
radeon_ring_write(ring, pd_addr >> 12);
drivers/gpu/drm/radeon/cik_sdma.c
959
radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_SRBM_WRITE, 0, 0xf000));
drivers/gpu/drm/radeon/cik_sdma.c
960
radeon_ring_write(ring, SRBM_GFX_CNTL >> 2);
drivers/gpu/drm/radeon/cik_sdma.c
961
radeon_ring_write(ring, VMID(vm_id));
drivers/gpu/drm/radeon/cik_sdma.c
963
radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_SRBM_WRITE, 0, 0xf000));
drivers/gpu/drm/radeon/cik_sdma.c
964
radeon_ring_write(ring, SH_MEM_BASES >> 2);
drivers/gpu/drm/radeon/cik_sdma.c
965
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/cik_sdma.c
967
radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_SRBM_WRITE, 0, 0xf000));
drivers/gpu/drm/radeon/cik_sdma.c
968
radeon_ring_write(ring, SH_MEM_CONFIG >> 2);
drivers/gpu/drm/radeon/cik_sdma.c
969
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/cik_sdma.c
971
radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_SRBM_WRITE, 0, 0xf000));
drivers/gpu/drm/radeon/cik_sdma.c
972
radeon_ring_write(ring, SH_MEM_APE1_BASE >> 2);
drivers/gpu/drm/radeon/cik_sdma.c
973
radeon_ring_write(ring, 1);
drivers/gpu/drm/radeon/cik_sdma.c
975
radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_SRBM_WRITE, 0, 0xf000));
drivers/gpu/drm/radeon/cik_sdma.c
976
radeon_ring_write(ring, SH_MEM_APE1_LIMIT >> 2);
drivers/gpu/drm/radeon/cik_sdma.c
977
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/cik_sdma.c
979
radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_SRBM_WRITE, 0, 0xf000));
drivers/gpu/drm/radeon/cik_sdma.c
980
radeon_ring_write(ring, SRBM_GFX_CNTL >> 2);
drivers/gpu/drm/radeon/cik_sdma.c
981
radeon_ring_write(ring, VMID(0));
drivers/gpu/drm/radeon/cik_sdma.c
984
cik_sdma_hdp_flush_ring_emit(rdev, ring->idx);
drivers/gpu/drm/radeon/cik_sdma.c
987
radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_SRBM_WRITE, 0, 0xf000));
drivers/gpu/drm/radeon/cik_sdma.c
988
radeon_ring_write(ring, VM_INVALIDATE_REQUEST >> 2);
drivers/gpu/drm/radeon/cik_sdma.c
989
radeon_ring_write(ring, 1 << vm_id);
drivers/gpu/drm/radeon/cik_sdma.c
991
radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_POLL_REG_MEM, 0, extra_bits));
drivers/gpu/drm/radeon/cik_sdma.c
992
radeon_ring_write(ring, VM_INVALIDATE_REQUEST >> 2);
drivers/gpu/drm/radeon/cik_sdma.c
993
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/cik_sdma.c
994
radeon_ring_write(ring, 0); /* reference */
drivers/gpu/drm/radeon/cik_sdma.c
995
radeon_ring_write(ring, 0); /* mask */
drivers/gpu/drm/radeon/cik_sdma.c
996
radeon_ring_write(ring, (0xfff << 16) | 10); /* retry count, poll interval */
drivers/gpu/drm/radeon/evergreen.c
2935
struct radeon_ring *ring = &rdev->ring[ib->ring];
drivers/gpu/drm/radeon/evergreen.c
2939
radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
drivers/gpu/drm/radeon/evergreen.c
2940
radeon_ring_write(ring, 1);
drivers/gpu/drm/radeon/evergreen.c
2942
if (ring->rptr_save_reg) {
drivers/gpu/drm/radeon/evergreen.c
2943
next_rptr = ring->wptr + 3 + 4;
drivers/gpu/drm/radeon/evergreen.c
2944
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
drivers/gpu/drm/radeon/evergreen.c
2945
radeon_ring_write(ring, ((ring->rptr_save_reg -
drivers/gpu/drm/radeon/evergreen.c
2947
radeon_ring_write(ring, next_rptr);
drivers/gpu/drm/radeon/evergreen.c
2949
next_rptr = ring->wptr + 5 + 4;
drivers/gpu/drm/radeon/evergreen.c
2950
radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
drivers/gpu/drm/radeon/evergreen.c
2951
radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
drivers/gpu/drm/radeon/evergreen.c
2952
radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
drivers/gpu/drm/radeon/evergreen.c
2953
radeon_ring_write(ring, next_rptr);
drivers/gpu/drm/radeon/evergreen.c
2954
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/evergreen.c
2957
radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
drivers/gpu/drm/radeon/evergreen.c
2958
radeon_ring_write(ring,
drivers/gpu/drm/radeon/evergreen.c
2963
radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
drivers/gpu/drm/radeon/evergreen.c
2964
radeon_ring_write(ring, ib->length_dw);
drivers/gpu/drm/radeon/evergreen.c
3002
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/evergreen.c
3007
r = radeon_ring_lock(rdev, ring, 7);
drivers/gpu/drm/radeon/evergreen.c
3012
radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
drivers/gpu/drm/radeon/evergreen.c
3013
radeon_ring_write(ring, 0x1);
drivers/gpu/drm/radeon/evergreen.c
3014
radeon_ring_write(ring, 0x0);
drivers/gpu/drm/radeon/evergreen.c
3015
radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
drivers/gpu/drm/radeon/evergreen.c
3016
radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
drivers/gpu/drm/radeon/evergreen.c
3017
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/evergreen.c
3018
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/evergreen.c
3019
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/evergreen.c
3024
r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
drivers/gpu/drm/radeon/evergreen.c
3031
radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
drivers/gpu/drm/radeon/evergreen.c
3032
radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
drivers/gpu/drm/radeon/evergreen.c
3035
radeon_ring_write(ring, evergreen_default_state[i]);
drivers/gpu/drm/radeon/evergreen.c
3037
radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
drivers/gpu/drm/radeon/evergreen.c
3038
radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
drivers/gpu/drm/radeon/evergreen.c
3041
radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
drivers/gpu/drm/radeon/evergreen.c
3042
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/evergreen.c
3045
radeon_ring_write(ring, 0xc0026f00);
drivers/gpu/drm/radeon/evergreen.c
3046
radeon_ring_write(ring, 0x00000000);
drivers/gpu/drm/radeon/evergreen.c
3047
radeon_ring_write(ring, 0x00000000);
drivers/gpu/drm/radeon/evergreen.c
3048
radeon_ring_write(ring, 0x00000000);
drivers/gpu/drm/radeon/evergreen.c
3051
radeon_ring_write(ring, 0xc0036f00);
drivers/gpu/drm/radeon/evergreen.c
3052
radeon_ring_write(ring, 0x00000bc4);
drivers/gpu/drm/radeon/evergreen.c
3053
radeon_ring_write(ring, 0xffffffff);
drivers/gpu/drm/radeon/evergreen.c
3054
radeon_ring_write(ring, 0xffffffff);
drivers/gpu/drm/radeon/evergreen.c
3055
radeon_ring_write(ring, 0xffffffff);
drivers/gpu/drm/radeon/evergreen.c
3057
radeon_ring_write(ring, 0xc0026900);
drivers/gpu/drm/radeon/evergreen.c
3058
radeon_ring_write(ring, 0x00000316);
drivers/gpu/drm/radeon/evergreen.c
3059
radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
drivers/gpu/drm/radeon/evergreen.c
3060
radeon_ring_write(ring, 0x00000010); /* */
drivers/gpu/drm/radeon/evergreen.c
3062
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/evergreen.c
3069
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/evergreen.c
3087
rb_bufsz = order_base_2(ring->ring_size / 8);
drivers/gpu/drm/radeon/evergreen.c
3102
ring->wptr = 0;
drivers/gpu/drm/radeon/evergreen.c
3103
WREG32(CP_RB_WPTR, ring->wptr);
drivers/gpu/drm/radeon/evergreen.c
3121
WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
drivers/gpu/drm/radeon/evergreen.c
3125
ring->ready = true;
drivers/gpu/drm/radeon/evergreen.c
3126
r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
drivers/gpu/drm/radeon/evergreen.c
3128
ring->ready = false;
drivers/gpu/drm/radeon/evergreen.c
4096
bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/evergreen.c
4103
radeon_ring_lockup_update(rdev, ring);
drivers/gpu/drm/radeon/evergreen.c
4106
return radeon_ring_test_lockup(rdev, ring);
drivers/gpu/drm/radeon/evergreen.c
4747
src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
drivers/gpu/drm/radeon/evergreen.c
4748
src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
drivers/gpu/drm/radeon/evergreen.c
4959
rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
drivers/gpu/drm/radeon/evergreen.c
4960
r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
drivers/gpu/drm/radeon/evergreen.c
4983
rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
drivers/gpu/drm/radeon/evergreen.c
4988
struct radeon_ring *ring;
drivers/gpu/drm/radeon/evergreen.c
4991
if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
drivers/gpu/drm/radeon/evergreen.c
4994
ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
drivers/gpu/drm/radeon/evergreen.c
4995
r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
drivers/gpu/drm/radeon/evergreen.c
5009
struct radeon_ring *ring;
drivers/gpu/drm/radeon/evergreen.c
5089
ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/evergreen.c
5090
r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
drivers/gpu/drm/radeon/evergreen.c
5095
ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
drivers/gpu/drm/radeon/evergreen.c
5096
r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
drivers/gpu/drm/radeon/evergreen.c
5264
rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
drivers/gpu/drm/radeon/evergreen.c
5265
r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
drivers/gpu/drm/radeon/evergreen.c
5267
rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
drivers/gpu/drm/radeon/evergreen.c
5268
r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
drivers/gpu/drm/radeon/evergreen_dma.c
115
struct radeon_ring *ring = &rdev->ring[ring_index];
drivers/gpu/drm/radeon/evergreen_dma.c
124
r = radeon_ring_lock(rdev, ring, num_loops * 5 + 11);
drivers/gpu/drm/radeon/evergreen_dma.c
132
radeon_sync_rings(rdev, &sync, ring->idx);
drivers/gpu/drm/radeon/evergreen_dma.c
139
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_COPY, 0, cur_size_in_dw));
drivers/gpu/drm/radeon/evergreen_dma.c
140
radeon_ring_write(ring, dst_offset & 0xfffffffc);
drivers/gpu/drm/radeon/evergreen_dma.c
141
radeon_ring_write(ring, src_offset & 0xfffffffc);
drivers/gpu/drm/radeon/evergreen_dma.c
142
radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff);
drivers/gpu/drm/radeon/evergreen_dma.c
143
radeon_ring_write(ring, upper_32_bits(src_offset) & 0xff);
drivers/gpu/drm/radeon/evergreen_dma.c
148
r = radeon_fence_emit(rdev, &fence, ring->idx);
drivers/gpu/drm/radeon/evergreen_dma.c
150
radeon_ring_unlock_undo(rdev, ring);
drivers/gpu/drm/radeon/evergreen_dma.c
155
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/evergreen_dma.c
170
bool evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/evergreen_dma.c
175
radeon_ring_lockup_update(rdev, ring);
drivers/gpu/drm/radeon/evergreen_dma.c
178
return radeon_ring_test_lockup(rdev, ring);
drivers/gpu/drm/radeon/evergreen_dma.c
43
struct radeon_ring *ring = &rdev->ring[fence->ring];
drivers/gpu/drm/radeon/evergreen_dma.c
44
u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
drivers/gpu/drm/radeon/evergreen_dma.c
46
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_FENCE, 0, 0));
drivers/gpu/drm/radeon/evergreen_dma.c
47
radeon_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/radeon/evergreen_dma.c
48
radeon_ring_write(ring, (upper_32_bits(addr) & 0xff));
drivers/gpu/drm/radeon/evergreen_dma.c
49
radeon_ring_write(ring, fence->seq);
drivers/gpu/drm/radeon/evergreen_dma.c
51
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_TRAP, 0, 0));
drivers/gpu/drm/radeon/evergreen_dma.c
53
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0));
drivers/gpu/drm/radeon/evergreen_dma.c
54
radeon_ring_write(ring, (0xf << 16) | (HDP_MEM_COHERENCY_FLUSH_CNTL >> 2));
drivers/gpu/drm/radeon/evergreen_dma.c
55
radeon_ring_write(ring, 1);
drivers/gpu/drm/radeon/evergreen_dma.c
69
struct radeon_ring *ring = &rdev->ring[ib->ring];
drivers/gpu/drm/radeon/evergreen_dma.c
72
u32 next_rptr = ring->wptr + 4;
drivers/gpu/drm/radeon/evergreen_dma.c
76
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_WRITE, 0, 1));
drivers/gpu/drm/radeon/evergreen_dma.c
77
radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
drivers/gpu/drm/radeon/evergreen_dma.c
78
radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xff);
drivers/gpu/drm/radeon/evergreen_dma.c
79
radeon_ring_write(ring, next_rptr);
drivers/gpu/drm/radeon/evergreen_dma.c
85
while ((ring->wptr & 7) != 5)
drivers/gpu/drm/radeon/evergreen_dma.c
86
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_NOP, 0, 0));
drivers/gpu/drm/radeon/evergreen_dma.c
87
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_INDIRECT_BUFFER, 0, 0));
drivers/gpu/drm/radeon/evergreen_dma.c
88
radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
drivers/gpu/drm/radeon/evergreen_dma.c
89
radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF));
drivers/gpu/drm/radeon/ni.c
1367
int ring, u32 cp_int_cntl)
drivers/gpu/drm/radeon/ni.c
1369
WREG32(SRBM_GFX_CNTL, RINGID(ring));
drivers/gpu/drm/radeon/ni.c
1379
struct radeon_ring *ring = &rdev->ring[fence->ring];
drivers/gpu/drm/radeon/ni.c
1380
u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
drivers/gpu/drm/radeon/ni.c
1385
radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
drivers/gpu/drm/radeon/ni.c
1386
radeon_ring_write(ring, PACKET3_ENGINE_ME | cp_coher_cntl);
drivers/gpu/drm/radeon/ni.c
1387
radeon_ring_write(ring, 0xFFFFFFFF);
drivers/gpu/drm/radeon/ni.c
1388
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/ni.c
1389
radeon_ring_write(ring, 10); /* poll interval */
drivers/gpu/drm/radeon/ni.c
1391
radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4));
drivers/gpu/drm/radeon/ni.c
1392
radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_EVENT_TS) | EVENT_INDEX(5));
drivers/gpu/drm/radeon/ni.c
1393
radeon_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/radeon/ni.c
1394
radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | DATA_SEL(1) | INT_SEL(2));
drivers/gpu/drm/radeon/ni.c
1395
radeon_ring_write(ring, fence->seq);
drivers/gpu/drm/radeon/ni.c
1396
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/ni.c
1401
struct radeon_ring *ring = &rdev->ring[ib->ring];
drivers/gpu/drm/radeon/ni.c
1402
unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0;
drivers/gpu/drm/radeon/ni.c
1407
radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
drivers/gpu/drm/radeon/ni.c
1408
radeon_ring_write(ring, 1);
drivers/gpu/drm/radeon/ni.c
1410
if (ring->rptr_save_reg) {
drivers/gpu/drm/radeon/ni.c
1411
uint32_t next_rptr = ring->wptr + 3 + 4 + 8;
drivers/gpu/drm/radeon/ni.c
1412
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
drivers/gpu/drm/radeon/ni.c
1413
radeon_ring_write(ring, ((ring->rptr_save_reg -
drivers/gpu/drm/radeon/ni.c
1415
radeon_ring_write(ring, next_rptr);
drivers/gpu/drm/radeon/ni.c
1418
radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
drivers/gpu/drm/radeon/ni.c
1419
radeon_ring_write(ring,
drivers/gpu/drm/radeon/ni.c
1424
radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
drivers/gpu/drm/radeon/ni.c
1425
radeon_ring_write(ring, ib->length_dw | (vm_id << 24));
drivers/gpu/drm/radeon/ni.c
1428
radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
drivers/gpu/drm/radeon/ni.c
1429
radeon_ring_write(ring, PACKET3_ENGINE_ME | cp_coher_cntl);
drivers/gpu/drm/radeon/ni.c
1430
radeon_ring_write(ring, 0xFFFFFFFF);
drivers/gpu/drm/radeon/ni.c
1431
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/ni.c
1432
radeon_ring_write(ring, (vm_id << 24) | 10); /* poll interval */
drivers/gpu/drm/radeon/ni.c
1444
rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
drivers/gpu/drm/radeon/ni.c
1449
struct radeon_ring *ring)
drivers/gpu/drm/radeon/ni.c
1454
rptr = rdev->wb.wb[ring->rptr_offs/4];
drivers/gpu/drm/radeon/ni.c
1456
if (ring->idx == RADEON_RING_TYPE_GFX_INDEX)
drivers/gpu/drm/radeon/ni.c
1458
else if (ring->idx == CAYMAN_RING_TYPE_CP1_INDEX)
drivers/gpu/drm/radeon/ni.c
1468
struct radeon_ring *ring)
drivers/gpu/drm/radeon/ni.c
1472
if (ring->idx == RADEON_RING_TYPE_GFX_INDEX)
drivers/gpu/drm/radeon/ni.c
1474
else if (ring->idx == CAYMAN_RING_TYPE_CP1_INDEX)
drivers/gpu/drm/radeon/ni.c
1483
struct radeon_ring *ring)
drivers/gpu/drm/radeon/ni.c
1485
if (ring->idx == RADEON_RING_TYPE_GFX_INDEX) {
drivers/gpu/drm/radeon/ni.c
1486
WREG32(CP_RB0_WPTR, ring->wptr);
drivers/gpu/drm/radeon/ni.c
1488
} else if (ring->idx == CAYMAN_RING_TYPE_CP1_INDEX) {
drivers/gpu/drm/radeon/ni.c
1489
WREG32(CP_RB1_WPTR, ring->wptr);
drivers/gpu/drm/radeon/ni.c
1492
WREG32(CP_RB2_WPTR, ring->wptr);
drivers/gpu/drm/radeon/ni.c
1526
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/ni.c
1529
r = radeon_ring_lock(rdev, ring, 7);
drivers/gpu/drm/radeon/ni.c
1534
radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
drivers/gpu/drm/radeon/ni.c
1535
radeon_ring_write(ring, 0x1);
drivers/gpu/drm/radeon/ni.c
1536
radeon_ring_write(ring, 0x0);
drivers/gpu/drm/radeon/ni.c
1537
radeon_ring_write(ring, rdev->config.cayman.max_hw_contexts - 1);
drivers/gpu/drm/radeon/ni.c
1538
radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
drivers/gpu/drm/radeon/ni.c
1539
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/ni.c
1540
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/ni.c
1541
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/ni.c
1545
r = radeon_ring_lock(rdev, ring, cayman_default_size + 19);
drivers/gpu/drm/radeon/ni.c
1552
radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
drivers/gpu/drm/radeon/ni.c
1553
radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
drivers/gpu/drm/radeon/ni.c
1556
radeon_ring_write(ring, cayman_default_state[i]);
drivers/gpu/drm/radeon/ni.c
1558
radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
drivers/gpu/drm/radeon/ni.c
1559
radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
drivers/gpu/drm/radeon/ni.c
1562
radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
drivers/gpu/drm/radeon/ni.c
1563
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/ni.c
1566
radeon_ring_write(ring, 0xc0026f00);
drivers/gpu/drm/radeon/ni.c
1567
radeon_ring_write(ring, 0x00000000);
drivers/gpu/drm/radeon/ni.c
1568
radeon_ring_write(ring, 0x00000000);
drivers/gpu/drm/radeon/ni.c
1569
radeon_ring_write(ring, 0x00000000);
drivers/gpu/drm/radeon/ni.c
1572
radeon_ring_write(ring, 0xc0036f00);
drivers/gpu/drm/radeon/ni.c
1573
radeon_ring_write(ring, 0x00000bc4);
drivers/gpu/drm/radeon/ni.c
1574
radeon_ring_write(ring, 0xffffffff);
drivers/gpu/drm/radeon/ni.c
1575
radeon_ring_write(ring, 0xffffffff);
drivers/gpu/drm/radeon/ni.c
1576
radeon_ring_write(ring, 0xffffffff);
drivers/gpu/drm/radeon/ni.c
1578
radeon_ring_write(ring, 0xc0026900);
drivers/gpu/drm/radeon/ni.c
1579
radeon_ring_write(ring, 0x00000316);
drivers/gpu/drm/radeon/ni.c
1580
radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
drivers/gpu/drm/radeon/ni.c
1581
radeon_ring_write(ring, 0x00000010); /* */
drivers/gpu/drm/radeon/ni.c
1583
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/ni.c
1592
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/ni.c
1594
radeon_ring_fini(rdev, ring);
drivers/gpu/drm/radeon/ni.c
1595
radeon_scratch_free(rdev, ring->rptr_save_reg);
drivers/gpu/drm/radeon/ni.c
1635
struct radeon_ring *ring;
drivers/gpu/drm/radeon/ni.c
1667
ring = &rdev->ring[ridx[i]];
drivers/gpu/drm/radeon/ni.c
1668
rb_cntl = order_base_2(ring->ring_size / 8);
drivers/gpu/drm/radeon/ni.c
1683
ring = &rdev->ring[ridx[i]];
drivers/gpu/drm/radeon/ni.c
1684
WREG32(cp_rb_base[i], ring->gpu_addr >> 8);
drivers/gpu/drm/radeon/ni.c
1689
ring = &rdev->ring[ridx[i]];
drivers/gpu/drm/radeon/ni.c
1692
ring->wptr = 0;
drivers/gpu/drm/radeon/ni.c
1694
WREG32(cp_rb_wptr[i], ring->wptr);
drivers/gpu/drm/radeon/ni.c
1702
rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true;
drivers/gpu/drm/radeon/ni.c
1703
rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
drivers/gpu/drm/radeon/ni.c
1704
rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
drivers/gpu/drm/radeon/ni.c
1706
r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]);
drivers/gpu/drm/radeon/ni.c
1708
rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
drivers/gpu/drm/radeon/ni.c
1709
rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
drivers/gpu/drm/radeon/ni.c
1710
rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
drivers/gpu/drm/radeon/ni.c
1965
bool cayman_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/ni.c
1972
radeon_ring_lockup_update(rdev, ring);
drivers/gpu/drm/radeon/ni.c
1975
return radeon_ring_test_lockup(rdev, ring);
drivers/gpu/drm/radeon/ni.c
1997
rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
drivers/gpu/drm/radeon/ni.c
1998
r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
drivers/gpu/drm/radeon/ni.c
2021
rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
drivers/gpu/drm/radeon/ni.c
2026
struct radeon_ring *ring;
drivers/gpu/drm/radeon/ni.c
2029
if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
drivers/gpu/drm/radeon/ni.c
2032
ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
drivers/gpu/drm/radeon/ni.c
2033
r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
drivers/gpu/drm/radeon/ni.c
2065
rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_obj = NULL;
drivers/gpu/drm/radeon/ni.c
2066
r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE1_INDEX], 4096);
drivers/gpu/drm/radeon/ni.c
2067
rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_obj = NULL;
drivers/gpu/drm/radeon/ni.c
2068
r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE2_INDEX], 4096);
drivers/gpu/drm/radeon/ni.c
2101
rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size = 0;
drivers/gpu/drm/radeon/ni.c
2102
rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_size = 0;
drivers/gpu/drm/radeon/ni.c
2107
struct radeon_ring *ring;
drivers/gpu/drm/radeon/ni.c
2110
if (!rdev->has_vce || !rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size)
drivers/gpu/drm/radeon/ni.c
2113
ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX];
drivers/gpu/drm/radeon/ni.c
2114
r = radeon_ring_init(rdev, ring, ring->ring_size, 0, 0x0);
drivers/gpu/drm/radeon/ni.c
2119
ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX];
drivers/gpu/drm/radeon/ni.c
2120
r = radeon_ring_init(rdev, ring, ring->ring_size, 0, 0x0);
drivers/gpu/drm/radeon/ni.c
2134
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/ni.c
2228
r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
drivers/gpu/drm/radeon/ni.c
2233
ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
drivers/gpu/drm/radeon/ni.c
2234
r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
drivers/gpu/drm/radeon/ni.c
2239
ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
drivers/gpu/drm/radeon/ni.c
2240
r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET,
drivers/gpu/drm/radeon/ni.c
2330
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/ni.c
2396
ring->ring_obj = NULL;
drivers/gpu/drm/radeon/ni.c
2397
r600_ring_init(rdev, ring, 1024 * 1024);
drivers/gpu/drm/radeon/ni.c
2399
ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
drivers/gpu/drm/radeon/ni.c
2400
ring->ring_obj = NULL;
drivers/gpu/drm/radeon/ni.c
2401
r600_ring_init(rdev, ring, 64 * 1024);
drivers/gpu/drm/radeon/ni.c
2403
ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
drivers/gpu/drm/radeon/ni.c
2404
ring->ring_obj = NULL;
drivers/gpu/drm/radeon/ni.c
2405
r600_ring_init(rdev, ring, 64 * 1024);
drivers/gpu/drm/radeon/ni.c
2663
void cayman_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
drivers/gpu/drm/radeon/ni.c
2666
radeon_ring_write(ring, PACKET0(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (vm_id << 2), 0));
drivers/gpu/drm/radeon/ni.c
2667
radeon_ring_write(ring, pd_addr >> 12);
drivers/gpu/drm/radeon/ni.c
2670
radeon_ring_write(ring, PACKET0(HDP_MEM_COHERENCY_FLUSH_CNTL, 0));
drivers/gpu/drm/radeon/ni.c
2671
radeon_ring_write(ring, 0x1);
drivers/gpu/drm/radeon/ni.c
2674
radeon_ring_write(ring, PACKET0(VM_INVALIDATE_REQUEST, 0));
drivers/gpu/drm/radeon/ni.c
2675
radeon_ring_write(ring, 1 << vm_id);
drivers/gpu/drm/radeon/ni.c
2678
radeon_ring_write(ring, PACKET3(PACKET3_WAIT_REG_MEM, 5));
drivers/gpu/drm/radeon/ni.c
2679
radeon_ring_write(ring, (WAIT_REG_MEM_FUNCTION(0) | /* always */
drivers/gpu/drm/radeon/ni.c
2681
radeon_ring_write(ring, VM_INVALIDATE_REQUEST >> 2);
drivers/gpu/drm/radeon/ni.c
2682
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/ni.c
2683
radeon_ring_write(ring, 0); /* ref */
drivers/gpu/drm/radeon/ni.c
2684
radeon_ring_write(ring, 0); /* mask */
drivers/gpu/drm/radeon/ni.c
2685
radeon_ring_write(ring, 0x20); /* poll interval */
drivers/gpu/drm/radeon/ni.c
2688
radeon_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
drivers/gpu/drm/radeon/ni.c
2689
radeon_ring_write(ring, 0x0);
drivers/gpu/drm/radeon/ni.h
32
int ring, u32 cp_int_cntl);
drivers/gpu/drm/radeon/ni_dma.c
101
struct radeon_ring *ring)
drivers/gpu/drm/radeon/ni_dma.c
105
if (ring->idx == R600_RING_TYPE_DMA_INDEX)
drivers/gpu/drm/radeon/ni_dma.c
110
WREG32(reg, (ring->wptr << 2) & 0x3fffc);
drivers/gpu/drm/radeon/ni_dma.c
124
struct radeon_ring *ring = &rdev->ring[ib->ring];
drivers/gpu/drm/radeon/ni_dma.c
125
unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0;
drivers/gpu/drm/radeon/ni_dma.c
128
u32 next_rptr = ring->wptr + 4;
drivers/gpu/drm/radeon/ni_dma.c
132
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 1));
drivers/gpu/drm/radeon/ni_dma.c
133
radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
drivers/gpu/drm/radeon/ni_dma.c
134
radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xff);
drivers/gpu/drm/radeon/ni_dma.c
135
radeon_ring_write(ring, next_rptr);
drivers/gpu/drm/radeon/ni_dma.c
141
while ((ring->wptr & 7) != 5)
drivers/gpu/drm/radeon/ni_dma.c
142
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0));
drivers/gpu/drm/radeon/ni_dma.c
143
radeon_ring_write(ring, DMA_IB_PACKET(DMA_PACKET_INDIRECT_BUFFER, vm_id, 0));
drivers/gpu/drm/radeon/ni_dma.c
144
radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
drivers/gpu/drm/radeon/ni_dma.c
145
radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF));
drivers/gpu/drm/radeon/ni_dma.c
174
rdev->ring[R600_RING_TYPE_DMA_INDEX].ready = false;
drivers/gpu/drm/radeon/ni_dma.c
175
rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX].ready = false;
drivers/gpu/drm/radeon/ni_dma.c
188
struct radeon_ring *ring;
drivers/gpu/drm/radeon/ni_dma.c
196
ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
drivers/gpu/drm/radeon/ni_dma.c
200
ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
drivers/gpu/drm/radeon/ni_dma.c
209
rb_bufsz = order_base_2(ring->ring_size / 4);
drivers/gpu/drm/radeon/ni_dma.c
229
WREG32(DMA_RB_BASE + reg_offset, ring->gpu_addr >> 8);
drivers/gpu/drm/radeon/ni_dma.c
242
ring->wptr = 0;
drivers/gpu/drm/radeon/ni_dma.c
243
WREG32(DMA_RB_WPTR + reg_offset, ring->wptr << 2);
drivers/gpu/drm/radeon/ni_dma.c
247
ring->ready = true;
drivers/gpu/drm/radeon/ni_dma.c
249
r = radeon_ring_test(rdev, ring->idx, ring);
drivers/gpu/drm/radeon/ni_dma.c
251
ring->ready = false;
drivers/gpu/drm/radeon/ni_dma.c
273
radeon_ring_fini(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX]);
drivers/gpu/drm/radeon/ni_dma.c
274
radeon_ring_fini(rdev, &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]);
drivers/gpu/drm/radeon/ni_dma.c
286
bool cayman_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/ni_dma.c
291
if (ring->idx == R600_RING_TYPE_DMA_INDEX)
drivers/gpu/drm/radeon/ni_dma.c
297
radeon_ring_lockup_update(rdev, ring);
drivers/gpu/drm/radeon/ni_dma.c
300
return radeon_ring_test_lockup(rdev, ring);
drivers/gpu/drm/radeon/ni_dma.c
448
void cayman_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
drivers/gpu/drm/radeon/ni_dma.c
451
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0, 0));
drivers/gpu/drm/radeon/ni_dma.c
452
radeon_ring_write(ring, (0xf << 16) | ((VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (vm_id << 2)) >> 2));
drivers/gpu/drm/radeon/ni_dma.c
453
radeon_ring_write(ring, pd_addr >> 12);
drivers/gpu/drm/radeon/ni_dma.c
456
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0, 0));
drivers/gpu/drm/radeon/ni_dma.c
457
radeon_ring_write(ring, (0xf << 16) | (HDP_MEM_COHERENCY_FLUSH_CNTL >> 2));
drivers/gpu/drm/radeon/ni_dma.c
458
radeon_ring_write(ring, 1);
drivers/gpu/drm/radeon/ni_dma.c
461
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0, 0));
drivers/gpu/drm/radeon/ni_dma.c
462
radeon_ring_write(ring, (0xf << 16) | (VM_INVALIDATE_REQUEST >> 2));
drivers/gpu/drm/radeon/ni_dma.c
463
radeon_ring_write(ring, 1 << vm_id);
drivers/gpu/drm/radeon/ni_dma.c
466
radeon_ring_write(ring, DMA_SRBM_READ_PACKET);
drivers/gpu/drm/radeon/ni_dma.c
467
radeon_ring_write(ring, (0xff << 20) | (VM_INVALIDATE_REQUEST >> 2));
drivers/gpu/drm/radeon/ni_dma.c
468
radeon_ring_write(ring, 0); /* mask */
drivers/gpu/drm/radeon/ni_dma.c
469
radeon_ring_write(ring, 0); /* value */
drivers/gpu/drm/radeon/ni_dma.c
53
struct radeon_ring *ring)
drivers/gpu/drm/radeon/ni_dma.c
58
rptr = rdev->wb.wb[ring->rptr_offs/4];
drivers/gpu/drm/radeon/ni_dma.c
60
if (ring->idx == R600_RING_TYPE_DMA_INDEX)
drivers/gpu/drm/radeon/ni_dma.c
80
struct radeon_ring *ring)
drivers/gpu/drm/radeon/ni_dma.c
84
if (ring->idx == R600_RING_TYPE_DMA_INDEX)
drivers/gpu/drm/radeon/r100.c
1001
radeon_ring_write(ring, PACKET0(RADEON_ISYNC_CNTL, 0));
drivers/gpu/drm/radeon/r100.c
1002
radeon_ring_write(ring,
drivers/gpu/drm/radeon/r100.c
1007
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/r100.c
1094
struct radeon_ring *ring)
drivers/gpu/drm/radeon/r100.c
1099
rptr = le32_to_cpu(rdev->wb.wb[ring->rptr_offs/4]);
drivers/gpu/drm/radeon/r100.c
1107
struct radeon_ring *ring)
drivers/gpu/drm/radeon/r100.c
1113
struct radeon_ring *ring)
drivers/gpu/drm/radeon/r100.c
1115
WREG32(RADEON_CP_RB_WPTR, ring->wptr);
drivers/gpu/drm/radeon/r100.c
1143
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/r100.c
1167
r = radeon_ring_init(rdev, ring, ring_size, RADEON_WB_CP_RPTR_OFFSET,
drivers/gpu/drm/radeon/r100.c
1177
ring->align_mask = 16 - 1;
drivers/gpu/drm/radeon/r100.c
1207
DRM_INFO("radeon: ring at 0x%016lX\n", (unsigned long)ring->gpu_addr);
drivers/gpu/drm/radeon/r100.c
1208
WREG32(RADEON_CP_RB_BASE, ring->gpu_addr);
drivers/gpu/drm/radeon/r100.c
1212
ring->wptr = 0;
drivers/gpu/drm/radeon/r100.c
1213
WREG32(RADEON_CP_RB_WPTR, ring->wptr);
drivers/gpu/drm/radeon/r100.c
1240
radeon_ring_start(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]);
drivers/gpu/drm/radeon/r100.c
1241
r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
drivers/gpu/drm/radeon/r100.c
1246
ring->ready = true;
drivers/gpu/drm/radeon/r100.c
1249
if (!ring->rptr_save_reg /* not resuming from suspend */
drivers/gpu/drm/radeon/r100.c
1250
&& radeon_ring_supports_scratch_reg(rdev, ring)) {
drivers/gpu/drm/radeon/r100.c
1251
r = radeon_scratch_get(rdev, &ring->rptr_save_reg);
drivers/gpu/drm/radeon/r100.c
1254
ring->rptr_save_reg = 0;
drivers/gpu/drm/radeon/r100.c
1267
radeon_scratch_free(rdev, rdev->ring[RADEON_RING_TYPE_GFX_INDEX].rptr_save_reg);
drivers/gpu/drm/radeon/r100.c
1268
radeon_ring_fini(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]);
drivers/gpu/drm/radeon/r100.c
1276
rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
drivers/gpu/drm/radeon/r100.c
2548
bool r100_gpu_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/r100.c
2554
radeon_ring_lockup_update(rdev, ring);
drivers/gpu/drm/radeon/r100.c
2557
return radeon_ring_test_lockup(rdev, ring);
drivers/gpu/drm/radeon/r100.c
2974
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/r100.c
2978
radeon_ring_free_size(rdev, ring);
drivers/gpu/drm/radeon/r100.c
2981
count = (rdp + ring->ring_size - wdp) & ring->ptr_mask;
drivers/gpu/drm/radeon/r100.c
2985
seq_printf(m, "%u free dwords in ring\n", ring->ring_free_dw);
drivers/gpu/drm/radeon/r100.c
2987
if (ring->ready) {
drivers/gpu/drm/radeon/r100.c
2989
i = (rdp + j) & ring->ptr_mask;
drivers/gpu/drm/radeon/r100.c
2990
seq_printf(m, "r[%04d]=0x%08x\n", i, ring->ring[i]);
drivers/gpu/drm/radeon/r100.c
3670
int r100_ring_test(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/r100.c
3683
r = radeon_ring_lock(rdev, ring, 2);
drivers/gpu/drm/radeon/r100.c
3689
radeon_ring_write(ring, PACKET0(scratch, 0));
drivers/gpu/drm/radeon/r100.c
3690
radeon_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/radeon/r100.c
3691
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/r100.c
3712
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/r100.c
3714
if (ring->rptr_save_reg) {
drivers/gpu/drm/radeon/r100.c
3715
u32 next_rptr = ring->wptr + 2 + 3;
drivers/gpu/drm/radeon/r100.c
3716
radeon_ring_write(ring, PACKET0(ring->rptr_save_reg, 0));
drivers/gpu/drm/radeon/r100.c
3717
radeon_ring_write(ring, next_rptr);
drivers/gpu/drm/radeon/r100.c
3720
radeon_ring_write(ring, PACKET0(RADEON_CP_IB_BASE, 1));
drivers/gpu/drm/radeon/r100.c
3721
radeon_ring_write(ring, ib->gpu_addr);
drivers/gpu/drm/radeon/r100.c
3722
radeon_ring_write(ring, ib->length_dw);
drivers/gpu/drm/radeon/r100.c
3725
int r100_ib_test(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/r100.c
3795
rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
drivers/gpu/drm/radeon/r100.c
858
static void r100_ring_hdp_flush(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/r100.c
860
radeon_ring_write(ring, PACKET0(RADEON_HOST_PATH_CNTL, 0));
drivers/gpu/drm/radeon/r100.c
861
radeon_ring_write(ring, rdev->config.r100.hdp_cntl |
drivers/gpu/drm/radeon/r100.c
863
radeon_ring_write(ring, PACKET0(RADEON_HOST_PATH_CNTL, 0));
drivers/gpu/drm/radeon/r100.c
864
radeon_ring_write(ring, rdev->config.r100.hdp_cntl);
drivers/gpu/drm/radeon/r100.c
872
struct radeon_ring *ring = &rdev->ring[fence->ring];
drivers/gpu/drm/radeon/r100.c
876
radeon_ring_write(ring, PACKET0(RADEON_RB3D_DSTCACHE_CTLSTAT, 0));
drivers/gpu/drm/radeon/r100.c
877
radeon_ring_write(ring, RADEON_RB3D_DC_FLUSH_ALL);
drivers/gpu/drm/radeon/r100.c
878
radeon_ring_write(ring, PACKET0(RADEON_RB3D_ZCACHE_CTLSTAT, 0));
drivers/gpu/drm/radeon/r100.c
879
radeon_ring_write(ring, RADEON_RB3D_ZC_FLUSH_ALL);
drivers/gpu/drm/radeon/r100.c
881
radeon_ring_write(ring, PACKET0(RADEON_WAIT_UNTIL, 0));
drivers/gpu/drm/radeon/r100.c
882
radeon_ring_write(ring, RADEON_WAIT_2D_IDLECLEAN | RADEON_WAIT_3D_IDLECLEAN);
drivers/gpu/drm/radeon/r100.c
883
r100_ring_hdp_flush(rdev, ring);
drivers/gpu/drm/radeon/r100.c
885
radeon_ring_write(ring, PACKET0(rdev->fence_drv[fence->ring].scratch_reg, 0));
drivers/gpu/drm/radeon/r100.c
886
radeon_ring_write(ring, fence->seq);
drivers/gpu/drm/radeon/r100.c
887
radeon_ring_write(ring, PACKET0(RADEON_GEN_INT_STATUS, 0));
drivers/gpu/drm/radeon/r100.c
888
radeon_ring_write(ring, RADEON_SW_INT_FIRE);
drivers/gpu/drm/radeon/r100.c
892
struct radeon_ring *ring,
drivers/gpu/drm/radeon/r100.c
907
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/r100.c
926
r = radeon_ring_lock(rdev, ring, ndw);
drivers/gpu/drm/radeon/r100.c
940
radeon_ring_write(ring, PACKET3(PACKET3_BITBLT_MULTI, 8));
drivers/gpu/drm/radeon/r100.c
941
radeon_ring_write(ring,
drivers/gpu/drm/radeon/r100.c
953
radeon_ring_write(ring, (pitch << 22) | (src_offset >> 10));
drivers/gpu/drm/radeon/r100.c
954
radeon_ring_write(ring, (pitch << 22) | (dst_offset >> 10));
drivers/gpu/drm/radeon/r100.c
955
radeon_ring_write(ring, (0x1fff) | (0x1fff << 16));
drivers/gpu/drm/radeon/r100.c
956
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/r100.c
957
radeon_ring_write(ring, (0x1fff) | (0x1fff << 16));
drivers/gpu/drm/radeon/r100.c
958
radeon_ring_write(ring, num_gpu_pages);
drivers/gpu/drm/radeon/r100.c
959
radeon_ring_write(ring, num_gpu_pages);
drivers/gpu/drm/radeon/r100.c
960
radeon_ring_write(ring, cur_pages | (stride_pixels << 16));
drivers/gpu/drm/radeon/r100.c
962
radeon_ring_write(ring, PACKET0(RADEON_DSTCACHE_CTLSTAT, 0));
drivers/gpu/drm/radeon/r100.c
963
radeon_ring_write(ring, RADEON_RB2D_DC_FLUSH_ALL);
drivers/gpu/drm/radeon/r100.c
964
radeon_ring_write(ring, PACKET0(RADEON_WAIT_UNTIL, 0));
drivers/gpu/drm/radeon/r100.c
965
radeon_ring_write(ring,
drivers/gpu/drm/radeon/r100.c
971
radeon_ring_unlock_undo(rdev, ring);
drivers/gpu/drm/radeon/r100.c
974
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/r100.c
993
void r100_ring_start(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/r100.c
997
r = radeon_ring_lock(rdev, ring, 2);
drivers/gpu/drm/radeon/r200.c
105
radeon_ring_write(ring, PACKET0(RADEON_WAIT_UNTIL, 0));
drivers/gpu/drm/radeon/r200.c
106
radeon_ring_write(ring, (1 << 16));
drivers/gpu/drm/radeon/r200.c
113
radeon_ring_write(ring, PACKET0(0x720, 2));
drivers/gpu/drm/radeon/r200.c
114
radeon_ring_write(ring, src_offset);
drivers/gpu/drm/radeon/r200.c
115
radeon_ring_write(ring, dst_offset);
drivers/gpu/drm/radeon/r200.c
116
radeon_ring_write(ring, cur_size | (1 << 31) | (1 << 30));
drivers/gpu/drm/radeon/r200.c
120
radeon_ring_write(ring, PACKET0(RADEON_WAIT_UNTIL, 0));
drivers/gpu/drm/radeon/r200.c
121
radeon_ring_write(ring, RADEON_WAIT_DMA_GUI_IDLE);
drivers/gpu/drm/radeon/r200.c
124
radeon_ring_unlock_undo(rdev, ring);
drivers/gpu/drm/radeon/r200.c
127
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/r200.c
89
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/r200.c
99
r = radeon_ring_lock(rdev, ring, num_loops * 4 + 64);
drivers/gpu/drm/radeon/r300.c
215
struct radeon_ring *ring = &rdev->ring[fence->ring];
drivers/gpu/drm/radeon/r300.c
220
radeon_ring_write(ring, PACKET0(R300_RE_SCISSORS_TL, 0));
drivers/gpu/drm/radeon/r300.c
221
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/r300.c
222
radeon_ring_write(ring, PACKET0(R300_RE_SCISSORS_BR, 0));
drivers/gpu/drm/radeon/r300.c
223
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/r300.c
225
radeon_ring_write(ring, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0));
drivers/gpu/drm/radeon/r300.c
226
radeon_ring_write(ring, R300_RB3D_DC_FLUSH);
drivers/gpu/drm/radeon/r300.c
227
radeon_ring_write(ring, PACKET0(R300_RB3D_ZCACHE_CTLSTAT, 0));
drivers/gpu/drm/radeon/r300.c
228
radeon_ring_write(ring, R300_ZC_FLUSH);
drivers/gpu/drm/radeon/r300.c
230
radeon_ring_write(ring, PACKET0(RADEON_WAIT_UNTIL, 0));
drivers/gpu/drm/radeon/r300.c
231
radeon_ring_write(ring, (RADEON_WAIT_3D_IDLECLEAN |
drivers/gpu/drm/radeon/r300.c
234
radeon_ring_write(ring, PACKET0(RADEON_HOST_PATH_CNTL, 0));
drivers/gpu/drm/radeon/r300.c
235
radeon_ring_write(ring, rdev->config.r300.hdp_cntl |
drivers/gpu/drm/radeon/r300.c
237
radeon_ring_write(ring, PACKET0(RADEON_HOST_PATH_CNTL, 0));
drivers/gpu/drm/radeon/r300.c
238
radeon_ring_write(ring, rdev->config.r300.hdp_cntl);
drivers/gpu/drm/radeon/r300.c
240
radeon_ring_write(ring, PACKET0(rdev->fence_drv[fence->ring].scratch_reg, 0));
drivers/gpu/drm/radeon/r300.c
241
radeon_ring_write(ring, fence->seq);
drivers/gpu/drm/radeon/r300.c
242
radeon_ring_write(ring, PACKET0(RADEON_GEN_INT_STATUS, 0));
drivers/gpu/drm/radeon/r300.c
243
radeon_ring_write(ring, RADEON_SW_INT_FIRE);
drivers/gpu/drm/radeon/r300.c
246
void r300_ring_start(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/r300.c
269
r = radeon_ring_lock(rdev, ring, 64);
drivers/gpu/drm/radeon/r300.c
273
radeon_ring_write(ring, PACKET0(RADEON_ISYNC_CNTL, 0));
drivers/gpu/drm/radeon/r300.c
274
radeon_ring_write(ring,
drivers/gpu/drm/radeon/r300.c
279
radeon_ring_write(ring, PACKET0(R300_GB_TILE_CONFIG, 0));
drivers/gpu/drm/radeon/r300.c
280
radeon_ring_write(ring, gb_tile_config);
drivers/gpu/drm/radeon/r300.c
281
radeon_ring_write(ring, PACKET0(RADEON_WAIT_UNTIL, 0));
drivers/gpu/drm/radeon/r300.c
282
radeon_ring_write(ring,
drivers/gpu/drm/radeon/r300.c
285
radeon_ring_write(ring, PACKET0(R300_DST_PIPE_CONFIG, 0));
drivers/gpu/drm/radeon/r300.c
286
radeon_ring_write(ring, R300_PIPE_AUTO_CONFIG);
drivers/gpu/drm/radeon/r300.c
287
radeon_ring_write(ring, PACKET0(R300_GB_SELECT, 0));
drivers/gpu/drm/radeon/r300.c
288
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/r300.c
289
radeon_ring_write(ring, PACKET0(R300_GB_ENABLE, 0));
drivers/gpu/drm/radeon/r300.c
290
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/r300.c
291
radeon_ring_write(ring, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0));
drivers/gpu/drm/radeon/r300.c
292
radeon_ring_write(ring, R300_RB3D_DC_FLUSH | R300_RB3D_DC_FREE);
drivers/gpu/drm/radeon/r300.c
293
radeon_ring_write(ring, PACKET0(R300_RB3D_ZCACHE_CTLSTAT, 0));
drivers/gpu/drm/radeon/r300.c
294
radeon_ring_write(ring, R300_ZC_FLUSH | R300_ZC_FREE);
drivers/gpu/drm/radeon/r300.c
295
radeon_ring_write(ring, PACKET0(RADEON_WAIT_UNTIL, 0));
drivers/gpu/drm/radeon/r300.c
296
radeon_ring_write(ring,
drivers/gpu/drm/radeon/r300.c
299
radeon_ring_write(ring, PACKET0(R300_GB_AA_CONFIG, 0));
drivers/gpu/drm/radeon/r300.c
300
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/r300.c
301
radeon_ring_write(ring, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0));
drivers/gpu/drm/radeon/r300.c
302
radeon_ring_write(ring, R300_RB3D_DC_FLUSH | R300_RB3D_DC_FREE);
drivers/gpu/drm/radeon/r300.c
303
radeon_ring_write(ring, PACKET0(R300_RB3D_ZCACHE_CTLSTAT, 0));
drivers/gpu/drm/radeon/r300.c
304
radeon_ring_write(ring, R300_ZC_FLUSH | R300_ZC_FREE);
drivers/gpu/drm/radeon/r300.c
305
radeon_ring_write(ring, PACKET0(R300_GB_MSPOS0, 0));
drivers/gpu/drm/radeon/r300.c
306
radeon_ring_write(ring,
drivers/gpu/drm/radeon/r300.c
315
radeon_ring_write(ring, PACKET0(R300_GB_MSPOS1, 0));
drivers/gpu/drm/radeon/r300.c
316
radeon_ring_write(ring,
drivers/gpu/drm/radeon/r300.c
324
radeon_ring_write(ring, PACKET0(R300_GA_ENHANCE, 0));
drivers/gpu/drm/radeon/r300.c
325
radeon_ring_write(ring, R300_GA_DEADLOCK_CNTL | R300_GA_FASTSYNC_CNTL);
drivers/gpu/drm/radeon/r300.c
326
radeon_ring_write(ring, PACKET0(R300_GA_POLY_MODE, 0));
drivers/gpu/drm/radeon/r300.c
327
radeon_ring_write(ring,
drivers/gpu/drm/radeon/r300.c
329
radeon_ring_write(ring, PACKET0(R300_GA_ROUND_MODE, 0));
drivers/gpu/drm/radeon/r300.c
330
radeon_ring_write(ring,
drivers/gpu/drm/radeon/r300.c
333
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/r420.c
210
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/r420.c
219
r = radeon_ring_lock(rdev, ring, 8);
drivers/gpu/drm/radeon/r420.c
221
radeon_ring_write(ring, PACKET0(R300_CP_RESYNC_ADDR, 1));
drivers/gpu/drm/radeon/r420.c
222
radeon_ring_write(ring, rdev->config.r300.resync_scratch);
drivers/gpu/drm/radeon/r420.c
223
radeon_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/radeon/r420.c
224
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/r420.c
230
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/r420.c
235
r = radeon_ring_lock(rdev, ring, 8);
drivers/gpu/drm/radeon/r420.c
237
radeon_ring_write(ring, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0));
drivers/gpu/drm/radeon/r420.c
238
radeon_ring_write(ring, R300_RB3D_DC_FINISH);
drivers/gpu/drm/radeon/r420.c
239
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/r600.c
1921
bool r600_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/r600.c
1928
radeon_ring_lockup_update(rdev, ring);
drivers/gpu/drm/radeon/r600.c
1931
return radeon_ring_test_lockup(rdev, ring);
drivers/gpu/drm/radeon/r600.c
2429
rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
drivers/gpu/drm/radeon/r600.c
2619
struct radeon_ring *ring)
drivers/gpu/drm/radeon/r600.c
2624
rptr = rdev->wb.wb[ring->rptr_offs/4];
drivers/gpu/drm/radeon/r600.c
2632
struct radeon_ring *ring)
drivers/gpu/drm/radeon/r600.c
2638
struct radeon_ring *ring)
drivers/gpu/drm/radeon/r600.c
2640
WREG32(R600_CP_RB_WPTR, ring->wptr);
drivers/gpu/drm/radeon/r600.c
2688
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/r600.c
2692
r = radeon_ring_lock(rdev, ring, 7);
drivers/gpu/drm/radeon/r600.c
2697
radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
drivers/gpu/drm/radeon/r600.c
2698
radeon_ring_write(ring, 0x1);
drivers/gpu/drm/radeon/r600.c
2700
radeon_ring_write(ring, 0x0);
drivers/gpu/drm/radeon/r600.c
2701
radeon_ring_write(ring, rdev->config.rv770.max_hw_contexts - 1);
drivers/gpu/drm/radeon/r600.c
2703
radeon_ring_write(ring, 0x3);
drivers/gpu/drm/radeon/r600.c
2704
radeon_ring_write(ring, rdev->config.r600.max_hw_contexts - 1);
drivers/gpu/drm/radeon/r600.c
2706
radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
drivers/gpu/drm/radeon/r600.c
2707
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/r600.c
2708
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/r600.c
2709
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/r600.c
2718
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/r600.c
2730
rb_bufsz = order_base_2(ring->ring_size / 8);
drivers/gpu/drm/radeon/r600.c
2744
ring->wptr = 0;
drivers/gpu/drm/radeon/r600.c
2745
WREG32(CP_RB_WPTR, ring->wptr);
drivers/gpu/drm/radeon/r600.c
2763
WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
drivers/gpu/drm/radeon/r600.c
2767
ring->ready = true;
drivers/gpu/drm/radeon/r600.c
2768
r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
drivers/gpu/drm/radeon/r600.c
2770
ring->ready = false;
drivers/gpu/drm/radeon/r600.c
2780
void r600_ring_init(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ring_size)
drivers/gpu/drm/radeon/r600.c
2788
ring->ring_size = ring_size;
drivers/gpu/drm/radeon/r600.c
2789
ring->align_mask = 16 - 1;
drivers/gpu/drm/radeon/r600.c
2791
if (radeon_ring_supports_scratch_reg(rdev, ring)) {
drivers/gpu/drm/radeon/r600.c
2792
r = radeon_scratch_get(rdev, &ring->rptr_save_reg);
drivers/gpu/drm/radeon/r600.c
2795
ring->rptr_save_reg = 0;
drivers/gpu/drm/radeon/r600.c
2802
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/r600.c
2804
radeon_ring_fini(rdev, ring);
drivers/gpu/drm/radeon/r600.c
2805
radeon_scratch_free(rdev, ring->rptr_save_reg);
drivers/gpu/drm/radeon/r600.c
2823
int r600_ring_test(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/r600.c
2836
r = radeon_ring_lock(rdev, ring, 3);
drivers/gpu/drm/radeon/r600.c
2838
DRM_ERROR("radeon: cp failed to lock ring %d (%d).\n", ring->idx, r);
drivers/gpu/drm/radeon/r600.c
2842
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
drivers/gpu/drm/radeon/r600.c
2843
radeon_ring_write(ring, ((scratch - PACKET3_SET_CONFIG_REG_OFFSET) >> 2));
drivers/gpu/drm/radeon/r600.c
2844
radeon_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/radeon/r600.c
2845
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/r600.c
2853
DRM_INFO("ring test on %d succeeded in %d usecs\n", ring->idx, i);
drivers/gpu/drm/radeon/r600.c
2856
ring->idx, scratch, tmp);
drivers/gpu/drm/radeon/r600.c
2870
struct radeon_ring *ring = &rdev->ring[fence->ring];
drivers/gpu/drm/radeon/r600.c
2878
u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
drivers/gpu/drm/radeon/r600.c
2880
radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
drivers/gpu/drm/radeon/r600.c
2881
radeon_ring_write(ring, cp_coher_cntl);
drivers/gpu/drm/radeon/r600.c
2882
radeon_ring_write(ring, 0xFFFFFFFF);
drivers/gpu/drm/radeon/r600.c
2883
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/r600.c
2884
radeon_ring_write(ring, 10); /* poll interval */
drivers/gpu/drm/radeon/r600.c
2886
radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4));
drivers/gpu/drm/radeon/r600.c
2887
radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_EVENT_TS) | EVENT_INDEX(5));
drivers/gpu/drm/radeon/r600.c
2888
radeon_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/radeon/r600.c
2889
radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | DATA_SEL(1) | INT_SEL(2));
drivers/gpu/drm/radeon/r600.c
2890
radeon_ring_write(ring, fence->seq);
drivers/gpu/drm/radeon/r600.c
2891
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/r600.c
2894
radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
drivers/gpu/drm/radeon/r600.c
2895
radeon_ring_write(ring, cp_coher_cntl);
drivers/gpu/drm/radeon/r600.c
2896
radeon_ring_write(ring, 0xFFFFFFFF);
drivers/gpu/drm/radeon/r600.c
2897
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/r600.c
2898
radeon_ring_write(ring, 10); /* poll interval */
drivers/gpu/drm/radeon/r600.c
2899
radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE, 0));
drivers/gpu/drm/radeon/r600.c
2900
radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_EVENT) | EVENT_INDEX(0));
drivers/gpu/drm/radeon/r600.c
2902
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
drivers/gpu/drm/radeon/r600.c
2903
radeon_ring_write(ring, (WAIT_UNTIL - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
drivers/gpu/drm/radeon/r600.c
2904
radeon_ring_write(ring, WAIT_3D_IDLE_bit | WAIT_3D_IDLECLEAN_bit);
drivers/gpu/drm/radeon/r600.c
2906
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
drivers/gpu/drm/radeon/r600.c
2907
radeon_ring_write(ring, ((rdev->fence_drv[fence->ring].scratch_reg - PACKET3_SET_CONFIG_REG_OFFSET) >> 2));
drivers/gpu/drm/radeon/r600.c
2908
radeon_ring_write(ring, fence->seq);
drivers/gpu/drm/radeon/r600.c
2910
radeon_ring_write(ring, PACKET0(CP_INT_STATUS, 0));
drivers/gpu/drm/radeon/r600.c
2911
radeon_ring_write(ring, RB_INT_STAT);
drivers/gpu/drm/radeon/r600.c
2927
struct radeon_ring *ring,
drivers/gpu/drm/radeon/r600.c
2937
radeon_ring_write(ring, PACKET3(PACKET3_MEM_SEMAPHORE, 1));
drivers/gpu/drm/radeon/r600.c
2938
radeon_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/radeon/r600.c
2939
radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | sel);
drivers/gpu/drm/radeon/r600.c
2944
radeon_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
drivers/gpu/drm/radeon/r600.c
2945
radeon_ring_write(ring, 0x0);
drivers/gpu/drm/radeon/r600.c
2972
struct radeon_ring *ring = &rdev->ring[ring_index];
drivers/gpu/drm/radeon/r600.c
2981
r = radeon_ring_lock(rdev, ring, num_loops * 6 + 24);
drivers/gpu/drm/radeon/r600.c
2989
radeon_sync_rings(rdev, &sync, ring->idx);
drivers/gpu/drm/radeon/r600.c
2991
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
drivers/gpu/drm/radeon/r600.c
2992
radeon_ring_write(ring, (WAIT_UNTIL - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
drivers/gpu/drm/radeon/r600.c
2993
radeon_ring_write(ring, WAIT_3D_IDLE_bit);
drivers/gpu/drm/radeon/r600.c
3002
radeon_ring_write(ring, PACKET3(PACKET3_CP_DMA, 4));
drivers/gpu/drm/radeon/r600.c
3003
radeon_ring_write(ring, lower_32_bits(src_offset));
drivers/gpu/drm/radeon/r600.c
3004
radeon_ring_write(ring, tmp);
drivers/gpu/drm/radeon/r600.c
3005
radeon_ring_write(ring, lower_32_bits(dst_offset));
drivers/gpu/drm/radeon/r600.c
3006
radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff);
drivers/gpu/drm/radeon/r600.c
3007
radeon_ring_write(ring, cur_size_in_bytes);
drivers/gpu/drm/radeon/r600.c
3011
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
drivers/gpu/drm/radeon/r600.c
3012
radeon_ring_write(ring, (WAIT_UNTIL - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
drivers/gpu/drm/radeon/r600.c
3013
radeon_ring_write(ring, WAIT_CP_DMA_IDLE_bit);
drivers/gpu/drm/radeon/r600.c
3015
r = radeon_fence_emit(rdev, &fence, ring->idx);
drivers/gpu/drm/radeon/r600.c
3017
radeon_ring_unlock_undo(rdev, ring);
drivers/gpu/drm/radeon/r600.c
3022
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/r600.c
3060
rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
drivers/gpu/drm/radeon/r600.c
3061
r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
drivers/gpu/drm/radeon/r600.c
3084
rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
drivers/gpu/drm/radeon/r600.c
3089
struct radeon_ring *ring;
drivers/gpu/drm/radeon/r600.c
3092
if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
drivers/gpu/drm/radeon/r600.c
3095
ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
drivers/gpu/drm/radeon/r600.c
3096
r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
drivers/gpu/drm/radeon/r600.c
3110
struct radeon_ring *ring;
drivers/gpu/drm/radeon/r600.c
3160
ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/r600.c
3161
r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
drivers/gpu/drm/radeon/r600.c
3310
rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
drivers/gpu/drm/radeon/r600.c
3311
r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
drivers/gpu/drm/radeon/r600.c
3368
struct radeon_ring *ring = &rdev->ring[ib->ring];
drivers/gpu/drm/radeon/r600.c
3371
if (ring->rptr_save_reg) {
drivers/gpu/drm/radeon/r600.c
3372
next_rptr = ring->wptr + 3 + 4;
drivers/gpu/drm/radeon/r600.c
3373
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
drivers/gpu/drm/radeon/r600.c
3374
radeon_ring_write(ring, ((ring->rptr_save_reg -
drivers/gpu/drm/radeon/r600.c
3376
radeon_ring_write(ring, next_rptr);
drivers/gpu/drm/radeon/r600.c
3378
next_rptr = ring->wptr + 5 + 4;
drivers/gpu/drm/radeon/r600.c
3379
radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
drivers/gpu/drm/radeon/r600.c
3380
radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
drivers/gpu/drm/radeon/r600.c
3381
radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
drivers/gpu/drm/radeon/r600.c
3382
radeon_ring_write(ring, next_rptr);
drivers/gpu/drm/radeon/r600.c
3383
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/r600.c
3386
radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
drivers/gpu/drm/radeon/r600.c
3387
radeon_ring_write(ring,
drivers/gpu/drm/radeon/r600.c
3392
radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
drivers/gpu/drm/radeon/r600.c
3393
radeon_ring_write(ring, ib->length_dw);
drivers/gpu/drm/radeon/r600.c
3396
int r600_ib_test(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/r600.c
3410
r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256);
drivers/gpu/drm/radeon/r600.c
3442
DRM_INFO("ib test on ring %d succeeded in %u usecs\n", ib.fence->ring, i);
drivers/gpu/drm/radeon/r600.c
3504
(void **)&rdev->ih.ring);
drivers/gpu/drm/radeon/r600.c
3525
rdev->ih.ring = NULL;
drivers/gpu/drm/radeon/r600.c
4128
src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
drivers/gpu/drm/radeon/r600.c
4129
src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
drivers/gpu/drm/radeon/r600_dma.c
108
rdev->ring[R600_RING_TYPE_DMA_INDEX].ready = false;
drivers/gpu/drm/radeon/r600_dma.c
121
struct radeon_ring *ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
drivers/gpu/drm/radeon/r600_dma.c
130
rb_bufsz = order_base_2(ring->ring_size / 4);
drivers/gpu/drm/radeon/r600_dma.c
150
WREG32(DMA_RB_BASE, ring->gpu_addr >> 8);
drivers/gpu/drm/radeon/r600_dma.c
166
ring->wptr = 0;
drivers/gpu/drm/radeon/r600_dma.c
167
WREG32(DMA_RB_WPTR, ring->wptr << 2);
drivers/gpu/drm/radeon/r600_dma.c
171
ring->ready = true;
drivers/gpu/drm/radeon/r600_dma.c
173
r = radeon_ring_test(rdev, R600_RING_TYPE_DMA_INDEX, ring);
drivers/gpu/drm/radeon/r600_dma.c
175
ring->ready = false;
drivers/gpu/drm/radeon/r600_dma.c
195
radeon_ring_fini(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX]);
drivers/gpu/drm/radeon/r600_dma.c
207
bool r600_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/r600_dma.c
212
radeon_ring_lockup_update(rdev, ring);
drivers/gpu/drm/radeon/r600_dma.c
215
return radeon_ring_test_lockup(rdev, ring);
drivers/gpu/drm/radeon/r600_dma.c
230
struct radeon_ring *ring)
drivers/gpu/drm/radeon/r600_dma.c
238
if (ring->idx == R600_RING_TYPE_DMA_INDEX)
drivers/gpu/drm/radeon/r600_dma.c
248
r = radeon_ring_lock(rdev, ring, 4);
drivers/gpu/drm/radeon/r600_dma.c
250
DRM_ERROR("radeon: dma failed to lock ring %d (%d).\n", ring->idx, r);
drivers/gpu/drm/radeon/r600_dma.c
253
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 1));
drivers/gpu/drm/radeon/r600_dma.c
254
radeon_ring_write(ring, lower_32_bits(gpu_addr));
drivers/gpu/drm/radeon/r600_dma.c
255
radeon_ring_write(ring, upper_32_bits(gpu_addr) & 0xff);
drivers/gpu/drm/radeon/r600_dma.c
256
radeon_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/radeon/r600_dma.c
257
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/r600_dma.c
267
DRM_INFO("ring test on %d succeeded in %d usecs\n", ring->idx, i);
drivers/gpu/drm/radeon/r600_dma.c
270
ring->idx, tmp);
drivers/gpu/drm/radeon/r600_dma.c
289
struct radeon_ring *ring = &rdev->ring[fence->ring];
drivers/gpu/drm/radeon/r600_dma.c
290
u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
drivers/gpu/drm/radeon/r600_dma.c
293
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_FENCE, 0, 0, 0));
drivers/gpu/drm/radeon/r600_dma.c
294
radeon_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/radeon/r600_dma.c
295
radeon_ring_write(ring, (upper_32_bits(addr) & 0xff));
drivers/gpu/drm/radeon/r600_dma.c
296
radeon_ring_write(ring, lower_32_bits(fence->seq));
drivers/gpu/drm/radeon/r600_dma.c
298
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_TRAP, 0, 0, 0));
drivers/gpu/drm/radeon/r600_dma.c
313
struct radeon_ring *ring,
drivers/gpu/drm/radeon/r600_dma.c
320
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SEMAPHORE, 0, s, 0));
drivers/gpu/drm/radeon/r600_dma.c
321
radeon_ring_write(ring, addr & 0xfffffffc);
drivers/gpu/drm/radeon/r600_dma.c
322
radeon_ring_write(ring, upper_32_bits(addr) & 0xff);
drivers/gpu/drm/radeon/r600_dma.c
336
int r600_dma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/r600_dma.c
345
if (ring->idx == R600_RING_TYPE_DMA_INDEX)
drivers/gpu/drm/radeon/r600_dma.c
352
r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256);
drivers/gpu/drm/radeon/r600_dma.c
387
DRM_INFO("ib test on ring %d succeeded in %u usecs\n", ib.fence->ring, i);
drivers/gpu/drm/radeon/r600_dma.c
406
struct radeon_ring *ring = &rdev->ring[ib->ring];
drivers/gpu/drm/radeon/r600_dma.c
409
u32 next_rptr = ring->wptr + 4;
drivers/gpu/drm/radeon/r600_dma.c
413
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 1));
drivers/gpu/drm/radeon/r600_dma.c
414
radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
drivers/gpu/drm/radeon/r600_dma.c
415
radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xff);
drivers/gpu/drm/radeon/r600_dma.c
416
radeon_ring_write(ring, next_rptr);
drivers/gpu/drm/radeon/r600_dma.c
422
while ((ring->wptr & 7) != 5)
drivers/gpu/drm/radeon/r600_dma.c
423
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0));
drivers/gpu/drm/radeon/r600_dma.c
424
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_INDIRECT_BUFFER, 0, 0, 0));
drivers/gpu/drm/radeon/r600_dma.c
425
radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
drivers/gpu/drm/radeon/r600_dma.c
426
radeon_ring_write(ring, (ib->length_dw << 16) | (upper_32_bits(ib->gpu_addr) & 0xFF));
drivers/gpu/drm/radeon/r600_dma.c
451
struct radeon_ring *ring = &rdev->ring[ring_index];
drivers/gpu/drm/radeon/r600_dma.c
460
r = radeon_ring_lock(rdev, ring, num_loops * 4 + 8);
drivers/gpu/drm/radeon/r600_dma.c
468
radeon_sync_rings(rdev, &sync, ring->idx);
drivers/gpu/drm/radeon/r600_dma.c
475
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_COPY, 0, 0, cur_size_in_dw));
drivers/gpu/drm/radeon/r600_dma.c
476
radeon_ring_write(ring, dst_offset & 0xfffffffc);
drivers/gpu/drm/radeon/r600_dma.c
477
radeon_ring_write(ring, src_offset & 0xfffffffc);
drivers/gpu/drm/radeon/r600_dma.c
478
radeon_ring_write(ring, (((upper_32_bits(dst_offset) & 0xff) << 16) |
drivers/gpu/drm/radeon/r600_dma.c
484
r = radeon_fence_emit(rdev, &fence, ring->idx);
drivers/gpu/drm/radeon/r600_dma.c
486
radeon_ring_unlock_undo(rdev, ring);
drivers/gpu/drm/radeon/r600_dma.c
491
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/r600_dma.c
51
struct radeon_ring *ring)
drivers/gpu/drm/radeon/r600_dma.c
56
rptr = rdev->wb.wb[ring->rptr_offs/4];
drivers/gpu/drm/radeon/r600_dma.c
72
struct radeon_ring *ring)
drivers/gpu/drm/radeon/r600_dma.c
86
struct radeon_ring *ring)
drivers/gpu/drm/radeon/r600_dma.c
88
WREG32(DMA_RB_WPTR, (ring->wptr << 2) & 0x3fffc);
drivers/gpu/drm/radeon/radeon.h
1044
u32 ring;
drivers/gpu/drm/radeon/radeon.h
1679
int radeon_uvd_get_create_msg(struct radeon_device *rdev, int ring,
drivers/gpu/drm/radeon/radeon.h
1681
int radeon_uvd_get_destroy_msg(struct radeon_device *rdev, int ring,
drivers/gpu/drm/radeon/radeon.h
1722
int radeon_vce_get_create_msg(struct radeon_device *rdev, int ring,
drivers/gpu/drm/radeon/radeon.h
1724
int radeon_vce_get_destroy_msg(struct radeon_device *rdev, int ring,
drivers/gpu/drm/radeon/radeon.h
1731
struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon.h
1737
int radeon_vce_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon.h
1738
int radeon_vce_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon.h
1803
u32 (*get_rptr)(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon.h
1804
u32 (*get_wptr)(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon.h
1805
void (*set_wptr)(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon.h
1814
void (*hdp_flush)(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon.h
1817
void (*vm_flush)(struct radeon_device *rdev, struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon.h
1878
const struct radeon_asic_ring *ring[RADEON_NUM_RINGS];
drivers/gpu/drm/radeon/radeon.h
2369
struct radeon_ring ring[RADEON_NUM_RINGS];
drivers/gpu/drm/radeon/radeon.h
2678
static inline void radeon_ring_write(struct radeon_ring *ring, uint32_t v)
drivers/gpu/drm/radeon/radeon.h
2680
if (ring->count_dw <= 0)
drivers/gpu/drm/radeon/radeon.h
2683
ring->ring[ring->wptr++] = v;
drivers/gpu/drm/radeon/radeon.h
2684
ring->wptr &= ring->ptr_mask;
drivers/gpu/drm/radeon/radeon.h
2685
ring->count_dw--;
drivers/gpu/drm/radeon/radeon.h
2686
ring->ring_free_dw--;
drivers/gpu/drm/radeon/radeon.h
2696
#define radeon_cs_parse(rdev, r, p) (rdev)->asic->ring[(r)]->cs_parse((p))
drivers/gpu/drm/radeon/radeon.h
2708
#define radeon_ring_start(rdev, r, cp) (rdev)->asic->ring[(r)]->ring_start((rdev), (cp))
drivers/gpu/drm/radeon/radeon.h
2709
#define radeon_ring_test(rdev, r, cp) (rdev)->asic->ring[(r)]->ring_test((rdev), (cp))
drivers/gpu/drm/radeon/radeon.h
2710
#define radeon_ib_test(rdev, r, cp) (rdev)->asic->ring[(r)]->ib_test((rdev), (cp))
drivers/gpu/drm/radeon/radeon.h
2711
#define radeon_ring_ib_execute(rdev, r, ib) (rdev)->asic->ring[(r)]->ib_execute((rdev), (ib))
drivers/gpu/drm/radeon/radeon.h
2712
#define radeon_ring_ib_parse(rdev, r, ib) (rdev)->asic->ring[(r)]->ib_parse((rdev), (ib))
drivers/gpu/drm/radeon/radeon.h
2713
#define radeon_ring_is_lockup(rdev, r, cp) (rdev)->asic->ring[(r)]->is_lockup((rdev), (cp))
drivers/gpu/drm/radeon/radeon.h
2714
#define radeon_ring_vm_flush(rdev, r, vm_id, pd_addr) (rdev)->asic->ring[(r)->idx]->vm_flush((rdev), (r), (vm_id), (pd_addr))
drivers/gpu/drm/radeon/radeon.h
2715
#define radeon_ring_get_rptr(rdev, r) (rdev)->asic->ring[(r)->idx]->get_rptr((rdev), (r))
drivers/gpu/drm/radeon/radeon.h
2716
#define radeon_ring_get_wptr(rdev, r) (rdev)->asic->ring[(r)->idx]->get_wptr((rdev), (r))
drivers/gpu/drm/radeon/radeon.h
2717
#define radeon_ring_set_wptr(rdev, r) (rdev)->asic->ring[(r)->idx]->set_wptr((rdev), (r))
drivers/gpu/drm/radeon/radeon.h
2725
#define radeon_fence_ring_emit(rdev, r, fence) (rdev)->asic->ring[(r)]->emit_fence((rdev), (fence))
drivers/gpu/drm/radeon/radeon.h
2726
#define radeon_semaphore_ring_emit(rdev, r, cp, semaphore, emit_wait) (rdev)->asic->ring[(r)]->emit_semaphore((rdev), (cp), (semaphore), (emit_wait))
drivers/gpu/drm/radeon/radeon.h
2840
struct radeon_vm *vm, int ring);
drivers/gpu/drm/radeon/radeon.h
2843
int ring, struct radeon_fence *fence);
drivers/gpu/drm/radeon/radeon.h
381
unsigned ring;
drivers/gpu/drm/radeon/radeon.h
387
int radeon_fence_driver_start_ring(struct radeon_device *rdev, int ring);
drivers/gpu/drm/radeon/radeon.h
390
void radeon_fence_driver_force_completion(struct radeon_device *rdev, int ring);
drivers/gpu/drm/radeon/radeon.h
391
int radeon_fence_emit(struct radeon_device *rdev, struct radeon_fence **fence, int ring);
drivers/gpu/drm/radeon/radeon.h
392
void radeon_fence_process(struct radeon_device *rdev, int ring);
drivers/gpu/drm/radeon/radeon.h
396
int radeon_fence_wait_next(struct radeon_device *rdev, int ring);
drivers/gpu/drm/radeon/radeon.h
397
int radeon_fence_wait_empty(struct radeon_device *rdev, int ring);
drivers/gpu/drm/radeon/radeon.h
400
unsigned radeon_fence_count_emitted(struct radeon_device *rdev, int ring);
drivers/gpu/drm/radeon/radeon.h
401
bool radeon_fence_need_sync(struct radeon_fence *fence, int ring);
drivers/gpu/drm/radeon/radeon.h
402
void radeon_fence_note_sync(struct radeon_fence *fence, int ring);
drivers/gpu/drm/radeon/radeon.h
414
BUG_ON(a->ring != b->ring);
drivers/gpu/drm/radeon/radeon.h
434
BUG_ON(a->ring != b->ring);
drivers/gpu/drm/radeon/radeon.h
558
bool radeon_semaphore_emit_signal(struct radeon_device *rdev, int ring,
drivers/gpu/drm/radeon/radeon.h
560
bool radeon_semaphore_emit_wait(struct radeon_device *rdev, int ring,
drivers/gpu/drm/radeon/radeon.h
765
void radeon_irq_kms_sw_irq_get(struct radeon_device *rdev, int ring);
drivers/gpu/drm/radeon/radeon.h
766
bool radeon_irq_kms_sw_irq_get_delayed(struct radeon_device *rdev, int ring);
drivers/gpu/drm/radeon/radeon.h
767
void radeon_irq_kms_sw_irq_put(struct radeon_device *rdev, int ring);
drivers/gpu/drm/radeon/radeon.h
784
int ring;
drivers/gpu/drm/radeon/radeon.h
794
volatile uint32_t *ring;
drivers/gpu/drm/radeon/radeon.h
930
volatile uint32_t *ring;
drivers/gpu/drm/radeon/radeon.h
964
int radeon_ib_get(struct radeon_device *rdev, int ring,
drivers/gpu/drm/radeon/radeon.h
975
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon.h
983
void radeon_ring_undo(struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon.h
987
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon.h
988
bool radeon_ring_test_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon.h
989
unsigned radeon_ring_backup(struct radeon_device *rdev, struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon.h
991
int radeon_ring_restore(struct radeon_device *rdev, struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon_asic.c
1034
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
1127
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
1233
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
1353
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
1447
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
1541
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
1686
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
1804
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
1942
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
2112
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
213
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
2225
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
281
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
377
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
445
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
513
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
581
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
649
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
717
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
785
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
853
.ring = {
drivers/gpu/drm/radeon/radeon_asic.c
949
.ring = {
drivers/gpu/drm/radeon/radeon_asic.h
113
int r100_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
147
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
149
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
151
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
172
extern void r300_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
287
void rv515_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
331
struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon_asic.h
335
bool r600_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
342
int r600_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
343
int r600_dma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
382
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
384
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
386
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
415
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
417
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
419
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
613
void cayman_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon_asic.h
620
bool cayman_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
621
bool cayman_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
639
void cayman_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon_asic.h
643
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
645
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
647
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
649
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
651
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
653
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
721
void si_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon_asic.h
73
void r100_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
744
void si_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon_asic.h
791
struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon_asic.h
803
int cik_sdma_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
804
int cik_sdma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
805
bool cik_sdma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
822
int cik_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
823
int cik_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
828
void cik_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon_asic.h
847
void cik_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon_asic.h
851
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
853
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
855
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
857
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
859
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
861
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
863
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
865
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
867
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
928
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
930
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
932
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
940
int uvd_v1_0_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
943
int uvd_v1_0_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
945
struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon_asic.h
955
struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon_asic.h
961
struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon_asic.h
970
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
972
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_asic.h
974
struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_cs.c
131
if (p->ring == R600_RING_TYPE_UVD_INDEX &&
drivers/gpu/drm/radeon/radeon_cs.c
196
r = radeon_bo_list_validate(p->rdev, &p->exec, &p->validated, p->ring);
drivers/gpu/drm/radeon/radeon_cs.c
204
static int radeon_cs_get_ring(struct radeon_cs_parser *p, u32 ring, s32 priority)
drivers/gpu/drm/radeon/radeon_cs.c
208
switch (ring) {
drivers/gpu/drm/radeon/radeon_cs.c
210
DRM_ERROR("unknown ring id: %d\n", ring);
drivers/gpu/drm/radeon/radeon_cs.c
213
p->ring = RADEON_RING_TYPE_GFX_INDEX;
drivers/gpu/drm/radeon/radeon_cs.c
218
p->ring = CAYMAN_RING_TYPE_CP1_INDEX;
drivers/gpu/drm/radeon/radeon_cs.c
220
p->ring = CAYMAN_RING_TYPE_CP2_INDEX;
drivers/gpu/drm/radeon/radeon_cs.c
222
p->ring = RADEON_RING_TYPE_GFX_INDEX;
drivers/gpu/drm/radeon/radeon_cs.c
227
p->ring = R600_RING_TYPE_DMA_INDEX;
drivers/gpu/drm/radeon/radeon_cs.c
229
p->ring = CAYMAN_RING_TYPE_DMA1_INDEX;
drivers/gpu/drm/radeon/radeon_cs.c
231
p->ring = R600_RING_TYPE_DMA_INDEX;
drivers/gpu/drm/radeon/radeon_cs.c
237
p->ring = R600_RING_TYPE_UVD_INDEX;
drivers/gpu/drm/radeon/radeon_cs.c
241
p->ring = TN_RING_TYPE_VCE1_INDEX;
drivers/gpu/drm/radeon/radeon_cs.c
270
u32 ring = RADEON_CS_RING_GFX;
drivers/gpu/drm/radeon/radeon_cs.c
358
ring = p->chunks[i].kdata[1];
drivers/gpu/drm/radeon/radeon_cs.c
372
if (radeon_cs_get_ring(p, ring, priority))
drivers/gpu/drm/radeon/radeon_cs.c
377
if (p->rdev->asic->ring[p->ring]->cs_parse == NULL) {
drivers/gpu/drm/radeon/radeon_cs.c
378
DRM_ERROR("Ring %d requires VM!\n", p->ring);
drivers/gpu/drm/radeon/radeon_cs.c
382
if (p->rdev->asic->ring[p->ring]->ib_parse == NULL) {
drivers/gpu/drm/radeon/radeon_cs.c
384
p->ring);
drivers/gpu/drm/radeon/radeon_cs.c
475
r = radeon_cs_parse(rdev, parser->ring, parser);
drivers/gpu/drm/radeon/radeon_cs.c
488
if (parser->ring == R600_RING_TYPE_UVD_INDEX)
drivers/gpu/drm/radeon/radeon_cs.c
490
else if ((parser->ring == TN_RING_TYPE_VCE1_INDEX) ||
drivers/gpu/drm/radeon/radeon_cs.c
491
(parser->ring == TN_RING_TYPE_VCE2_INDEX))
drivers/gpu/drm/radeon/radeon_cs.c
563
r = radeon_ring_ib_parse(rdev, parser->ring, &parser->const_ib);
drivers/gpu/drm/radeon/radeon_cs.c
569
r = radeon_ring_ib_parse(rdev, parser->ring, &parser->ib);
drivers/gpu/drm/radeon/radeon_cs.c
574
if (parser->ring == R600_RING_TYPE_UVD_INDEX)
drivers/gpu/drm/radeon/radeon_cs.c
632
r = radeon_ib_get(rdev, parser->ring, &parser->const_ib,
drivers/gpu/drm/radeon/radeon_cs.c
654
r = radeon_ib_get(rdev, parser->ring, &parser->ib,
drivers/gpu/drm/radeon/radeon_device.c
1296
rdev->ring[i].idx = i;
drivers/gpu/drm/radeon/radeon_device.c
1779
ring_sizes[i] = radeon_ring_backup(rdev, &rdev->ring[i],
drivers/gpu/drm/radeon/radeon_device.c
1798
radeon_ring_restore(rdev, &rdev->ring[i],
drivers/gpu/drm/radeon/radeon_fence.c
112
static void radeon_fence_schedule_check(struct radeon_device *rdev, int ring)
drivers/gpu/drm/radeon/radeon_fence.c
119
&rdev->fence_drv[ring].lockup_work,
drivers/gpu/drm/radeon/radeon_fence.c
135
int ring)
drivers/gpu/drm/radeon/radeon_fence.c
145
(*fence)->seq = seq = ++rdev->fence_drv[ring].sync_seq[ring];
drivers/gpu/drm/radeon/radeon_fence.c
146
(*fence)->ring = ring;
drivers/gpu/drm/radeon/radeon_fence.c
150
rdev->fence_context + ring,
drivers/gpu/drm/radeon/radeon_fence.c
152
radeon_fence_ring_emit(rdev, ring, *fence);
drivers/gpu/drm/radeon/radeon_fence.c
153
trace_radeon_fence_emit(rdev_to_drm(rdev), ring, (*fence)->seq);
drivers/gpu/drm/radeon/radeon_fence.c
154
radeon_fence_schedule_check(rdev, ring);
drivers/gpu/drm/radeon/radeon_fence.c
177
seq = atomic64_read(&fence->rdev->fence_drv[fence->ring].last_seq);
drivers/gpu/drm/radeon/radeon_fence.c
180
radeon_irq_kms_sw_irq_put(fence->rdev, fence->ring);
drivers/gpu/drm/radeon/radeon_fence.c
197
static bool radeon_fence_activity(struct radeon_device *rdev, int ring)
drivers/gpu/drm/radeon/radeon_fence.c
224
last_seq = atomic64_read(&rdev->fence_drv[ring].last_seq);
drivers/gpu/drm/radeon/radeon_fence.c
226
last_emitted = rdev->fence_drv[ring].sync_seq[ring];
drivers/gpu/drm/radeon/radeon_fence.c
227
seq = radeon_fence_read(rdev, ring);
drivers/gpu/drm/radeon/radeon_fence.c
251
} while (atomic64_xchg(&rdev->fence_drv[ring].last_seq, seq) > seq);
drivers/gpu/drm/radeon/radeon_fence.c
254
radeon_fence_schedule_check(rdev, ring);
drivers/gpu/drm/radeon/radeon_fence.c
271
int ring;
drivers/gpu/drm/radeon/radeon_fence.c
276
ring = fence_drv - &rdev->fence_drv[0];
drivers/gpu/drm/radeon/radeon_fence.c
280
radeon_fence_schedule_check(rdev, ring);
drivers/gpu/drm/radeon/radeon_fence.c
293
if (radeon_fence_activity(rdev, ring))
drivers/gpu/drm/radeon/radeon_fence.c
296
else if (radeon_ring_is_lockup(rdev, ring, &rdev->ring[ring])) {
drivers/gpu/drm/radeon/radeon_fence.c
301
fence_drv->sync_seq[ring], ring);
drivers/gpu/drm/radeon/radeon_fence.c
319
void radeon_fence_process(struct radeon_device *rdev, int ring)
drivers/gpu/drm/radeon/radeon_fence.c
321
if (radeon_fence_activity(rdev, ring))
drivers/gpu/drm/radeon/radeon_fence.c
340
u64 seq, unsigned int ring)
drivers/gpu/drm/radeon/radeon_fence.c
342
if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq)
drivers/gpu/drm/radeon/radeon_fence.c
346
radeon_fence_process(rdev, ring);
drivers/gpu/drm/radeon/radeon_fence.c
347
if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq)
drivers/gpu/drm/radeon/radeon_fence.c
357
unsigned int ring = fence->ring;
drivers/gpu/drm/radeon/radeon_fence.c
360
if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq)
drivers/gpu/drm/radeon/radeon_fence.c
379
if (atomic64_read(&rdev->fence_drv[fence->ring].last_seq) >= fence->seq)
drivers/gpu/drm/radeon/radeon_fence.c
383
radeon_irq_kms_sw_irq_get(rdev, fence->ring);
drivers/gpu/drm/radeon/radeon_fence.c
385
if (radeon_fence_activity(rdev, fence->ring))
drivers/gpu/drm/radeon/radeon_fence.c
389
if (atomic64_read(&rdev->fence_drv[fence->ring].last_seq) >= fence->seq) {
drivers/gpu/drm/radeon/radeon_fence.c
390
radeon_irq_kms_sw_irq_put(rdev, fence->ring);
drivers/gpu/drm/radeon/radeon_fence.c
398
if (radeon_irq_kms_sw_irq_get_delayed(rdev, fence->ring))
drivers/gpu/drm/radeon/radeon_fence.c
399
rdev->fence_drv[fence->ring].delayed_irq = true;
drivers/gpu/drm/radeon/radeon_fence.c
400
radeon_fence_schedule_check(rdev, fence->ring);
drivers/gpu/drm/radeon/radeon_fence.c
424
if (radeon_fence_seq_signaled(fence->rdev, fence->seq, fence->ring)) {
drivers/gpu/drm/radeon/radeon_fence.c
540
seq[fence->ring] = fence->seq;
drivers/gpu/drm/radeon/radeon_fence.c
580
int radeon_fence_wait_next(struct radeon_device *rdev, int ring)
drivers/gpu/drm/radeon/radeon_fence.c
585
seq[ring] = atomic64_read(&rdev->fence_drv[ring].last_seq) + 1ULL;
drivers/gpu/drm/radeon/radeon_fence.c
586
if (seq[ring] >= rdev->fence_drv[ring].sync_seq[ring]) {
drivers/gpu/drm/radeon/radeon_fence.c
610
int radeon_fence_wait_empty(struct radeon_device *rdev, int ring)
drivers/gpu/drm/radeon/radeon_fence.c
615
seq[ring] = rdev->fence_drv[ring].sync_seq[ring];
drivers/gpu/drm/radeon/radeon_fence.c
616
if (!seq[ring])
drivers/gpu/drm/radeon/radeon_fence.c
625
ring, r);
drivers/gpu/drm/radeon/radeon_fence.c
67
static void radeon_fence_write(struct radeon_device *rdev, u32 seq, int ring)
drivers/gpu/drm/radeon/radeon_fence.c
670
unsigned int radeon_fence_count_emitted(struct radeon_device *rdev, int ring)
drivers/gpu/drm/radeon/radeon_fence.c
677
radeon_fence_process(rdev, ring);
drivers/gpu/drm/radeon/radeon_fence.c
678
emitted = rdev->fence_drv[ring].sync_seq[ring]
drivers/gpu/drm/radeon/radeon_fence.c
679
- atomic64_read(&rdev->fence_drv[ring].last_seq);
drivers/gpu/drm/radeon/radeon_fence.c
69
struct radeon_fence_driver *drv = &rdev->fence_drv[ring];
drivers/gpu/drm/radeon/radeon_fence.c
705
if (fence->ring == dst_ring)
drivers/gpu/drm/radeon/radeon_fence.c
710
if (fence->seq <= fdrv->sync_seq[fence->ring])
drivers/gpu/drm/radeon/radeon_fence.c
733
if (fence->ring == dst_ring)
drivers/gpu/drm/radeon/radeon_fence.c
737
src = &fence->rdev->fence_drv[fence->ring];
drivers/gpu/drm/radeon/radeon_fence.c
759
int radeon_fence_driver_start_ring(struct radeon_device *rdev, int ring)
drivers/gpu/drm/radeon/radeon_fence.c
764
radeon_scratch_free(rdev, rdev->fence_drv[ring].scratch_reg);
drivers/gpu/drm/radeon/radeon_fence.c
765
if (rdev->wb.use_event || !radeon_ring_supports_scratch_reg(rdev, &rdev->ring[ring])) {
drivers/gpu/drm/radeon/radeon_fence.c
766
rdev->fence_drv[ring].scratch_reg = 0;
drivers/gpu/drm/radeon/radeon_fence.c
767
if (ring != R600_RING_TYPE_UVD_INDEX) {
drivers/gpu/drm/radeon/radeon_fence.c
768
index = R600_WB_EVENT_OFFSET + ring * 4;
drivers/gpu/drm/radeon/radeon_fence.c
769
rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4];
drivers/gpu/drm/radeon/radeon_fence.c
770
rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr +
drivers/gpu/drm/radeon/radeon_fence.c
776
rdev->fence_drv[ring].cpu_addr = rdev->uvd.cpu_addr + index;
drivers/gpu/drm/radeon/radeon_fence.c
777
rdev->fence_drv[ring].gpu_addr = rdev->uvd.gpu_addr + index;
drivers/gpu/drm/radeon/radeon_fence.c
781
r = radeon_scratch_get(rdev, &rdev->fence_drv[ring].scratch_reg);
drivers/gpu/drm/radeon/radeon_fence.c
787
rdev->fence_drv[ring].scratch_reg -
drivers/gpu/drm/radeon/radeon_fence.c
789
rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4];
drivers/gpu/drm/radeon/radeon_fence.c
790
rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + index;
drivers/gpu/drm/radeon/radeon_fence.c
792
radeon_fence_write(rdev, atomic64_read(&rdev->fence_drv[ring].last_seq), ring);
drivers/gpu/drm/radeon/radeon_fence.c
793
rdev->fence_drv[ring].initialized = true;
drivers/gpu/drm/radeon/radeon_fence.c
795
ring, rdev->fence_drv[ring].gpu_addr);
drivers/gpu/drm/radeon/radeon_fence.c
809
static void radeon_fence_driver_init_ring(struct radeon_device *rdev, int ring)
drivers/gpu/drm/radeon/radeon_fence.c
813
rdev->fence_drv[ring].scratch_reg = -1;
drivers/gpu/drm/radeon/radeon_fence.c
814
rdev->fence_drv[ring].cpu_addr = NULL;
drivers/gpu/drm/radeon/radeon_fence.c
815
rdev->fence_drv[ring].gpu_addr = 0;
drivers/gpu/drm/radeon/radeon_fence.c
817
rdev->fence_drv[ring].sync_seq[i] = 0;
drivers/gpu/drm/radeon/radeon_fence.c
818
atomic64_set(&rdev->fence_drv[ring].last_seq, 0);
drivers/gpu/drm/radeon/radeon_fence.c
819
rdev->fence_drv[ring].initialized = false;
drivers/gpu/drm/radeon/radeon_fence.c
820
INIT_DELAYED_WORK(&rdev->fence_drv[ring].lockup_work,
drivers/gpu/drm/radeon/radeon_fence.c
822
rdev->fence_drv[ring].rdev = rdev;
drivers/gpu/drm/radeon/radeon_fence.c
838
int ring;
drivers/gpu/drm/radeon/radeon_fence.c
841
for (ring = 0; ring < RADEON_NUM_RINGS; ring++)
drivers/gpu/drm/radeon/radeon_fence.c
842
radeon_fence_driver_init_ring(rdev, ring);
drivers/gpu/drm/radeon/radeon_fence.c
857
int ring, r;
drivers/gpu/drm/radeon/radeon_fence.c
860
for (ring = 0; ring < RADEON_NUM_RINGS; ring++) {
drivers/gpu/drm/radeon/radeon_fence.c
861
if (!rdev->fence_drv[ring].initialized)
drivers/gpu/drm/radeon/radeon_fence.c
863
r = radeon_fence_wait_empty(rdev, ring);
drivers/gpu/drm/radeon/radeon_fence.c
866
radeon_fence_driver_force_completion(rdev, ring);
drivers/gpu/drm/radeon/radeon_fence.c
868
cancel_delayed_work_sync(&rdev->fence_drv[ring].lockup_work);
drivers/gpu/drm/radeon/radeon_fence.c
870
radeon_scratch_free(rdev, rdev->fence_drv[ring].scratch_reg);
drivers/gpu/drm/radeon/radeon_fence.c
871
rdev->fence_drv[ring].initialized = false;
drivers/gpu/drm/radeon/radeon_fence.c
88
static u32 radeon_fence_read(struct radeon_device *rdev, int ring)
drivers/gpu/drm/radeon/radeon_fence.c
885
void radeon_fence_driver_force_completion(struct radeon_device *rdev, int ring)
drivers/gpu/drm/radeon/radeon_fence.c
887
if (rdev->fence_drv[ring].initialized) {
drivers/gpu/drm/radeon/radeon_fence.c
888
radeon_fence_write(rdev, rdev->fence_drv[ring].sync_seq[ring], ring);
drivers/gpu/drm/radeon/radeon_fence.c
889
cancel_delayed_work_sync(&rdev->fence_drv[ring].lockup_work);
drivers/gpu/drm/radeon/radeon_fence.c
90
struct radeon_fence_driver *drv = &rdev->fence_drv[ring];
drivers/gpu/drm/radeon/radeon_fence.c
969
switch (fence->ring) {
drivers/gpu/drm/radeon/radeon_ib.c
130
struct radeon_ring *ring = &rdev->ring[ib->ring];
drivers/gpu/drm/radeon/radeon_ib.c
133
if (!ib->length_dw || !ring->ready) {
drivers/gpu/drm/radeon/radeon_ib.c
140
r = radeon_ring_lock(rdev, ring, 64 + RADEON_NUM_SYNCS * 8);
drivers/gpu/drm/radeon/radeon_ib.c
149
vm_id_fence = radeon_vm_grab_id(rdev, ib->vm, ib->ring);
drivers/gpu/drm/radeon/radeon_ib.c
154
r = radeon_sync_rings(rdev, &ib->sync, ib->ring);
drivers/gpu/drm/radeon/radeon_ib.c
157
radeon_ring_unlock_undo(rdev, ring);
drivers/gpu/drm/radeon/radeon_ib.c
162
radeon_vm_flush(rdev, ib->vm, ib->ring,
drivers/gpu/drm/radeon/radeon_ib.c
166
radeon_ring_ib_execute(rdev, const_ib->ring, const_ib);
drivers/gpu/drm/radeon/radeon_ib.c
169
radeon_ring_ib_execute(rdev, ib->ring, ib);
drivers/gpu/drm/radeon/radeon_ib.c
170
r = radeon_fence_emit(rdev, &ib->fence, ib->ring);
drivers/gpu/drm/radeon/radeon_ib.c
173
radeon_ring_unlock_undo(rdev, ring);
drivers/gpu/drm/radeon/radeon_ib.c
183
radeon_ring_unlock_commit(rdev, ring, hdp_flush);
drivers/gpu/drm/radeon/radeon_ib.c
264
struct radeon_ring *ring = &rdev->ring[i];
drivers/gpu/drm/radeon/radeon_ib.c
266
if (!ring->ready)
drivers/gpu/drm/radeon/radeon_ib.c
269
r = radeon_ib_test(rdev, i, ring);
drivers/gpu/drm/radeon/radeon_ib.c
272
ring->ready = false;
drivers/gpu/drm/radeon/radeon_ib.c
60
int radeon_ib_get(struct radeon_device *rdev, int ring,
drivers/gpu/drm/radeon/radeon_ib.c
74
ib->ring = ring;
drivers/gpu/drm/radeon/radeon_irq_kms.c
377
void radeon_irq_kms_sw_irq_get(struct radeon_device *rdev, int ring)
drivers/gpu/drm/radeon/radeon_irq_kms.c
384
if (atomic_inc_return(&rdev->irq.ring_int[ring]) == 1) {
drivers/gpu/drm/radeon/radeon_irq_kms.c
401
bool radeon_irq_kms_sw_irq_get_delayed(struct radeon_device *rdev, int ring)
drivers/gpu/drm/radeon/radeon_irq_kms.c
403
return atomic_inc_return(&rdev->irq.ring_int[ring]) == 1;
drivers/gpu/drm/radeon/radeon_irq_kms.c
416
void radeon_irq_kms_sw_irq_put(struct radeon_device *rdev, int ring)
drivers/gpu/drm/radeon/radeon_irq_kms.c
423
if (atomic_dec_and_test(&rdev->irq.ring_int[ring])) {
drivers/gpu/drm/radeon/radeon_kms.c
474
*value = rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready;
drivers/gpu/drm/radeon/radeon_kms.c
477
*value = rdev->ring[R600_RING_TYPE_DMA_INDEX].ready;
drivers/gpu/drm/radeon/radeon_kms.c
478
*value |= rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX].ready;
drivers/gpu/drm/radeon/radeon_kms.c
481
*value = rdev->ring[R600_RING_TYPE_UVD_INDEX].ready;
drivers/gpu/drm/radeon/radeon_kms.c
484
*value = rdev->ring[TN_RING_TYPE_VCE1_INDEX].ready;
drivers/gpu/drm/radeon/radeon_object.c
468
struct list_head *head, int ring)
drivers/gpu/drm/radeon/radeon_object.c
511
if (ring == R600_RING_TYPE_UVD_INDEX)
drivers/gpu/drm/radeon/radeon_object.h
156
struct list_head *head, int ring);
drivers/gpu/drm/radeon/radeon_pm.c
1149
struct radeon_ring *ring = &rdev->ring[i];
drivers/gpu/drm/radeon/radeon_pm.c
1150
if (ring->ready)
drivers/gpu/drm/radeon/radeon_pm.c
1866
struct radeon_ring *ring = &rdev->ring[i];
drivers/gpu/drm/radeon/radeon_pm.c
1868
if (ring->ready) {
drivers/gpu/drm/radeon/radeon_pm.c
268
struct radeon_ring *ring = &rdev->ring[i];
drivers/gpu/drm/radeon/radeon_pm.c
269
if (!ring->ready) {
drivers/gpu/drm/radeon/radeon_ring.c
109
int radeon_ring_alloc(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ndw)
drivers/gpu/drm/radeon/radeon_ring.c
114
if (ndw > (ring->ring_size / 4))
drivers/gpu/drm/radeon/radeon_ring.c
118
radeon_ring_free_size(rdev, ring);
drivers/gpu/drm/radeon/radeon_ring.c
119
ndw = (ndw + ring->align_mask) & ~ring->align_mask;
drivers/gpu/drm/radeon/radeon_ring.c
120
while (ndw > (ring->ring_free_dw - 1)) {
drivers/gpu/drm/radeon/radeon_ring.c
121
radeon_ring_free_size(rdev, ring);
drivers/gpu/drm/radeon/radeon_ring.c
122
if (ndw < ring->ring_free_dw) {
drivers/gpu/drm/radeon/radeon_ring.c
125
r = radeon_fence_wait_next(rdev, ring->idx);
drivers/gpu/drm/radeon/radeon_ring.c
129
ring->count_dw = ndw;
drivers/gpu/drm/radeon/radeon_ring.c
130
ring->wptr_old = ring->wptr;
drivers/gpu/drm/radeon/radeon_ring.c
145
int radeon_ring_lock(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ndw)
drivers/gpu/drm/radeon/radeon_ring.c
150
r = radeon_ring_alloc(rdev, ring, ndw);
drivers/gpu/drm/radeon/radeon_ring.c
169
void radeon_ring_commit(struct radeon_device *rdev, struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon_ring.c
175
if (hdp_flush && rdev->asic->ring[ring->idx]->hdp_flush)
drivers/gpu/drm/radeon/radeon_ring.c
176
rdev->asic->ring[ring->idx]->hdp_flush(rdev, ring);
drivers/gpu/drm/radeon/radeon_ring.c
178
while (ring->wptr & ring->align_mask) {
drivers/gpu/drm/radeon/radeon_ring.c
179
radeon_ring_write(ring, ring->nop);
drivers/gpu/drm/radeon/radeon_ring.c
187
radeon_ring_set_wptr(rdev, ring);
drivers/gpu/drm/radeon/radeon_ring.c
200
void radeon_ring_unlock_commit(struct radeon_device *rdev, struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon_ring.c
203
radeon_ring_commit(rdev, ring, hdp_flush);
drivers/gpu/drm/radeon/radeon_ring.c
214
void radeon_ring_undo(struct radeon_ring *ring)
drivers/gpu/drm/radeon/radeon_ring.c
216
ring->wptr = ring->wptr_old;
drivers/gpu/drm/radeon/radeon_ring.c
227
void radeon_ring_unlock_undo(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/radeon_ring.c
229
radeon_ring_undo(ring);
drivers/gpu/drm/radeon/radeon_ring.c
242
struct radeon_ring *ring)
drivers/gpu/drm/radeon/radeon_ring.c
244
atomic_set(&ring->last_rptr, radeon_ring_get_rptr(rdev, ring));
drivers/gpu/drm/radeon/radeon_ring.c
245
atomic64_set(&ring->last_activity, jiffies_64);
drivers/gpu/drm/radeon/radeon_ring.c
254
bool radeon_ring_test_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/radeon_ring.c
256
uint32_t rptr = radeon_ring_get_rptr(rdev, ring);
drivers/gpu/drm/radeon/radeon_ring.c
257
uint64_t last = atomic64_read(&ring->last_activity);
drivers/gpu/drm/radeon/radeon_ring.c
260
if (rptr != atomic_read(&ring->last_rptr)) {
drivers/gpu/drm/radeon/radeon_ring.c
262
radeon_ring_lockup_update(rdev, ring);
drivers/gpu/drm/radeon/radeon_ring.c
269
ring->idx, elapsed);
drivers/gpu/drm/radeon/radeon_ring.c
285
unsigned radeon_ring_backup(struct radeon_device *rdev, struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon_ring.c
294
if (ring->ring_obj == NULL) {
drivers/gpu/drm/radeon/radeon_ring.c
300
if (!radeon_fence_count_emitted(rdev, ring->idx)) {
drivers/gpu/drm/radeon/radeon_ring.c
306
if (ring->rptr_save_reg)
drivers/gpu/drm/radeon/radeon_ring.c
307
ptr = RREG32(ring->rptr_save_reg);
drivers/gpu/drm/radeon/radeon_ring.c
309
ptr = le32_to_cpu(*ring->next_rptr_cpu_addr);
drivers/gpu/drm/radeon/radeon_ring.c
316
size = ring->wptr + (ring->ring_size / 4);
drivers/gpu/drm/radeon/radeon_ring.c
318
size &= ring->ptr_mask;
drivers/gpu/drm/radeon/radeon_ring.c
331
(*data)[i] = ring->ring[ptr++];
drivers/gpu/drm/radeon/radeon_ring.c
332
ptr &= ring->ptr_mask;
drivers/gpu/drm/radeon/radeon_ring.c
349
int radeon_ring_restore(struct radeon_device *rdev, struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon_ring.c
358
r = radeon_ring_lock(rdev, ring, size);
drivers/gpu/drm/radeon/radeon_ring.c
363
radeon_ring_write(ring, data[i]);
drivers/gpu/drm/radeon/radeon_ring.c
366
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/radeon_ring.c
383
int radeon_ring_init(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ring_size,
drivers/gpu/drm/radeon/radeon_ring.c
388
ring->ring_size = ring_size;
drivers/gpu/drm/radeon/radeon_ring.c
389
ring->rptr_offs = rptr_offs;
drivers/gpu/drm/radeon/radeon_ring.c
390
ring->nop = nop;
drivers/gpu/drm/radeon/radeon_ring.c
391
ring->rdev = rdev;
drivers/gpu/drm/radeon/radeon_ring.c
393
if (ring->ring_obj == NULL) {
drivers/gpu/drm/radeon/radeon_ring.c
394
r = radeon_bo_create(rdev, ring->ring_size, PAGE_SIZE, true,
drivers/gpu/drm/radeon/radeon_ring.c
396
NULL, &ring->ring_obj);
drivers/gpu/drm/radeon/radeon_ring.c
401
r = radeon_bo_reserve(ring->ring_obj, false);
drivers/gpu/drm/radeon/radeon_ring.c
404
r = radeon_bo_pin(ring->ring_obj, RADEON_GEM_DOMAIN_GTT,
drivers/gpu/drm/radeon/radeon_ring.c
405
&ring->gpu_addr);
drivers/gpu/drm/radeon/radeon_ring.c
407
radeon_bo_unreserve(ring->ring_obj);
drivers/gpu/drm/radeon/radeon_ring.c
411
r = radeon_bo_kmap(ring->ring_obj,
drivers/gpu/drm/radeon/radeon_ring.c
412
(void **)&ring->ring);
drivers/gpu/drm/radeon/radeon_ring.c
413
radeon_bo_unreserve(ring->ring_obj);
drivers/gpu/drm/radeon/radeon_ring.c
418
radeon_debugfs_ring_init(rdev, ring);
drivers/gpu/drm/radeon/radeon_ring.c
420
ring->ptr_mask = (ring->ring_size / 4) - 1;
drivers/gpu/drm/radeon/radeon_ring.c
421
ring->ring_free_dw = ring->ring_size / 4;
drivers/gpu/drm/radeon/radeon_ring.c
423
u32 index = RADEON_WB_RING0_NEXT_RPTR + (ring->idx * 4);
drivers/gpu/drm/radeon/radeon_ring.c
424
ring->next_rptr_gpu_addr = rdev->wb.gpu_addr + index;
drivers/gpu/drm/radeon/radeon_ring.c
425
ring->next_rptr_cpu_addr = &rdev->wb.wb[index/4];
drivers/gpu/drm/radeon/radeon_ring.c
427
radeon_ring_lockup_update(rdev, ring);
drivers/gpu/drm/radeon/radeon_ring.c
439
void radeon_ring_fini(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/radeon_ring.c
445
ring_obj = ring->ring_obj;
drivers/gpu/drm/radeon/radeon_ring.c
446
ring->ready = false;
drivers/gpu/drm/radeon/radeon_ring.c
447
ring->ring = NULL;
drivers/gpu/drm/radeon/radeon_ring.c
448
ring->ring_obj = NULL;
drivers/gpu/drm/radeon/radeon_ring.c
469
struct radeon_ring *ring = m->private;
drivers/gpu/drm/radeon/radeon_ring.c
470
struct radeon_device *rdev = ring->rdev;
drivers/gpu/drm/radeon/radeon_ring.c
475
radeon_ring_free_size(rdev, ring);
drivers/gpu/drm/radeon/radeon_ring.c
476
count = (ring->ring_size / 4) - ring->ring_free_dw;
drivers/gpu/drm/radeon/radeon_ring.c
478
wptr = radeon_ring_get_wptr(rdev, ring);
drivers/gpu/drm/radeon/radeon_ring.c
482
rptr = radeon_ring_get_rptr(rdev, ring);
drivers/gpu/drm/radeon/radeon_ring.c
486
if (ring->rptr_save_reg) {
drivers/gpu/drm/radeon/radeon_ring.c
487
rptr_next = RREG32(ring->rptr_save_reg);
drivers/gpu/drm/radeon/radeon_ring.c
489
ring->rptr_save_reg, rptr_next, rptr_next);
drivers/gpu/drm/radeon/radeon_ring.c
494
ring->wptr, ring->wptr);
drivers/gpu/drm/radeon/radeon_ring.c
496
ring->last_semaphore_signal_addr);
drivers/gpu/drm/radeon/radeon_ring.c
498
ring->last_semaphore_wait_addr);
drivers/gpu/drm/radeon/radeon_ring.c
499
seq_printf(m, "%u free dwords in ring\n", ring->ring_free_dw);
drivers/gpu/drm/radeon/radeon_ring.c
50
static void radeon_debugfs_ring_init(struct radeon_device *rdev, struct radeon_ring *ring);
drivers/gpu/drm/radeon/radeon_ring.c
502
if (!ring->ring)
drivers/gpu/drm/radeon/radeon_ring.c
508
i = (rptr + ring->ptr_mask + 1 - 32) & ring->ptr_mask;
drivers/gpu/drm/radeon/radeon_ring.c
510
seq_printf(m, "r[%5d]=0x%08x", i, ring->ring[i]);
drivers/gpu/drm/radeon/radeon_ring.c
516
i = (i + 1) & ring->ptr_mask;
drivers/gpu/drm/radeon/radeon_ring.c
549
static void radeon_debugfs_ring_init(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/radeon_ring.c
552
const char *ring_name = radeon_debugfs_ring_idx_to_name(ring->idx);
drivers/gpu/drm/radeon/radeon_ring.c
556
debugfs_create_file(ring_name, 0444, root, ring,
drivers/gpu/drm/radeon/radeon_ring.c
63
struct radeon_ring *ring)
drivers/gpu/drm/radeon/radeon_ring.c
65
switch (ring->idx) {
drivers/gpu/drm/radeon/radeon_ring.c
83
void radeon_ring_free_size(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/radeon_ring.c
85
uint32_t rptr = radeon_ring_get_rptr(rdev, ring);
drivers/gpu/drm/radeon/radeon_ring.c
88
ring->ring_free_dw = rptr + (ring->ring_size / 4);
drivers/gpu/drm/radeon/radeon_ring.c
89
ring->ring_free_dw -= ring->wptr;
drivers/gpu/drm/radeon/radeon_ring.c
90
ring->ring_free_dw &= ring->ptr_mask;
drivers/gpu/drm/radeon/radeon_ring.c
91
if (!ring->ring_free_dw) {
drivers/gpu/drm/radeon/radeon_ring.c
93
ring->ring_free_dw = ring->ring_size / 4;
drivers/gpu/drm/radeon/radeon_ring.c
95
radeon_ring_lockup_update(rdev, ring);
drivers/gpu/drm/radeon/radeon_semaphore.c
61
struct radeon_ring *ring = &rdev->ring[ridx];
drivers/gpu/drm/radeon/radeon_semaphore.c
65
if (radeon_semaphore_ring_emit(rdev, ridx, ring, semaphore, false)) {
drivers/gpu/drm/radeon/radeon_semaphore.c
69
ring->last_semaphore_signal_addr = semaphore->gpu_addr;
drivers/gpu/drm/radeon/radeon_semaphore.c
78
struct radeon_ring *ring = &rdev->ring[ridx];
drivers/gpu/drm/radeon/radeon_semaphore.c
82
if (radeon_semaphore_ring_emit(rdev, ridx, ring, semaphore, true)) {
drivers/gpu/drm/radeon/radeon_semaphore.c
86
ring->last_semaphore_wait_addr = semaphore->gpu_addr;
drivers/gpu/drm/radeon/radeon_sync.c
123
int ring)
drivers/gpu/drm/radeon/radeon_sync.c
133
if (!radeon_fence_need_sync(fence, ring))
drivers/gpu/drm/radeon/radeon_sync.c
137
if (!rdev->ring[i].ready) {
drivers/gpu/drm/radeon/radeon_sync.c
156
r = radeon_ring_alloc(rdev, &rdev->ring[i], 16);
drivers/gpu/drm/radeon/radeon_sync.c
163
radeon_ring_undo(&rdev->ring[i]);
drivers/gpu/drm/radeon/radeon_sync.c
171
if (!radeon_semaphore_emit_wait(rdev, ring, semaphore)) {
drivers/gpu/drm/radeon/radeon_sync.c
173
radeon_ring_undo(&rdev->ring[i]);
drivers/gpu/drm/radeon/radeon_sync.c
180
radeon_ring_commit(rdev, &rdev->ring[i], false);
drivers/gpu/drm/radeon/radeon_sync.c
181
radeon_fence_note_sync(fence, ring);
drivers/gpu/drm/radeon/radeon_sync.c
70
other = sync->sync_to[fence->ring];
drivers/gpu/drm/radeon/radeon_sync.c
71
sync->sync_to[fence->ring] = radeon_fence_later(fence, other);
drivers/gpu/drm/radeon/radeon_test.c
120
if (ring == R600_RING_TYPE_DMA_INDEX)
drivers/gpu/drm/radeon/radeon_test.c
171
if (ring == R600_RING_TYPE_DMA_INDEX)
drivers/gpu/drm/radeon/radeon_test.c
263
struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon_test.c
266
uint32_t handle = ring->idx ^ 0xdeafbeef;
drivers/gpu/drm/radeon/radeon_test.c
269
if (ring->idx == R600_RING_TYPE_UVD_INDEX) {
drivers/gpu/drm/radeon/radeon_test.c
270
r = radeon_uvd_get_create_msg(rdev, ring->idx, handle, NULL);
drivers/gpu/drm/radeon/radeon_test.c
276
r = radeon_uvd_get_destroy_msg(rdev, ring->idx, handle, fence);
drivers/gpu/drm/radeon/radeon_test.c
282
} else if (ring->idx == TN_RING_TYPE_VCE1_INDEX ||
drivers/gpu/drm/radeon/radeon_test.c
283
ring->idx == TN_RING_TYPE_VCE2_INDEX) {
drivers/gpu/drm/radeon/radeon_test.c
284
r = radeon_vce_get_create_msg(rdev, ring->idx, handle, NULL);
drivers/gpu/drm/radeon/radeon_test.c
290
r = radeon_vce_get_destroy_msg(rdev, ring->idx, handle, fence);
drivers/gpu/drm/radeon/radeon_test.c
297
r = radeon_ring_lock(rdev, ring, 64);
drivers/gpu/drm/radeon/radeon_test.c
299
DRM_ERROR("Failed to lock ring A %d\n", ring->idx);
drivers/gpu/drm/radeon/radeon_test.c
302
r = radeon_fence_emit(rdev, fence, ring->idx);
drivers/gpu/drm/radeon/radeon_test.c
305
radeon_ring_unlock_undo(rdev, ring);
drivers/gpu/drm/radeon/radeon_test.c
308
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/radeon_test.c
41
int i, r, ring;
drivers/gpu/drm/radeon/radeon_test.c
45
ring = radeon_copy_dma_ring_index(rdev);
drivers/gpu/drm/radeon/radeon_test.c
48
ring = radeon_copy_blit_ring_index(rdev);
drivers/gpu/drm/radeon/radeon_test.c
531
struct radeon_ring *ringA = &rdev->ring[i];
drivers/gpu/drm/radeon/radeon_test.c
536
struct radeon_ring *ringB = &rdev->ring[j];
drivers/gpu/drm/radeon/radeon_test.c
550
struct radeon_ring *ringC = &rdev->ring[k];
drivers/gpu/drm/radeon/radeon_trace.h
108
TP_PROTO(uint64_t pd_addr, unsigned ring, unsigned id),
drivers/gpu/drm/radeon/radeon_trace.h
109
TP_ARGS(pd_addr, ring, id),
drivers/gpu/drm/radeon/radeon_trace.h
112
__field(u32, ring)
drivers/gpu/drm/radeon/radeon_trace.h
118
__entry->ring = ring;
drivers/gpu/drm/radeon/radeon_trace.h
122
__entry->pd_addr, __entry->ring, __entry->id)
drivers/gpu/drm/radeon/radeon_trace.h
127
TP_PROTO(struct drm_device *dev, int ring, u32 seqno),
drivers/gpu/drm/radeon/radeon_trace.h
129
TP_ARGS(dev, ring, seqno),
drivers/gpu/drm/radeon/radeon_trace.h
133
__field(int, ring)
drivers/gpu/drm/radeon/radeon_trace.h
139
__entry->ring = ring;
drivers/gpu/drm/radeon/radeon_trace.h
144
__entry->dev, __entry->ring, __entry->seqno)
drivers/gpu/drm/radeon/radeon_trace.h
149
TP_PROTO(struct drm_device *dev, int ring, u32 seqno),
drivers/gpu/drm/radeon/radeon_trace.h
151
TP_ARGS(dev, ring, seqno)
drivers/gpu/drm/radeon/radeon_trace.h
156
TP_PROTO(struct drm_device *dev, int ring, u32 seqno),
drivers/gpu/drm/radeon/radeon_trace.h
158
TP_ARGS(dev, ring, seqno)
drivers/gpu/drm/radeon/radeon_trace.h
163
TP_PROTO(struct drm_device *dev, int ring, u32 seqno),
drivers/gpu/drm/radeon/radeon_trace.h
165
TP_ARGS(dev, ring, seqno)
drivers/gpu/drm/radeon/radeon_trace.h
170
TP_PROTO(int ring, struct radeon_semaphore *sem),
drivers/gpu/drm/radeon/radeon_trace.h
172
TP_ARGS(ring, sem),
drivers/gpu/drm/radeon/radeon_trace.h
175
__field(int, ring)
drivers/gpu/drm/radeon/radeon_trace.h
181
__entry->ring = ring;
drivers/gpu/drm/radeon/radeon_trace.h
186
TP_printk("ring=%u, waiters=%d, addr=%010Lx", __entry->ring,
drivers/gpu/drm/radeon/radeon_trace.h
192
TP_PROTO(int ring, struct radeon_semaphore *sem),
drivers/gpu/drm/radeon/radeon_trace.h
194
TP_ARGS(ring, sem)
drivers/gpu/drm/radeon/radeon_trace.h
199
TP_PROTO(int ring, struct radeon_semaphore *sem),
drivers/gpu/drm/radeon/radeon_trace.h
201
TP_ARGS(ring, sem)
drivers/gpu/drm/radeon/radeon_trace.h
34
__field(u32, ring)
drivers/gpu/drm/radeon/radeon_trace.h
40
__entry->ring = p->ring;
drivers/gpu/drm/radeon/radeon_trace.h
43
p->rdev, p->ring);
drivers/gpu/drm/radeon/radeon_trace.h
46
__entry->ring, __entry->dw,
drivers/gpu/drm/radeon/radeon_trace.h
51
TP_PROTO(unsigned vmid, int ring),
drivers/gpu/drm/radeon/radeon_trace.h
52
TP_ARGS(vmid, ring),
drivers/gpu/drm/radeon/radeon_trace.h
55
__field(u32, ring)
drivers/gpu/drm/radeon/radeon_trace.h
60
__entry->ring = ring;
drivers/gpu/drm/radeon/radeon_trace.h
62
TP_printk("vmid=%u, ring=%u", __entry->vmid, __entry->ring)
drivers/gpu/drm/radeon/radeon_ttm.c
102
if (rbo->rdev->ring[radeon_copy_ring_index(rbo->rdev)].ready == false)
drivers/gpu/drm/radeon/radeon_ttm.c
171
if (!rdev->ring[ridx].ready) {
drivers/gpu/drm/radeon/radeon_ttm.c
225
if (rdev->ring[radeon_copy_ring_index(rdev)].ready &&
drivers/gpu/drm/radeon/radeon_uvd.c
246
radeon_ring_fini(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX]);
drivers/gpu/drm/radeon/radeon_uvd.c
730
int ring, uint64_t addr,
drivers/gpu/drm/radeon/radeon_uvd.c
736
r = radeon_ib_get(rdev, ring, &ib, NULL, 64);
drivers/gpu/drm/radeon/radeon_uvd.c
766
int radeon_uvd_get_create_msg(struct radeon_device *rdev, int ring,
drivers/gpu/drm/radeon/radeon_uvd.c
797
r = radeon_uvd_send_msg(rdev, ring, addr, fence);
drivers/gpu/drm/radeon/radeon_uvd.c
802
int radeon_uvd_get_destroy_msg(struct radeon_device *rdev, int ring,
drivers/gpu/drm/radeon/radeon_uvd.c
826
r = radeon_uvd_send_msg(rdev, ring, addr, fence);
drivers/gpu/drm/radeon/radeon_vce.c
346
int radeon_vce_get_create_msg(struct radeon_device *rdev, int ring,
drivers/gpu/drm/radeon/radeon_vce.c
354
r = radeon_ib_get(rdev, ring, &ib, NULL, ib_size_dw * 4);
drivers/gpu/drm/radeon/radeon_vce.c
413
int radeon_vce_get_destroy_msg(struct radeon_device *rdev, int ring,
drivers/gpu/drm/radeon/radeon_vce.c
421
r = radeon_ib_get(rdev, ring, &ib, NULL, ib_size_dw * 4);
drivers/gpu/drm/radeon/radeon_vce.c
696
struct radeon_ring *ring,
drivers/gpu/drm/radeon/radeon_vce.c
702
radeon_ring_write(ring, cpu_to_le32(VCE_CMD_SEMAPHORE));
drivers/gpu/drm/radeon/radeon_vce.c
703
radeon_ring_write(ring, cpu_to_le32((addr >> 3) & 0x000FFFFF));
drivers/gpu/drm/radeon/radeon_vce.c
704
radeon_ring_write(ring, cpu_to_le32((addr >> 23) & 0x000FFFFF));
drivers/gpu/drm/radeon/radeon_vce.c
705
radeon_ring_write(ring, cpu_to_le32(0x01003000 | (emit_wait ? 1 : 0)));
drivers/gpu/drm/radeon/radeon_vce.c
707
radeon_ring_write(ring, cpu_to_le32(VCE_CMD_END));
drivers/gpu/drm/radeon/radeon_vce.c
721
struct radeon_ring *ring = &rdev->ring[ib->ring];
drivers/gpu/drm/radeon/radeon_vce.c
722
radeon_ring_write(ring, cpu_to_le32(VCE_CMD_IB));
drivers/gpu/drm/radeon/radeon_vce.c
723
radeon_ring_write(ring, cpu_to_le32(ib->gpu_addr));
drivers/gpu/drm/radeon/radeon_vce.c
724
radeon_ring_write(ring, cpu_to_le32(upper_32_bits(ib->gpu_addr)));
drivers/gpu/drm/radeon/radeon_vce.c
725
radeon_ring_write(ring, cpu_to_le32(ib->length_dw));
drivers/gpu/drm/radeon/radeon_vce.c
738
struct radeon_ring *ring = &rdev->ring[fence->ring];
drivers/gpu/drm/radeon/radeon_vce.c
739
uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr;
drivers/gpu/drm/radeon/radeon_vce.c
741
radeon_ring_write(ring, cpu_to_le32(VCE_CMD_FENCE));
drivers/gpu/drm/radeon/radeon_vce.c
742
radeon_ring_write(ring, cpu_to_le32(addr));
drivers/gpu/drm/radeon/radeon_vce.c
743
radeon_ring_write(ring, cpu_to_le32(upper_32_bits(addr)));
drivers/gpu/drm/radeon/radeon_vce.c
744
radeon_ring_write(ring, cpu_to_le32(fence->seq));
drivers/gpu/drm/radeon/radeon_vce.c
745
radeon_ring_write(ring, cpu_to_le32(VCE_CMD_TRAP));
drivers/gpu/drm/radeon/radeon_vce.c
746
radeon_ring_write(ring, cpu_to_le32(VCE_CMD_END));
drivers/gpu/drm/radeon/radeon_vce.c
756
int radeon_vce_ring_test(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/radeon_vce.c
758
uint32_t rptr = vce_v1_0_get_rptr(rdev, ring);
drivers/gpu/drm/radeon/radeon_vce.c
762
r = radeon_ring_lock(rdev, ring, 16);
drivers/gpu/drm/radeon/radeon_vce.c
765
ring->idx, r);
drivers/gpu/drm/radeon/radeon_vce.c
768
radeon_ring_write(ring, cpu_to_le32(VCE_CMD_END));
drivers/gpu/drm/radeon/radeon_vce.c
769
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/radeon_vce.c
772
if (vce_v1_0_get_rptr(rdev, ring) != rptr)
drivers/gpu/drm/radeon/radeon_vce.c
779
ring->idx, i);
drivers/gpu/drm/radeon/radeon_vce.c
782
ring->idx);
drivers/gpu/drm/radeon/radeon_vce.c
796
int radeon_vce_ib_test(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/radeon_vce.c
801
r = radeon_vce_get_create_msg(rdev, ring->idx, 1, NULL);
drivers/gpu/drm/radeon/radeon_vce.c
807
r = radeon_vce_get_destroy_msg(rdev, ring->idx, 1, &fence);
drivers/gpu/drm/radeon/radeon_vce.c
821
DRM_INFO("ib test on ring %d succeeded\n", ring->idx);
drivers/gpu/drm/radeon/radeon_vm.c
176
struct radeon_vm *vm, int ring)
drivers/gpu/drm/radeon/radeon_vm.c
179
struct radeon_vm_id *vm_id = &vm->ids[ring];
drivers/gpu/drm/radeon/radeon_vm.c
199
trace_radeon_vm_grab_id(i, ring);
drivers/gpu/drm/radeon/radeon_vm.c
203
if (radeon_fence_is_earlier(fence, best[fence->ring])) {
drivers/gpu/drm/radeon/radeon_vm.c
204
best[fence->ring] = fence;
drivers/gpu/drm/radeon/radeon_vm.c
205
choices[fence->ring == ring ? 0 : 1] = i;
drivers/gpu/drm/radeon/radeon_vm.c
212
trace_radeon_vm_grab_id(choices[i], ring);
drivers/gpu/drm/radeon/radeon_vm.c
236
int ring, struct radeon_fence *updates)
drivers/gpu/drm/radeon/radeon_vm.c
239
struct radeon_vm_id *vm_id = &vm->ids[ring];
drivers/gpu/drm/radeon/radeon_vm.c
244
trace_radeon_vm_flush(pd_addr, ring, vm->ids[ring].id);
drivers/gpu/drm/radeon/radeon_vm.c
248
radeon_ring_vm_flush(rdev, &rdev->ring[ring],
drivers/gpu/drm/radeon/radeon_vm.c
270
unsigned vm_id = vm->ids[fence->ring].id;
drivers/gpu/drm/radeon/radeon_vm.c
275
radeon_fence_unref(&vm->ids[fence->ring].last_id_use);
drivers/gpu/drm/radeon/radeon_vm.c
276
vm->ids[fence->ring].last_id_use = radeon_fence_ref(fence);
drivers/gpu/drm/radeon/rv515.c
100
radeon_ring_write(ring,
drivers/gpu/drm/radeon/rv515.c
108
radeon_ring_write(ring, PACKET0(GA_ENHANCE, 0));
drivers/gpu/drm/radeon/rv515.c
109
radeon_ring_write(ring, GA_DEADLOCK_CNTL | GA_FASTSYNC_CNTL);
drivers/gpu/drm/radeon/rv515.c
110
radeon_ring_write(ring, PACKET0(GA_POLY_MODE, 0));
drivers/gpu/drm/radeon/rv515.c
111
radeon_ring_write(ring, FRONT_PTYPE_TRIANGE | BACK_PTYPE_TRIANGE);
drivers/gpu/drm/radeon/rv515.c
112
radeon_ring_write(ring, PACKET0(GA_ROUND_MODE, 0));
drivers/gpu/drm/radeon/rv515.c
113
radeon_ring_write(ring, GEOMETRY_ROUND_NEAREST | COLOR_ROUND_NEAREST);
drivers/gpu/drm/radeon/rv515.c
114
radeon_ring_write(ring, PACKET0(0x20C8, 0));
drivers/gpu/drm/radeon/rv515.c
115
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/rv515.c
116
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/rv515.c
51
void rv515_ring_start(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/rv515.c
55
r = radeon_ring_lock(rdev, ring, 64);
drivers/gpu/drm/radeon/rv515.c
59
radeon_ring_write(ring, PACKET0(ISYNC_CNTL, 0));
drivers/gpu/drm/radeon/rv515.c
60
radeon_ring_write(ring,
drivers/gpu/drm/radeon/rv515.c
65
radeon_ring_write(ring, PACKET0(WAIT_UNTIL, 0));
drivers/gpu/drm/radeon/rv515.c
66
radeon_ring_write(ring, WAIT_2D_IDLECLEAN | WAIT_3D_IDLECLEAN);
drivers/gpu/drm/radeon/rv515.c
67
radeon_ring_write(ring, PACKET0(R300_DST_PIPE_CONFIG, 0));
drivers/gpu/drm/radeon/rv515.c
68
radeon_ring_write(ring, R300_PIPE_AUTO_CONFIG);
drivers/gpu/drm/radeon/rv515.c
69
radeon_ring_write(ring, PACKET0(GB_SELECT, 0));
drivers/gpu/drm/radeon/rv515.c
70
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/rv515.c
71
radeon_ring_write(ring, PACKET0(GB_ENABLE, 0));
drivers/gpu/drm/radeon/rv515.c
72
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/rv515.c
73
radeon_ring_write(ring, PACKET0(R500_SU_REG_DEST, 0));
drivers/gpu/drm/radeon/rv515.c
74
radeon_ring_write(ring, (1 << rdev->num_gb_pipes) - 1);
drivers/gpu/drm/radeon/rv515.c
75
radeon_ring_write(ring, PACKET0(VAP_INDEX_OFFSET, 0));
drivers/gpu/drm/radeon/rv515.c
76
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/rv515.c
77
radeon_ring_write(ring, PACKET0(RB3D_DSTCACHE_CTLSTAT, 0));
drivers/gpu/drm/radeon/rv515.c
78
radeon_ring_write(ring, RB3D_DC_FLUSH | RB3D_DC_FREE);
drivers/gpu/drm/radeon/rv515.c
79
radeon_ring_write(ring, PACKET0(ZB_ZCACHE_CTLSTAT, 0));
drivers/gpu/drm/radeon/rv515.c
80
radeon_ring_write(ring, ZC_FLUSH | ZC_FREE);
drivers/gpu/drm/radeon/rv515.c
81
radeon_ring_write(ring, PACKET0(WAIT_UNTIL, 0));
drivers/gpu/drm/radeon/rv515.c
82
radeon_ring_write(ring, WAIT_2D_IDLECLEAN | WAIT_3D_IDLECLEAN);
drivers/gpu/drm/radeon/rv515.c
83
radeon_ring_write(ring, PACKET0(GB_AA_CONFIG, 0));
drivers/gpu/drm/radeon/rv515.c
84
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/rv515.c
85
radeon_ring_write(ring, PACKET0(RB3D_DSTCACHE_CTLSTAT, 0));
drivers/gpu/drm/radeon/rv515.c
86
radeon_ring_write(ring, RB3D_DC_FLUSH | RB3D_DC_FREE);
drivers/gpu/drm/radeon/rv515.c
87
radeon_ring_write(ring, PACKET0(ZB_ZCACHE_CTLSTAT, 0));
drivers/gpu/drm/radeon/rv515.c
88
radeon_ring_write(ring, ZC_FLUSH | ZC_FREE);
drivers/gpu/drm/radeon/rv515.c
89
radeon_ring_write(ring, PACKET0(GB_MSPOS0, 0));
drivers/gpu/drm/radeon/rv515.c
90
radeon_ring_write(ring,
drivers/gpu/drm/radeon/rv515.c
99
radeon_ring_write(ring, PACKET0(GB_MSPOS1, 0));
drivers/gpu/drm/radeon/rv770.c
1086
rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
drivers/gpu/drm/radeon/rv770.c
1129
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/rv770.c
1131
radeon_ring_fini(rdev, ring);
drivers/gpu/drm/radeon/rv770.c
1132
radeon_scratch_free(rdev, ring->rptr_save_reg);
drivers/gpu/drm/radeon/rv770.c
1706
rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
drivers/gpu/drm/radeon/rv770.c
1707
r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
drivers/gpu/drm/radeon/rv770.c
1730
rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
drivers/gpu/drm/radeon/rv770.c
1735
struct radeon_ring *ring;
drivers/gpu/drm/radeon/rv770.c
1738
if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
drivers/gpu/drm/radeon/rv770.c
1741
ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
drivers/gpu/drm/radeon/rv770.c
1742
r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
drivers/gpu/drm/radeon/rv770.c
1756
struct radeon_ring *ring;
drivers/gpu/drm/radeon/rv770.c
1813
ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/rv770.c
1814
r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
drivers/gpu/drm/radeon/rv770.c
1819
ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
drivers/gpu/drm/radeon/rv770.c
1820
r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
drivers/gpu/drm/radeon/rv770.c
1966
rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
drivers/gpu/drm/radeon/rv770.c
1967
r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
drivers/gpu/drm/radeon/rv770.c
1969
rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
drivers/gpu/drm/radeon/rv770.c
1970
r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
drivers/gpu/drm/radeon/rv770_dma.c
50
struct radeon_ring *ring = &rdev->ring[ring_index];
drivers/gpu/drm/radeon/rv770_dma.c
59
r = radeon_ring_lock(rdev, ring, num_loops * 5 + 8);
drivers/gpu/drm/radeon/rv770_dma.c
67
radeon_sync_rings(rdev, &sync, ring->idx);
drivers/gpu/drm/radeon/rv770_dma.c
74
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_COPY, 0, 0, cur_size_in_dw));
drivers/gpu/drm/radeon/rv770_dma.c
75
radeon_ring_write(ring, dst_offset & 0xfffffffc);
drivers/gpu/drm/radeon/rv770_dma.c
76
radeon_ring_write(ring, src_offset & 0xfffffffc);
drivers/gpu/drm/radeon/rv770_dma.c
77
radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff);
drivers/gpu/drm/radeon/rv770_dma.c
78
radeon_ring_write(ring, upper_32_bits(src_offset) & 0xff);
drivers/gpu/drm/radeon/rv770_dma.c
83
r = radeon_fence_emit(rdev, &fence, ring->idx);
drivers/gpu/drm/radeon/rv770_dma.c
85
radeon_ring_unlock_undo(rdev, ring);
drivers/gpu/drm/radeon/rv770_dma.c
90
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/si.c
3353
struct radeon_ring *ring = &rdev->ring[fence->ring];
drivers/gpu/drm/radeon/si.c
3354
u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
drivers/gpu/drm/radeon/si.c
3357
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
drivers/gpu/drm/radeon/si.c
3358
radeon_ring_write(ring, (CP_COHER_CNTL2 - PACKET3_SET_CONFIG_REG_START) >> 2);
drivers/gpu/drm/radeon/si.c
3359
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/si.c
3360
radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
drivers/gpu/drm/radeon/si.c
3361
radeon_ring_write(ring, PACKET3_TCL1_ACTION_ENA |
drivers/gpu/drm/radeon/si.c
3365
radeon_ring_write(ring, 0xFFFFFFFF);
drivers/gpu/drm/radeon/si.c
3366
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/si.c
3367
radeon_ring_write(ring, 10); /* poll interval */
drivers/gpu/drm/radeon/si.c
3369
radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4));
drivers/gpu/drm/radeon/si.c
3370
radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_TS_EVENT) | EVENT_INDEX(5));
drivers/gpu/drm/radeon/si.c
3371
radeon_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/radeon/si.c
3372
radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | DATA_SEL(1) | INT_SEL(2));
drivers/gpu/drm/radeon/si.c
3373
radeon_ring_write(ring, fence->seq);
drivers/gpu/drm/radeon/si.c
3374
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/si.c
3382
struct radeon_ring *ring = &rdev->ring[ib->ring];
drivers/gpu/drm/radeon/si.c
3383
unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0;
drivers/gpu/drm/radeon/si.c
3388
radeon_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0));
drivers/gpu/drm/radeon/si.c
3389
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/si.c
3394
if (ring->rptr_save_reg) {
drivers/gpu/drm/radeon/si.c
3395
next_rptr = ring->wptr + 3 + 4 + 8;
drivers/gpu/drm/radeon/si.c
3396
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
drivers/gpu/drm/radeon/si.c
3397
radeon_ring_write(ring, ((ring->rptr_save_reg -
drivers/gpu/drm/radeon/si.c
3399
radeon_ring_write(ring, next_rptr);
drivers/gpu/drm/radeon/si.c
3401
next_rptr = ring->wptr + 5 + 4 + 8;
drivers/gpu/drm/radeon/si.c
3402
radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/radeon/si.c
3403
radeon_ring_write(ring, (1 << 8));
drivers/gpu/drm/radeon/si.c
3404
radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
drivers/gpu/drm/radeon/si.c
3405
radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr));
drivers/gpu/drm/radeon/si.c
3406
radeon_ring_write(ring, next_rptr);
drivers/gpu/drm/radeon/si.c
3412
radeon_ring_write(ring, header);
drivers/gpu/drm/radeon/si.c
3413
radeon_ring_write(ring,
drivers/gpu/drm/radeon/si.c
3418
radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF);
drivers/gpu/drm/radeon/si.c
3419
radeon_ring_write(ring, ib->length_dw | (vm_id << 24));
drivers/gpu/drm/radeon/si.c
3423
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
drivers/gpu/drm/radeon/si.c
3424
radeon_ring_write(ring, (CP_COHER_CNTL2 - PACKET3_SET_CONFIG_REG_START) >> 2);
drivers/gpu/drm/radeon/si.c
3425
radeon_ring_write(ring, vm_id);
drivers/gpu/drm/radeon/si.c
3426
radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
drivers/gpu/drm/radeon/si.c
3427
radeon_ring_write(ring, PACKET3_TCL1_ACTION_ENA |
drivers/gpu/drm/radeon/si.c
3431
radeon_ring_write(ring, 0xFFFFFFFF);
drivers/gpu/drm/radeon/si.c
3432
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/si.c
3433
radeon_ring_write(ring, 10); /* poll interval */
drivers/gpu/drm/radeon/si.c
3449
rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
drivers/gpu/drm/radeon/si.c
3450
rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
drivers/gpu/drm/radeon/si.c
3451
rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
drivers/gpu/drm/radeon/si.c
3539
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/si.c
3542
r = radeon_ring_lock(rdev, ring, 7 + 4);
drivers/gpu/drm/radeon/si.c
3548
radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
drivers/gpu/drm/radeon/si.c
3549
radeon_ring_write(ring, 0x1);
drivers/gpu/drm/radeon/si.c
3550
radeon_ring_write(ring, 0x0);
drivers/gpu/drm/radeon/si.c
3551
radeon_ring_write(ring, rdev->config.si.max_hw_contexts - 1);
drivers/gpu/drm/radeon/si.c
3552
radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
drivers/gpu/drm/radeon/si.c
3553
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/si.c
3554
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/si.c
3557
radeon_ring_write(ring, PACKET3(PACKET3_SET_BASE, 2));
drivers/gpu/drm/radeon/si.c
3558
radeon_ring_write(ring, PACKET3_BASE_INDEX(CE_PARTITION_BASE));
drivers/gpu/drm/radeon/si.c
3559
radeon_ring_write(ring, 0xc000);
drivers/gpu/drm/radeon/si.c
3560
radeon_ring_write(ring, 0xe000);
drivers/gpu/drm/radeon/si.c
3561
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/si.c
3565
r = radeon_ring_lock(rdev, ring, si_default_size + 10);
drivers/gpu/drm/radeon/si.c
3572
radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
drivers/gpu/drm/radeon/si.c
3573
radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
drivers/gpu/drm/radeon/si.c
3576
radeon_ring_write(ring, si_default_state[i]);
drivers/gpu/drm/radeon/si.c
3578
radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
drivers/gpu/drm/radeon/si.c
3579
radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
drivers/gpu/drm/radeon/si.c
3582
radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
drivers/gpu/drm/radeon/si.c
3583
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/si.c
3585
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
drivers/gpu/drm/radeon/si.c
3586
radeon_ring_write(ring, 0x00000316);
drivers/gpu/drm/radeon/si.c
3587
radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
drivers/gpu/drm/radeon/si.c
3588
radeon_ring_write(ring, 0x00000010); /* VGT_OUT_DEALLOC_CNTL */
drivers/gpu/drm/radeon/si.c
3590
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/si.c
3593
ring = &rdev->ring[i];
drivers/gpu/drm/radeon/si.c
3594
r = radeon_ring_lock(rdev, ring, 2);
drivers/gpu/drm/radeon/si.c
3601
radeon_ring_write(ring, PACKET3_COMPUTE(PACKET3_CLEAR_STATE, 0));
drivers/gpu/drm/radeon/si.c
3602
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/si.c
3604
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/si.c
3612
struct radeon_ring *ring;
drivers/gpu/drm/radeon/si.c
3615
ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/si.c
3616
radeon_ring_fini(rdev, ring);
drivers/gpu/drm/radeon/si.c
3617
radeon_scratch_free(rdev, ring->rptr_save_reg);
drivers/gpu/drm/radeon/si.c
3619
ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
drivers/gpu/drm/radeon/si.c
3620
radeon_ring_fini(rdev, ring);
drivers/gpu/drm/radeon/si.c
3621
radeon_scratch_free(rdev, ring->rptr_save_reg);
drivers/gpu/drm/radeon/si.c
3623
ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
drivers/gpu/drm/radeon/si.c
3624
radeon_ring_fini(rdev, ring);
drivers/gpu/drm/radeon/si.c
3625
radeon_scratch_free(rdev, ring->rptr_save_reg);
drivers/gpu/drm/radeon/si.c
3630
struct radeon_ring *ring;
drivers/gpu/drm/radeon/si.c
3648
ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/si.c
3649
rb_bufsz = order_base_2(ring->ring_size / 8);
drivers/gpu/drm/radeon/si.c
3658
ring->wptr = 0;
drivers/gpu/drm/radeon/si.c
3659
WREG32(CP_RB0_WPTR, ring->wptr);
drivers/gpu/drm/radeon/si.c
3675
WREG32(CP_RB0_BASE, ring->gpu_addr >> 8);
drivers/gpu/drm/radeon/si.c
3679
ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
drivers/gpu/drm/radeon/si.c
3680
rb_bufsz = order_base_2(ring->ring_size / 8);
drivers/gpu/drm/radeon/si.c
3689
ring->wptr = 0;
drivers/gpu/drm/radeon/si.c
3690
WREG32(CP_RB1_WPTR, ring->wptr);
drivers/gpu/drm/radeon/si.c
3699
WREG32(CP_RB1_BASE, ring->gpu_addr >> 8);
drivers/gpu/drm/radeon/si.c
3703
ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
drivers/gpu/drm/radeon/si.c
3704
rb_bufsz = order_base_2(ring->ring_size / 8);
drivers/gpu/drm/radeon/si.c
3713
ring->wptr = 0;
drivers/gpu/drm/radeon/si.c
3714
WREG32(CP_RB2_WPTR, ring->wptr);
drivers/gpu/drm/radeon/si.c
3723
WREG32(CP_RB2_BASE, ring->gpu_addr >> 8);
drivers/gpu/drm/radeon/si.c
3727
rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true;
drivers/gpu/drm/radeon/si.c
3728
rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = true;
drivers/gpu/drm/radeon/si.c
3729
rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = true;
drivers/gpu/drm/radeon/si.c
3730
r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]);
drivers/gpu/drm/radeon/si.c
3732
rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
drivers/gpu/drm/radeon/si.c
3733
rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
drivers/gpu/drm/radeon/si.c
3734
rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
drivers/gpu/drm/radeon/si.c
3737
r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP1_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]);
drivers/gpu/drm/radeon/si.c
3739
rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
drivers/gpu/drm/radeon/si.c
3741
r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP2_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]);
drivers/gpu/drm/radeon/si.c
3743
rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
drivers/gpu/drm/radeon/si.c
4107
bool si_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/si.c
4114
radeon_ring_lockup_update(rdev, ring);
drivers/gpu/drm/radeon/si.c
4117
return radeon_ring_test_lockup(rdev, ring);
drivers/gpu/drm/radeon/si.c
4740
switch (ib->ring) {
drivers/gpu/drm/radeon/si.c
4749
dev_err(rdev->dev, "Non-PM4 ring %d !\n", ib->ring);
drivers/gpu/drm/radeon/si.c
5056
void si_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
drivers/gpu/drm/radeon/si.c
5060
radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/radeon/si.c
5061
radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(1) |
drivers/gpu/drm/radeon/si.c
5065
radeon_ring_write(ring,
drivers/gpu/drm/radeon/si.c
5068
radeon_ring_write(ring,
drivers/gpu/drm/radeon/si.c
5071
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/si.c
5072
radeon_ring_write(ring, pd_addr >> 12);
drivers/gpu/drm/radeon/si.c
5075
radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/radeon/si.c
5076
radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(1) |
drivers/gpu/drm/radeon/si.c
5078
radeon_ring_write(ring, HDP_MEM_COHERENCY_FLUSH_CNTL >> 2);
drivers/gpu/drm/radeon/si.c
5079
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/si.c
5080
radeon_ring_write(ring, 0x1);
drivers/gpu/drm/radeon/si.c
5083
radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
drivers/gpu/drm/radeon/si.c
5084
radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(1) |
drivers/gpu/drm/radeon/si.c
5086
radeon_ring_write(ring, VM_INVALIDATE_REQUEST >> 2);
drivers/gpu/drm/radeon/si.c
5087
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/si.c
5088
radeon_ring_write(ring, 1 << vm_id);
drivers/gpu/drm/radeon/si.c
5091
radeon_ring_write(ring, PACKET3(PACKET3_WAIT_REG_MEM, 5));
drivers/gpu/drm/radeon/si.c
5092
radeon_ring_write(ring, (WAIT_REG_MEM_FUNCTION(0) | /* always */
drivers/gpu/drm/radeon/si.c
5094
radeon_ring_write(ring, VM_INVALIDATE_REQUEST >> 2);
drivers/gpu/drm/radeon/si.c
5095
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/si.c
5096
radeon_ring_write(ring, 0); /* ref */
drivers/gpu/drm/radeon/si.c
5097
radeon_ring_write(ring, 0); /* mask */
drivers/gpu/drm/radeon/si.c
5098
radeon_ring_write(ring, 0x20); /* poll interval */
drivers/gpu/drm/radeon/si.c
5101
radeon_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
drivers/gpu/drm/radeon/si.c
5102
radeon_ring_write(ring, 0x0);
drivers/gpu/drm/radeon/si.c
6262
src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
drivers/gpu/drm/radeon/si.c
6263
src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
drivers/gpu/drm/radeon/si.c
6264
ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff;
drivers/gpu/drm/radeon/si.c
6458
rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
drivers/gpu/drm/radeon/si.c
6459
r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
drivers/gpu/drm/radeon/si.c
6482
rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
drivers/gpu/drm/radeon/si.c
6487
struct radeon_ring *ring;
drivers/gpu/drm/radeon/si.c
6490
if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
drivers/gpu/drm/radeon/si.c
6493
ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
drivers/gpu/drm/radeon/si.c
6494
r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
drivers/gpu/drm/radeon/si.c
6525
rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_obj = NULL;
drivers/gpu/drm/radeon/si.c
6526
r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE1_INDEX], 4096);
drivers/gpu/drm/radeon/si.c
6527
rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_obj = NULL;
drivers/gpu/drm/radeon/si.c
6528
r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE2_INDEX], 4096);
drivers/gpu/drm/radeon/si.c
6561
rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size = 0;
drivers/gpu/drm/radeon/si.c
6562
rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_size = 0;
drivers/gpu/drm/radeon/si.c
6567
struct radeon_ring *ring;
drivers/gpu/drm/radeon/si.c
6570
if (!rdev->has_vce || !rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size)
drivers/gpu/drm/radeon/si.c
6573
ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX];
drivers/gpu/drm/radeon/si.c
6574
r = radeon_ring_init(rdev, ring, ring->ring_size, 0, VCE_CMD_NO_OP);
drivers/gpu/drm/radeon/si.c
6579
ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX];
drivers/gpu/drm/radeon/si.c
6580
r = radeon_ring_init(rdev, ring, ring->ring_size, 0, VCE_CMD_NO_OP);
drivers/gpu/drm/radeon/si.c
6594
struct radeon_ring *ring;
drivers/gpu/drm/radeon/si.c
6688
ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/si.c
6689
r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
drivers/gpu/drm/radeon/si.c
6694
ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
drivers/gpu/drm/radeon/si.c
6695
r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP1_RPTR_OFFSET,
drivers/gpu/drm/radeon/si.c
6700
ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
drivers/gpu/drm/radeon/si.c
6701
r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP2_RPTR_OFFSET,
drivers/gpu/drm/radeon/si.c
6706
ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
drivers/gpu/drm/radeon/si.c
6707
r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
drivers/gpu/drm/radeon/si.c
6712
ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
drivers/gpu/drm/radeon/si.c
6713
r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET,
drivers/gpu/drm/radeon/si.c
6809
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/si.c
6868
ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
drivers/gpu/drm/radeon/si.c
6869
ring->ring_obj = NULL;
drivers/gpu/drm/radeon/si.c
6870
r600_ring_init(rdev, ring, 1024 * 1024);
drivers/gpu/drm/radeon/si.c
6872
ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
drivers/gpu/drm/radeon/si.c
6873
ring->ring_obj = NULL;
drivers/gpu/drm/radeon/si.c
6874
r600_ring_init(rdev, ring, 1024 * 1024);
drivers/gpu/drm/radeon/si.c
6876
ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
drivers/gpu/drm/radeon/si.c
6877
ring->ring_obj = NULL;
drivers/gpu/drm/radeon/si.c
6878
r600_ring_init(rdev, ring, 1024 * 1024);
drivers/gpu/drm/radeon/si.c
6880
ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
drivers/gpu/drm/radeon/si.c
6881
ring->ring_obj = NULL;
drivers/gpu/drm/radeon/si.c
6882
r600_ring_init(rdev, ring, 64 * 1024);
drivers/gpu/drm/radeon/si.c
6884
ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
drivers/gpu/drm/radeon/si.c
6885
ring->ring_obj = NULL;
drivers/gpu/drm/radeon/si.c
6886
r600_ring_init(rdev, ring, 64 * 1024);
drivers/gpu/drm/radeon/si_dma.c
186
void si_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
drivers/gpu/drm/radeon/si_dma.c
190
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0, 0, 0));
drivers/gpu/drm/radeon/si_dma.c
192
radeon_ring_write(ring, (0xf << 16) | ((VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (vm_id << 2)) >> 2));
drivers/gpu/drm/radeon/si_dma.c
194
radeon_ring_write(ring, (0xf << 16) | ((VM_CONTEXT8_PAGE_TABLE_BASE_ADDR + ((vm_id - 8) << 2)) >> 2));
drivers/gpu/drm/radeon/si_dma.c
196
radeon_ring_write(ring, pd_addr >> 12);
drivers/gpu/drm/radeon/si_dma.c
199
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0, 0, 0));
drivers/gpu/drm/radeon/si_dma.c
200
radeon_ring_write(ring, (0xf << 16) | (HDP_MEM_COHERENCY_FLUSH_CNTL >> 2));
drivers/gpu/drm/radeon/si_dma.c
201
radeon_ring_write(ring, 1);
drivers/gpu/drm/radeon/si_dma.c
204
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0, 0, 0));
drivers/gpu/drm/radeon/si_dma.c
205
radeon_ring_write(ring, (0xf << 16) | (VM_INVALIDATE_REQUEST >> 2));
drivers/gpu/drm/radeon/si_dma.c
206
radeon_ring_write(ring, 1 << vm_id);
drivers/gpu/drm/radeon/si_dma.c
209
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_POLL_REG_MEM, 0, 0, 0, 0));
drivers/gpu/drm/radeon/si_dma.c
210
radeon_ring_write(ring, VM_INVALIDATE_REQUEST);
drivers/gpu/drm/radeon/si_dma.c
211
radeon_ring_write(ring, 0xff << 16); /* retry */
drivers/gpu/drm/radeon/si_dma.c
212
radeon_ring_write(ring, 1 << vm_id); /* mask */
drivers/gpu/drm/radeon/si_dma.c
213
radeon_ring_write(ring, 0); /* value */
drivers/gpu/drm/radeon/si_dma.c
214
radeon_ring_write(ring, (0 << 28) | 0x20); /* func(always) | poll interval */
drivers/gpu/drm/radeon/si_dma.c
238
struct radeon_ring *ring = &rdev->ring[ring_index];
drivers/gpu/drm/radeon/si_dma.c
247
r = radeon_ring_lock(rdev, ring, num_loops * 5 + 11);
drivers/gpu/drm/radeon/si_dma.c
255
radeon_sync_rings(rdev, &sync, ring->idx);
drivers/gpu/drm/radeon/si_dma.c
262
radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_COPY, 1, 0, 0, cur_size_in_bytes));
drivers/gpu/drm/radeon/si_dma.c
263
radeon_ring_write(ring, lower_32_bits(dst_offset));
drivers/gpu/drm/radeon/si_dma.c
264
radeon_ring_write(ring, lower_32_bits(src_offset));
drivers/gpu/drm/radeon/si_dma.c
265
radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff);
drivers/gpu/drm/radeon/si_dma.c
266
radeon_ring_write(ring, upper_32_bits(src_offset) & 0xff);
drivers/gpu/drm/radeon/si_dma.c
271
r = radeon_fence_emit(rdev, &fence, ring->idx);
drivers/gpu/drm/radeon/si_dma.c
273
radeon_ring_unlock_undo(rdev, ring);
drivers/gpu/drm/radeon/si_dma.c
278
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/si_dma.c
40
bool si_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/si_dma.c
45
if (ring->idx == R600_RING_TYPE_DMA_INDEX)
drivers/gpu/drm/radeon/si_dma.c
51
radeon_ring_lockup_update(rdev, ring);
drivers/gpu/drm/radeon/si_dma.c
54
return radeon_ring_test_lockup(rdev, ring);
drivers/gpu/drm/radeon/uvd_v1_0.c
159
struct radeon_ring *ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
drivers/gpu/drm/radeon/uvd_v1_0.c
173
ring->ready = true;
drivers/gpu/drm/radeon/uvd_v1_0.c
174
r = radeon_ring_test(rdev, R600_RING_TYPE_UVD_INDEX, ring);
drivers/gpu/drm/radeon/uvd_v1_0.c
176
ring->ready = false;
drivers/gpu/drm/radeon/uvd_v1_0.c
180
r = radeon_ring_lock(rdev, ring, 10);
drivers/gpu/drm/radeon/uvd_v1_0.c
187
radeon_ring_write(ring, tmp);
drivers/gpu/drm/radeon/uvd_v1_0.c
188
radeon_ring_write(ring, 0xFFFFF);
drivers/gpu/drm/radeon/uvd_v1_0.c
191
radeon_ring_write(ring, tmp);
drivers/gpu/drm/radeon/uvd_v1_0.c
192
radeon_ring_write(ring, 0xFFFFF);
drivers/gpu/drm/radeon/uvd_v1_0.c
195
radeon_ring_write(ring, tmp);
drivers/gpu/drm/radeon/uvd_v1_0.c
196
radeon_ring_write(ring, 0xFFFFF);
drivers/gpu/drm/radeon/uvd_v1_0.c
199
radeon_ring_write(ring, PACKET0(UVD_SEMA_TIMEOUT_STATUS, 0));
drivers/gpu/drm/radeon/uvd_v1_0.c
200
radeon_ring_write(ring, 0x8);
drivers/gpu/drm/radeon/uvd_v1_0.c
202
radeon_ring_write(ring, PACKET0(UVD_SEMA_CNTL, 0));
drivers/gpu/drm/radeon/uvd_v1_0.c
203
radeon_ring_write(ring, 3);
drivers/gpu/drm/radeon/uvd_v1_0.c
205
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/uvd_v1_0.c
250
struct radeon_ring *ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
drivers/gpu/drm/radeon/uvd_v1_0.c
253
ring->ready = false;
drivers/gpu/drm/radeon/uvd_v1_0.c
265
struct radeon_ring *ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
drivers/gpu/drm/radeon/uvd_v1_0.c
364
WREG32(UVD_LMI_EXT40_ADDR, upper_32_bits(ring->gpu_addr) |
drivers/gpu/drm/radeon/uvd_v1_0.c
370
ring->wptr = RREG32(UVD_RBC_RB_RPTR);
drivers/gpu/drm/radeon/uvd_v1_0.c
371
WREG32(UVD_RBC_RB_WPTR, ring->wptr);
drivers/gpu/drm/radeon/uvd_v1_0.c
374
WREG32(UVD_RBC_RB_BASE, ring->gpu_addr);
drivers/gpu/drm/radeon/uvd_v1_0.c
377
rb_bufsz = order_base_2(ring->ring_size);
drivers/gpu/drm/radeon/uvd_v1_0.c
40
struct radeon_ring *ring)
drivers/gpu/drm/radeon/uvd_v1_0.c
421
int uvd_v1_0_ring_test(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/uvd_v1_0.c
428
r = radeon_ring_lock(rdev, ring, 3);
drivers/gpu/drm/radeon/uvd_v1_0.c
431
ring->idx, r);
drivers/gpu/drm/radeon/uvd_v1_0.c
434
radeon_ring_write(ring, PACKET0(UVD_CONTEXT_ID, 0));
drivers/gpu/drm/radeon/uvd_v1_0.c
435
radeon_ring_write(ring, 0xDEADBEEF);
drivers/gpu/drm/radeon/uvd_v1_0.c
436
radeon_ring_unlock_commit(rdev, ring, false);
drivers/gpu/drm/radeon/uvd_v1_0.c
446
ring->idx, i);
drivers/gpu/drm/radeon/uvd_v1_0.c
449
ring->idx, tmp);
drivers/gpu/drm/radeon/uvd_v1_0.c
466
struct radeon_ring *ring,
drivers/gpu/drm/radeon/uvd_v1_0.c
484
struct radeon_ring *ring = &rdev->ring[ib->ring];
drivers/gpu/drm/radeon/uvd_v1_0.c
486
radeon_ring_write(ring, PACKET0(UVD_RBC_IB_BASE, 0));
drivers/gpu/drm/radeon/uvd_v1_0.c
487
radeon_ring_write(ring, ib->gpu_addr);
drivers/gpu/drm/radeon/uvd_v1_0.c
488
radeon_ring_write(ring, PACKET0(UVD_RBC_IB_SIZE, 0));
drivers/gpu/drm/radeon/uvd_v1_0.c
489
radeon_ring_write(ring, ib->length_dw);
drivers/gpu/drm/radeon/uvd_v1_0.c
500
int uvd_v1_0_ib_test(struct radeon_device *rdev, struct radeon_ring *ring)
drivers/gpu/drm/radeon/uvd_v1_0.c
514
r = radeon_uvd_get_create_msg(rdev, ring->idx, 1, NULL);
drivers/gpu/drm/radeon/uvd_v1_0.c
520
r = radeon_uvd_get_destroy_msg(rdev, ring->idx, 1, &fence);
drivers/gpu/drm/radeon/uvd_v1_0.c
537
drm_info(&rdev->ddev, "ib test on ring %d succeeded\n", ring->idx);
drivers/gpu/drm/radeon/uvd_v1_0.c
54
struct radeon_ring *ring)
drivers/gpu/drm/radeon/uvd_v1_0.c
68
struct radeon_ring *ring)
drivers/gpu/drm/radeon/uvd_v1_0.c
70
WREG32(UVD_RBC_RB_WPTR, ring->wptr);
drivers/gpu/drm/radeon/uvd_v1_0.c
84
struct radeon_ring *ring = &rdev->ring[fence->ring];
drivers/gpu/drm/radeon/uvd_v1_0.c
85
uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr;
drivers/gpu/drm/radeon/uvd_v1_0.c
87
radeon_ring_write(ring, PACKET0(UVD_GPCOM_VCPU_DATA0, 0));
drivers/gpu/drm/radeon/uvd_v1_0.c
88
radeon_ring_write(ring, addr & 0xffffffff);
drivers/gpu/drm/radeon/uvd_v1_0.c
89
radeon_ring_write(ring, PACKET0(UVD_GPCOM_VCPU_DATA1, 0));
drivers/gpu/drm/radeon/uvd_v1_0.c
90
radeon_ring_write(ring, fence->seq);
drivers/gpu/drm/radeon/uvd_v1_0.c
91
radeon_ring_write(ring, PACKET0(UVD_GPCOM_VCPU_CMD, 0));
drivers/gpu/drm/radeon/uvd_v1_0.c
92
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/uvd_v1_0.c
94
radeon_ring_write(ring, PACKET0(UVD_GPCOM_VCPU_DATA0, 0));
drivers/gpu/drm/radeon/uvd_v1_0.c
95
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/uvd_v1_0.c
96
radeon_ring_write(ring, PACKET0(UVD_GPCOM_VCPU_DATA1, 0));
drivers/gpu/drm/radeon/uvd_v1_0.c
97
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/uvd_v1_0.c
98
radeon_ring_write(ring, PACKET0(UVD_GPCOM_VCPU_CMD, 0));
drivers/gpu/drm/radeon/uvd_v1_0.c
99
radeon_ring_write(ring, 2);
drivers/gpu/drm/radeon/uvd_v2_2.c
42
struct radeon_ring *ring = &rdev->ring[fence->ring];
drivers/gpu/drm/radeon/uvd_v2_2.c
43
uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr;
drivers/gpu/drm/radeon/uvd_v2_2.c
45
radeon_ring_write(ring, PACKET0(UVD_CONTEXT_ID, 0));
drivers/gpu/drm/radeon/uvd_v2_2.c
46
radeon_ring_write(ring, fence->seq);
drivers/gpu/drm/radeon/uvd_v2_2.c
47
radeon_ring_write(ring, PACKET0(UVD_GPCOM_VCPU_DATA0, 0));
drivers/gpu/drm/radeon/uvd_v2_2.c
48
radeon_ring_write(ring, lower_32_bits(addr));
drivers/gpu/drm/radeon/uvd_v2_2.c
49
radeon_ring_write(ring, PACKET0(UVD_GPCOM_VCPU_DATA1, 0));
drivers/gpu/drm/radeon/uvd_v2_2.c
50
radeon_ring_write(ring, upper_32_bits(addr) & 0xff);
drivers/gpu/drm/radeon/uvd_v2_2.c
51
radeon_ring_write(ring, PACKET0(UVD_GPCOM_VCPU_CMD, 0));
drivers/gpu/drm/radeon/uvd_v2_2.c
52
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/uvd_v2_2.c
54
radeon_ring_write(ring, PACKET0(UVD_GPCOM_VCPU_DATA0, 0));
drivers/gpu/drm/radeon/uvd_v2_2.c
55
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/uvd_v2_2.c
56
radeon_ring_write(ring, PACKET0(UVD_GPCOM_VCPU_DATA1, 0));
drivers/gpu/drm/radeon/uvd_v2_2.c
57
radeon_ring_write(ring, 0);
drivers/gpu/drm/radeon/uvd_v2_2.c
58
radeon_ring_write(ring, PACKET0(UVD_GPCOM_VCPU_CMD, 0));
drivers/gpu/drm/radeon/uvd_v2_2.c
59
radeon_ring_write(ring, 2);
drivers/gpu/drm/radeon/uvd_v2_2.c
73
struct radeon_ring *ring,
drivers/gpu/drm/radeon/uvd_v2_2.c
79
radeon_ring_write(ring, PACKET0(UVD_SEMA_ADDR_LOW, 0));
drivers/gpu/drm/radeon/uvd_v2_2.c
80
radeon_ring_write(ring, (addr >> 3) & 0x000FFFFF);
drivers/gpu/drm/radeon/uvd_v2_2.c
82
radeon_ring_write(ring, PACKET0(UVD_SEMA_ADDR_HIGH, 0));
drivers/gpu/drm/radeon/uvd_v2_2.c
83
radeon_ring_write(ring, (addr >> 23) & 0x000FFFFF);
drivers/gpu/drm/radeon/uvd_v2_2.c
85
radeon_ring_write(ring, PACKET0(UVD_SEMA_CMD, 0));
drivers/gpu/drm/radeon/uvd_v2_2.c
86
radeon_ring_write(ring, emit_wait ? 1 : 0);
drivers/gpu/drm/radeon/uvd_v3_1.c
40
struct radeon_ring *ring,
drivers/gpu/drm/radeon/uvd_v3_1.c
46
radeon_ring_write(ring, PACKET0(UVD_SEMA_ADDR_LOW, 0));
drivers/gpu/drm/radeon/uvd_v3_1.c
47
radeon_ring_write(ring, (addr >> 3) & 0x000FFFFF);
drivers/gpu/drm/radeon/uvd_v3_1.c
49
radeon_ring_write(ring, PACKET0(UVD_SEMA_ADDR_HIGH, 0));
drivers/gpu/drm/radeon/uvd_v3_1.c
50
radeon_ring_write(ring, (addr >> 23) & 0x000FFFFF);
drivers/gpu/drm/radeon/uvd_v3_1.c
52
radeon_ring_write(ring, PACKET0(UVD_SEMA_CMD, 0));
drivers/gpu/drm/radeon/uvd_v3_1.c
53
radeon_ring_write(ring, 0x80 | (emit_wait ? 1 : 0));
drivers/gpu/drm/radeon/vce_v1_0.c
100
WREG32(VCE_RB_WPTR2, ring->wptr);
drivers/gpu/drm/radeon/vce_v1_0.c
291
struct radeon_ring *ring;
drivers/gpu/drm/radeon/vce_v1_0.c
297
ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX];
drivers/gpu/drm/radeon/vce_v1_0.c
298
WREG32(VCE_RB_RPTR, ring->wptr);
drivers/gpu/drm/radeon/vce_v1_0.c
299
WREG32(VCE_RB_WPTR, ring->wptr);
drivers/gpu/drm/radeon/vce_v1_0.c
300
WREG32(VCE_RB_BASE_LO, ring->gpu_addr);
drivers/gpu/drm/radeon/vce_v1_0.c
301
WREG32(VCE_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/radeon/vce_v1_0.c
302
WREG32(VCE_RB_SIZE, ring->ring_size / 4);
drivers/gpu/drm/radeon/vce_v1_0.c
304
ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX];
drivers/gpu/drm/radeon/vce_v1_0.c
305
WREG32(VCE_RB_RPTR2, ring->wptr);
drivers/gpu/drm/radeon/vce_v1_0.c
306
WREG32(VCE_RB_WPTR2, ring->wptr);
drivers/gpu/drm/radeon/vce_v1_0.c
307
WREG32(VCE_RB_BASE_LO2, ring->gpu_addr);
drivers/gpu/drm/radeon/vce_v1_0.c
308
WREG32(VCE_RB_BASE_HI2, upper_32_bits(ring->gpu_addr));
drivers/gpu/drm/radeon/vce_v1_0.c
309
WREG32(VCE_RB_SIZE2, ring->ring_size / 4);
drivers/gpu/drm/radeon/vce_v1_0.c
358
struct radeon_ring *ring;
drivers/gpu/drm/radeon/vce_v1_0.c
365
ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX];
drivers/gpu/drm/radeon/vce_v1_0.c
366
ring->ready = true;
drivers/gpu/drm/radeon/vce_v1_0.c
367
r = radeon_ring_test(rdev, TN_RING_TYPE_VCE1_INDEX, ring);
drivers/gpu/drm/radeon/vce_v1_0.c
369
ring->ready = false;
drivers/gpu/drm/radeon/vce_v1_0.c
373
ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX];
drivers/gpu/drm/radeon/vce_v1_0.c
374
ring->ready = true;
drivers/gpu/drm/radeon/vce_v1_0.c
375
r = radeon_ring_test(rdev, TN_RING_TYPE_VCE2_INDEX, ring);
drivers/gpu/drm/radeon/vce_v1_0.c
377
ring->ready = false;
drivers/gpu/drm/radeon/vce_v1_0.c
61
struct radeon_ring *ring)
drivers/gpu/drm/radeon/vce_v1_0.c
63
if (ring->idx == TN_RING_TYPE_VCE1_INDEX)
drivers/gpu/drm/radeon/vce_v1_0.c
78
struct radeon_ring *ring)
drivers/gpu/drm/radeon/vce_v1_0.c
80
if (ring->idx == TN_RING_TYPE_VCE1_INDEX)
drivers/gpu/drm/radeon/vce_v1_0.c
95
struct radeon_ring *ring)
drivers/gpu/drm/radeon/vce_v1_0.c
97
if (ring->idx == TN_RING_TYPE_VCE1_INDEX)
drivers/gpu/drm/radeon/vce_v1_0.c
98
WREG32(VCE_RB_WPTR, ring->wptr);
drivers/gpu/drm/xe/xe_execlist.c
139
return lrc->ring.tail == lrc->ring.old_tail;
drivers/gpu/drm/xe/xe_execlist.c
66
xe_lrc_write_ctx_reg(lrc, CTX_RING_TAIL, lrc->ring.tail);
drivers/gpu/drm/xe/xe_execlist.c
67
lrc->ring.old_tail = lrc->ring.tail;
drivers/gpu/drm/xe/xe_gt_sriov_pf_migration.c
1058
err = ptr_ring_init(&migration->ring,
drivers/gpu/drm/xe/xe_gt_sriov_pf_migration.c
1063
err = devm_add_action_or_reset(xe->drm.dev, action_ring_cleanup, &migration->ring);
drivers/gpu/drm/xe/xe_gt_sriov_pf_migration.c
779
return ptr_ring_empty(&pf_pick_gt_migration(gt, vfid)->ring);
drivers/gpu/drm/xe/xe_gt_sriov_pf_migration.c
791
return ptr_ring_full(&pf_pick_gt_migration(gt, vfid)->ring);
drivers/gpu/drm/xe/xe_gt_sriov_pf_migration.c
804
if (ptr_ring_empty(&migration->ring))
drivers/gpu/drm/xe/xe_gt_sriov_pf_migration.c
809
while ((data = ptr_ring_consume(&migration->ring)))
drivers/gpu/drm/xe/xe_gt_sriov_pf_migration.c
888
ret = ptr_ring_produce(&pf_pick_gt_migration(gt, vfid)->ring, data);
drivers/gpu/drm/xe/xe_gt_sriov_pf_migration.c
917
data = ptr_ring_consume(&migration->ring);
drivers/gpu/drm/xe/xe_gt_sriov_pf_migration.c
927
!ptr_ring_full(&pf_pick_gt_migration(gt, vfid)->ring))
drivers/gpu/drm/xe/xe_gt_sriov_pf_migration.c
959
ret = ptr_ring_produce(&pf_pick_gt_migration(gt, vfid)->ring, data);
drivers/gpu/drm/xe/xe_gt_sriov_pf_migration.c
993
data = ptr_ring_consume(&migration->ring);
drivers/gpu/drm/xe/xe_gt_sriov_pf_migration_types.h
18
struct ptr_ring ring;
drivers/gpu/drm/xe/xe_guc_submit.c
1074
FIELD_PREP(WQ_RING_TAIL_MASK, q->lrc[0]->ring.tail / sizeof(u64));
drivers/gpu/drm/xe/xe_guc_submit.c
1079
wqi[i++] = lrc->ring.tail / sizeof(u64);
drivers/gpu/drm/xe/xe_guc_submit.c
1113
xe_lrc_set_ring_tail(lrc, lrc->ring.tail);
drivers/gpu/drm/xe/xe_guc_submit.c
1822
xe_lrc_set_ring_tail(lrc, lrc->ring.tail);
drivers/gpu/drm/xe/xe_guc_submit.c
1837
xe_lrc_set_ring_tail(lrc, lrc->ring.tail);
drivers/gpu/drm/xe/xe_guc_submit.c
2480
q->lrc[i]->ring.tail = job->ptrs[i].head;
drivers/gpu/drm/xe/xe_guc_submit.c
2541
q->lrc[i]->ring.tail = job->ptrs[i].head;
drivers/gpu/drm/xe/xe_lrc.c
1454
lrc->ring.size = ring_size;
drivers/gpu/drm/xe/xe_lrc.c
1455
lrc->ring.tail = 0;
drivers/gpu/drm/xe/xe_lrc.c
1531
xe_lrc_write_indirect_ctx_reg(lrc, INDIRECT_CTX_RING_TAIL, lrc->ring.tail);
drivers/gpu/drm/xe/xe_lrc.c
1533
RING_CTL_SIZE(lrc->ring.size) | RING_VALID);
drivers/gpu/drm/xe/xe_lrc.c
1537
xe_lrc_write_ctx_reg(lrc, CTX_RING_TAIL, lrc->ring.tail);
drivers/gpu/drm/xe/xe_lrc.c
1539
RING_CTL_SIZE(lrc->ring.size) | RING_VALID);
drivers/gpu/drm/xe/xe_lrc.c
1707
const u32 tail = lrc->ring.tail;
drivers/gpu/drm/xe/xe_lrc.c
1708
const u32 size = lrc->ring.size;
drivers/gpu/drm/xe/xe_lrc.c
1713
static void __xe_lrc_write_ring(struct xe_lrc *lrc, struct iosys_map ring,
drivers/gpu/drm/xe/xe_lrc.c
1718
iosys_map_incr(&ring, lrc->ring.tail);
drivers/gpu/drm/xe/xe_lrc.c
1719
xe_map_memcpy_to(xe, &ring, 0, data, size);
drivers/gpu/drm/xe/xe_lrc.c
1720
lrc->ring.tail = (lrc->ring.tail + size) & (lrc->ring.size - 1);
drivers/gpu/drm/xe/xe_lrc.c
1726
struct iosys_map ring;
drivers/gpu/drm/xe/xe_lrc.c
1733
ring = __xe_lrc_ring_map(lrc);
drivers/gpu/drm/xe/xe_lrc.c
1735
xe_assert(xe, lrc->ring.tail < lrc->ring.size);
drivers/gpu/drm/xe/xe_lrc.c
1736
rhs = lrc->ring.size - lrc->ring.tail;
drivers/gpu/drm/xe/xe_lrc.c
1738
__xe_lrc_write_ring(lrc, ring, data, rhs);
drivers/gpu/drm/xe/xe_lrc.c
1739
__xe_lrc_write_ring(lrc, ring, data + rhs, size - rhs);
drivers/gpu/drm/xe/xe_lrc.c
1741
__xe_lrc_write_ring(lrc, ring, data, size);
drivers/gpu/drm/xe/xe_lrc.c
1747
__xe_lrc_write_ring(lrc, ring, &noop, sizeof(noop));
drivers/gpu/drm/xe/xe_lrc.c
2281
snapshot->tail.internal = lrc->ring.tail;
drivers/gpu/drm/xe/xe_lrc.c
707
return lrc->ring.size;
drivers/gpu/drm/xe/xe_lrc.c
818
DECL_MAP_ADDR_HELPERS(ring)
drivers/gpu/drm/xe/xe_lrc_types.h
50
} ring;
drivers/gpu/drm/xe/xe_ring_ops.c
267
*head = lrc->ring.tail;
drivers/gpu/drm/xe/xe_ring_ops.c
325
*head = lrc->ring.tail;
drivers/gpu/drm/xe/xe_ring_ops.c
382
*head = lrc->ring.tail;
drivers/gpu/drm/xe/xe_ring_ops.c
434
*head = lrc->ring.tail;
drivers/gpu/drm/xe/xe_sriov_vf_ccs.c
190
lrc->ring.tail = 0;
drivers/gpu/drm/xe/xe_sriov_vf_ccs.c
201
xe_lrc_set_ring_tail(lrc, lrc->ring.tail);
drivers/gpu/drm/xen/xen_drm_front.c
94
req = RING_GET_REQUEST(&evtchnl->u.req.ring,
drivers/gpu/drm/xen/xen_drm_front.c
95
evtchnl->u.req.ring.req_prod_pvt);
drivers/gpu/drm/xen/xen_drm_front_evtchnl.c
129
page = evtchnl->u.req.ring.sring;
drivers/gpu/drm/xen/xen_drm_front_evtchnl.c
181
XEN_FRONT_RING_INIT(&evtchnl->u.req.ring, sring, XEN_PAGE_SIZE);
drivers/gpu/drm/xen/xen_drm_front_evtchnl.c
328
evtchnl->u.req.ring.req_prod_pvt++;
drivers/gpu/drm/xen/xen_drm_front_evtchnl.c
329
RING_PUSH_REQUESTS_AND_CHECK_NOTIFY(&evtchnl->u.req.ring, notify);
drivers/gpu/drm/xen/xen_drm_front_evtchnl.c
37
rp = evtchnl->u.req.ring.sring->rsp_prod;
drivers/gpu/drm/xen/xen_drm_front_evtchnl.c
41
for (i = evtchnl->u.req.ring.rsp_cons; i != rp; i++) {
drivers/gpu/drm/xen/xen_drm_front_evtchnl.c
42
resp = RING_GET_RESPONSE(&evtchnl->u.req.ring, i);
drivers/gpu/drm/xen/xen_drm_front_evtchnl.c
64
evtchnl->u.req.ring.rsp_cons = i;
drivers/gpu/drm/xen/xen_drm_front_evtchnl.c
66
if (i != evtchnl->u.req.ring.req_prod_pvt) {
drivers/gpu/drm/xen/xen_drm_front_evtchnl.c
69
RING_FINAL_CHECK_FOR_RESPONSES(&evtchnl->u.req.ring,
drivers/gpu/drm/xen/xen_drm_front_evtchnl.c
74
evtchnl->u.req.ring.sring->rsp_event = i + 1;
drivers/gpu/drm/xen/xen_drm_front_evtchnl.h
52
struct xen_displif_front_ring ring;
drivers/hid/wacom_wac.c
1481
int ring = data[285] & 0x7F;
drivers/hid/wacom_wac.c
1486
ring = 71 - ring;
drivers/hid/wacom_wac.c
1487
ring += 3*72/16;
drivers/hid/wacom_wac.c
1488
if (ring > 71)
drivers/hid/wacom_wac.c
1489
ring -= 72;
drivers/hid/wacom_wac.c
1494
input_report_abs(pad_input, ABS_WHEEL, ringstatus ? ring : 0);
drivers/hv/mshv_synic.c
32
volatile struct hv_synic_event_ring *ring;
drivers/hv/mshv_synic.c
53
ring = &(*event_ring_page)->sint_event_ring[sint_index];
drivers/hv/mshv_synic.c
58
message = ring->data[tail];
drivers/hv/mshv_synic.c
61
if (ring->ring_full) {
drivers/hv/mshv_synic.c
67
ring->ring_full = 0;
drivers/hv/mshv_synic.c
69
message = ring->data[tail];
drivers/hv/mshv_synic.c
73
ring->signal_masked = 0;
drivers/hv/mshv_synic.c
79
message = ring->data[tail];
drivers/hv/mshv_synic.c
88
ring->signal_masked = 1;
drivers/hv/mshv_synic.c
94
ring->data[tail] = 0;
drivers/i3c/master/mipi-i3c-hci/dma.c
473
unsigned int i, ring, enqueue_ptr;
drivers/i3c/master/mipi-i3c-hci/dma.c
482
ring = 0;
drivers/i3c/master/mipi-i3c-hci/dma.c
483
rh = &rings->headers[ring];
drivers/i3c/master/mipi-i3c-hci/dma.c
526
xfer->ring_number = ring;
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
1330
struct hns_roce_v2_cmq_ring *ring)
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
1332
int size = ring->desc_num * sizeof(struct hns_roce_cmq_desc);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
1334
ring->desc = dma_alloc_coherent(hr_dev->dev, size,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
1335
&ring->desc_dma_addr, GFP_KERNEL);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
1336
if (!ring->desc)
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
1343
struct hns_roce_v2_cmq_ring *ring)
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
1346
ring->desc_num * sizeof(struct hns_roce_cmq_desc),
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
1347
ring->desc, ring->desc_dma_addr);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
1349
ring->desc_dma_addr = 0;
drivers/infiniband/hw/mlx4/mad.c
1326
sg_list.addr = tun_qp->ring[index].map;
drivers/infiniband/hw/mlx4/mad.c
1335
ib_dma_sync_single_for_device(ctx->ib_dev, tun_qp->ring[index].map,
drivers/infiniband/hw/mlx4/mad.c
1482
struct mlx4_tunnel_mad *tunnel = tun_qp->ring[wr_ix].addr;
drivers/infiniband/hw/mlx4/mad.c
1509
ib_dma_sync_single_for_cpu(ctx->ib_dev, tun_qp->ring[wr_ix].map,
drivers/infiniband/hw/mlx4/mad.c
1615
tun_qp->ring = kzalloc_objs(struct mlx4_ib_buf, nmbr_bufs);
drivers/infiniband/hw/mlx4/mad.c
1616
if (!tun_qp->ring)
drivers/infiniband/hw/mlx4/mad.c
1621
kfree(tun_qp->ring);
drivers/infiniband/hw/mlx4/mad.c
1622
tun_qp->ring = NULL;
drivers/infiniband/hw/mlx4/mad.c
1635
tun_qp->ring[i].addr = kmalloc(rx_buf_size, GFP_KERNEL);
drivers/infiniband/hw/mlx4/mad.c
1636
if (!tun_qp->ring[i].addr)
drivers/infiniband/hw/mlx4/mad.c
1638
tun_qp->ring[i].map = ib_dma_map_single(ctx->ib_dev,
drivers/infiniband/hw/mlx4/mad.c
1639
tun_qp->ring[i].addr,
drivers/infiniband/hw/mlx4/mad.c
1642
if (ib_dma_mapping_error(ctx->ib_dev, tun_qp->ring[i].map)) {
drivers/infiniband/hw/mlx4/mad.c
1643
kfree(tun_qp->ring[i].addr);
drivers/infiniband/hw/mlx4/mad.c
1683
ib_dma_unmap_single(ctx->ib_dev, tun_qp->ring[i].map,
drivers/infiniband/hw/mlx4/mad.c
1685
kfree(tun_qp->ring[i].addr);
drivers/infiniband/hw/mlx4/mad.c
1689
kfree(tun_qp->ring);
drivers/infiniband/hw/mlx4/mad.c
1690
tun_qp->ring = NULL;
drivers/infiniband/hw/mlx4/mad.c
1716
ib_dma_unmap_single(ctx->ib_dev, tun_qp->ring[i].map,
drivers/infiniband/hw/mlx4/mad.c
1718
kfree(tun_qp->ring[i].addr);
drivers/infiniband/hw/mlx4/mad.c
1729
kfree(tun_qp->ring);
drivers/infiniband/hw/mlx4/mad.c
1920
(sqp->ring[wc.wr_id &
drivers/infiniband/hw/mlx4/mad.c
1923
(sqp->ring[wc.wr_id &
drivers/infiniband/hw/mlx4/mlx4_ib.h
462
struct mlx4_ib_buf *ring;
drivers/infiniband/hw/vmw_pvrdma/pvrdma.h
152
struct pvrdma_ring *ring;
drivers/infiniband/hw/vmw_pvrdma/pvrdma.h
173
struct pvrdma_ring_state *ring;
drivers/infiniband/hw/vmw_pvrdma/pvrdma_main.c
424
struct pvrdma_ring *ring = &dev->async_ring_state->rx;
drivers/infiniband/hw/vmw_pvrdma/pvrdma_main.c
438
while (pvrdma_idx_ring_has_data(ring, ring_slots, &head) > 0) {
drivers/infiniband/hw/vmw_pvrdma/pvrdma_main.c
482
pvrdma_idx_ring_inc(&ring->cons_head, ring_slots);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_main.c
500
struct pvrdma_ring *ring = &dev->cq_ring_state->rx;
drivers/infiniband/hw/vmw_pvrdma/pvrdma_main.c
507
while (pvrdma_idx_ring_has_data(ring, ring_slots, &head) > 0) {
drivers/infiniband/hw/vmw_pvrdma/pvrdma_main.c
524
pvrdma_idx_ring_inc(&ring->cons_head, ring_slots);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c
120
if (qp->rq.ring) {
drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c
121
atomic_set(&qp->rq.ring->cons_head, 0);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c
122
atomic_set(&qp->rq.ring->prod_tail, 0);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c
124
if (qp->sq.ring) {
drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c
125
atomic_set(&qp->sq.ring->cons_head, 0);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c
126
atomic_set(&qp->sq.ring->prod_tail, 0);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c
344
qp->sq.ring = qp->pdir.pages[0];
drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c
345
qp->rq.ring = is_srq ? NULL : &qp->sq.ring[1];
drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c
696
qp->sq.ring, qp->sq.wqe_cnt, &tail))) {
drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c
848
pvrdma_idx_ring_inc(&qp->sq.ring->prod_tail,
drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c
914
qp->rq.ring, qp->rq.wqe_cnt, &tail))) {
drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c
939
pvrdma_idx_ring_inc(&qp->rq.ring->prod_tail,
drivers/infiniband/ulp/srpt/ib_srpt.c
787
struct srpt_ioctx **ring;
drivers/infiniband/ulp/srpt/ib_srpt.c
793
ring = kvmalloc_objs(ring[0], ring_size);
drivers/infiniband/ulp/srpt/ib_srpt.c
794
if (!ring)
drivers/infiniband/ulp/srpt/ib_srpt.c
797
ring[i] = srpt_alloc_ioctx(sdev, ioctx_size, buf_cache, dir);
drivers/infiniband/ulp/srpt/ib_srpt.c
798
if (!ring[i])
drivers/infiniband/ulp/srpt/ib_srpt.c
800
ring[i]->index = i;
drivers/infiniband/ulp/srpt/ib_srpt.c
801
ring[i]->offset = alignment_offset;
drivers/infiniband/ulp/srpt/ib_srpt.c
807
srpt_free_ioctx(sdev, ring[i], buf_cache, dir);
drivers/infiniband/ulp/srpt/ib_srpt.c
808
kvfree(ring);
drivers/infiniband/ulp/srpt/ib_srpt.c
809
ring = NULL;
drivers/infiniband/ulp/srpt/ib_srpt.c
811
return ring;
drivers/mailbox/bcm-flexrm-mailbox.c
1000
ret = flexrm_dma_map(ring->mbox->dev, msg);
drivers/mailbox/bcm-flexrm-mailbox.c
1002
ring->requests[reqid] = NULL;
drivers/mailbox/bcm-flexrm-mailbox.c
1003
spin_lock_irqsave(&ring->lock, flags);
drivers/mailbox/bcm-flexrm-mailbox.c
1004
bitmap_release_region(ring->requests_bmap, reqid, 0);
drivers/mailbox/bcm-flexrm-mailbox.c
1005
spin_unlock_irqrestore(&ring->lock, flags);
drivers/mailbox/bcm-flexrm-mailbox.c
1010
read_offset = readl_relaxed(ring->regs + RING_BD_READ_PTR);
drivers/mailbox/bcm-flexrm-mailbox.c
1011
val = readl_relaxed(ring->regs + RING_BD_START_ADDR);
drivers/mailbox/bcm-flexrm-mailbox.c
1013
read_offset += (u32)(BD_START_ADDR_DECODE(val) - ring->bd_dma_base);
drivers/mailbox/bcm-flexrm-mailbox.c
1024
write_offset = ring->bd_write_offset;
drivers/mailbox/bcm-flexrm-mailbox.c
1026
if (!flexrm_is_next_table_desc(ring->bd_base + write_offset))
drivers/mailbox/bcm-flexrm-mailbox.c
1042
ring->bd_base + ring->bd_write_offset,
drivers/mailbox/bcm-flexrm-mailbox.c
1043
RING_BD_TOGGLE_VALID(ring->bd_write_offset),
drivers/mailbox/bcm-flexrm-mailbox.c
1044
ring->bd_base, ring->bd_base + RING_BD_SIZE);
drivers/mailbox/bcm-flexrm-mailbox.c
1052
ring->bd_write_offset = (unsigned long)(next - ring->bd_base);
drivers/mailbox/bcm-flexrm-mailbox.c
1055
atomic_inc_return(&ring->msg_send_count);
drivers/mailbox/bcm-flexrm-mailbox.c
1063
flexrm_dma_unmap(ring->mbox->dev, msg);
drivers/mailbox/bcm-flexrm-mailbox.c
1064
ring->requests[reqid] = NULL;
drivers/mailbox/bcm-flexrm-mailbox.c
1065
spin_lock_irqsave(&ring->lock, flags);
drivers/mailbox/bcm-flexrm-mailbox.c
1066
bitmap_release_region(ring->requests_bmap, reqid, 0);
drivers/mailbox/bcm-flexrm-mailbox.c
1067
spin_unlock_irqrestore(&ring->lock, flags);
drivers/mailbox/bcm-flexrm-mailbox.c
1073
static int flexrm_process_completions(struct flexrm_ring *ring)
drivers/mailbox/bcm-flexrm-mailbox.c
1080
struct mbox_chan *chan = &ring->mbox->controller.chans[ring->num];
drivers/mailbox/bcm-flexrm-mailbox.c
1082
spin_lock_irqsave(&ring->lock, flags);
drivers/mailbox/bcm-flexrm-mailbox.c
1092
cmpl_write_offset = readl_relaxed(ring->regs + RING_CMPL_WRITE_PTR);
drivers/mailbox/bcm-flexrm-mailbox.c
1094
cmpl_read_offset = ring->cmpl_read_offset;
drivers/mailbox/bcm-flexrm-mailbox.c
1095
ring->cmpl_read_offset = cmpl_write_offset;
drivers/mailbox/bcm-flexrm-mailbox.c
1097
spin_unlock_irqrestore(&ring->lock, flags);
drivers/mailbox/bcm-flexrm-mailbox.c
1103
desc = *((u64 *)(ring->cmpl_base + cmpl_read_offset));
drivers/mailbox/bcm-flexrm-mailbox.c
1113
dev_warn(ring->mbox->dev,
drivers/mailbox/bcm-flexrm-mailbox.c
1115
ring->num, (unsigned long)desc, err);
drivers/mailbox/bcm-flexrm-mailbox.c
1122
msg = ring->requests[reqid];
drivers/mailbox/bcm-flexrm-mailbox.c
1124
dev_warn(ring->mbox->dev,
drivers/mailbox/bcm-flexrm-mailbox.c
1126
ring->num, (unsigned long)desc);
drivers/mailbox/bcm-flexrm-mailbox.c
1131
ring->requests[reqid] = NULL;
drivers/mailbox/bcm-flexrm-mailbox.c
1132
spin_lock_irqsave(&ring->lock, flags);
drivers/mailbox/bcm-flexrm-mailbox.c
1133
bitmap_release_region(ring->requests_bmap, reqid, 0);
drivers/mailbox/bcm-flexrm-mailbox.c
1134
spin_unlock_irqrestore(&ring->lock, flags);
drivers/mailbox/bcm-flexrm-mailbox.c
1137
flexrm_dma_unmap(ring->mbox->dev, msg);
drivers/mailbox/bcm-flexrm-mailbox.c
1144
atomic_inc_return(&ring->msg_cmpl_count);
drivers/mailbox/bcm-flexrm-mailbox.c
1187
struct flexrm_ring *ring = chan->con_priv;
drivers/mailbox/bcm-flexrm-mailbox.c
1193
rc = flexrm_new_request(ring, msg,
drivers/mailbox/bcm-flexrm-mailbox.c
1204
return flexrm_new_request(ring, NULL, data);
drivers/mailbox/bcm-flexrm-mailbox.c
1220
struct flexrm_ring *ring = chan->con_priv;
drivers/mailbox/bcm-flexrm-mailbox.c
1223
ring->bd_base = dma_pool_alloc(ring->mbox->bd_pool,
drivers/mailbox/bcm-flexrm-mailbox.c
1224
GFP_KERNEL, &ring->bd_dma_base);
drivers/mailbox/bcm-flexrm-mailbox.c
1225
if (!ring->bd_base) {
drivers/mailbox/bcm-flexrm-mailbox.c
1226
dev_err(ring->mbox->dev,
drivers/mailbox/bcm-flexrm-mailbox.c
1228
ring->num);
drivers/mailbox/bcm-flexrm-mailbox.c
1238
next_addr += ring->bd_dma_base;
drivers/mailbox/bcm-flexrm-mailbox.c
1244
flexrm_write_desc(ring->bd_base + off, d);
drivers/mailbox/bcm-flexrm-mailbox.c
1248
ring->cmpl_base = dma_pool_zalloc(ring->mbox->cmpl_pool,
drivers/mailbox/bcm-flexrm-mailbox.c
1249
GFP_KERNEL, &ring->cmpl_dma_base);
drivers/mailbox/bcm-flexrm-mailbox.c
1250
if (!ring->cmpl_base) {
drivers/mailbox/bcm-flexrm-mailbox.c
1251
dev_err(ring->mbox->dev,
drivers/mailbox/bcm-flexrm-mailbox.c
1253
ring->num);
drivers/mailbox/bcm-flexrm-mailbox.c
1259
if (ring->irq == UINT_MAX) {
drivers/mailbox/bcm-flexrm-mailbox.c
1260
dev_err(ring->mbox->dev,
drivers/mailbox/bcm-flexrm-mailbox.c
1261
"ring%d IRQ not available\n", ring->num);
drivers/mailbox/bcm-flexrm-mailbox.c
1265
ret = request_threaded_irq(ring->irq, NULL, flexrm_irq_thread,
drivers/mailbox/bcm-flexrm-mailbox.c
1266
IRQF_ONESHOT, dev_name(ring->mbox->dev), ring);
drivers/mailbox/bcm-flexrm-mailbox.c
1268
dev_err(ring->mbox->dev,
drivers/mailbox/bcm-flexrm-mailbox.c
1269
"failed to request ring%d IRQ\n", ring->num);
drivers/mailbox/bcm-flexrm-mailbox.c
1272
ring->irq_requested = true;
drivers/mailbox/bcm-flexrm-mailbox.c
1275
ring->irq_aff_hint = CPU_MASK_NONE;
drivers/mailbox/bcm-flexrm-mailbox.c
1276
val = ring->mbox->num_rings;
drivers/mailbox/bcm-flexrm-mailbox.c
1278
cpumask_set_cpu((ring->num / val) % num_online_cpus(),
drivers/mailbox/bcm-flexrm-mailbox.c
1279
&ring->irq_aff_hint);
drivers/mailbox/bcm-flexrm-mailbox.c
1280
ret = irq_update_affinity_hint(ring->irq, &ring->irq_aff_hint);
drivers/mailbox/bcm-flexrm-mailbox.c
1282
dev_err(ring->mbox->dev,
drivers/mailbox/bcm-flexrm-mailbox.c
1284
ring->num);
drivers/mailbox/bcm-flexrm-mailbox.c
1289
writel_relaxed(0x0, ring->regs + RING_CONTROL);
drivers/mailbox/bcm-flexrm-mailbox.c
1292
val = BD_START_ADDR_VALUE(ring->bd_dma_base);
drivers/mailbox/bcm-flexrm-mailbox.c
1293
writel_relaxed(val, ring->regs + RING_BD_START_ADDR);
drivers/mailbox/bcm-flexrm-mailbox.c
1296
ring->bd_write_offset =
drivers/mailbox/bcm-flexrm-mailbox.c
1297
readl_relaxed(ring->regs + RING_BD_WRITE_PTR);
drivers/mailbox/bcm-flexrm-mailbox.c
1298
ring->bd_write_offset *= RING_DESC_SIZE;
drivers/mailbox/bcm-flexrm-mailbox.c
1301
val = CMPL_START_ADDR_VALUE(ring->cmpl_dma_base);
drivers/mailbox/bcm-flexrm-mailbox.c
1302
writel_relaxed(val, ring->regs + RING_CMPL_START_ADDR);
drivers/mailbox/bcm-flexrm-mailbox.c
1305
ring->cmpl_read_offset =
drivers/mailbox/bcm-flexrm-mailbox.c
1306
readl_relaxed(ring->regs + RING_CMPL_WRITE_PTR);
drivers/mailbox/bcm-flexrm-mailbox.c
1307
ring->cmpl_read_offset *= RING_DESC_SIZE;
drivers/mailbox/bcm-flexrm-mailbox.c
1310
readl_relaxed(ring->regs + RING_NUM_REQ_RECV_LS);
drivers/mailbox/bcm-flexrm-mailbox.c
1311
readl_relaxed(ring->regs + RING_NUM_REQ_RECV_MS);
drivers/mailbox/bcm-flexrm-mailbox.c
1312
readl_relaxed(ring->regs + RING_NUM_REQ_TRANS_LS);
drivers/mailbox/bcm-flexrm-mailbox.c
1313
readl_relaxed(ring->regs + RING_NUM_REQ_TRANS_MS);
drivers/mailbox/bcm-flexrm-mailbox.c
1314
readl_relaxed(ring->regs + RING_NUM_REQ_OUTSTAND);
drivers/mailbox/bcm-flexrm-mailbox.c
1318
val |= (ring->msi_timer_val << MSI_TIMER_VAL_SHIFT);
drivers/mailbox/bcm-flexrm-mailbox.c
1320
val |= (ring->msi_count_threshold & MSI_COUNT_MASK) << MSI_COUNT_SHIFT;
drivers/mailbox/bcm-flexrm-mailbox.c
1321
writel_relaxed(val, ring->regs + RING_MSI_CONTROL);
drivers/mailbox/bcm-flexrm-mailbox.c
1325
writel_relaxed(val, ring->regs + RING_CONTROL);
drivers/mailbox/bcm-flexrm-mailbox.c
1328
atomic_set(&ring->msg_send_count, 0);
drivers/mailbox/bcm-flexrm-mailbox.c
1329
atomic_set(&ring->msg_cmpl_count, 0);
drivers/mailbox/bcm-flexrm-mailbox.c
1334
free_irq(ring->irq, ring);
drivers/mailbox/bcm-flexrm-mailbox.c
1335
ring->irq_requested = false;
drivers/mailbox/bcm-flexrm-mailbox.c
1337
dma_pool_free(ring->mbox->cmpl_pool,
drivers/mailbox/bcm-flexrm-mailbox.c
1338
ring->cmpl_base, ring->cmpl_dma_base);
drivers/mailbox/bcm-flexrm-mailbox.c
1339
ring->cmpl_base = NULL;
drivers/mailbox/bcm-flexrm-mailbox.c
1341
dma_pool_free(ring->mbox->bd_pool,
drivers/mailbox/bcm-flexrm-mailbox.c
1342
ring->bd_base, ring->bd_dma_base);
drivers/mailbox/bcm-flexrm-mailbox.c
1343
ring->bd_base = NULL;
drivers/mailbox/bcm-flexrm-mailbox.c
1353
struct flexrm_ring *ring = chan->con_priv;
drivers/mailbox/bcm-flexrm-mailbox.c
1356
writel_relaxed(0x0, ring->regs + RING_CONTROL);
drivers/mailbox/bcm-flexrm-mailbox.c
1361
ring->regs + RING_CONTROL);
drivers/mailbox/bcm-flexrm-mailbox.c
1363
if (readl_relaxed(ring->regs + RING_FLUSH_DONE) &
drivers/mailbox/bcm-flexrm-mailbox.c
1369
dev_err(ring->mbox->dev,
drivers/mailbox/bcm-flexrm-mailbox.c
1370
"setting ring%d flush state timedout\n", ring->num);
drivers/mailbox/bcm-flexrm-mailbox.c
1374
writel_relaxed(0x0, ring->regs + RING_CONTROL);
drivers/mailbox/bcm-flexrm-mailbox.c
1376
if (!(readl_relaxed(ring->regs + RING_FLUSH_DONE) &
drivers/mailbox/bcm-flexrm-mailbox.c
1382
dev_err(ring->mbox->dev,
drivers/mailbox/bcm-flexrm-mailbox.c
1383
"clearing ring%d flush state timedout\n", ring->num);
drivers/mailbox/bcm-flexrm-mailbox.c
1387
msg = ring->requests[reqid];
drivers/mailbox/bcm-flexrm-mailbox.c
1392
ring->requests[reqid] = NULL;
drivers/mailbox/bcm-flexrm-mailbox.c
1395
flexrm_dma_unmap(ring->mbox->dev, msg);
drivers/mailbox/bcm-flexrm-mailbox.c
1403
bitmap_zero(ring->requests_bmap, RING_MAX_REQ_COUNT);
drivers/mailbox/bcm-flexrm-mailbox.c
1406
if (ring->irq_requested) {
drivers/mailbox/bcm-flexrm-mailbox.c
1407
irq_update_affinity_hint(ring->irq, NULL);
drivers/mailbox/bcm-flexrm-mailbox.c
1408
free_irq(ring->irq, ring);
drivers/mailbox/bcm-flexrm-mailbox.c
1409
ring->irq_requested = false;
drivers/mailbox/bcm-flexrm-mailbox.c
1413
if (ring->cmpl_base) {
drivers/mailbox/bcm-flexrm-mailbox.c
1414
dma_pool_free(ring->mbox->cmpl_pool,
drivers/mailbox/bcm-flexrm-mailbox.c
1415
ring->cmpl_base, ring->cmpl_dma_base);
drivers/mailbox/bcm-flexrm-mailbox.c
1416
ring->cmpl_base = NULL;
drivers/mailbox/bcm-flexrm-mailbox.c
1420
if (ring->bd_base) {
drivers/mailbox/bcm-flexrm-mailbox.c
1421
dma_pool_free(ring->mbox->bd_pool,
drivers/mailbox/bcm-flexrm-mailbox.c
1422
ring->bd_base, ring->bd_dma_base);
drivers/mailbox/bcm-flexrm-mailbox.c
1423
ring->bd_base = NULL;
drivers/mailbox/bcm-flexrm-mailbox.c
1438
struct flexrm_ring *ring;
drivers/mailbox/bcm-flexrm-mailbox.c
1453
ring = chan->con_priv;
drivers/mailbox/bcm-flexrm-mailbox.c
1454
ring->msi_count_threshold = pa->args[1];
drivers/mailbox/bcm-flexrm-mailbox.c
1455
ring->msi_timer_val = pa->args[2];
drivers/mailbox/bcm-flexrm-mailbox.c
1466
struct flexrm_ring *ring = &mbox->rings[desc->msi_index];
drivers/mailbox/bcm-flexrm-mailbox.c
1469
writel_relaxed(msg->address_lo, ring->regs + RING_MSI_ADDR_LS);
drivers/mailbox/bcm-flexrm-mailbox.c
1470
writel_relaxed(msg->address_hi, ring->regs + RING_MSI_ADDR_MS);
drivers/mailbox/bcm-flexrm-mailbox.c
1471
writel_relaxed(msg->data, ring->regs + RING_MSI_DATA_VALUE);
drivers/mailbox/bcm-flexrm-mailbox.c
1480
struct flexrm_ring *ring;
drivers/mailbox/bcm-flexrm-mailbox.c
1516
ring = devm_kcalloc(dev, mbox->num_rings, sizeof(*ring), GFP_KERNEL);
drivers/mailbox/bcm-flexrm-mailbox.c
1517
if (!ring) {
drivers/mailbox/bcm-flexrm-mailbox.c
1521
mbox->rings = ring;
drivers/mailbox/bcm-flexrm-mailbox.c
1526
ring = &mbox->rings[index];
drivers/mailbox/bcm-flexrm-mailbox.c
1527
ring->num = index;
drivers/mailbox/bcm-flexrm-mailbox.c
1528
ring->mbox = mbox;
drivers/mailbox/bcm-flexrm-mailbox.c
1536
ring->regs = regs;
drivers/mailbox/bcm-flexrm-mailbox.c
1538
ring->irq = UINT_MAX;
drivers/mailbox/bcm-flexrm-mailbox.c
1539
ring->irq_requested = false;
drivers/mailbox/bcm-flexrm-mailbox.c
1540
ring->msi_timer_val = MSI_TIMER_VAL_MASK;
drivers/mailbox/bcm-flexrm-mailbox.c
1541
ring->msi_count_threshold = 0x1;
drivers/mailbox/bcm-flexrm-mailbox.c
1542
memset(ring->requests, 0, sizeof(ring->requests));
drivers/mailbox/bcm-flexrm-mailbox.c
1543
ring->bd_base = NULL;
drivers/mailbox/bcm-flexrm-mailbox.c
1544
ring->bd_dma_base = 0;
drivers/mailbox/bcm-flexrm-mailbox.c
1545
ring->cmpl_base = NULL;
drivers/mailbox/bcm-flexrm-mailbox.c
1546
ring->cmpl_dma_base = 0;
drivers/mailbox/bcm-flexrm-mailbox.c
1547
atomic_set(&ring->msg_send_count, 0);
drivers/mailbox/bcm-flexrm-mailbox.c
1548
atomic_set(&ring->msg_cmpl_count, 0);
drivers/mailbox/bcm-flexrm-mailbox.c
1549
spin_lock_init(&ring->lock);
drivers/mailbox/bcm-flexrm-mailbox.c
1550
bitmap_zero(ring->requests_bmap, RING_MAX_REQ_COUNT);
drivers/mailbox/bcm-flexrm-mailbox.c
1551
ring->cmpl_read_offset = 0;
drivers/mailbox/bcm-flexrm-mailbox.c
923
struct flexrm_ring *ring;
drivers/mailbox/bcm-flexrm-mailbox.c
930
ring = &mbox->rings[i];
drivers/mailbox/bcm-flexrm-mailbox.c
931
if (readl(ring->regs + RING_CONTROL) &
drivers/mailbox/bcm-flexrm-mailbox.c
938
ring->num, state,
drivers/mailbox/bcm-flexrm-mailbox.c
939
(unsigned long long)ring->bd_dma_base,
drivers/mailbox/bcm-flexrm-mailbox.c
941
(unsigned long long)ring->cmpl_dma_base,
drivers/mailbox/bcm-flexrm-mailbox.c
951
struct flexrm_ring *ring;
drivers/mailbox/bcm-flexrm-mailbox.c
958
ring = &mbox->rings[i];
drivers/mailbox/bcm-flexrm-mailbox.c
959
bd_read_offset = readl_relaxed(ring->regs + RING_BD_READ_PTR);
drivers/mailbox/bcm-flexrm-mailbox.c
960
val = readl_relaxed(ring->regs + RING_BD_START_ADDR);
drivers/mailbox/bcm-flexrm-mailbox.c
963
ring->bd_dma_base);
drivers/mailbox/bcm-flexrm-mailbox.c
965
ring->num,
drivers/mailbox/bcm-flexrm-mailbox.c
967
(u32)ring->bd_write_offset,
drivers/mailbox/bcm-flexrm-mailbox.c
968
(u32)ring->cmpl_read_offset,
drivers/mailbox/bcm-flexrm-mailbox.c
969
(u32)atomic_read(&ring->msg_send_count),
drivers/mailbox/bcm-flexrm-mailbox.c
970
(u32)atomic_read(&ring->msg_cmpl_count));
drivers/mailbox/bcm-flexrm-mailbox.c
974
static int flexrm_new_request(struct flexrm_ring *ring,
drivers/mailbox/bcm-flexrm-mailbox.c
991
spin_lock_irqsave(&ring->lock, flags);
drivers/mailbox/bcm-flexrm-mailbox.c
992
reqid = bitmap_find_free_region(ring->requests_bmap,
drivers/mailbox/bcm-flexrm-mailbox.c
994
spin_unlock_irqrestore(&ring->lock, flags);
drivers/mailbox/bcm-flexrm-mailbox.c
997
ring->requests[reqid] = msg;
drivers/misc/genwqe/card_utils.c
832
int entries = 0, ring, traps, traces, trace_entries;
drivers/misc/genwqe/card_utils.c
864
for (ring = 0; ring < 8; ring++) {
drivers/misc/genwqe/card_utils.c
865
addr = GENWQE_UID_OFFS(uid) | IO_EXTENDED_DIAG_MAP(ring);
drivers/misc/genwqe/card_utils.c
890
int i, traps, traces, trace, trace_entries, trace_entry, ring;
drivers/misc/genwqe/card_utils.c
932
for (ring = 0; ring < 8; ring++) { /* 0 is fls, 1 is fds,
drivers/misc/genwqe/card_utils.c
934
addr = GENWQE_UID_OFFS(uid) | IO_EXTENDED_DIAG_MAP(ring);
drivers/misc/genwqe/card_utils.c
949
GENWQE_EXTENDED_DIAG_SELECTOR(ring, trace);
drivers/net/can/c_can/c_can.h
228
static inline u8 c_can_get_tx_head(const struct c_can_tx_ring *ring)
drivers/net/can/c_can/c_can.h
230
return ring->head & (ring->obj_num - 1);
drivers/net/can/c_can/c_can.h
233
static inline u8 c_can_get_tx_tail(const struct c_can_tx_ring *ring)
drivers/net/can/c_can/c_can.h
235
return ring->tail & (ring->obj_num - 1);
drivers/net/can/c_can/c_can.h
239
const struct c_can_tx_ring *ring)
drivers/net/can/c_can/c_can.h
241
u8 head = c_can_get_tx_head(ring);
drivers/net/can/c_can/c_can.h
242
u8 tail = c_can_get_tx_tail(ring);
drivers/net/can/c_can/c_can.h
245
return ring->obj_num - (ring->head - ring->tail);
drivers/net/can/c_can/c_can.h
253
return ring->obj_num - head;
drivers/net/can/c_can/c_can_ethtool.c
15
struct ethtool_ringparam *ring,
drivers/net/can/c_can/c_can_ethtool.c
21
ring->rx_max_pending = priv->msg_obj_num;
drivers/net/can/c_can/c_can_ethtool.c
22
ring->tx_max_pending = priv->msg_obj_num;
drivers/net/can/c_can/c_can_ethtool.c
23
ring->rx_pending = priv->msg_obj_rx_num;
drivers/net/can/c_can/c_can_ethtool.c
24
ring->tx_pending = priv->msg_obj_tx_num;
drivers/net/can/flexcan/flexcan-ethtool.c
21
flexcan_get_ringparam(struct net_device *ndev, struct ethtool_ringparam *ring,
drivers/net/can/flexcan/flexcan-ethtool.c
27
ring->rx_max_pending = priv->mb_count;
drivers/net/can/flexcan/flexcan-ethtool.c
28
ring->tx_max_pending = priv->mb_count;
drivers/net/can/flexcan/flexcan-ethtool.c
31
ring->rx_pending = priv->offload.mb_last -
drivers/net/can/flexcan/flexcan-ethtool.c
34
ring->rx_pending = 6; /* RX-FIFO depth is fixed */
drivers/net/can/flexcan/flexcan-ethtool.c
37
ring->tx_pending = 1;
drivers/net/can/spi/mcp251xfd/mcp251xfd-chip-fifo.c
21
const struct mcp251xfd_rx_ring *ring)
drivers/net/can/spi/mcp251xfd/mcp251xfd-chip-fifo.c
32
ring->obj_num - 1) |
drivers/net/can/spi/mcp251xfd/mcp251xfd-chip-fifo.c
45
MCP251XFD_REG_FIFOCON(ring->fifo_nr), fifo_con);
drivers/net/can/spi/mcp251xfd/mcp251xfd-chip-fifo.c
50
const struct mcp251xfd_rx_ring *ring)
drivers/net/can/spi/mcp251xfd/mcp251xfd-chip-fifo.c
54
fltcon = MCP251XFD_REG_FLTCON_FLTEN(ring->nr) |
drivers/net/can/spi/mcp251xfd/mcp251xfd-chip-fifo.c
55
MCP251XFD_REG_FLTCON_FBP(ring->nr, ring->fifo_nr);
drivers/net/can/spi/mcp251xfd/mcp251xfd-chip-fifo.c
58
MCP251XFD_REG_FLTCON(ring->nr >> 2),
drivers/net/can/spi/mcp251xfd/mcp251xfd-chip-fifo.c
59
MCP251XFD_REG_FLTCON_FLT_MASK(ring->nr),
drivers/net/can/spi/mcp251xfd/mcp251xfd-core.c
900
struct mcp251xfd_rx_ring *ring;
drivers/net/can/spi/mcp251xfd/mcp251xfd-core.c
913
mcp251xfd_for_each_rx_ring(priv, ring, i) {
drivers/net/can/spi/mcp251xfd/mcp251xfd-core.c
914
if (!(rxovif & BIT(ring->fifo_nr)))
drivers/net/can/spi/mcp251xfd/mcp251xfd-core.c
922
ring->nr);
drivers/net/can/spi/mcp251xfd/mcp251xfd-core.c
927
ring->nr);
drivers/net/can/spi/mcp251xfd/mcp251xfd-core.c
931
MCP251XFD_REG_FIFOSTA(ring->fifo_nr),
drivers/net/can/spi/mcp251xfd/mcp251xfd-ethtool.c
16
struct ethtool_ringparam *ring,
drivers/net/can/spi/mcp251xfd/mcp251xfd-ethtool.c
25
ring->rx_max_pending = layout.max_rx;
drivers/net/can/spi/mcp251xfd/mcp251xfd-ethtool.c
26
ring->tx_max_pending = layout.max_tx;
drivers/net/can/spi/mcp251xfd/mcp251xfd-ethtool.c
28
ring->rx_pending = priv->rx_obj_num;
drivers/net/can/spi/mcp251xfd/mcp251xfd-ethtool.c
29
ring->tx_pending = priv->tx->obj_num;
drivers/net/can/spi/mcp251xfd/mcp251xfd-ethtool.c
34
struct ethtool_ringparam *ring,
drivers/net/can/spi/mcp251xfd/mcp251xfd-ethtool.c
42
can_ram_get_layout(&layout, &mcp251xfd_ram_config, ring, NULL, fd_mode);
drivers/net/can/spi/mcp251xfd/mcp251xfd-ethtool.c
93
const struct ethtool_ringparam ring = {
drivers/net/can/spi/mcp251xfd/mcp251xfd-ethtool.c
99
can_ram_get_layout(&layout, &mcp251xfd_ram_config, &ring, ec, fd_mode);
drivers/net/can/spi/mcp251xfd/mcp251xfd-ram.c
105
num_rx = ring->rx_pending;
drivers/net/can/spi/mcp251xfd/mcp251xfd-ram.c
131
num_tx = min_t(u8, ring->tx_pending, num_tx);
drivers/net/can/spi/mcp251xfd/mcp251xfd-ram.c
62
const struct ethtool_ringparam *ring,
drivers/net/can/spi/mcp251xfd/mcp251xfd-ram.c
97
if (ring) {
drivers/net/can/spi/mcp251xfd/mcp251xfd-ram.h
58
const struct ethtool_ringparam *ring,
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
110
mcp251xfd_for_each_rx_ring(priv, ring, n) {
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
111
if (reg == MCP251XFD_REG_FIFOCON(ring->fifo_nr))
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
113
if (reg == MCP251XFD_REG_FIFOSTA(ring->fifo_nr))
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
94
struct mcp251xfd_rx_ring *ring;
drivers/net/can/spi/mcp251xfd/mcp251xfd-ring.c
134
const struct mcp251xfd_tx_ring *ring,
drivers/net/can/spi/mcp251xfd/mcp251xfd-ring.c
143
addr = mcp251xfd_get_tx_obj_addr(ring, n);
drivers/net/can/spi/mcp251xfd/mcp251xfd-ring.c
160
xfer->tx_buf = &ring->rts_buf;
drivers/net/can/spi/mcp251xfd/mcp251xfd-ring.c
417
struct mcp251xfd_rx_ring *ring = priv->rx[0];
drivers/net/can/spi/mcp251xfd/mcp251xfd-ring.c
422
spi_async(priv->spi, &ring->irq_enable_msg);
drivers/net/can/spi/mcp251xfd/mcp251xfd-ring.c
431
struct mcp251xfd_tef_ring *ring = priv->tef;
drivers/net/can/spi/mcp251xfd/mcp251xfd-ring.c
436
spi_async(priv->spi, &ring->irq_enable_msg);
drivers/net/can/spi/mcp251xfd/mcp251xfd-ring.c
480
const struct ethtool_ringparam ring = {
drivers/net/can/spi/mcp251xfd/mcp251xfd-ring.c
494
can_ram_get_layout(&layout, &mcp251xfd_ram_config, &ring, &ec, fd_mode);
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
100
err = mcp251xfd_check_rx_tail(priv, ring);
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
103
tail = mcp251xfd_get_rx_tail(ring);
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
109
BUILD_BUG_ON(sizeof(ring->obj_num) != sizeof(chip_head));
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
110
BUILD_BUG_ON(sizeof(ring->obj_num) != sizeof(tail));
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
111
BUILD_BUG_ON(sizeof(ring->obj_num) != sizeof(len));
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
166
struct mcp251xfd_rx_ring *ring,
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
185
if (timestamp <= ring->last_valid) {
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
190
ring->last_valid = timestamp;
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
213
const struct mcp251xfd_rx_ring *ring,
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
221
mcp251xfd_get_rx_obj_addr(ring, offset),
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
223
len * ring->obj_size / val_bytes);
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
230
struct mcp251xfd_rx_ring *ring,
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
239
ring->head += len;
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
250
offset = ARRAY_SIZE(ring->uinc_xfer) - len;
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
252
ring->uinc_xfer + offset, len);
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
256
ring->tail += len;
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
263
struct mcp251xfd_rx_ring *ring)
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
265
struct mcp251xfd_hw_rx_obj_canfd *hw_rx_obj = ring->obj;
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
269
err = mcp251xfd_get_rx_len(priv, ring, &len);
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
273
while ((l = mcp251xfd_get_rx_linear_len(ring, len))) {
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
274
rx_tail = mcp251xfd_get_rx_tail(ring);
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
276
err = mcp251xfd_rx_obj_read(priv, ring, hw_rx_obj,
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
282
err = mcp251xfd_handle_rxif_one(priv, ring,
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
284
i * ring->obj_size);
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
293
return mcp251xfd_handle_rxif_ring_uinc(priv, ring, i);
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
298
err = mcp251xfd_handle_rxif_ring_uinc(priv, ring, l);
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
31
const struct mcp251xfd_rx_ring *ring,
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
310
struct mcp251xfd_rx_ring *ring;
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
313
mcp251xfd_for_each_rx_ring(priv, ring, n) {
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
317
if ((ring->nr > 0 || !priv->rx_obj_num_coalesce_irq) &&
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
318
!(priv->regs_status.rxif & BIT(ring->fifo_nr)))
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
321
err = mcp251xfd_handle_rxif_ring(priv, ring);
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
37
err = regmap_read(priv->map_reg, MCP251XFD_REG_FIFOUA(ring->fifo_nr),
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
42
fifo_ua -= ring->base - MCP251XFD_RAM_START;
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
43
*rx_tail = fifo_ua / ring->obj_size;
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
50
const struct mcp251xfd_rx_ring *ring)
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
58
err = mcp251xfd_rx_tail_get_from_chip(priv, ring, &rx_tail_chip);
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
62
rx_tail = mcp251xfd_get_rx_tail(ring);
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
75
const struct mcp251xfd_rx_ring *ring,
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
78
const u8 shift = ring->obj_num_shift_to_u8;
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
83
err = regmap_read(priv->map_reg, MCP251XFD_REG_FIFOSTA(ring->fifo_nr),
drivers/net/can/spi/mcp251xfd/mcp251xfd-rx.c
94
*len_p = ring->obj_num;
drivers/net/can/spi/mcp251xfd/mcp251xfd-tef.c
256
struct mcp251xfd_tef_ring *ring = priv->tef;
drivers/net/can/spi/mcp251xfd/mcp251xfd-tef.c
260
ring->head += len;
drivers/net/can/spi/mcp251xfd/mcp251xfd-tef.c
271
offset = ARRAY_SIZE(ring->uinc_xfer) - len;
drivers/net/can/spi/mcp251xfd/mcp251xfd-tef.c
273
ring->uinc_xfer + offset, len);
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
856
mcp251xfd_get_tx_obj_addr(const struct mcp251xfd_tx_ring *ring, u8 n)
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
858
return ring->base + ring->obj_size * n;
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
862
mcp251xfd_get_rx_obj_addr(const struct mcp251xfd_rx_ring *ring, u8 n)
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
864
return ring->base + ring->obj_size * n;
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
900
static inline u8 mcp251xfd_get_tx_head(const struct mcp251xfd_tx_ring *ring)
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
902
return ring->head & (ring->obj_num - 1);
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
905
static inline u8 mcp251xfd_get_tx_tail(const struct mcp251xfd_tx_ring *ring)
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
907
return ring->tail & (ring->obj_num - 1);
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
910
static inline u8 mcp251xfd_get_tx_free(const struct mcp251xfd_tx_ring *ring)
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
912
return ring->obj_num - (ring->head - ring->tail);
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
929
static inline u8 mcp251xfd_get_rx_head(const struct mcp251xfd_rx_ring *ring)
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
931
return ring->head & (ring->obj_num - 1);
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
934
static inline u8 mcp251xfd_get_rx_tail(const struct mcp251xfd_rx_ring *ring)
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
936
return ring->tail & (ring->obj_num - 1);
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
940
mcp251xfd_get_rx_linear_len(const struct mcp251xfd_rx_ring *ring, u8 len)
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
942
return min_t(u8, len, ring->obj_num - mcp251xfd_get_rx_tail(ring));
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
945
#define mcp251xfd_for_each_tx_obj(ring, _obj, n) \
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
946
for ((n) = 0, (_obj) = &(ring)->obj[(n)]; \
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
947
(n) < (ring)->obj_num; \
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
948
(n)++, (_obj) = &(ring)->obj[(n)])
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
950
#define mcp251xfd_for_each_rx_ring(priv, ring, n) \
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
951
for ((n) = 0, (ring) = *((priv)->rx + (n)); \
drivers/net/can/spi/mcp251xfd/mcp251xfd.h
953
(n)++, (ring) = *((priv)->rx + (n)))
drivers/net/ethernet/3com/typhoon.c
1559
struct basic_ring *ring = &tp->rxBuffRing;
drivers/net/ethernet/3com/typhoon.c
1562
if ((ring->lastWrite + sizeof(*r)) % (RXFREE_ENTRIES * sizeof(*r)) ==
drivers/net/ethernet/3com/typhoon.c
1571
r = (struct rx_free *) (ring->ringBase + ring->lastWrite);
drivers/net/ethernet/3com/typhoon.c
1572
typhoon_inc_rxfree_index(&ring->lastWrite, 1);
drivers/net/ethernet/3com/typhoon.c
1578
indexes->rxBuffReady = cpu_to_le32(ring->lastWrite);
drivers/net/ethernet/3com/typhoon.c
1586
struct basic_ring *ring = &tp->rxBuffRing;
drivers/net/ethernet/3com/typhoon.c
1593
if ((ring->lastWrite + sizeof(*r)) % (RXFREE_ENTRIES * sizeof(*r)) ==
drivers/net/ethernet/3com/typhoon.c
1614
r = (struct rx_free *) (ring->ringBase + ring->lastWrite);
drivers/net/ethernet/3com/typhoon.c
1615
typhoon_inc_rxfree_index(&ring->lastWrite, 1);
drivers/net/ethernet/3com/typhoon.c
1623
indexes->rxBuffReady = cpu_to_le32(ring->lastWrite);
drivers/net/ethernet/3com/typhoon.c
456
struct basic_ring *ring = &tp->cmdRing;
drivers/net/ethernet/3com/typhoon.c
464
cmd = (struct cmd_desc *)(ring->ringBase + ring->lastWrite);
drivers/net/ethernet/3com/typhoon.c
465
typhoon_inc_cmd_index(&ring->lastWrite, 1);
drivers/net/ethernet/3com/typhoon.c
469
iowrite32(ring->lastWrite, tp->ioaddr + TYPHOON_REG_CMD_READY);
drivers/net/ethernet/3com/typhoon.c
563
typhoon_num_free_tx(struct transmit_ring *ring)
drivers/net/ethernet/3com/typhoon.c
566
return typhoon_num_free(ring->lastWrite, ring->lastRead, TXLO_ENTRIES);
drivers/net/ethernet/3com/typhoon.c
574
struct basic_ring *ring = &tp->cmdRing;
drivers/net/ethernet/3com/typhoon.c
606
if (unlikely(ring->lastWrite + len > COMMAND_RING_SIZE)) {
drivers/net/ethernet/3com/typhoon.c
607
wrap_len = ring->lastWrite + len - COMMAND_RING_SIZE;
drivers/net/ethernet/3com/typhoon.c
608
len = COMMAND_RING_SIZE - ring->lastWrite;
drivers/net/ethernet/3com/typhoon.c
611
memcpy(ring->ringBase + ring->lastWrite, cmd, len);
drivers/net/ethernet/3com/typhoon.c
615
memcpy(ring->ringBase, wrap_ptr, wrap_len);
drivers/net/ethernet/3com/typhoon.c
618
typhoon_inc_cmd_index(&ring->lastWrite, num_cmd);
drivers/net/ethernet/3com/typhoon.c
623
iowrite32(ring->lastWrite, tp->ioaddr + TYPHOON_REG_CMD_READY);
drivers/net/ethernet/actions/owl-emac.c
140
static unsigned int owl_emac_ring_num_unused(struct owl_emac_ring *ring)
drivers/net/ethernet/actions/owl-emac.c
142
return CIRC_SPACE(ring->head, ring->tail, ring->size);
drivers/net/ethernet/actions/owl-emac.c
145
static unsigned int owl_emac_ring_get_next(struct owl_emac_ring *ring,
drivers/net/ethernet/actions/owl-emac.c
148
return (cur + 1) & (ring->size - 1);
drivers/net/ethernet/actions/owl-emac.c
151
static void owl_emac_ring_push_head(struct owl_emac_ring *ring)
drivers/net/ethernet/actions/owl-emac.c
153
ring->head = owl_emac_ring_get_next(ring, ring->head);
drivers/net/ethernet/actions/owl-emac.c
156
static void owl_emac_ring_pop_tail(struct owl_emac_ring *ring)
drivers/net/ethernet/actions/owl-emac.c
158
ring->tail = owl_emac_ring_get_next(ring, ring->tail);
drivers/net/ethernet/actions/owl-emac.c
181
struct owl_emac_ring *ring = &priv->rx_ring;
drivers/net/ethernet/actions/owl-emac.c
189
for (i = 0; i < ring->size; i++) {
drivers/net/ethernet/actions/owl-emac.c
200
desc = &ring->descs[i];
drivers/net/ethernet/actions/owl-emac.c
206
ring->skbs[i] = skb;
drivers/net/ethernet/actions/owl-emac.c
207
ring->skbs_dma[i] = dma_addr;
drivers/net/ethernet/actions/owl-emac.c
212
ring->head = 0;
drivers/net/ethernet/actions/owl-emac.c
213
ring->tail = 0;
drivers/net/ethernet/actions/owl-emac.c
220
struct owl_emac_ring *ring = &priv->tx_ring;
drivers/net/ethernet/actions/owl-emac.c
224
for (i = 0; i < ring->size; i++) {
drivers/net/ethernet/actions/owl-emac.c
225
desc = &ring->descs[i];
drivers/net/ethernet/actions/owl-emac.c
235
memset(ring->skbs_dma, 0, sizeof(dma_addr_t) * ring->size);
drivers/net/ethernet/actions/owl-emac.c
237
ring->head = 0;
drivers/net/ethernet/actions/owl-emac.c
238
ring->tail = 0;
drivers/net/ethernet/actions/owl-emac.c
243
struct owl_emac_ring *ring = &priv->rx_ring;
drivers/net/ethernet/actions/owl-emac.c
246
for (i = 0; i < ring->size; i++) {
drivers/net/ethernet/actions/owl-emac.c
247
ring->descs[i].status = 0;
drivers/net/ethernet/actions/owl-emac.c
249
if (!ring->skbs_dma[i])
drivers/net/ethernet/actions/owl-emac.c
252
owl_emac_dma_unmap_rx(priv, ring->skbs[i], ring->skbs_dma[i]);
drivers/net/ethernet/actions/owl-emac.c
253
ring->skbs_dma[i] = 0;
drivers/net/ethernet/actions/owl-emac.c
255
dev_kfree_skb(ring->skbs[i]);
drivers/net/ethernet/actions/owl-emac.c
256
ring->skbs[i] = NULL;
drivers/net/ethernet/actions/owl-emac.c
262
struct owl_emac_ring *ring = &priv->tx_ring;
drivers/net/ethernet/actions/owl-emac.c
265
for (i = 0; i < ring->size; i++) {
drivers/net/ethernet/actions/owl-emac.c
266
ring->descs[i].status = 0;
drivers/net/ethernet/actions/owl-emac.c
268
if (!ring->skbs_dma[i])
drivers/net/ethernet/actions/owl-emac.c
271
owl_emac_dma_unmap_tx(priv, ring->skbs[i], ring->skbs_dma[i]);
drivers/net/ethernet/actions/owl-emac.c
272
ring->skbs_dma[i] = 0;
drivers/net/ethernet/actions/owl-emac.c
274
dev_kfree_skb(ring->skbs[i]);
drivers/net/ethernet/actions/owl-emac.c
275
ring->skbs[i] = NULL;
drivers/net/ethernet/actions/owl-emac.c
279
static int owl_emac_ring_alloc(struct device *dev, struct owl_emac_ring *ring,
drivers/net/ethernet/actions/owl-emac.c
282
ring->descs = dmam_alloc_coherent(dev,
drivers/net/ethernet/actions/owl-emac.c
284
&ring->descs_dma, GFP_KERNEL);
drivers/net/ethernet/actions/owl-emac.c
285
if (!ring->descs)
drivers/net/ethernet/actions/owl-emac.c
288
ring->skbs = devm_kcalloc(dev, size, sizeof(struct sk_buff *),
drivers/net/ethernet/actions/owl-emac.c
290
if (!ring->skbs)
drivers/net/ethernet/actions/owl-emac.c
293
ring->skbs_dma = devm_kcalloc(dev, size, sizeof(dma_addr_t),
drivers/net/ethernet/actions/owl-emac.c
295
if (!ring->skbs_dma)
drivers/net/ethernet/actions/owl-emac.c
298
ring->size = size;
drivers/net/ethernet/actions/owl-emac.c
492
struct owl_emac_ring *ring = &priv->tx_ring;
drivers/net/ethernet/actions/owl-emac.c
515
tx_head = ring->head;
drivers/net/ethernet/actions/owl-emac.c
516
desc = &ring->descs[tx_head];
drivers/net/ethernet/actions/owl-emac.c
523
!owl_emac_ring_num_unused(ring)) {
drivers/net/ethernet/actions/owl-emac.c
530
ring->skbs[tx_head] = skb;
drivers/net/ethernet/actions/owl-emac.c
531
ring->skbs_dma[tx_head] = dma_addr;
drivers/net/ethernet/actions/owl-emac.c
542
owl_emac_ring_push_head(ring);
drivers/net/ethernet/actions/owl-emac.c
570
struct owl_emac_ring *ring = &priv->tx_ring;
drivers/net/ethernet/actions/owl-emac.c
586
tx_head = ring->head;
drivers/net/ethernet/actions/owl-emac.c
587
desc = &ring->descs[tx_head];
drivers/net/ethernet/actions/owl-emac.c
593
if (!owl_emac_ring_num_unused(ring) ||
drivers/net/ethernet/actions/owl-emac.c
605
ring->skbs[tx_head] = skb;
drivers/net/ethernet/actions/owl-emac.c
606
ring->skbs_dma[tx_head] = dma_addr;
drivers/net/ethernet/actions/owl-emac.c
618
owl_emac_ring_push_head(ring);
drivers/net/ethernet/actions/owl-emac.c
633
struct owl_emac_ring *ring = &priv->tx_ring;
drivers/net/ethernet/actions/owl-emac.c
640
tx_tail = ring->tail;
drivers/net/ethernet/actions/owl-emac.c
641
desc = &ring->descs[tx_tail];
drivers/net/ethernet/actions/owl-emac.c
673
netdev->stats.tx_bytes += ring->skbs[tx_tail]->len;
drivers/net/ethernet/actions/owl-emac.c
680
skb = ring->skbs[tx_tail];
drivers/net/ethernet/actions/owl-emac.c
681
owl_emac_dma_unmap_tx(priv, skb, ring->skbs_dma[tx_tail]);
drivers/net/ethernet/actions/owl-emac.c
684
ring->skbs[tx_tail] = NULL;
drivers/net/ethernet/actions/owl-emac.c
685
ring->skbs_dma[tx_tail] = 0;
drivers/net/ethernet/actions/owl-emac.c
687
owl_emac_ring_pop_tail(ring);
drivers/net/ethernet/actions/owl-emac.c
697
struct owl_emac_ring *ring = &priv->tx_ring;
drivers/net/ethernet/actions/owl-emac.c
704
while (ring->tail != ring->head) {
drivers/net/ethernet/actions/owl-emac.c
721
if (unlikely(!owl_emac_ring_num_unused(ring))) {
drivers/net/ethernet/actions/owl-emac.c
722
tx_next = ring->tail;
drivers/net/ethernet/actions/owl-emac.c
724
while ((tx_next = owl_emac_ring_get_next(ring, tx_next)) != ring->head) {
drivers/net/ethernet/actions/owl-emac.c
725
status = READ_ONCE(ring->descs[tx_next].status);
drivers/net/ethernet/actions/owl-emac.c
733
status = READ_ONCE(ring->descs[ring->tail].status);
drivers/net/ethernet/actions/owl-emac.c
736
WRITE_ONCE(ring->descs[ring->tail].status, status);
drivers/net/ethernet/actions/owl-emac.c
748
struct owl_emac_ring *ring = &priv->rx_ring;
drivers/net/ethernet/actions/owl-emac.c
761
rx_tail = ring->tail;
drivers/net/ethernet/actions/owl-emac.c
762
desc = &ring->descs[rx_tail];
drivers/net/ethernet/actions/owl-emac.c
772
curr_skb = ring->skbs[rx_tail];
drivers/net/ethernet/actions/owl-emac.c
773
curr_dma = ring->skbs_dma[rx_tail];
drivers/net/ethernet/actions/owl-emac.c
774
owl_emac_ring_pop_tail(ring);
drivers/net/ethernet/actions/owl-emac.c
850
ring->skbs[ring->head] = new_skb;
drivers/net/ethernet/actions/owl-emac.c
851
ring->skbs_dma[ring->head] = new_dma;
drivers/net/ethernet/actions/owl-emac.c
857
owl_emac_ring_push_head(ring);
drivers/net/ethernet/amazon/ena/ena_ethtool.c
222
struct ena_ring *ring;
drivers/net/ethernet/amazon/ena/ena_ethtool.c
229
ring = &adapter->tx_ring[i];
drivers/net/ethernet/amazon/ena/ena_ethtool.c
234
ptr = (u64 *)&ring->tx_stats + ena_stats->stat_offset;
drivers/net/ethernet/amazon/ena/ena_ethtool.c
236
ena_safe_update_stat(ptr, (*data)++, &ring->syncp);
drivers/net/ethernet/amazon/ena/ena_ethtool.c
241
ring = &adapter->rx_ring[i];
drivers/net/ethernet/amazon/ena/ena_ethtool.c
246
ptr = (u64 *)&ring->rx_stats +
drivers/net/ethernet/amazon/ena/ena_ethtool.c
249
ena_safe_update_stat(ptr, (*data)++, &ring->syncp);
drivers/net/ethernet/amazon/ena/ena_ethtool.c
603
struct ethtool_ringparam *ring,
drivers/net/ethernet/amazon/ena/ena_ethtool.c
609
ring->tx_max_pending = adapter->max_tx_ring_size;
drivers/net/ethernet/amazon/ena/ena_ethtool.c
610
ring->rx_max_pending = adapter->max_rx_ring_size;
drivers/net/ethernet/amazon/ena/ena_ethtool.c
626
ring->tx_pending = adapter->tx_ring[0].ring_size;
drivers/net/ethernet/amazon/ena/ena_ethtool.c
627
ring->rx_pending = adapter->rx_ring[0].ring_size;
drivers/net/ethernet/amazon/ena/ena_ethtool.c
631
struct ethtool_ringparam *ring,
drivers/net/ethernet/amazon/ena/ena_ethtool.c
639
new_tx_size = ring->tx_pending < ENA_MIN_RING_SIZE ?
drivers/net/ethernet/amazon/ena/ena_ethtool.c
640
ENA_MIN_RING_SIZE : ring->tx_pending;
drivers/net/ethernet/amazon/ena/ena_ethtool.c
643
new_rx_size = ring->rx_pending < ENA_MIN_RING_SIZE ?
drivers/net/ethernet/amazon/ena/ena_ethtool.c
644
ENA_MIN_RING_SIZE : ring->rx_pending;
drivers/net/ethernet/amazon/ena/ena_netdev.c
118
struct ena_ring *ring,
drivers/net/ethernet/amazon/ena/ena_netdev.c
126
if (unlikely(ena_com_is_doorbell_needed(ring->ena_com_io_sq,
drivers/net/ethernet/amazon/ena/ena_netdev.c
130
ring->qid);
drivers/net/ethernet/amazon/ena/ena_netdev.c
131
ena_ring_tx_doorbell(ring);
drivers/net/ethernet/amazon/ena/ena_netdev.c
135
rc = ena_com_prepare_tx(ring->ena_com_io_sq, ena_tx_ctx,
drivers/net/ethernet/amazon/ena/ena_netdev.c
145
ena_increase_stat(&ring->tx_stats.prepare_ctx_err, 1, &ring->syncp);
drivers/net/ethernet/amazon/ena/ena_netdev.c
151
u64_stats_update_begin(&ring->syncp);
drivers/net/ethernet/amazon/ena/ena_netdev.c
152
ring->tx_stats.cnt++;
drivers/net/ethernet/amazon/ena/ena_netdev.c
153
ring->tx_stats.bytes += bytes;
drivers/net/ethernet/amazon/ena/ena_netdev.c
154
u64_stats_update_end(&ring->syncp);
drivers/net/ethernet/amazon/ena/ena_netdev.c
161
ring->next_to_use = ENA_TX_RING_IDX_NEXT(next_to_use,
drivers/net/ethernet/amazon/ena/ena_netdev.c
162
ring->ring_size);
drivers/net/ethernet/amazon/ena/ena_netdev.c
167
struct ena_ring *ring, u16 qid)
drivers/net/ethernet/amazon/ena/ena_netdev.c
169
ring->qid = qid;
drivers/net/ethernet/amazon/ena/ena_netdev.c
170
ring->pdev = adapter->pdev;
drivers/net/ethernet/amazon/ena/ena_netdev.c
171
ring->dev = &adapter->pdev->dev;
drivers/net/ethernet/amazon/ena/ena_netdev.c
172
ring->netdev = adapter->netdev;
drivers/net/ethernet/amazon/ena/ena_netdev.c
173
ring->napi = &adapter->ena_napi[qid].napi;
drivers/net/ethernet/amazon/ena/ena_netdev.c
174
ring->adapter = adapter;
drivers/net/ethernet/amazon/ena/ena_netdev.c
175
ring->ena_dev = adapter->ena_dev;
drivers/net/ethernet/amazon/ena/ena_netdev.c
176
ring->per_napi_packets = 0;
drivers/net/ethernet/amazon/ena/ena_netdev.c
177
ring->cpu = 0;
drivers/net/ethernet/amazon/ena/ena_netdev.c
178
ring->numa_node = 0;
drivers/net/ethernet/amazon/ena/ena_netdev.c
179
ring->no_interrupt_event_cnt = 0;
drivers/net/ethernet/amazon/ena/ena_netdev.c
180
u64_stats_init(&ring->syncp);
drivers/net/ethernet/amazon/ena/ena_netdev.c
774
int handle_invalid_req_id(struct ena_ring *ring, u16 req_id,
drivers/net/ethernet/amazon/ena/ena_netdev.c
778
netif_err(ring->adapter,
drivers/net/ethernet/amazon/ena/ena_netdev.c
780
ring->netdev,
drivers/net/ethernet/amazon/ena/ena_netdev.c
782
is_xdp ? "xdp frame" : "skb", ring->qid, req_id);
drivers/net/ethernet/amazon/ena/ena_netdev.c
784
netif_err(ring->adapter,
drivers/net/ethernet/amazon/ena/ena_netdev.c
786
ring->netdev,
drivers/net/ethernet/amazon/ena/ena_netdev.c
788
req_id, ring->qid);
drivers/net/ethernet/amazon/ena/ena_netdev.c
790
ena_increase_stat(&ring->tx_stats.bad_req_id, 1, &ring->syncp);
drivers/net/ethernet/amazon/ena/ena_netdev.c
791
ena_reset_device(ring->adapter, ENA_REGS_RESET_INV_TX_REQ_ID);
drivers/net/ethernet/amazon/ena/ena_netdev.h
429
int handle_invalid_req_id(struct ena_ring *ring, u16 req_id,
drivers/net/ethernet/amazon/ena/ena_netdev.h
448
struct ena_ring *ring,
drivers/net/ethernet/amazon/ena/ena_xdp.h
59
static inline bool ena_xdp_present_ring(struct ena_ring *ring)
drivers/net/ethernet/amazon/ena/ena_xdp.h
61
return !!ring->xdp_bpf_prog;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
107
struct xgbe_ring *ring, unsigned int rdesc_count)
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
111
if (!ring)
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
117
ring->rdesc_count = rdesc_count;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
118
ring->rdesc = xgbe_dma_alloc_node(pdata->dev, size, &ring->rdesc_dma,
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
119
ring->node);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
120
if (!ring->rdesc)
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
126
ring->rdata = xgbe_alloc_node(size, ring->node);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
127
if (!ring->rdata)
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
132
ring->rdesc, &ring->rdesc_dma, ring->rdata, ring->node);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
14
struct xgbe_ring *ring)
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
19
if (!ring)
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
22
if (ring->rdata) {
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
23
for (i = 0; i < ring->rdesc_count; i++) {
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
24
rdata = XGBE_GET_DESC_DATA(ring, i);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
248
struct xgbe_ring *ring,
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
253
if (!ring->rx_hdr_pa.pages) {
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
254
ret = xgbe_alloc_pages(pdata, &ring->rx_hdr_pa, 0, ring->node);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
259
if (!ring->rx_buf_pa.pages) {
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
260
ret = xgbe_alloc_pages(pdata, &ring->rx_buf_pa,
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
261
PAGE_ALLOC_COSTLY_ORDER, ring->node);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
268
xgbe_set_buffer_data(&rdata->rx.hdr, &ring->rx_hdr_pa,
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
271
xgbe_set_buffer_data(&rdata->rx.hdr, &ring->rx_hdr_pa,
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
276
xgbe_set_buffer_data(&rdata->rx.buf, &ring->rx_buf_pa,
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
28
kfree(ring->rdata);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
286
struct xgbe_ring *ring;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
29
ring->rdata = NULL;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
296
ring = channel->tx_ring;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
297
if (!ring)
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
300
rdesc = ring->rdesc;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
301
rdesc_dma = ring->rdesc_dma;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
303
for (j = 0; j < ring->rdesc_count; j++) {
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
304
rdata = XGBE_GET_DESC_DATA(ring, j);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
313
ring->cur = 0;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
314
ring->dirty = 0;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
315
memset(&ring->tx, 0, sizeof(ring->tx));
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
32
if (ring->rx_hdr_pa.pages) {
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
327
struct xgbe_ring *ring;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
33
dma_unmap_page(pdata->dev, ring->rx_hdr_pa.pages_dma,
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
337
ring = channel->rx_ring;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
338
if (!ring)
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
34
ring->rx_hdr_pa.pages_len, DMA_FROM_DEVICE);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
341
rdesc = ring->rdesc;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
342
rdesc_dma = ring->rdesc_dma;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
344
for (j = 0; j < ring->rdesc_count; j++) {
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
345
rdata = XGBE_GET_DESC_DATA(ring, j);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
35
put_page(ring->rx_hdr_pa.pages);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
350
if (xgbe_map_rx_buffer(pdata, ring, rdata))
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
357
ring->cur = 0;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
358
ring->dirty = 0;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
37
ring->rx_hdr_pa.pages = NULL;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
38
ring->rx_hdr_pa.pages_len = 0;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
39
ring->rx_hdr_pa.pages_offset = 0;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
40
ring->rx_hdr_pa.pages_dma = 0;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
422
struct xgbe_ring *ring = channel->tx_ring;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
43
if (ring->rx_buf_pa.pages) {
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
431
DBGPR("-->xgbe_map_tx_skb: cur = %d\n", ring->cur);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
434
start_index = ring->cur;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
435
cur_index = ring->cur;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
437
packet = &ring->packet_data;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
44
dma_unmap_page(pdata->dev, ring->rx_buf_pa.pages_dma,
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
447
if ((tso && (packet->mss != ring->tx.cur_mss)) ||
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
448
(vlan && (packet->vlan_ctag != ring->tx.cur_vlan_ctag)))
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
45
ring->rx_buf_pa.pages_len, DMA_FROM_DEVICE);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
450
rdata = XGBE_GET_DESC_DATA(ring, cur_index);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
46
put_page(ring->rx_buf_pa.pages);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
471
rdata = XGBE_GET_DESC_DATA(ring, cur_index);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
48
ring->rx_buf_pa.pages = NULL;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
49
ring->rx_buf_pa.pages_len = 0;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
496
rdata = XGBE_GET_DESC_DATA(ring, cur_index);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
50
ring->rx_buf_pa.pages_offset = 0;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
51
ring->rx_buf_pa.pages_dma = 0;
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
530
rdata = XGBE_GET_DESC_DATA(ring, cur_index);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
538
rdata = XGBE_GET_DESC_DATA(ring, cur_index - 1);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
54
if (ring->rdesc) {
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
550
rdata = XGBE_GET_DESC_DATA(ring, start_index++);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
57
ring->rdesc_count),
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
58
ring->rdesc, ring->rdesc_dma);
drivers/net/ethernet/amd/xgbe/xgbe-desc.c
59
ring->rdesc = NULL;
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1451
struct xgbe_ring *ring = channel->tx_ring;
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1454
int start_index = ring->cur;
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1459
for (i = 0; i < ring->rdesc_count; i++) {
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1460
rdata = XGBE_GET_DESC_DATA(ring, i);
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1467
XGMAC_DMA_IOWRITE(channel, DMA_CH_TDRLR, ring->rdesc_count - 1);
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1470
rdata = XGBE_GET_DESC_DATA(ring, start_index);
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1530
struct xgbe_ring *ring = channel->rx_ring;
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1532
unsigned int start_index = ring->cur;
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1538
for (i = 0; i < ring->rdesc_count; i++) {
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1539
rdata = XGBE_GET_DESC_DATA(ring, i);
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1546
XGMAC_DMA_IOWRITE(channel, DMA_CH_RDRLR, ring->rdesc_count - 1);
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1549
rdata = XGBE_GET_DESC_DATA(ring, start_index);
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1556
rdata = XGBE_GET_DESC_DATA(ring, start_index + ring->rdesc_count - 1);
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1564
struct xgbe_ring *ring)
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1574
rdata = XGBE_GET_DESC_DATA(ring, ring->cur);
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1585
ring->tx.xmit_more = 0;
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1591
struct xgbe_ring *ring = channel->tx_ring;
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1594
struct xgbe_packet_data *packet = &ring->packet_data;
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1599
int start_index = ring->cur;
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1600
int cur_index = ring->cur;
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1617
if (tso && (packet->mss != ring->tx.cur_mss))
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1622
if (vlan && (packet->vlan_ctag != ring->tx.cur_vlan_ctag))
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1637
ring->coalesce_count += tx_packets;
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1642
else if ((ring->coalesce_count % pdata->tx_frames) < tx_packets)
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1647
rdata = XGBE_GET_DESC_DATA(ring, cur_index);
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1669
ring->tx.cur_mss = packet->mss;
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1689
ring->tx.cur_vlan_ctag = packet->vlan_ctag;
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1693
rdata = XGBE_GET_DESC_DATA(ring, cur_index);
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1756
rdata = XGBE_GET_DESC_DATA(ring, cur_index);
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1800
rdata = XGBE_GET_DESC_DATA(ring, start_index);
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1805
xgbe_dump_tx_desc(pdata, ring, start_index,
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1811
ring->cur = cur_index + 1;
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1815
xgbe_tx_start_xmit(channel, ring);
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1817
ring->tx.xmit_more = 1;
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1820
channel->name, start_index & (ring->rdesc_count - 1),
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1821
(ring->cur - 1) & (ring->rdesc_count - 1));
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1829
struct xgbe_ring *ring = channel->rx_ring;
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1832
struct xgbe_packet_data *packet = &ring->packet_data;
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1836
DBGPR("-->xgbe_dev_read: cur = %d\n", ring->cur);
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1838
rdata = XGBE_GET_DESC_DATA(ring, ring->cur);
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1849
xgbe_dump_rx_desc(pdata, ring, ring->cur);
drivers/net/ethernet/amd/xgbe/xgbe-dev.c
1981
ring->cur & (ring->rdesc_count - 1), ring->cur);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
1069
struct xgbe_ring *ring;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
1076
ring = pdata->channel[i]->tx_ring;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
1077
if (!ring)
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
1080
for (j = 0; j < ring->rdesc_count; j++) {
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
1081
rdata = XGBE_GET_DESC_DATA(ring, j);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
1092
struct xgbe_ring *ring;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
1099
ring = pdata->channel[i]->rx_ring;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
1100
if (!ring)
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
1103
for (j = 0; j < ring->rdesc_count; j++) {
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
1104
rdata = XGBE_GET_DESC_DATA(ring, j);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
114
ring = xgbe_alloc_node(sizeof(*ring), node);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
115
if (!ring)
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
118
spin_lock_init(&ring->lock);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
119
ring->node = node;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
121
channel->tx_ring = ring;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
125
ring = xgbe_alloc_node(sizeof(*ring), node);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
126
if (!ring)
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
129
spin_lock_init(&ring->lock);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
130
ring->node = node;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
132
channel->rx_ring = ring;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
1507
struct xgbe_ring *ring, struct sk_buff *skb,
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
1525
if (skb_shinfo(skb)->gso_size != ring->tx.cur_mss) {
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
154
static inline unsigned int xgbe_tx_avail_desc(struct xgbe_ring *ring)
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
1547
if (skb_vlan_tag_get(skb) != ring->tx.cur_vlan_ctag)
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
156
return (ring->rdesc_count - (ring->cur - ring->dirty));
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
159
static inline unsigned int xgbe_rx_dirty_desc(struct xgbe_ring *ring)
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
161
return (ring->cur - ring->dirty);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
165
struct xgbe_ring *ring, unsigned int count)
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
1687
struct xgbe_ring *ring;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
169
if (count > xgbe_tx_avail_desc(ring)) {
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
1696
ring = channel->tx_ring;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
1697
packet = &ring->packet_data;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
1710
xgbe_packet_info(pdata, ring, skb, packet);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
1713
ret = xgbe_maybe_stop_tx_queue(channel, ring, packet->rdesc_count);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
173
ring->tx.queue_stopped = 1;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
1743
xgbe_maybe_stop_tx_queue(channel, ring, XGBE_TX_MAX_DESCS);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
178
if (ring->tx.xmit_more)
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
179
pdata->hw_if.tx_start_xmit(channel, ring);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2055
struct xgbe_ring *ring = channel->rx_ring;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2058
while (ring->dirty != ring->cur) {
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2059
rdata = XGBE_GET_DESC_DATA(ring, ring->dirty);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2064
if (desc_if->map_rx_buffer(pdata, ring, rdata))
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2067
hw_if->rx_desc_reset(pdata, rdata, ring->dirty);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2069
ring->dirty++;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2077
rdata = XGBE_GET_DESC_DATA(ring, ring->dirty - 1);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2151
struct xgbe_ring *ring = channel->tx_ring;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2163
if (!ring)
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2166
cur = ring->cur;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2174
(ring->dirty != cur)) {
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2175
rdata = XGBE_GET_DESC_DATA(ring, ring->dirty);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2186
xgbe_dump_tx_desc(pdata, ring, ring->dirty, 1, 0);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2198
ring->dirty++;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2206
if ((ring->tx.queue_stopped == 1) &&
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2207
(xgbe_tx_avail_desc(ring) > XGBE_TX_DESC_MIN_FREE)) {
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2208
ring->tx.queue_stopped = 0;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2221
struct xgbe_ring *ring = channel->rx_ring;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2236
if (!ring)
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2244
rdata = XGBE_GET_DESC_DATA(ring, ring->cur);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2245
packet = &ring->packet_data;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2247
DBGPR(" cur = %d\n", ring->cur);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2262
rdata = XGBE_GET_DESC_DATA(ring, ring->cur);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2264
if (xgbe_rx_dirty_desc(ring) > (XGBE_RX_DESC_CNT >> 3))
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2271
ring->cur++;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2398
rdata = XGBE_GET_DESC_DATA(ring, ring->cur);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2479
void xgbe_dump_tx_desc(struct xgbe_prv_data *pdata, struct xgbe_ring *ring,
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2486
rdata = XGBE_GET_DESC_DATA(ring, idx);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2499
void xgbe_dump_rx_desc(struct xgbe_prv_data *pdata, struct xgbe_ring *ring,
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2505
rdata = XGBE_GET_DESC_DATA(ring, idx);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
84
struct xgbe_ring *ring;
drivers/net/ethernet/apm/xgene-v2/main.c
107
ring->tail = tail;
drivers/net/ethernet/apm/xgene-v2/main.c
350
struct xge_desc_ring *ring)
drivers/net/ethernet/apm/xgene-v2/main.c
356
if (!ring)
drivers/net/ethernet/apm/xgene-v2/main.c
360
if (ring->desc_addr)
drivers/net/ethernet/apm/xgene-v2/main.c
361
dma_free_coherent(dev, size, ring->desc_addr, ring->dma_addr);
drivers/net/ethernet/apm/xgene-v2/main.c
363
kfree(ring->pkt_info);
drivers/net/ethernet/apm/xgene-v2/main.c
364
kfree(ring);
drivers/net/ethernet/apm/xgene-v2/main.c
370
struct xge_desc_ring *ring = pdata->rx_ring;
drivers/net/ethernet/apm/xgene-v2/main.c
377
skb = ring->pkt_info[i].skb;
drivers/net/ethernet/apm/xgene-v2/main.c
378
dma_addr = ring->pkt_info[i].dma_addr;
drivers/net/ethernet/apm/xgene-v2/main.c
405
struct xge_desc_ring *ring;
drivers/net/ethernet/apm/xgene-v2/main.c
408
ring = kzalloc_obj(*ring);
drivers/net/ethernet/apm/xgene-v2/main.c
409
if (!ring)
drivers/net/ethernet/apm/xgene-v2/main.c
412
ring->ndev = ndev;
drivers/net/ethernet/apm/xgene-v2/main.c
415
ring->desc_addr = dma_alloc_coherent(dev, size, &ring->dma_addr,
drivers/net/ethernet/apm/xgene-v2/main.c
417
if (!ring->desc_addr)
drivers/net/ethernet/apm/xgene-v2/main.c
420
ring->pkt_info = kzalloc_objs(*ring->pkt_info, XGENE_ENET_NUM_DESC);
drivers/net/ethernet/apm/xgene-v2/main.c
421
if (!ring->pkt_info)
drivers/net/ethernet/apm/xgene-v2/main.c
424
xge_setup_desc(ring);
drivers/net/ethernet/apm/xgene-v2/main.c
426
return ring;
drivers/net/ethernet/apm/xgene-v2/main.c
429
xge_delete_desc_ring(ndev, ring);
drivers/net/ethernet/apm/xgene-v2/main.c
437
struct xge_desc_ring *ring;
drivers/net/ethernet/apm/xgene-v2/main.c
441
ring = xge_create_desc_ring(ndev);
drivers/net/ethernet/apm/xgene-v2/main.c
442
if (!ring)
drivers/net/ethernet/apm/xgene-v2/main.c
445
pdata->tx_ring = ring;
drivers/net/ethernet/apm/xgene-v2/main.c
449
ring = xge_create_desc_ring(ndev);
drivers/net/ethernet/apm/xgene-v2/main.c
450
if (!ring)
drivers/net/ethernet/apm/xgene-v2/main.c
453
pdata->rx_ring = ring;
drivers/net/ethernet/apm/xgene-v2/main.c
65
struct xge_desc_ring *ring = pdata->rx_ring;
drivers/net/ethernet/apm/xgene-v2/main.c
70
u8 tail = ring->tail;
drivers/net/ethernet/apm/xgene-v2/main.c
77
raw_desc = &ring->raw_desc[tail];
drivers/net/ethernet/apm/xgene-v2/main.c
91
ring->pkt_info[tail].skb = skb;
drivers/net/ethernet/apm/xgene-v2/main.c
92
ring->pkt_info[tail].dma_addr = dma_addr;
drivers/net/ethernet/apm/xgene-v2/ring.c
13
void xge_setup_desc(struct xge_desc_ring *ring)
drivers/net/ethernet/apm/xgene-v2/ring.c
21
raw_desc = &ring->raw_desc[i];
drivers/net/ethernet/apm/xgene-v2/ring.c
24
next_dma = ring->dma_addr + (offset * XGENE_ENET_DESC_SIZE);
drivers/net/ethernet/apm/xgene-v2/ring.c
36
struct xge_desc_ring *ring = pdata->tx_ring;
drivers/net/ethernet/apm/xgene-v2/ring.c
37
dma_addr_t dma_addr = ring->dma_addr;
drivers/net/ethernet/apm/xgene-v2/ring.c
42
ring->head = 0;
drivers/net/ethernet/apm/xgene-v2/ring.c
43
ring->tail = 0;
drivers/net/ethernet/apm/xgene-v2/ring.c
48
struct xge_desc_ring *ring = pdata->rx_ring;
drivers/net/ethernet/apm/xgene-v2/ring.c
49
dma_addr_t dma_addr = ring->dma_addr;
drivers/net/ethernet/apm/xgene-v2/ring.c
54
ring->head = 0;
drivers/net/ethernet/apm/xgene-v2/ring.c
55
ring->tail = 0;
drivers/net/ethernet/apm/xgene-v2/ring.h
101
void xge_setup_desc(struct xge_desc_ring *ring);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
100
xgene_enet_ring_set_recombbuf(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
102
xgene_enet_ring_init(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
103
xgene_enet_write_ring_state(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
106
static void xgene_enet_set_ring_id(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
111
is_bufpool = xgene_enet_is_bufpool(ring->id);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
113
ring_id_val = ring->id & GENMASK(9, 0);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
116
ring_id_buf = (ring->num << 9) & GENMASK(18, 9);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
121
xgene_enet_ring_wr32(ring, CSR_RING_ID, ring_id_val);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
122
xgene_enet_ring_wr32(ring, CSR_RING_ID_BUF, ring_id_buf);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
125
static void xgene_enet_clr_desc_ring_id(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
129
ring_id = ring->id | OVERWRITE;
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
13
static void xgene_enet_ring_init(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
130
xgene_enet_ring_wr32(ring, CSR_RING_ID, ring_id);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
131
xgene_enet_ring_wr32(ring, CSR_RING_ID_BUF, 0);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
135
struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
137
u32 size = ring->size;
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
141
xgene_enet_clr_ring_state(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
142
xgene_enet_set_ring_state(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
143
xgene_enet_set_ring_id(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
145
ring->slots = xgene_enet_get_numslots(ring->id, size);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
147
is_bufpool = xgene_enet_is_bufpool(ring->id);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
148
if (is_bufpool || xgene_enet_ring_owner(ring->id) != RING_OWNER_CPU)
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
149
return ring;
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
15
u32 *ring_cfg = ring->state;
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
151
for (i = 0; i < ring->slots; i++)
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
152
xgene_enet_mark_desc_slot_empty(&ring->raw_desc[i]);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
154
xgene_enet_ring_rd32(ring, CSR_RING_NE_INT_MODE, &data);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
155
data |= BIT(31 - xgene_enet_ring_bufnum(ring->id));
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
156
xgene_enet_ring_wr32(ring, CSR_RING_NE_INT_MODE, data);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
158
return ring;
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
16
u64 addr = ring->dma;
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
161
static void xgene_enet_clear_ring(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
166
is_bufpool = xgene_enet_is_bufpool(ring->id);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
167
if (is_bufpool || xgene_enet_ring_owner(ring->id) != RING_OWNER_CPU)
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
17
enum xgene_enet_ring_cfgsize cfgsize = ring->cfgsize;
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
170
xgene_enet_ring_rd32(ring, CSR_RING_NE_INT_MODE, &data);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
171
data &= ~BIT(31 - xgene_enet_ring_bufnum(ring->id));
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
172
xgene_enet_ring_wr32(ring, CSR_RING_NE_INT_MODE, data);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
175
xgene_enet_clr_desc_ring_id(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
176
xgene_enet_clr_ring_state(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
179
static void xgene_enet_wr_cmd(struct xgene_enet_desc_ring *ring, int count)
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
181
iowrite32(count, ring->cmd);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
184
static u32 xgene_enet_ring_len(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
186
u32 __iomem *cmd_base = ring->cmd_base;
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
195
void xgene_enet_parse_error(struct xgene_enet_desc_ring *ring,
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
200
ring->rx_crc_errors++;
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
204
ring->rx_errors++;
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
207
ring->rx_frame_errors++;
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
210
ring->rx_length_errors++;
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
213
ring->rx_frame_errors++;
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
216
ring->rx_fifo_errors++;
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
33
static void xgene_enet_ring_set_type(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
35
u32 *ring_cfg = ring->state;
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
39
is_bufpool = xgene_enet_is_bufpool(ring->id);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
50
static void xgene_enet_ring_set_recombbuf(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
52
u32 *ring_cfg = ring->state;
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
60
static void xgene_enet_ring_wr32(struct xgene_enet_desc_ring *ring,
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
63
struct xgene_enet_pdata *pdata = netdev_priv(ring->ndev);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
68
static void xgene_enet_ring_rd32(struct xgene_enet_desc_ring *ring,
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
71
struct xgene_enet_pdata *pdata = netdev_priv(ring->ndev);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
727
struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
731
if (xgene_enet_is_bufpool(ring->id)) {
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
733
data = BIT(xgene_enet_get_fpsel(ring->id));
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
736
data = BIT(xgene_enet_ring_bufnum(ring->id));
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
76
static void xgene_enet_write_ring_state(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
78
struct xgene_enet_pdata *pdata = netdev_priv(ring->ndev);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
81
xgene_enet_ring_wr32(ring, CSR_RING_CONFIG, ring->num);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
83
xgene_enet_ring_wr32(ring, CSR_RING_WR_BASE + (i * 4),
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
84
ring->state[i]);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
88
static void xgene_enet_clr_ring_state(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
90
memset(ring->state, 0, sizeof(ring->state));
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
91
xgene_enet_write_ring_state(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
94
static void xgene_enet_set_ring_state(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
96
xgene_enet_ring_set_type(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
98
if (xgene_enet_ring_owner(ring->id) == RING_OWNER_ETH0 ||
drivers/net/ethernet/apm/xgene/xgene_enet_hw.c
99
xgene_enet_ring_owner(ring->id) == RING_OWNER_ETH1)
drivers/net/ethernet/apm/xgene/xgene_enet_hw.h
422
void xgene_enet_parse_error(struct xgene_enet_desc_ring *ring,
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1048
static void xgene_enet_delete_ring(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1053
pdata = netdev_priv(ring->ndev);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1054
dev = ndev_to_dev(ring->ndev);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1056
pdata->ring_ops->clear(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1057
dmam_free_coherent(dev, ring->size, ring->desc_addr, ring->dma);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1063
struct xgene_enet_desc_ring *ring;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1067
ring = pdata->tx_ring[i];
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1068
if (ring) {
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1069
xgene_enet_delete_ring(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1070
pdata->port_ops->clear(pdata, ring);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1072
xgene_enet_delete_ring(ring->cp_ring);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1079
ring = pdata->rx_ring[i];
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1080
if (ring) {
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1081
page_pool = ring->page_pool;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1088
buf_pool = ring->buf_pool;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1093
xgene_enet_delete_ring(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1129
static void xgene_enet_free_desc_ring(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1134
if (!ring)
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1137
dev = ndev_to_dev(ring->ndev);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1138
pdata = netdev_priv(ring->ndev);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1140
if (ring->desc_addr) {
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1141
pdata->ring_ops->clear(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1142
dmam_free_coherent(dev, ring->size, ring->desc_addr, ring->dma);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1144
devm_kfree(dev, ring);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1151
struct xgene_enet_desc_ring *ring;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1156
ring = pdata->tx_ring[i];
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1157
if (ring) {
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1158
if (ring->cp_ring && ring->cp_ring->cp_skb)
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1159
devm_kfree(dev, ring->cp_ring->cp_skb);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1161
if (ring->cp_ring && pdata->cq_cnt)
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1162
xgene_enet_free_desc_ring(ring->cp_ring);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1164
xgene_enet_free_desc_ring(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1170
ring = pdata->rx_ring[i];
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1171
if (ring) {
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1172
if (ring->buf_pool) {
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1173
if (ring->buf_pool->rx_skb)
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1174
devm_kfree(dev, ring->buf_pool->rx_skb);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1176
xgene_enet_free_desc_ring(ring->buf_pool);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1179
page_pool = ring->page_pool;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1190
xgene_enet_free_desc_ring(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1196
struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1199
(xgene_enet_ring_owner(ring->id) == RING_OWNER_CPU)) {
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1207
struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1211
return pdata->ring_cmd_addr + (ring->num << num_ring_id_shift);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1220
struct xgene_enet_desc_ring *ring;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1228
ring = devm_kzalloc(dev, sizeof(struct xgene_enet_desc_ring),
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1230
if (!ring)
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1233
ring->ndev = ndev;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1234
ring->num = ring_num;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1235
ring->cfgsize = cfgsize;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1236
ring->id = ring_id;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1238
ring->desc_addr = dmam_alloc_coherent(dev, size, &ring->dma,
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1240
if (!ring->desc_addr) {
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1241
devm_kfree(dev, ring);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1244
ring->size = size;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1246
if (is_irq_mbox_required(pdata, ring)) {
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1248
&ring->irq_mbox_dma,
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1251
dmam_free_coherent(dev, size, ring->desc_addr,
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1252
ring->dma);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1253
devm_kfree(dev, ring);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1256
ring->irq_mbox_addr = irq_mbox_addr;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1259
ring->cmd_base = xgene_enet_ring_cmd_base(pdata, ring);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1260
ring->cmd = ring->cmd_base + INC_DEC_CMD_ADDR;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1261
ring = pdata->ring_ops->setup(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1263
ring->num, ring->size, ring->id, ring->slots);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1265
return ring;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1477
struct xgene_enet_desc_ring *ring;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1481
ring = pdata->tx_ring[i];
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1482
if (ring) {
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1483
stats->tx_packets += ring->tx_packets;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1484
stats->tx_bytes += ring->tx_bytes;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1485
stats->tx_dropped += ring->tx_dropped;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1486
stats->tx_errors += ring->tx_errors;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1491
ring = pdata->rx_ring[i];
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1492
if (ring) {
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1493
stats->rx_packets += ring->rx_packets;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1494
stats->rx_bytes += ring->rx_bytes;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1495
stats->rx_dropped += ring->rx_dropped;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1496
stats->rx_errors += ring->rx_errors +
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1497
ring->rx_length_errors +
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1498
ring->rx_crc_errors +
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1499
ring->rx_frame_errors +
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1500
ring->rx_fifo_errors;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1501
stats->rx_length_errors += ring->rx_length_errors;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1502
stats->rx_crc_errors += ring->rx_crc_errors;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1503
stats->rx_frame_errors += ring->rx_frame_errors;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
1504
stats->rx_fifo_errors += ring->rx_fifo_errors;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
390
static __le64 *xgene_enet_get_exp_bufs(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
394
exp_bufs = &ring->exp_bufs[ring->exp_buf_tail * MAX_EXP_BUFFS];
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
396
ring->exp_buf_tail = (ring->exp_buf_tail + 1) & ((ring->slots / 2) - 1);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
401
static dma_addr_t *xgene_get_frag_dma_array(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
403
return &ring->cp_ring->frag_dma_addr[ring->tail * MAX_SKB_FRAGS];
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
783
static int xgene_enet_process_ring(struct xgene_enet_desc_ring *ring,
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
786
struct net_device *ndev = ring->ndev;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
789
u16 head = ring->head;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
790
u16 slots = ring->slots - 1;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
795
raw_desc = &ring->raw_desc[head];
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
806
exp_desc = &ring->raw_desc[head];
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
817
ret = xgene_enet_rx_frame(ring, raw_desc, exp_desc);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
819
ret = xgene_enet_tx_completion(ring, raw_desc);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
831
pdata->txc_level[ring->index] += desc_count;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
838
pdata->ring_ops->wr_cmd(ring, -count);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
839
ring->head = head;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
841
if (__netif_subqueue_stopped(ndev, ring->index))
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
842
netif_start_subqueue(ndev, ring->index);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
850
struct xgene_enet_desc_ring *ring;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
853
ring = container_of(napi, struct xgene_enet_desc_ring, napi);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
854
processed = xgene_enet_process_ring(ring, budget);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
858
enable_irq(ring->irq);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
882
struct xgene_enet_desc_ring *ring;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
886
ring = pdata->rx_ring[i];
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
888
snprintf(ring->irq_name, IRQ_ID_SIZE, "%s-rx-txc",
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
891
snprintf(ring->irq_name, IRQ_ID_SIZE, "%s-rx-%d",
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
897
ring = pdata->tx_ring[i]->cp_ring;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
898
snprintf(ring->irq_name, IRQ_ID_SIZE, "%s-txc-%d",
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
907
struct xgene_enet_desc_ring *ring;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
912
ring = pdata->rx_ring[i];
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
913
irq_set_status_flags(ring->irq, IRQ_DISABLE_UNLAZY);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
914
ret = devm_request_irq(dev, ring->irq, xgene_enet_rx_irq,
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
915
0, ring->irq_name, ring);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
918
ring->irq_name);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
923
ring = pdata->tx_ring[i]->cp_ring;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
924
irq_set_status_flags(ring->irq, IRQ_DISABLE_UNLAZY);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
925
ret = devm_request_irq(dev, ring->irq, xgene_enet_rx_irq,
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
926
0, ring->irq_name, ring);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
929
ring->irq_name);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
939
struct xgene_enet_desc_ring *ring;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
947
ring = pdata->rx_ring[i];
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
948
irq_clear_status_flags(ring->irq, IRQ_DISABLE_UNLAZY);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
949
devm_free_irq(dev, ring->irq, ring);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
953
ring = pdata->tx_ring[i]->cp_ring;
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
954
irq_clear_status_flags(ring->irq, IRQ_DISABLE_UNLAZY);
drivers/net/ethernet/apm/xgene/xgene_enet_main.c
955
devm_free_irq(dev, ring->irq, ring);
drivers/net/ethernet/apm/xgene/xgene_enet_main.h
163
struct xgene_enet_desc_ring *ring);
drivers/net/ethernet/apm/xgene/xgene_enet_main.h
255
static inline u16 xgene_enet_dst_ring_num(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_main.h
257
struct xgene_enet_pdata *pdata = netdev_priv(ring->ndev);
drivers/net/ethernet/apm/xgene/xgene_enet_main.h
259
return ((u16)pdata->rm << 10) | ring->num;
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
100
if (xgene_enet_ring_owner(ring->id) == RING_OWNER_CPU)
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
103
is_bufpool = xgene_enet_is_bufpool(ring->id);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
105
ring_id_val = ring->id & GENMASK(9, 0);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
108
ring_id_buf = (ring->num << 9) & GENMASK(18, 9);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
114
xgene_enet_ring_wr32(ring, CSR_RING_ID, ring_id_val);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
115
xgene_enet_ring_wr32(ring, CSR_RING_ID_BUF, ring_id_buf);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
118
static void xgene_enet_clr_desc_ring_id(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
12
static void xgene_enet_ring_init(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
122
ring_id = ring->id | OVERWRITE;
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
123
xgene_enet_ring_wr32(ring, CSR_RING_ID, ring_id);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
124
xgene_enet_ring_wr32(ring, CSR_RING_ID_BUF, 0);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
128
struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
133
xgene_enet_clr_ring_state(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
134
xgene_enet_set_ring_state(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
135
xgene_enet_set_ring_id(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
137
ring->slots = xgene_enet_get_numslots(ring->id, ring->size);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
139
is_bufpool = xgene_enet_is_bufpool(ring->id);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
14
u32 *ring_cfg = ring->state;
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
140
if (is_bufpool || xgene_enet_ring_owner(ring->id) != RING_OWNER_CPU)
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
141
return ring;
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
143
addr = CSR_VMID0_INTR_MBOX + (4 * (ring->id & RING_BUFNUM_MASK));
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
144
xgene_enet_ring_wr32(ring, addr, ring->irq_mbox_dma >> 10);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
146
for (i = 0; i < ring->slots; i++)
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
147
xgene_enet_mark_desc_slot_empty(&ring->raw_desc[i]);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
149
return ring;
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
15
u64 addr = ring->dma;
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
152
static void xgene_enet_clear_ring(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
154
xgene_enet_clr_desc_ring_id(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
155
xgene_enet_clr_ring_state(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
158
static void xgene_enet_wr_cmd(struct xgene_enet_desc_ring *ring, int count)
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
162
if (xgene_enet_ring_owner(ring->id) == RING_OWNER_CPU) {
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
163
data = SET_VAL(X2_INTLINE, ring->id & RING_BUFNUM_MASK) |
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
168
iowrite32(data, ring->cmd);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
17
if (xgene_enet_ring_owner(ring->id) == RING_OWNER_CPU) {
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
171
static u32 xgene_enet_ring_len(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
173
u32 __iomem *cmd_base = ring->cmd_base;
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
18
ring_cfg[0] |= SET_VAL(X2_INTLINE, ring->id & RING_BUFNUM_MASK);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
182
static void xgene_enet_setup_coalescing(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
186
xgene_enet_ring_wr32(ring, CSR_PBM_COAL, 0x8e);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
187
xgene_enet_ring_wr32(ring, CSR_PBM_CTICK0, data);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
188
xgene_enet_ring_wr32(ring, CSR_PBM_CTICK1, data);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
189
xgene_enet_ring_wr32(ring, CSR_PBM_CTICK2, data);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
190
xgene_enet_ring_wr32(ring, CSR_PBM_CTICK3, data);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
191
xgene_enet_ring_wr32(ring, CSR_THRESHOLD0_SET1, 0x08);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
192
xgene_enet_ring_wr32(ring, CSR_THRESHOLD1_SET1, 0x10);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
27
ring_cfg[3] |= SET_VAL(RINGSIZE, ring->cfgsize)
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
34
static void xgene_enet_ring_set_type(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
36
u32 *ring_cfg = ring->state;
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
40
is_bufpool = xgene_enet_is_bufpool(ring->id);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
47
static void xgene_enet_ring_set_recombbuf(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
49
u32 *ring_cfg = ring->state;
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
55
static void xgene_enet_ring_wr32(struct xgene_enet_desc_ring *ring,
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
58
struct xgene_enet_pdata *pdata = netdev_priv(ring->ndev);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
63
static void xgene_enet_write_ring_state(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
65
struct xgene_enet_pdata *pdata = netdev_priv(ring->ndev);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
68
xgene_enet_ring_wr32(ring, CSR_RING_CONFIG, ring->num);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
70
xgene_enet_ring_wr32(ring, CSR_RING_WR_BASE + (i * 4),
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
71
ring->state[i]);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
75
static void xgene_enet_clr_ring_state(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
77
memset(ring->state, 0, sizeof(ring->state));
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
78
xgene_enet_write_ring_state(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
81
static void xgene_enet_set_ring_state(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
85
xgene_enet_ring_set_type(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
87
owner = xgene_enet_ring_owner(ring->id);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
89
xgene_enet_ring_set_recombbuf(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
91
xgene_enet_ring_init(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
92
xgene_enet_write_ring_state(ring);
drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c
95
static void xgene_enet_set_ring_id(struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_sgmac.c
509
struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_sgmac.c
513
if (xgene_enet_is_bufpool(ring->id)) {
drivers/net/ethernet/apm/xgene/xgene_enet_sgmac.c
515
data = BIT(xgene_enet_get_fpsel(ring->id));
drivers/net/ethernet/apm/xgene/xgene_enet_sgmac.c
518
data = BIT(xgene_enet_ring_bufnum(ring->id));
drivers/net/ethernet/apm/xgene/xgene_enet_xgmac.c
451
struct xgene_enet_desc_ring *ring)
drivers/net/ethernet/apm/xgene/xgene_enet_xgmac.c
455
if (xgene_enet_is_bufpool(ring->id)) {
drivers/net/ethernet/apm/xgene/xgene_enet_xgmac.c
457
data = BIT(xgene_enet_get_fpsel(ring->id));
drivers/net/ethernet/apm/xgene/xgene_enet_xgmac.c
460
data = BIT(xgene_enet_ring_bufnum(ring->id));
drivers/net/ethernet/aquantia/atlantic/aq_ethtool.c
822
struct ethtool_ringparam *ring,
drivers/net/ethernet/aquantia/atlantic/aq_ethtool.c
831
ring->rx_pending = cfg->rxds;
drivers/net/ethernet/aquantia/atlantic/aq_ethtool.c
832
ring->tx_pending = cfg->txds;
drivers/net/ethernet/aquantia/atlantic/aq_ethtool.c
834
ring->rx_max_pending = cfg->aq_hw_caps->rxds_max;
drivers/net/ethernet/aquantia/atlantic/aq_ethtool.c
835
ring->tx_max_pending = cfg->aq_hw_caps->txds_max;
drivers/net/ethernet/aquantia/atlantic/aq_ethtool.c
839
struct ethtool_ringparam *ring,
drivers/net/ethernet/aquantia/atlantic/aq_ethtool.c
852
if (ring->rx_mini_pending || ring->rx_jumbo_pending) {
drivers/net/ethernet/aquantia/atlantic/aq_ethtool.c
862
cfg->rxds = max(ring->rx_pending, hw_caps->rxds_min);
drivers/net/ethernet/aquantia/atlantic/aq_ethtool.c
866
cfg->txds = max(ring->tx_pending, hw_caps->txds_min);
drivers/net/ethernet/aquantia/atlantic/aq_hw.h
314
struct aq_ring_s *ring);
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
395
struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
397
self->aq_ring_tx[idx] = ring;
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
577
struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
581
unsigned int dx = ring->sw_tail;
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
589
dx_buff = &ring->buff_ring[dx];
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
630
dx = aq_ring_next_dx(ring, dx);
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
631
dx_buff = &ring->buff_ring[dx];
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
653
for (dx = ring->sw_tail;
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
655
--ret, dx = aq_ring_next_dx(ring, dx)) {
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
656
dx_buff = &ring->buff_ring[dx];
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
673
struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
692
dx = ring->sw_tail;
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
693
dx_buff = &ring->buff_ring[dx];
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
724
dx = aq_ring_next_dx(ring, dx);
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
725
dx_buff = &ring->buff_ring[dx];
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
778
dx = aq_ring_next_dx(ring, dx);
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
779
dx_buff = &ring->buff_ring[dx];
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
801
for (dx = ring->sw_tail;
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
803
--ret, dx = aq_ring_next_dx(ring, dx)) {
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
804
dx_buff = &ring->buff_ring[dx];
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
869
struct aq_ring_s *ring = NULL;
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
875
ring = self->aq_ring_tx[AQ_NIC_CFG_TCVEC2RING(cfg, tc, vec)];
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
882
aq_ring_update_queue_state(ring);
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
891
AQ_NIC_RING2QMAP(self, ring->idx))) {
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
896
frags = aq_nic_map_skb(self, skb, ring);
drivers/net/ethernet/aquantia/atlantic/aq_nic.c
902
ring, frags);
drivers/net/ethernet/aquantia/atlantic/aq_nic.h
176
struct aq_ring_s *ring);
drivers/net/ethernet/aquantia/atlantic/aq_nic.h
184
struct aq_ring_s *ring);
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
119
static int __aq_ptp_skb_put(struct ptp_skb_ring *ring, struct sk_buff *skb)
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
121
unsigned int next_head = (ring->head + 1) % ring->size;
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
123
if (next_head == ring->tail)
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
126
ring->buff[ring->head] = skb_get(skb);
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
127
ring->head = next_head;
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
132
static int aq_ptp_skb_put(struct ptp_skb_ring *ring, struct sk_buff *skb)
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
137
spin_lock_irqsave(&ring->lock, flags);
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
138
ret = __aq_ptp_skb_put(ring, skb);
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
139
spin_unlock_irqrestore(&ring->lock, flags);
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
144
static struct sk_buff *__aq_ptp_skb_get(struct ptp_skb_ring *ring)
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
148
if (ring->tail == ring->head)
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
151
skb = ring->buff[ring->tail];
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
152
ring->tail = (ring->tail + 1) % ring->size;
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
157
static struct sk_buff *aq_ptp_skb_get(struct ptp_skb_ring *ring)
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
162
spin_lock_irqsave(&ring->lock, flags);
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
163
skb = __aq_ptp_skb_get(ring);
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
164
spin_unlock_irqrestore(&ring->lock, flags);
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
169
static unsigned int aq_ptp_skb_buf_len(struct ptp_skb_ring *ring)
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
174
spin_lock_irqsave(&ring->lock, flags);
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
175
len = (ring->head >= ring->tail) ?
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
176
ring->head - ring->tail :
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
177
ring->size - ring->tail + ring->head;
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
178
spin_unlock_irqrestore(&ring->lock, flags);
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
183
static int aq_ptp_skb_ring_init(struct ptp_skb_ring *ring, unsigned int size)
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
190
spin_lock_init(&ring->lock);
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
192
ring->buff = buff;
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
193
ring->size = size;
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
194
ring->head = 0;
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
195
ring->tail = 0;
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
200
static void aq_ptp_skb_ring_clean(struct ptp_skb_ring *ring)
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
204
while ((skb = aq_ptp_skb_get(ring)) != NULL)
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
208
static void aq_ptp_skb_ring_release(struct ptp_skb_ring *ring)
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
210
if (ring->buff) {
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
211
aq_ptp_skb_ring_clean(ring);
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
212
kfree(ring->buff);
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
213
ring->buff = NULL;
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
631
bool aq_ptp_ring(struct aq_nic_s *aq_nic, struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
638
return &aq_ptp->ptp_tx == ring ||
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
639
&aq_ptp->ptp_rx == ring || &aq_ptp->hwts_rx == ring;
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
749
struct aq_ring_s *ring = &aq_ptp->ptp_tx;
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
764
if (frags > AQ_CFG_SKB_FRAGS_MAX || frags > aq_ring_avail_dx(ring)) {
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
773
ring->size);
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
781
frags = aq_nic_map_skb(aq_nic, skb, ring);
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
785
ring, frags);
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
787
u64_stats_update_begin(&ring->stats.tx.syncp);
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
788
++ring->stats.tx.packets;
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
789
ring->stats.tx.bytes += skb->len;
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
790
u64_stats_update_end(&ring->stats.tx.syncp);
drivers/net/ethernet/aquantia/atlantic/aq_ptp.h
140
static inline bool aq_ptp_ring(struct aq_nic_s *aq_nic, struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/aq_ptp.h
68
bool aq_ptp_ring(struct aq_nic_s *aq_nic, struct aq_ring_s *ring);
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
251
void aq_ring_update_queue_state(struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
253
if (aq_ring_avail_dx(ring) <= AQ_CFG_SKB_FRAGS_MAX)
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
254
aq_ring_queue_stop(ring);
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
255
else if (aq_ring_avail_dx(ring) > AQ_CFG_RESTART_DESC_THRES)
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
256
aq_ring_queue_wake(ring);
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
259
void aq_ring_queue_wake(struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
261
struct net_device *ndev = aq_nic_get_ndev(ring->aq_nic);
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
264
AQ_NIC_RING2QMAP(ring->aq_nic,
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
265
ring->idx))) {
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
267
AQ_NIC_RING2QMAP(ring->aq_nic, ring->idx));
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
268
u64_stats_update_begin(&ring->stats.tx.syncp);
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
269
ring->stats.tx.queue_restarts++;
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
270
u64_stats_update_end(&ring->stats.tx.syncp);
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
274
void aq_ring_queue_stop(struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
276
struct net_device *ndev = aq_nic_get_ndev(ring->aq_nic);
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
279
AQ_NIC_RING2QMAP(ring->aq_nic,
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
280
ring->idx)))
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
282
AQ_NIC_RING2QMAP(ring->aq_nic, ring->idx));
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
370
struct aq_ring_s *ring;
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
374
ring = aq_nic->aq_ring_tx[AQ_NIC_CFG_TCVEC2RING(aq_cfg, 0, vec)];
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
379
if (aq_nic_xmit_xdpf(aq_nic, ring, xdpf) == NETDEV_TX_BUSY)
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
485
struct aq_ring_s *ring,
drivers/net/ethernet/aquantia/atlantic/aq_ring.c
500
buff_ = &ring->buff_ring[buff_->next];
drivers/net/ethernet/aquantia/atlantic/aq_ring.h
198
void aq_ring_update_queue_state(struct aq_ring_s *ring);
drivers/net/ethernet/aquantia/atlantic/aq_ring.h
199
void aq_ring_queue_wake(struct aq_ring_s *ring);
drivers/net/ethernet/aquantia/atlantic/aq_ring.h
200
void aq_ring_queue_stop(struct aq_ring_s *ring);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
131
struct aq_ring_s *ring = NULL;
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
139
ring = &self->ring[i][AQ_VEC_TX_ID];
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
140
err = aq_ring_tx_alloc(ring, aq_nic, idx_ring, aq_nic_cfg);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
146
aq_nic_set_tx_ring(aq_nic, idx_ring, ring);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
148
ring = &self->ring[i][AQ_VEC_RX_ID];
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
149
if (xdp_rxq_info_reg(&ring->xdp_rxq,
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
155
if (xdp_rxq_info_reg_mem_model(&ring->xdp_rxq,
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
157
xdp_rxq_info_unreg(&ring->xdp_rxq);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
162
err = aq_ring_rx_alloc(ring, aq_nic, idx_ring, aq_nic_cfg);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
164
xdp_rxq_info_unreg(&ring->xdp_rxq);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
183
struct aq_ring_s *ring = NULL;
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
191
ring = self->ring[i];
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
192
err = aq_ring_init(&ring[AQ_VEC_TX_ID], ATL_RING_TX);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
197
&ring[AQ_VEC_TX_ID],
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
202
err = aq_ring_init(&ring[AQ_VEC_RX_ID], ATL_RING_RX);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
207
&ring[AQ_VEC_RX_ID],
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
212
err = aq_ring_rx_fill(&ring[AQ_VEC_RX_ID]);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
217
&ring[AQ_VEC_RX_ID], 0U);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
22
struct aq_ring_s ring[AQ_CFG_TCS_MAX][2];
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
228
struct aq_ring_s *ring = NULL;
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
233
ring = self->ring[i];
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
235
&ring[AQ_VEC_TX_ID]);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
240
&ring[AQ_VEC_RX_ID]);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
253
struct aq_ring_s *ring = NULL;
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
257
ring = self->ring[i];
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
259
&ring[AQ_VEC_TX_ID]);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
262
&ring[AQ_VEC_RX_ID]);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
270
struct aq_ring_s *ring = NULL;
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
277
ring = self->ring[i];
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
278
aq_ring_tx_clean(&ring[AQ_VEC_TX_ID]);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
279
aq_ring_rx_deinit(&ring[AQ_VEC_RX_ID]);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
299
struct aq_ring_s *ring = NULL;
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
306
ring = self->ring[i];
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
307
aq_ring_free(&ring[AQ_VEC_TX_ID]);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
309
xdp_rxq_info_unreg(&ring[AQ_VEC_RX_ID].xdp_rxq);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
310
aq_ring_free(&ring[AQ_VEC_RX_ID]);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
32
struct aq_ring_s *ring = NULL;
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
375
count = aq_ring_fill_stats_data(&self->ring[tc][AQ_VEC_RX_ID], data);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
376
count += aq_ring_fill_stats_data(&self->ring[tc][AQ_VEC_TX_ID], data + count);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
42
ring = self->ring[i];
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
43
u64_stats_update_begin(&ring[AQ_VEC_RX_ID].stats.rx.syncp);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
44
ring[AQ_VEC_RX_ID].stats.rx.polls++;
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
45
u64_stats_update_end(&ring[AQ_VEC_RX_ID].stats.rx.syncp);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
49
&ring[AQ_VEC_TX_ID]);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
54
if (ring[AQ_VEC_TX_ID].sw_head !=
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
55
ring[AQ_VEC_TX_ID].hw_head) {
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
56
was_tx_cleaned = aq_ring_tx_clean(&ring[AQ_VEC_TX_ID]);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
57
aq_ring_update_queue_state(&ring[AQ_VEC_TX_ID]);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
61
&ring[AQ_VEC_RX_ID]);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
65
if (ring[AQ_VEC_RX_ID].sw_head !=
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
66
ring[AQ_VEC_RX_ID].hw_head) {
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
67
err = aq_ring_rx_clean(&ring[AQ_VEC_RX_ID],
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
74
sw_tail_old = ring[AQ_VEC_RX_ID].sw_tail;
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
76
err = aq_ring_rx_fill(&ring[AQ_VEC_RX_ID]);
drivers/net/ethernet/aquantia/atlantic/aq_vec.c
82
&ring[AQ_VEC_RX_ID], sw_tail_old);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
406
struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
408
hw_atl_tdm_tx_desc_en_set(self, 1, ring->idx);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
414
struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
416
hw_atl_rdm_rx_desc_en_set(self, 1, ring->idx);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
430
struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
432
hw_atl_reg_tx_dma_desc_tail_ptr_set(self, ring->sw_tail, ring->idx);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
438
struct aq_ring_s *ring,
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
448
buff = &ring->buff_ring[ring->sw_tail];
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
452
txd = (struct hw_atl_txd_s *)&ring->dx_ring[ring->sw_tail *
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
458
buff = &ring->buff_ring[ring->sw_tail];
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
505
ring->sw_tail = aq_ring_next_dx(ring, ring->sw_tail);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
508
hw_atl_a0_hw_tx_ring_tail_update(self, ring);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
586
struct aq_ring_s *ring,
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
589
for (; sw_tail_old != ring->sw_tail;
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
590
sw_tail_old = aq_ring_next_dx(ring, sw_tail_old)) {
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
592
(struct hw_atl_rxd_s *)&ring->dx_ring[sw_tail_old *
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
595
struct aq_ring_buff_s *buff = &ring->buff_ring[sw_tail_old];
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
601
hw_atl_reg_rx_dma_desc_tail_ptr_set(self, sw_tail_old, ring->idx);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
607
struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
609
unsigned int hw_head = hw_atl_tdm_tx_desc_head_ptr_get(self, ring->idx);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
616
ring->hw_head = hw_head;
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
624
struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
626
for (; ring->hw_head != ring->sw_tail;
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
627
ring->hw_head = aq_ring_next_dx(ring, ring->hw_head)) {
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
630
&ring->dx_ring[ring->hw_head * HW_ATL_A0_RXD_SIZE];
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
638
hw_atl_reg_rx_dma_desc_status_get(self, ring->idx)) {
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
639
hw_atl_rdm_rx_desc_en_set(self, false, ring->idx);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
640
hw_atl_rdm_rx_desc_res_set(self, true, ring->idx);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
641
hw_atl_rdm_rx_desc_res_set(self, false, ring->idx);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
642
hw_atl_rdm_rx_desc_en_set(self, true, ring->idx);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
645
if (ring->hw_head ||
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
647
ring->idx) < 2U)) {
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
652
(&ring->dx_ring[(1U) *
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
664
buff = &ring->buff_ring[ring->hw_head];
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
709
ring->frame_max;
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
711
buff->len : ring->frame_max;
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
716
buff->next = aq_ring_next_dx(ring,
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
717
ring->hw_head);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
718
++ring->stats.rx.jumbo_packets;
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
875
struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
877
hw_atl_tdm_tx_desc_en_set(self, 0U, ring->idx);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
883
struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c
885
hw_atl_rdm_rx_desc_en_set(self, 0U, ring->idx);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
1206
int hw_atl_b0_hw_ring_tx_stop(struct aq_hw_s *self, struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
1208
hw_atl_tdm_tx_desc_en_set(self, 0U, ring->idx);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
1213
int hw_atl_b0_hw_ring_rx_stop(struct aq_hw_s *self, struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
1215
hw_atl_rdm_rx_desc_en_set(self, 0U, ring->idx);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
631
int hw_atl_b0_hw_ring_tx_start(struct aq_hw_s *self, struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
633
hw_atl_tdm_tx_desc_en_set(self, 1, ring->idx);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
638
int hw_atl_b0_hw_ring_rx_start(struct aq_hw_s *self, struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
640
hw_atl_rdm_rx_desc_en_set(self, 1, ring->idx);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
654
struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
656
hw_atl_reg_tx_dma_desc_tail_ptr_set(self, ring->sw_tail, ring->idx);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
661
int hw_atl_b0_hw_ring_tx_xmit(struct aq_hw_s *self, struct aq_ring_s *ring,
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
672
buff = &ring->buff_ring[ring->sw_tail];
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
676
txd = (struct hw_atl_txd_s *)&ring->dx_ring[ring->sw_tail *
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
682
buff = &ring->buff_ring[ring->sw_tail];
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
741
ring->sw_tail = aq_ring_next_dx(ring, ring->sw_tail);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
744
hw_atl_b0_hw_tx_ring_tail_update(self, ring);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
820
int hw_atl_b0_hw_ring_rx_fill(struct aq_hw_s *self, struct aq_ring_s *ring,
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
823
for (; sw_tail_old != ring->sw_tail;
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
824
sw_tail_old = aq_ring_next_dx(ring, sw_tail_old)) {
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
826
(struct hw_atl_rxd_s *)&ring->dx_ring[sw_tail_old *
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
829
struct aq_ring_buff_s *buff = &ring->buff_ring[sw_tail_old];
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
835
hw_atl_reg_rx_dma_desc_tail_ptr_set(self, sw_tail_old, ring->idx);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
841
struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
845
for (i = aq_ring_avail_dx(ring); i--;
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
846
ring->sw_tail = aq_ring_next_dx(ring, ring->sw_tail)) {
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
849
&ring->dx_ring[ring->sw_tail * HW_ATL_B0_RXD_SIZE];
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
851
rxd->buf_addr = ring->dx_ring_pa + ring->size * ring->dx_size;
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
857
hw_atl_reg_rx_dma_desc_tail_ptr_set(self, ring->sw_tail, ring->idx);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
863
struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
865
while (ring->hw_head != ring->sw_tail) {
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
868
(ring->dx_ring + (ring->hw_head * HW_ATL_B0_RXD_SIZE));
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
874
ring->hw_head = aq_ring_next_dx(ring, ring->hw_head);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
881
struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
886
hw_head_ = hw_atl_tdm_tx_desc_head_ptr_get(self, ring->idx);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
894
if (hw_head_ >= ring->size) {
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
899
ring->hw_head = hw_head_;
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
906
int hw_atl_b0_hw_ring_rx_receive(struct aq_hw_s *self, struct aq_ring_s *ring)
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
908
for (; ring->hw_head != ring->sw_tail;
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
909
ring->hw_head = aq_ring_next_dx(ring, ring->hw_head)) {
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
912
&ring->dx_ring[ring->hw_head * HW_ATL_B0_RXD_SIZE];
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
922
buff = &ring->buff_ring[ring->hw_head];
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
979
ring->frame_max;
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
981
buff->len : ring->frame_max;
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
986
rxd_wb->pkt_len > ring->frame_max ?
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
987
ring->frame_max : rxd_wb->pkt_len;
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
992
++ring->stats.rx.lro_packets;
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
996
aq_ring_next_dx(ring,
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
997
ring->hw_head);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c
998
++ring->stats.rx.jumbo_packets;
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.h
40
int hw_atl_b0_hw_ring_tx_start(struct aq_hw_s *self, struct aq_ring_s *ring);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.h
41
int hw_atl_b0_hw_ring_rx_start(struct aq_hw_s *self, struct aq_ring_s *ring);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.h
45
int hw_atl_b0_hw_ring_rx_fill(struct aq_hw_s *self, struct aq_ring_s *ring,
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.h
47
int hw_atl_b0_hw_ring_rx_receive(struct aq_hw_s *self, struct aq_ring_s *ring);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.h
51
int hw_atl_b0_hw_ring_tx_xmit(struct aq_hw_s *self, struct aq_ring_s *ring,
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.h
54
struct aq_ring_s *ring);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.h
56
int hw_atl_b0_hw_ring_tx_stop(struct aq_hw_s *self, struct aq_ring_s *ring);
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.h
57
int hw_atl_b0_hw_ring_rx_stop(struct aq_hw_s *self, struct aq_ring_s *ring);
drivers/net/ethernet/atheros/ag71xx.c
1120
struct ag71xx_ring *ring = &ag->tx_ring;
drivers/net/ethernet/atheros/ag71xx.c
1121
int ring_mask = BIT(ring->order) - 1;
drivers/net/ethernet/atheros/ag71xx.c
1125
while (ring->curr != ring->dirty) {
drivers/net/ethernet/atheros/ag71xx.c
1127
u32 i = ring->dirty & ring_mask;
drivers/net/ethernet/atheros/ag71xx.c
1129
desc = ag71xx_ring_desc(ring, i);
drivers/net/ethernet/atheros/ag71xx.c
1135
if (ring->buf[i].tx.skb) {
drivers/net/ethernet/atheros/ag71xx.c
1136
bytes_compl += ring->buf[i].tx.len;
drivers/net/ethernet/atheros/ag71xx.c
1138
dev_kfree_skb_any(ring->buf[i].tx.skb);
drivers/net/ethernet/atheros/ag71xx.c
1140
ring->buf[i].tx.skb = NULL;
drivers/net/ethernet/atheros/ag71xx.c
1141
ring->dirty++;
drivers/net/ethernet/atheros/ag71xx.c
1152
struct ag71xx_ring *ring = &ag->tx_ring;
drivers/net/ethernet/atheros/ag71xx.c
1153
int ring_size = BIT(ring->order);
drivers/net/ethernet/atheros/ag71xx.c
1158
struct ag71xx_desc *desc = ag71xx_ring_desc(ring, i);
drivers/net/ethernet/atheros/ag71xx.c
1160
desc->next = (u32)(ring->descs_dma +
drivers/net/ethernet/atheros/ag71xx.c
1164
ring->buf[i].tx.skb = NULL;
drivers/net/ethernet/atheros/ag71xx.c
1170
ring->curr = 0;
drivers/net/ethernet/atheros/ag71xx.c
1171
ring->dirty = 0;
drivers/net/ethernet/atheros/ag71xx.c
1177
struct ag71xx_ring *ring = &ag->rx_ring;
drivers/net/ethernet/atheros/ag71xx.c
1178
int ring_size = BIT(ring->order);
drivers/net/ethernet/atheros/ag71xx.c
1181
if (!ring->buf)
drivers/net/ethernet/atheros/ag71xx.c
1185
if (ring->buf[i].rx.rx_buf) {
drivers/net/ethernet/atheros/ag71xx.c
1187
ring->buf[i].rx.dma_addr,
drivers/net/ethernet/atheros/ag71xx.c
1189
skb_free_frag(ring->buf[i].rx.rx_buf);
drivers/net/ethernet/atheros/ag71xx.c
1203
struct ag71xx_ring *ring = &ag->rx_ring;
drivers/net/ethernet/atheros/ag71xx.c
1207
desc = ag71xx_ring_desc(ring, buf - &ring->buf[0]);
drivers/net/ethernet/atheros/ag71xx.c
1227
struct ag71xx_ring *ring = &ag->rx_ring;
drivers/net/ethernet/atheros/ag71xx.c
1229
int ring_mask = BIT(ring->order) - 1;
drivers/net/ethernet/atheros/ag71xx.c
1230
int ring_size = BIT(ring->order);
drivers/net/ethernet/atheros/ag71xx.c
1236
struct ag71xx_desc *desc = ag71xx_ring_desc(ring, i);
drivers/net/ethernet/atheros/ag71xx.c
1238
desc->next = (u32)(ring->descs_dma +
drivers/net/ethernet/atheros/ag71xx.c
1246
struct ag71xx_desc *desc = ag71xx_ring_desc(ring, i);
drivers/net/ethernet/atheros/ag71xx.c
1248
if (!ag71xx_fill_rx_buf(ag, &ring->buf[i], ag->rx_buf_offset,
drivers/net/ethernet/atheros/ag71xx.c
1260
ring->curr = 0;
drivers/net/ethernet/atheros/ag71xx.c
1261
ring->dirty = 0;
drivers/net/ethernet/atheros/ag71xx.c
1268
struct ag71xx_ring *ring = &ag->rx_ring;
drivers/net/ethernet/atheros/ag71xx.c
1269
int ring_mask = BIT(ring->order) - 1;
drivers/net/ethernet/atheros/ag71xx.c
1274
for (; ring->curr - ring->dirty > 0; ring->dirty++) {
drivers/net/ethernet/atheros/ag71xx.c
1278
i = ring->dirty & ring_mask;
drivers/net/ethernet/atheros/ag71xx.c
1279
desc = ag71xx_ring_desc(ring, i);
drivers/net/ethernet/atheros/ag71xx.c
1281
if (!ring->buf[i].rx.rx_buf &&
drivers/net/ethernet/atheros/ag71xx.c
1282
!ag71xx_fill_rx_buf(ag, &ring->buf[i], offset,
drivers/net/ethernet/atheros/ag71xx.c
1450
static int ag71xx_fill_dma_desc(struct ag71xx_ring *ring, u32 addr, int len)
drivers/net/ethernet/atheros/ag71xx.c
1455
ring_mask = BIT(ring->order) - 1;
drivers/net/ethernet/atheros/ag71xx.c
1457
split = ring->desc_split;
drivers/net/ethernet/atheros/ag71xx.c
1465
i = (ring->curr + ndesc) & ring_mask;
drivers/net/ethernet/atheros/ag71xx.c
1466
desc = ag71xx_ring_desc(ring, i);
drivers/net/ethernet/atheros/ag71xx.c
1504
struct ag71xx_ring *ring;
drivers/net/ethernet/atheros/ag71xx.c
1508
ring = &ag->tx_ring;
drivers/net/ethernet/atheros/ag71xx.c
1509
ring_mask = BIT(ring->order) - 1;
drivers/net/ethernet/atheros/ag71xx.c
1510
ring_size = BIT(ring->order);
drivers/net/ethernet/atheros/ag71xx.c
1524
i = ring->curr & ring_mask;
drivers/net/ethernet/atheros/ag71xx.c
1525
desc = ag71xx_ring_desc(ring, i);
drivers/net/ethernet/atheros/ag71xx.c
1528
n = ag71xx_fill_dma_desc(ring, (u32)dma_addr,
drivers/net/ethernet/atheros/ag71xx.c
1533
i = (ring->curr + n - 1) & ring_mask;
drivers/net/ethernet/atheros/ag71xx.c
1534
ring->buf[i].tx.len = skb->len;
drivers/net/ethernet/atheros/ag71xx.c
1535
ring->buf[i].tx.skb = skb;
drivers/net/ethernet/atheros/ag71xx.c
1542
ring->curr += n;
drivers/net/ethernet/atheros/ag71xx.c
1548
if (ring->desc_split)
drivers/net/ethernet/atheros/ag71xx.c
1551
if (ring->curr - ring->dirty >= ring_size - ring_min) {
drivers/net/ethernet/atheros/ag71xx.c
1609
struct ag71xx_ring *ring;
drivers/net/ethernet/atheros/ag71xx.c
1613
ring = &ag->rx_ring;
drivers/net/ethernet/atheros/ag71xx.c
1616
ring_mask = BIT(ring->order) - 1;
drivers/net/ethernet/atheros/ag71xx.c
1617
ring_size = BIT(ring->order);
drivers/net/ethernet/atheros/ag71xx.c
1620
limit, ring->curr, ring->dirty);
drivers/net/ethernet/atheros/ag71xx.c
1623
unsigned int i = ring->curr & ring_mask;
drivers/net/ethernet/atheros/ag71xx.c
1624
struct ag71xx_desc *desc = ag71xx_ring_desc(ring, i);
drivers/net/ethernet/atheros/ag71xx.c
1630
if ((ring->dirty + ring_size) == ring->curr) {
drivers/net/ethernet/atheros/ag71xx.c
1640
dma_unmap_single(&ag->pdev->dev, ring->buf[i].rx.dma_addr,
drivers/net/ethernet/atheros/ag71xx.c
1646
skb = napi_build_skb(ring->buf[i].rx.rx_buf, ag71xx_buffer_size(ag));
drivers/net/ethernet/atheros/ag71xx.c
1649
skb_free_frag(ring->buf[i].rx.rx_buf);
drivers/net/ethernet/atheros/ag71xx.c
1662
ring->buf[i].rx.rx_buf = NULL;
drivers/net/ethernet/atheros/ag71xx.c
1665
ring->curr++;
drivers/net/ethernet/atheros/ag71xx.c
1673
ring->curr, ring->dirty, done);
drivers/net/ethernet/atheros/ag71xx.c
390
static struct ag71xx_desc *ag71xx_ring_desc(struct ag71xx_ring *ring, int idx)
drivers/net/ethernet/atheros/ag71xx.c
392
return (struct ag71xx_desc *)&ring->descs_cpu[idx * AG71XX_DESC_SIZE];
drivers/net/ethernet/atheros/ag71xx.c
771
struct ag71xx_ring *ring = &ag->tx_ring;
drivers/net/ethernet/atheros/ag71xx.c
777
ring_mask = BIT(ring->order) - 1;
drivers/net/ethernet/atheros/ag71xx.c
778
ring_size = BIT(ring->order);
drivers/net/ethernet/atheros/ag71xx.c
782
while (ring->dirty + n != ring->curr) {
drivers/net/ethernet/atheros/ag71xx.c
787
i = (ring->dirty + n) & ring_mask;
drivers/net/ethernet/atheros/ag71xx.c
788
desc = ag71xx_ring_desc(ring, i);
drivers/net/ethernet/atheros/ag71xx.c
789
skb = ring->buf[i].tx.skb;
drivers/net/ethernet/atheros/ag71xx.c
809
ring->buf[i].tx.skb = NULL;
drivers/net/ethernet/atheros/ag71xx.c
811
bytes_compl += ring->buf[i].tx.len;
drivers/net/ethernet/atheros/ag71xx.c
814
ring->dirty += n;
drivers/net/ethernet/atheros/ag71xx.c
831
if ((ring->curr - ring->dirty) < (ring_size * 3) / 4)
drivers/net/ethernet/atheros/atlx/atl1.c
3476
struct ethtool_ringparam *ring,
drivers/net/ethernet/atheros/atlx/atl1.c
3484
ring->rx_max_pending = ATL1_MAX_RFD;
drivers/net/ethernet/atheros/atlx/atl1.c
3485
ring->tx_max_pending = ATL1_MAX_TPD;
drivers/net/ethernet/atheros/atlx/atl1.c
3486
ring->rx_pending = rxdr->count;
drivers/net/ethernet/atheros/atlx/atl1.c
3487
ring->tx_pending = txdr->count;
drivers/net/ethernet/atheros/atlx/atl1.c
3491
struct ethtool_ringparam *ring,
drivers/net/ethernet/atheros/atlx/atl1.c
3516
rfdr->count = (u16) max(ring->rx_pending, (u32) ATL1_MIN_RFD);
drivers/net/ethernet/atheros/atlx/atl1.c
3522
tpdr->count = (u16) max(ring->tx_pending, (u32) ATL1_MIN_TPD);
drivers/net/ethernet/broadcom/bcm4908_enet.c
148
struct bcm4908_enet_dma_ring *ring)
drivers/net/ethernet/broadcom/bcm4908_enet.c
150
enet_write(enet, ring->cfg_block + ENET_DMA_CH_CFG_INT_MASK, ENET_DMA_INT_DEFAULTS);
drivers/net/ethernet/broadcom/bcm4908_enet.c
154
struct bcm4908_enet_dma_ring *ring)
drivers/net/ethernet/broadcom/bcm4908_enet.c
156
enet_write(enet, ring->cfg_block + ENET_DMA_CH_CFG_INT_MASK, 0);
drivers/net/ethernet/broadcom/bcm4908_enet.c
160
struct bcm4908_enet_dma_ring *ring)
drivers/net/ethernet/broadcom/bcm4908_enet.c
162
enet_write(enet, ring->cfg_block + ENET_DMA_CH_CFG_INT_STAT, ENET_DMA_INT_DEFAULTS);
drivers/net/ethernet/broadcom/bcm4908_enet.c
170
struct bcm4908_enet_dma_ring *ring)
drivers/net/ethernet/broadcom/bcm4908_enet.c
172
int size = ring->length * sizeof(struct bcm4908_enet_dma_ring_bd);
drivers/net/ethernet/broadcom/bcm4908_enet.c
175
ring->cpu_addr = dma_alloc_coherent(dev, size, &ring->dma_addr, GFP_KERNEL);
drivers/net/ethernet/broadcom/bcm4908_enet.c
176
if (!ring->cpu_addr)
drivers/net/ethernet/broadcom/bcm4908_enet.c
179
if (((uintptr_t)ring->cpu_addr) & (0x40 - 1)) {
drivers/net/ethernet/broadcom/bcm4908_enet.c
184
ring->slots = kzalloc_objs(*ring->slots, ring->length);
drivers/net/ethernet/broadcom/bcm4908_enet.c
185
if (!ring->slots)
drivers/net/ethernet/broadcom/bcm4908_enet.c
191
dma_free_coherent(dev, size, ring->cpu_addr, ring->dma_addr);
drivers/net/ethernet/broadcom/bcm4908_enet.c
192
ring->cpu_addr = NULL;
drivers/net/ethernet/broadcom/bcm4908_enet.c
257
struct bcm4908_enet_dma_ring *ring = rings[i];
drivers/net/ethernet/broadcom/bcm4908_enet.c
259
enet_write(enet, ring->st_ram_block + ENET_DMA_CH_STATE_RAM_BASE_DESC_PTR, 0);
drivers/net/ethernet/broadcom/bcm4908_enet.c
260
enet_write(enet, ring->st_ram_block + ENET_DMA_CH_STATE_RAM_STATE_DATA, 0);
drivers/net/ethernet/broadcom/bcm4908_enet.c
261
enet_write(enet, ring->st_ram_block + ENET_DMA_CH_STATE_RAM_DESC_LEN_STATUS, 0);
drivers/net/ethernet/broadcom/bcm4908_enet.c
262
enet_write(enet, ring->st_ram_block + ENET_DMA_CH_STATE_RAM_DESC_BASE_BUFPTR, 0);
drivers/net/ethernet/broadcom/bcm4908_enet.c
299
struct bcm4908_enet_dma_ring *ring)
drivers/net/ethernet/broadcom/bcm4908_enet.c
302
int reset_subch = ring->is_tx ? 1 : 0;
drivers/net/ethernet/broadcom/bcm4908_enet.c
308
enet_write(enet, ring->cfg_block + ENET_DMA_CH_CFG, 0);
drivers/net/ethernet/broadcom/bcm4908_enet.c
309
enet_write(enet, ring->cfg_block + ENET_DMA_CH_CFG_MAX_BURST, ENET_DMA_MAX_BURST_LEN);
drivers/net/ethernet/broadcom/bcm4908_enet.c
310
enet_write(enet, ring->cfg_block + ENET_DMA_CH_CFG_INT_MASK, 0);
drivers/net/ethernet/broadcom/bcm4908_enet.c
312
enet_write(enet, ring->st_ram_block + ENET_DMA_CH_STATE_RAM_BASE_DESC_PTR,
drivers/net/ethernet/broadcom/bcm4908_enet.c
313
(uint32_t)ring->dma_addr);
drivers/net/ethernet/broadcom/bcm4908_enet.c
315
ring->read_idx = 0;
drivers/net/ethernet/broadcom/bcm4908_enet.c
316
ring->write_idx = 0;
drivers/net/ethernet/broadcom/bcm4908_enet.c
359
struct bcm4908_enet_dma_ring *ring)
drivers/net/ethernet/broadcom/bcm4908_enet.c
361
enet_write(enet, ring->cfg_block + ENET_DMA_CH_CFG, ENET_DMA_CH_CFG_ENABLE);
drivers/net/ethernet/broadcom/bcm4908_enet.c
365
struct bcm4908_enet_dma_ring *ring)
drivers/net/ethernet/broadcom/bcm4908_enet.c
367
enet_write(enet, ring->cfg_block + ENET_DMA_CH_CFG, 0);
drivers/net/ethernet/broadcom/bcm4908_enet.c
371
struct bcm4908_enet_dma_ring *ring)
drivers/net/ethernet/broadcom/bcm4908_enet.c
373
enet_set(enet, ring->cfg_block + ENET_DMA_CH_CFG, ENET_DMA_CH_CFG_ENABLE);
drivers/net/ethernet/broadcom/bcm4908_enet.c
377
struct bcm4908_enet_dma_ring *ring)
drivers/net/ethernet/broadcom/bcm4908_enet.c
382
enet_maskset(enet, ring->cfg_block + ENET_DMA_CH_CFG, ENET_DMA_CH_CFG_ENABLE, 0);
drivers/net/ethernet/broadcom/bcm4908_enet.c
386
tmp = enet_read(enet, ring->cfg_block + ENET_DMA_CH_CFG);
drivers/net/ethernet/broadcom/bcm4908_enet.c
389
enet_maskset(enet, ring->cfg_block + ENET_DMA_CH_CFG, ENET_DMA_CH_CFG_ENABLE, 0);
drivers/net/ethernet/broadcom/bcm4908_enet.c
436
struct bcm4908_enet_dma_ring *ring;
drivers/net/ethernet/broadcom/bcm4908_enet.c
438
ring = (irq == enet->irq_tx) ? &enet->tx_ring : &enet->rx_ring;
drivers/net/ethernet/broadcom/bcm4908_enet.c
440
bcm4908_enet_dma_ring_intrs_off(enet, ring);
drivers/net/ethernet/broadcom/bcm4908_enet.c
441
bcm4908_enet_dma_ring_intrs_ack(enet, ring);
drivers/net/ethernet/broadcom/bcm4908_enet.c
443
napi_schedule(&ring->napi);
drivers/net/ethernet/broadcom/bcm4908_enet.c
524
struct bcm4908_enet_dma_ring *ring = &enet->tx_ring;
drivers/net/ethernet/broadcom/bcm4908_enet.c
533
!(le32_to_cpu(ring->buf_desc[ring->read_idx].ctl) & DMA_CTL_STATUS_OWN))
drivers/net/ethernet/broadcom/bcm4908_enet.c
537
if (ring->read_idx <= ring->write_idx)
drivers/net/ethernet/broadcom/bcm4908_enet.c
538
free_buf_descs = ring->read_idx - ring->write_idx + ring->length;
drivers/net/ethernet/broadcom/bcm4908_enet.c
540
free_buf_descs = ring->read_idx - ring->write_idx;
drivers/net/ethernet/broadcom/bcm4908_enet.c
547
buf_desc = &ring->buf_desc[ring->write_idx];
drivers/net/ethernet/broadcom/bcm4908_enet.c
553
slot = &ring->slots[ring->write_idx];
drivers/net/ethernet/broadcom/bcm4908_enet.c
565
if (ring->write_idx + 1 == ring->length - 1)
drivers/net/ethernet/broadcom/bcm4908_enet.c
575
if (++ring->write_idx == ring->length - 1)
drivers/net/ethernet/broadcom/bcm4908_enet.c
576
ring->write_idx = 0;
drivers/net/ethernet/broadcom/bcmsysport.c
1109
unsigned int ring, ring_bit;
drivers/net/ethernet/broadcom/bcmsysport.c
1138
for (ring = 0; ring < dev->num_tx_queues; ring++) {
drivers/net/ethernet/broadcom/bcmsysport.c
1139
ring_bit = BIT(ring + INTRL2_0_TDMA_MBDONE_SHIFT);
drivers/net/ethernet/broadcom/bcmsysport.c
1143
txr = &priv->tx_rings[ring];
drivers/net/ethernet/broadcom/bcmsysport.c
1160
unsigned int ring;
drivers/net/ethernet/broadcom/bcmsysport.c
1171
for (ring = 0; ring < dev->num_tx_queues; ring++) {
drivers/net/ethernet/broadcom/bcmsysport.c
1172
if (!(priv->irq1_stat & BIT(ring)))
drivers/net/ethernet/broadcom/bcmsysport.c
1175
txr = &priv->tx_rings[ring];
drivers/net/ethernet/broadcom/bcmsysport.c
1178
intrl2_1_mask_set(priv, BIT(ring));
drivers/net/ethernet/broadcom/bcmsysport.c
1288
struct bcm_sysport_tx_ring *ring;
drivers/net/ethernet/broadcom/bcmsysport.c
1300
ring = &priv->tx_rings[queue];
drivers/net/ethernet/broadcom/bcmsysport.c
1303
spin_lock_irqsave(&ring->lock, flags);
drivers/net/ethernet/broadcom/bcmsysport.c
1304
if (unlikely(ring->desc_count == 0)) {
drivers/net/ethernet/broadcom/bcmsysport.c
1333
cb = &ring->cbs[ring->curr_desc];
drivers/net/ethernet/broadcom/bcmsysport.c
1348
ring->curr_desc++;
drivers/net/ethernet/broadcom/bcmsysport.c
1349
if (ring->curr_desc == ring->size)
drivers/net/ethernet/broadcom/bcmsysport.c
1350
ring->curr_desc = 0;
drivers/net/ethernet/broadcom/bcmsysport.c
1351
ring->desc_count--;
drivers/net/ethernet/broadcom/bcmsysport.c
1355
tdma_writel(priv, len_status, TDMA_WRITE_PORT_HI(ring->index));
drivers/net/ethernet/broadcom/bcmsysport.c
1356
tdma_writel(priv, addr_lo, TDMA_WRITE_PORT_LO(ring->index));
drivers/net/ethernet/broadcom/bcmsysport.c
1360
if (ring->desc_count == 0)
drivers/net/ethernet/broadcom/bcmsysport.c
1364
ring->index, ring->desc_count, ring->curr_desc);
drivers/net/ethernet/broadcom/bcmsysport.c
1368
spin_unlock_irqrestore(&ring->lock, flags);
drivers/net/ethernet/broadcom/bcmsysport.c
1482
struct bcm_sysport_tx_ring *ring = &priv->tx_rings[index];
drivers/net/ethernet/broadcom/bcmsysport.c
1489
ring->cbs = kzalloc_objs(struct bcm_sysport_cb, size);
drivers/net/ethernet/broadcom/bcmsysport.c
1490
if (!ring->cbs) {
drivers/net/ethernet/broadcom/bcmsysport.c
1496
spin_lock_init(&ring->lock);
drivers/net/ethernet/broadcom/bcmsysport.c
1497
ring->priv = priv;
drivers/net/ethernet/broadcom/bcmsysport.c
1498
netif_napi_add_tx(priv->netdev, &ring->napi, bcm_sysport_tx_poll);
drivers/net/ethernet/broadcom/bcmsysport.c
1499
ring->index = index;
drivers/net/ethernet/broadcom/bcmsysport.c
1500
ring->size = size;
drivers/net/ethernet/broadcom/bcmsysport.c
1501
ring->clean_index = 0;
drivers/net/ethernet/broadcom/bcmsysport.c
1502
ring->alloc_size = ring->size;
drivers/net/ethernet/broadcom/bcmsysport.c
1503
ring->desc_count = ring->size;
drivers/net/ethernet/broadcom/bcmsysport.c
1504
ring->curr_desc = 0;
drivers/net/ethernet/broadcom/bcmsysport.c
1515
if (ring->inspect) {
drivers/net/ethernet/broadcom/bcmsysport.c
1516
reg |= ring->switch_queue & RING_QID_MASK;
drivers/net/ethernet/broadcom/bcmsysport.c
1517
reg |= ring->switch_port << RING_PORT_ID_SHIFT;
drivers/net/ethernet/broadcom/bcmsysport.c
1551
tdma_writel(priv, ring->size |
drivers/net/ethernet/broadcom/bcmsysport.c
1560
napi_enable(&ring->napi);
drivers/net/ethernet/broadcom/bcmsysport.c
1564
ring->size, ring->switch_queue,
drivers/net/ethernet/broadcom/bcmsysport.c
1565
ring->switch_port);
drivers/net/ethernet/broadcom/bcmsysport.c
1573
struct bcm_sysport_tx_ring *ring = &priv->tx_rings[index];
drivers/net/ethernet/broadcom/bcmsysport.c
1585
if (!ring->cbs)
drivers/net/ethernet/broadcom/bcmsysport.c
1588
napi_disable(&ring->napi);
drivers/net/ethernet/broadcom/bcmsysport.c
1589
netif_napi_del(&ring->napi);
drivers/net/ethernet/broadcom/bcmsysport.c
1591
bcm_sysport_tx_clean(priv, ring);
drivers/net/ethernet/broadcom/bcmsysport.c
1593
kfree(ring->cbs);
drivers/net/ethernet/broadcom/bcmsysport.c
1594
ring->cbs = NULL;
drivers/net/ethernet/broadcom/bcmsysport.c
1595
ring->size = 0;
drivers/net/ethernet/broadcom/bcmsysport.c
1596
ring->alloc_size = 0;
drivers/net/ethernet/broadcom/bcmsysport.c
2306
struct bcm_sysport_tx_ring *ring;
drivers/net/ethernet/broadcom/bcmsysport.c
2338
ring = &priv->tx_rings[q];
drivers/net/ethernet/broadcom/bcmsysport.c
2340
if (ring->inspect)
drivers/net/ethernet/broadcom/bcmsysport.c
2346
ring->switch_queue = qp;
drivers/net/ethernet/broadcom/bcmsysport.c
2347
ring->switch_port = port;
drivers/net/ethernet/broadcom/bcmsysport.c
2348
ring->inspect = true;
drivers/net/ethernet/broadcom/bcmsysport.c
2349
priv->ring_map[qp + port * num_tx_queues] = ring;
drivers/net/ethernet/broadcom/bcmsysport.c
2361
struct bcm_sysport_tx_ring *ring;
drivers/net/ethernet/broadcom/bcmsysport.c
2370
ring = &priv->tx_rings[q];
drivers/net/ethernet/broadcom/bcmsysport.c
2372
if (ring->switch_port != port)
drivers/net/ethernet/broadcom/bcmsysport.c
2375
if (!ring->inspect)
drivers/net/ethernet/broadcom/bcmsysport.c
2378
ring->inspect = false;
drivers/net/ethernet/broadcom/bcmsysport.c
2379
qp = ring->switch_queue;
drivers/net/ethernet/broadcom/bcmsysport.c
429
struct bcm_sysport_tx_ring *ring;
drivers/net/ethernet/broadcom/bcmsysport.c
435
ring = &priv->tx_rings[q];
drivers/net/ethernet/broadcom/bcmsysport.c
438
bytes = ring->bytes;
drivers/net/ethernet/broadcom/bcmsysport.c
439
packets = ring->packets;
drivers/net/ethernet/broadcom/bcmsysport.c
453
struct bcm_sysport_tx_ring *ring;
drivers/net/ethernet/broadcom/bcmsysport.c
501
ring = &priv->tx_rings[i];
drivers/net/ethernet/broadcom/bcmsysport.c
502
data[j] = ring->packets;
drivers/net/ethernet/broadcom/bcmsysport.c
504
data[j] = ring->bytes;
drivers/net/ethernet/broadcom/bcmsysport.c
571
static void bcm_sysport_set_tx_coalesce(struct bcm_sysport_tx_ring *ring,
drivers/net/ethernet/broadcom/bcmsysport.c
574
struct bcm_sysport_priv *priv = ring->priv;
drivers/net/ethernet/broadcom/bcmsysport.c
577
reg = tdma_readl(priv, TDMA_DESC_RING_INTR_CONTROL(ring->index));
drivers/net/ethernet/broadcom/bcmsysport.c
583
tdma_writel(priv, reg, TDMA_DESC_RING_INTR_CONTROL(ring->index));
drivers/net/ethernet/broadcom/bcmsysport.c
851
static void bcm_sysport_tx_reclaim_one(struct bcm_sysport_tx_ring *ring,
drivers/net/ethernet/broadcom/bcmsysport.c
856
struct bcm_sysport_priv *priv = ring->priv;
drivers/net/ethernet/broadcom/bcmsysport.c
877
struct bcm_sysport_tx_ring *ring)
drivers/net/ethernet/broadcom/bcmsysport.c
888
if (!ring->priv->is_lite)
drivers/net/ethernet/broadcom/bcmsysport.c
889
intrl2_1_writel(ring->priv, BIT(ring->index), INTRL2_CPU_CLEAR);
drivers/net/ethernet/broadcom/bcmsysport.c
891
intrl2_0_writel(ring->priv, BIT(ring->index +
drivers/net/ethernet/broadcom/bcmsysport.c
895
hw_ind = tdma_readl(priv, TDMA_DESC_RING_PROD_CONS_INDEX(ring->index));
drivers/net/ethernet/broadcom/bcmsysport.c
897
txbds_ready = (c_index - ring->c_index) & RING_CONS_INDEX_MASK;
drivers/net/ethernet/broadcom/bcmsysport.c
901
ring->index, ring->c_index, c_index, txbds_ready);
drivers/net/ethernet/broadcom/bcmsysport.c
904
cb = &ring->cbs[ring->clean_index];
drivers/net/ethernet/broadcom/bcmsysport.c
905
bcm_sysport_tx_reclaim_one(ring, cb, &bytes_compl, &pkts_compl);
drivers/net/ethernet/broadcom/bcmsysport.c
907
ring->desc_count++;
drivers/net/ethernet/broadcom/bcmsysport.c
910
if (likely(ring->clean_index < ring->size - 1))
drivers/net/ethernet/broadcom/bcmsysport.c
911
ring->clean_index++;
drivers/net/ethernet/broadcom/bcmsysport.c
913
ring->clean_index = 0;
drivers/net/ethernet/broadcom/bcmsysport.c
917
ring->packets += pkts_compl;
drivers/net/ethernet/broadcom/bcmsysport.c
918
ring->bytes += bytes_compl;
drivers/net/ethernet/broadcom/bcmsysport.c
921
ring->c_index = c_index;
drivers/net/ethernet/broadcom/bcmsysport.c
925
ring->index, ring->c_index, pkts_compl, bytes_compl);
drivers/net/ethernet/broadcom/bcmsysport.c
932
struct bcm_sysport_tx_ring *ring)
drivers/net/ethernet/broadcom/bcmsysport.c
938
txq = netdev_get_tx_queue(priv->netdev, ring->index);
drivers/net/ethernet/broadcom/bcmsysport.c
940
spin_lock_irqsave(&ring->lock, flags);
drivers/net/ethernet/broadcom/bcmsysport.c
941
released = __bcm_sysport_tx_reclaim(priv, ring);
drivers/net/ethernet/broadcom/bcmsysport.c
945
spin_unlock_irqrestore(&ring->lock, flags);
drivers/net/ethernet/broadcom/bcmsysport.c
952
struct bcm_sysport_tx_ring *ring)
drivers/net/ethernet/broadcom/bcmsysport.c
956
spin_lock_irqsave(&ring->lock, flags);
drivers/net/ethernet/broadcom/bcmsysport.c
957
__bcm_sysport_tx_reclaim(priv, ring);
drivers/net/ethernet/broadcom/bcmsysport.c
958
spin_unlock_irqrestore(&ring->lock, flags);
drivers/net/ethernet/broadcom/bcmsysport.c
963
struct bcm_sysport_tx_ring *ring =
drivers/net/ethernet/broadcom/bcmsysport.c
967
work_done = bcm_sysport_tx_reclaim(ring->priv, ring);
drivers/net/ethernet/broadcom/bcmsysport.c
972
if (!ring->priv->is_lite)
drivers/net/ethernet/broadcom/bcmsysport.c
973
intrl2_1_mask_clear(ring->priv, BIT(ring->index));
drivers/net/ethernet/broadcom/bcmsysport.c
975
intrl2_0_mask_clear(ring->priv, BIT(ring->index +
drivers/net/ethernet/broadcom/bgmac.c
107
bgmac_write(bgmac, ring->mmio_base + BGMAC_DMA_TX_CTL, ctl);
drivers/net/ethernet/broadcom/bgmac.c
111
bgmac_dma_tx_add_buf(struct bgmac *bgmac, struct bgmac_dma_ring *ring,
drivers/net/ethernet/broadcom/bgmac.c
1227
struct bgmac_dma_ring *ring;
drivers/net/ethernet/broadcom/bgmac.c
123
slot = &ring->slots[i];
drivers/net/ethernet/broadcom/bgmac.c
1230
ring = &bgmac->tx_ring[0];
drivers/net/ethernet/broadcom/bgmac.c
1231
return bgmac_dma_tx_add(bgmac, ring, skb);
drivers/net/ethernet/broadcom/bgmac.c
124
dma_desc = &ring->cpu_base[i];
drivers/net/ethernet/broadcom/bgmac.c
132
struct bgmac_dma_ring *ring,
drivers/net/ethernet/broadcom/bgmac.c
137
int index = ring->end % BGMAC_TX_RING_SLOTS;
drivers/net/ethernet/broadcom/bgmac.c
138
struct bgmac_slot_info *slot = &ring->slots[index];
drivers/net/ethernet/broadcom/bgmac.c
156
if (ring->end - ring->start + nr_frags + 1 >= BGMAC_TX_RING_SLOTS) {
drivers/net/ethernet/broadcom/bgmac.c
171
bgmac_dma_tx_add_buf(bgmac, ring, index, skb_headlen(skb), flags);
drivers/net/ethernet/broadcom/bgmac.c
179
slot = &ring->slots[index];
drivers/net/ethernet/broadcom/bgmac.c
188
bgmac_dma_tx_add_buf(bgmac, ring, index, len, flags);
drivers/net/ethernet/broadcom/bgmac.c
193
ring->end += nr_frags + 1;
drivers/net/ethernet/broadcom/bgmac.c
200
bgmac_write(bgmac, ring->mmio_base + BGMAC_DMA_TX_INDEX,
drivers/net/ethernet/broadcom/bgmac.c
201
ring->index_base +
drivers/net/ethernet/broadcom/bgmac.c
202
(ring->end % BGMAC_TX_RING_SLOTS) *
drivers/net/ethernet/broadcom/bgmac.c
205
if (ring->end - ring->start >= BGMAC_TX_RING_SLOTS - 8)
drivers/net/ethernet/broadcom/bgmac.c
215
int index = (ring->end + i) % BGMAC_TX_RING_SLOTS;
drivers/net/ethernet/broadcom/bgmac.c
216
struct bgmac_slot_info *slot = &ring->slots[index];
drivers/net/ethernet/broadcom/bgmac.c
217
u32 ctl1 = le32_to_cpu(ring->cpu_base[index].ctl1);
drivers/net/ethernet/broadcom/bgmac.c
225
ring->mmio_base);
drivers/net/ethernet/broadcom/bgmac.c
235
static void bgmac_dma_tx_free(struct bgmac *bgmac, struct bgmac_dma_ring *ring)
drivers/net/ethernet/broadcom/bgmac.c
242
empty_slot = bgmac_read(bgmac, ring->mmio_base + BGMAC_DMA_TX_STATUS);
drivers/net/ethernet/broadcom/bgmac.c
244
empty_slot -= ring->index_base;
drivers/net/ethernet/broadcom/bgmac.c
248
while (ring->start != ring->end) {
drivers/net/ethernet/broadcom/bgmac.c
249
int slot_idx = ring->start % BGMAC_TX_RING_SLOTS;
drivers/net/ethernet/broadcom/bgmac.c
250
struct bgmac_slot_info *slot = &ring->slots[slot_idx];
drivers/net/ethernet/broadcom/bgmac.c
257
ctl0 = le32_to_cpu(ring->cpu_base[slot_idx].ctl0);
drivers/net/ethernet/broadcom/bgmac.c
258
ctl1 = le32_to_cpu(ring->cpu_base[slot_idx].ctl1);
drivers/net/ethernet/broadcom/bgmac.c
280
ring->start++;
drivers/net/ethernet/broadcom/bgmac.c
292
static void bgmac_dma_rx_reset(struct bgmac *bgmac, struct bgmac_dma_ring *ring)
drivers/net/ethernet/broadcom/bgmac.c
294
if (!ring->mmio_base)
drivers/net/ethernet/broadcom/bgmac.c
297
bgmac_write(bgmac, ring->mmio_base + BGMAC_DMA_RX_CTL, 0);
drivers/net/ethernet/broadcom/bgmac.c
299
ring->mmio_base + BGMAC_DMA_RX_STATUS,
drivers/net/ethernet/broadcom/bgmac.c
303
ring->mmio_base);
drivers/net/ethernet/broadcom/bgmac.c
307
struct bgmac_dma_ring *ring)
drivers/net/ethernet/broadcom/bgmac.c
311
ctl = bgmac_read(bgmac, ring->mmio_base + BGMAC_DMA_RX_CTL);
drivers/net/ethernet/broadcom/bgmac.c
330
bgmac_write(bgmac, ring->mmio_base + BGMAC_DMA_RX_CTL, ctl);
drivers/net/ethernet/broadcom/bgmac.c
368
struct bgmac_dma_ring *ring)
drivers/net/ethernet/broadcom/bgmac.c
372
bgmac_write(bgmac, ring->mmio_base + BGMAC_DMA_RX_INDEX,
drivers/net/ethernet/broadcom/bgmac.c
373
ring->index_base +
drivers/net/ethernet/broadcom/bgmac.c
374
ring->end * sizeof(struct bgmac_dma_desc));
drivers/net/ethernet/broadcom/bgmac.c
378
struct bgmac_dma_ring *ring, int desc_idx)
drivers/net/ethernet/broadcom/bgmac.c
380
struct bgmac_dma_desc *dma_desc = ring->cpu_base + desc_idx;
drivers/net/ethernet/broadcom/bgmac.c
391
dma_desc->addr_low = cpu_to_le32(lower_32_bits(ring->slots[desc_idx].dma_addr));
drivers/net/ethernet/broadcom/bgmac.c
392
dma_desc->addr_high = cpu_to_le32(upper_32_bits(ring->slots[desc_idx].dma_addr));
drivers/net/ethernet/broadcom/bgmac.c
396
ring->end = desc_idx;
drivers/net/ethernet/broadcom/bgmac.c
41
static void bgmac_dma_tx_reset(struct bgmac *bgmac, struct bgmac_dma_ring *ring)
drivers/net/ethernet/broadcom/bgmac.c
412
static int bgmac_dma_rx_read(struct bgmac *bgmac, struct bgmac_dma_ring *ring,
drivers/net/ethernet/broadcom/bgmac.c
418
end_slot = bgmac_read(bgmac, ring->mmio_base + BGMAC_DMA_RX_STATUS);
drivers/net/ethernet/broadcom/bgmac.c
420
end_slot -= ring->index_base;
drivers/net/ethernet/broadcom/bgmac.c
424
while (ring->start != end_slot) {
drivers/net/ethernet/broadcom/bgmac.c
426
struct bgmac_slot_info *slot = &ring->slots[ring->start];
drivers/net/ethernet/broadcom/bgmac.c
451
ring->start);
drivers/net/ethernet/broadcom/bgmac.c
459
ring->start);
drivers/net/ethernet/broadcom/bgmac.c
46
if (!ring->mmio_base)
drivers/net/ethernet/broadcom/bgmac.c
489
bgmac_dma_rx_setup_desc(bgmac, ring, ring->start);
drivers/net/ethernet/broadcom/bgmac.c
491
if (++ring->start >= BGMAC_RX_RING_SLOTS)
drivers/net/ethernet/broadcom/bgmac.c
492
ring->start = 0;
drivers/net/ethernet/broadcom/bgmac.c
498
bgmac_dma_rx_update_index(bgmac, ring);
drivers/net/ethernet/broadcom/bgmac.c
505
struct bgmac_dma_ring *ring,
drivers/net/ethernet/broadcom/bgmac.c
510
bgmac_write(bgmac, ring->mmio_base + BGMAC_DMA_TX_RINGLO,
drivers/net/ethernet/broadcom/bgmac.c
512
if (bgmac_read(bgmac, ring->mmio_base + BGMAC_DMA_TX_RINGLO))
drivers/net/ethernet/broadcom/bgmac.c
516
bgmac_write(bgmac, ring->mmio_base + BGMAC_DMA_RX_RINGLO,
drivers/net/ethernet/broadcom/bgmac.c
518
if (bgmac_read(bgmac, ring->mmio_base + BGMAC_DMA_RX_RINGLO))
drivers/net/ethernet/broadcom/bgmac.c
526
struct bgmac_dma_ring *ring)
drivers/net/ethernet/broadcom/bgmac.c
529
struct bgmac_dma_desc *dma_desc = ring->cpu_base;
drivers/net/ethernet/broadcom/bgmac.c
53
bgmac_write(bgmac, ring->mmio_base + BGMAC_DMA_TX_CTL,
drivers/net/ethernet/broadcom/bgmac.c
537
slot = &ring->slots[i];
drivers/net/ethernet/broadcom/bgmac.c
553
struct bgmac_dma_ring *ring)
drivers/net/ethernet/broadcom/bgmac.c
56
val = bgmac_read(bgmac, ring->mmio_base + BGMAC_DMA_TX_STATUS);
drivers/net/ethernet/broadcom/bgmac.c
560
slot = &ring->slots[i];
drivers/net/ethernet/broadcom/bgmac.c
573
struct bgmac_dma_ring *ring,
drivers/net/ethernet/broadcom/bgmac.c
579
if (!ring->cpu_base)
drivers/net/ethernet/broadcom/bgmac.c
584
dma_free_coherent(dma_dev, size, ring->cpu_base,
drivers/net/ethernet/broadcom/bgmac.c
585
ring->dma_base);
drivers/net/ethernet/broadcom/bgmac.c
615
struct bgmac_dma_ring *ring;
drivers/net/ethernet/broadcom/bgmac.c
632
ring = &bgmac->tx_ring[i];
drivers/net/ethernet/broadcom/bgmac.c
633
ring->mmio_base = ring_base[i];
drivers/net/ethernet/broadcom/bgmac.c
637
ring->cpu_base = dma_alloc_coherent(dma_dev, size,
drivers/net/ethernet/broadcom/bgmac.c
638
&ring->dma_base,
drivers/net/ethernet/broadcom/bgmac.c
640
if (!ring->cpu_base) {
drivers/net/ethernet/broadcom/bgmac.c
642
ring->mmio_base);
drivers/net/ethernet/broadcom/bgmac.c
646
ring->unaligned = bgmac_dma_unaligned(bgmac, ring,
drivers/net/ethernet/broadcom/bgmac.c
648
if (ring->unaligned)
drivers/net/ethernet/broadcom/bgmac.c
649
ring->index_base = lower_32_bits(ring->dma_base);
drivers/net/ethernet/broadcom/bgmac.c
651
ring->index_base = 0;
drivers/net/ethernet/broadcom/bgmac.c
657
ring = &bgmac->rx_ring[i];
drivers/net/ethernet/broadcom/bgmac.c
658
ring->mmio_base = ring_base[i];
drivers/net/ethernet/broadcom/bgmac.c
662
ring->cpu_base = dma_alloc_coherent(dma_dev, size,
drivers/net/ethernet/broadcom/bgmac.c
663
&ring->dma_base,
drivers/net/ethernet/broadcom/bgmac.c
665
if (!ring->cpu_base) {
drivers/net/ethernet/broadcom/bgmac.c
667
ring->mmio_base);
drivers/net/ethernet/broadcom/bgmac.c
671
ring->unaligned = bgmac_dma_unaligned(bgmac, ring,
drivers/net/ethernet/broadcom/bgmac.c
673
if (ring->unaligned)
drivers/net/ethernet/broadcom/bgmac.c
674
ring->index_base = lower_32_bits(ring->dma_base);
drivers/net/ethernet/broadcom/bgmac.c
676
ring->index_base = 0;
drivers/net/ethernet/broadcom/bgmac.c
68
ring->mmio_base, val);
drivers/net/ethernet/broadcom/bgmac.c
688
struct bgmac_dma_ring *ring;
drivers/net/ethernet/broadcom/bgmac.c
692
ring = &bgmac->tx_ring[i];
drivers/net/ethernet/broadcom/bgmac.c
694
if (!ring->unaligned)
drivers/net/ethernet/broadcom/bgmac.c
695
bgmac_dma_tx_enable(bgmac, ring);
drivers/net/ethernet/broadcom/bgmac.c
696
bgmac_write(bgmac, ring->mmio_base + BGMAC_DMA_TX_RINGLO,
drivers/net/ethernet/broadcom/bgmac.c
697
lower_32_bits(ring->dma_base));
drivers/net/ethernet/broadcom/bgmac.c
698
bgmac_write(bgmac, ring->mmio_base + BGMAC_DMA_TX_RINGHI,
drivers/net/ethernet/broadcom/bgmac.c
699
upper_32_bits(ring->dma_base));
drivers/net/ethernet/broadcom/bgmac.c
700
if (ring->unaligned)
drivers/net/ethernet/broadcom/bgmac.c
701
bgmac_dma_tx_enable(bgmac, ring);
drivers/net/ethernet/broadcom/bgmac.c
703
ring->start = 0;
drivers/net/ethernet/broadcom/bgmac.c
704
ring->end = 0; /* Points the slot that should *not* be read */
drivers/net/ethernet/broadcom/bgmac.c
71
bgmac_write(bgmac, ring->mmio_base + BGMAC_DMA_TX_CTL, 0);
drivers/net/ethernet/broadcom/bgmac.c
710
ring = &bgmac->rx_ring[i];
drivers/net/ethernet/broadcom/bgmac.c
712
if (!ring->unaligned)
drivers/net/ethernet/broadcom/bgmac.c
713
bgmac_dma_rx_enable(bgmac, ring);
drivers/net/ethernet/broadcom/bgmac.c
714
bgmac_write(bgmac, ring->mmio_base + BGMAC_DMA_RX_RINGLO,
drivers/net/ethernet/broadcom/bgmac.c
715
lower_32_bits(ring->dma_base));
drivers/net/ethernet/broadcom/bgmac.c
716
bgmac_write(bgmac, ring->mmio_base + BGMAC_DMA_RX_RINGHI,
drivers/net/ethernet/broadcom/bgmac.c
717
upper_32_bits(ring->dma_base));
drivers/net/ethernet/broadcom/bgmac.c
718
if (ring->unaligned)
drivers/net/ethernet/broadcom/bgmac.c
719
bgmac_dma_rx_enable(bgmac, ring);
drivers/net/ethernet/broadcom/bgmac.c
721
ring->start = 0;
drivers/net/ethernet/broadcom/bgmac.c
722
ring->end = 0;
drivers/net/ethernet/broadcom/bgmac.c
724
err = bgmac_dma_rx_skb_for_slot(bgmac, &ring->slots[j]);
drivers/net/ethernet/broadcom/bgmac.c
728
bgmac_dma_rx_setup_desc(bgmac, ring, j);
drivers/net/ethernet/broadcom/bgmac.c
73
ring->mmio_base + BGMAC_DMA_TX_STATUS,
drivers/net/ethernet/broadcom/bgmac.c
731
bgmac_dma_rx_update_index(bgmac, ring);
drivers/net/ethernet/broadcom/bgmac.c
77
ring->mmio_base);
drivers/net/ethernet/broadcom/bgmac.c
79
val = bgmac_read(bgmac, ring->mmio_base + BGMAC_DMA_TX_STATUS);
drivers/net/ethernet/broadcom/bgmac.c
82
ring->mmio_base);
drivers/net/ethernet/broadcom/bgmac.c
87
struct bgmac_dma_ring *ring)
drivers/net/ethernet/broadcom/bgmac.c
91
ctl = bgmac_read(bgmac, ring->mmio_base + BGMAC_DMA_TX_CTL);
drivers/net/ethernet/broadcom/bnge/bnge_hwrm.c
264
u32 ring = le16_to_cpu(req->cmpl_ring);
drivers/net/ethernet/broadcom/bnge/bnge_hwrm.c
270
if (unlikely(ring != (u16)BNGE_HWRM_NO_CMPL_RING))
drivers/net/ethernet/broadcom/bnge/bnge_hwrm.c
271
snprintf(opt, 16, " ring %d\n", ring);
drivers/net/ethernet/broadcom/bnge/bnge_hwrm_lib.c
1041
struct bnge_ring_struct *ring,
drivers/net/ethernet/broadcom/bnge/bnge_hwrm_lib.c
1054
req->ring_id = cpu_to_le16(ring->fw_ring_id);
drivers/net/ethernet/broadcom/bnge/bnge_hwrm_lib.c
1068
struct bnge_ring_struct *ring,
drivers/net/ethernet/broadcom/bnge/bnge_hwrm_lib.c
1071
struct bnge_ring_mem_info *rmem = &ring->ring_mem;
drivers/net/ethernet/broadcom/bnge/bnge_hwrm_lib.c
1100
txr = container_of(ring, struct bnge_tx_ring_info,
drivers/net/ethernet/broadcom/bnge/bnge_hwrm_lib.c
1104
grp_info = &bn->grp_info[ring->grp_idx];
drivers/net/ethernet/broadcom/bnge/bnge_hwrm_lib.c
1108
req->queue_id = cpu_to_le16(ring->queue_id);
drivers/net/ethernet/broadcom/bnge/bnge_hwrm_lib.c
1117
grp_info = &bn->grp_info[ring->grp_idx];
drivers/net/ethernet/broadcom/bnge/bnge_hwrm_lib.c
1129
grp_info = &bn->grp_info[ring->grp_idx];
drivers/net/ethernet/broadcom/bnge/bnge_hwrm_lib.c
1144
req->cq_handle = cpu_to_le64(ring->handle);
drivers/net/ethernet/broadcom/bnge/bnge_hwrm_lib.c
1168
ring->fw_ring_id = ring_id;
drivers/net/ethernet/broadcom/bnge/bnge_hwrm_lib.h
52
int hwrm_ring_free_send_msg(struct bnge_net *bn, struct bnge_ring_struct *ring,
drivers/net/ethernet/broadcom/bnge/bnge_hwrm_lib.h
55
struct bnge_ring_struct *ring,
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1031
struct bnge_ring_struct *ring = &nqr->ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1033
ring->fw_ring_id = INVALID_HW_RING_ID;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1037
ring = &cpr->ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1038
ring->fw_ring_id = INVALID_HW_RING_ID;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
106
struct bnge_ring_struct *ring = &nqr->ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
110
ring->ring_mem.pg_arr = NULL;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
113
ring->ring_mem.dma_arr = NULL;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
118
struct bnge_ring_struct *ring = &cpr->ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
122
ring->ring_mem.pg_arr = NULL;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
125
ring->ring_mem.dma_arr = NULL;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1263
static void bnge_init_rxbd_pages(struct bnge_ring_struct *ring, u32 type)
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1269
rx_desc_ring = (struct rx_bd **)ring->ring_mem.pg_arr;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1270
for (i = 0, prod = 0; i < ring->ring_mem.nr_pages; i++) {
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1284
struct bnge_ring_struct *ring;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1293
ring = &rxr->rx_ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1294
bnge_init_rxbd_pages(ring, type);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1295
ring->fw_ring_id = INVALID_HW_RING_ID;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1301
struct bnge_ring_struct *ring;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1304
ring = &rxr->rx_agg_ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1305
ring->fw_ring_id = INVALID_HW_RING_ID;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1310
bnge_init_rxbd_pages(ring, type);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1364
struct bnge_ring_struct *ring = &txr->tx_ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1366
ring->fw_ring_id = INVALID_HW_RING_ID;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1474
struct bnge_ring_struct *ring;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1478
ring = &cpr->ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1479
ring->handle = BNGE_SET_NQ_HDL(cpr);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1480
rc = hwrm_ring_alloc_send_msg(bn, ring, type, map_idx);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1484
bnge_set_db(bn, &cpr->cp_db, type, map_idx, ring->fw_ring_id);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1493
struct bnge_ring_struct *ring = &txr->tx_ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1497
rc = hwrm_ring_alloc_send_msg(bn, ring, type, tx_idx);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1501
bnge_set_db(bn, &txr->tx_db, type, tx_idx, ring->fw_ring_id);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1509
struct bnge_ring_struct *ring = &rxr->rx_agg_ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1512
u32 grp_idx = ring->grp_idx;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1517
rc = hwrm_ring_alloc_send_msg(bn, ring, type, map_idx);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1522
ring->fw_ring_id);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1525
bn->grp_info[grp_idx].agg_fw_ring_id = ring->fw_ring_id;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1533
struct bnge_ring_struct *ring = &rxr->rx_ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1539
rc = hwrm_ring_alloc_send_msg(bn, ring, type, map_idx);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1543
bnge_set_db(bn, &rxr->rx_db, type, map_idx, ring->fw_ring_id);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1544
bn->grp_info[map_idx].rx_fw_ring_id = ring->fw_ring_id;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1559
struct bnge_ring_struct *ring = &nqr->ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1561
u32 map_idx = ring->map_idx;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1566
rc = hwrm_ring_alloc_send_msg(bn, ring, type, map_idx);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1571
bnge_set_db(bn, &nqr->nq_db, type, map_idx, ring->fw_ring_id);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1574
bn->grp_info[i].nq_fw_ring_id = ring->fw_ring_id;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1577
rc = bnge_hwrm_set_async_event_cr(bd, ring->fw_ring_id);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1597
struct bnge_ring_struct *ring;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1613
ring = &cpr->ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1614
ring->handle = BNGE_SET_NQ_HDL(cpr);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1615
rc = hwrm_ring_alloc_send_msg(bn, ring, type, map_idx);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1619
ring->fw_ring_id);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1938
struct bnge_ring_struct *ring;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1941
ring = &nqr->ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
1943
if (ring->fw_ring_id != INVALID_HW_RING_ID)
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
201
struct bnge_ring_struct *ring;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
205
ring = &nqr->ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2055
struct bnge_ring_struct *ring = &rxr->rx_ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2059
if (ring->fw_ring_id == INVALID_HW_RING_ID)
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2063
hwrm_ring_free_send_msg(bn, ring,
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2067
ring->fw_ring_id = INVALID_HW_RING_ID;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
207
bnge_free_ring(bd, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2075
struct bnge_ring_struct *ring = &rxr->rx_agg_ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2079
if (ring->fw_ring_id == INVALID_HW_RING_ID)
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2083
hwrm_ring_free_send_msg(bn, ring, RING_FREE_REQ_RING_TYPE_RX_AGG,
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2086
ring->fw_ring_id = INVALID_HW_RING_ID;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2094
struct bnge_ring_struct *ring = &txr->tx_ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2097
if (ring->fw_ring_id == INVALID_HW_RING_ID)
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2102
hwrm_ring_free_send_msg(bn, ring, RING_FREE_REQ_RING_TYPE_TX,
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2104
ring->fw_ring_id = INVALID_HW_RING_ID;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2110
struct bnge_ring_struct *ring;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2112
ring = &cpr->ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2113
if (ring->fw_ring_id == INVALID_HW_RING_ID)
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2116
hwrm_ring_free_send_msg(bn, ring, RING_FREE_REQ_RING_TYPE_L2_CMPL,
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2118
ring->fw_ring_id = INVALID_HW_RING_ID;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2146
struct bnge_ring_struct *ring;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
215
ring = &cpr->ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2153
ring = &nqr->ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2154
if (ring->fw_ring_id != INVALID_HW_RING_ID) {
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2155
hwrm_ring_free_send_msg(bn, ring,
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
2158
ring->fw_ring_id = INVALID_HW_RING_ID;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
216
bnge_free_ring(bd, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
229
struct bnge_ring_struct *ring;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
236
ring = &cpr->ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
237
rmem = &ring->ring_mem;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
265
struct bnge_ring_struct *ring;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
271
ring = &nqr->ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
273
rc = bnge_alloc_ring(bd, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
277
ring->map_idx = ulp_msix + i;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
543
struct bnge_ring_struct *ring;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
552
ring = &rxr->rx_ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
553
bnge_free_ring(bd, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
555
ring = &rxr->rx_agg_ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
556
bnge_free_ring(bd, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
634
struct bnge_ring_struct *ring;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
637
ring = &rxr->rx_ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
648
rc = bnge_alloc_ring(bd, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
652
ring->grp_idx = i;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
654
ring = &rxr->rx_agg_ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
655
rc = bnge_alloc_ring(bd, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
659
ring->grp_idx = i;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
685
struct bnge_ring_struct *ring;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
687
ring = &txr->tx_ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
689
bnge_free_ring(bd, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
700
struct bnge_ring_struct *ring;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
703
ring = &txr->tx_ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
705
rc = bnge_alloc_ring(bd, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
709
ring->grp_idx = txr->bnapi->index;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
711
ring->queue_id = bd->q_info[qidx].queue_id;
drivers/net/ethernet/broadcom/bnge/bnge_rmem.c
453
struct bnge_ring_struct *ring;
drivers/net/ethernet/broadcom/bnge/bnge_rmem.c
456
ring = &nqr->ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_rmem.c
457
rmem = &ring->ring_mem;
drivers/net/ethernet/broadcom/bnge/bnge_rmem.c
468
ring = &rxr->rx_ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_rmem.c
469
rmem = &ring->ring_mem;
drivers/net/ethernet/broadcom/bnge/bnge_rmem.c
477
ring = &rxr->rx_agg_ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_rmem.c
478
rmem = &ring->ring_mem;
drivers/net/ethernet/broadcom/bnge/bnge_rmem.c
488
ring = &txr->tx_ring_struct;
drivers/net/ethernet/broadcom/bnge/bnge_rmem.c
489
rmem = &ring->ring_mem;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
16030
struct bnxt_ring_struct *ring;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
16063
ring = &clone->rx_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
16064
rc = bnxt_alloc_ring(bp, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
16069
ring = &clone->rx_agg_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
16070
rc = bnxt_alloc_ring(bp, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
16116
struct bnxt_ring_struct *ring;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
16128
ring = &rxr->rx_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
16129
bnxt_free_ring(bp, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
16131
ring = &rxr->rx_agg_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
16132
bnxt_free_ring(bp, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3807
struct bnxt_ring_struct *ring;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3822
ring = &rxr->rx_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3823
bnxt_free_ring(bp, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3825
ring = &rxr->rx_agg_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3826
bnxt_free_ring(bp, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3928
struct bnxt_ring_struct *ring;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3931
ring = &rxr->rx_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3954
rc = bnxt_alloc_ring(bp, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3958
ring->grp_idx = i;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3960
ring = &rxr->rx_agg_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3961
rc = bnxt_alloc_ring(bp, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3965
ring->grp_idx = i;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3986
struct bnxt_ring_struct *ring;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3994
ring = &txr->tx_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3996
bnxt_free_ring(bp, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4031
struct bnxt_ring_struct *ring;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4034
ring = &txr->tx_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4036
rc = bnxt_alloc_ring(bp, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4040
ring->grp_idx = txr->bnapi->index;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4060
ring->queue_id = bp->q_info[qidx].queue_id;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4072
struct bnxt_ring_struct *ring = &cpr->cp_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4076
ring->ring_mem.pg_arr = NULL;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4079
ring->ring_mem.dma_arr = NULL;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4135
struct bnxt_ring_struct *ring;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4142
ring = &cpr->cp_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4144
bnxt_free_ring(bp, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4152
ring = &cpr2->cp_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4153
bnxt_free_ring(bp, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4166
struct bnxt_ring_struct *ring;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4174
ring = &cpr->cp_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4175
rmem = &ring->ring_mem;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4201
struct bnxt_ring_struct *ring;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4210
ring = &cpr->cp_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4212
rc = bnxt_alloc_ring(bp, &ring->ring_mem);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4216
ring->map_idx = ulp_msix + i;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4268
struct bnxt_ring_struct *ring;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4270
ring = &rxr->rx_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4271
rmem = &ring->ring_mem;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4279
ring = &rxr->rx_agg_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4280
rmem = &ring->ring_mem;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4293
struct bnxt_ring_struct *ring;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4302
ring = &rxr->rx_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4303
rmem = &ring->ring_mem;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4312
ring = &rxr->rx_agg_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4313
rmem = &ring->ring_mem;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4334
struct bnxt_ring_struct *ring;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4340
ring = &cpr->cp_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4341
rmem = &ring->ring_mem;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4355
ring = &rxr->rx_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4356
rmem = &ring->ring_mem;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4364
ring = &rxr->rx_agg_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4365
rmem = &ring->ring_mem;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4375
ring = &txr->tx_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4376
rmem = &ring->ring_mem;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4387
static void bnxt_init_rxbd_pages(struct bnxt_ring_struct *ring, u32 type)
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4393
rx_buf_ring = (struct rx_bd **)ring->ring_mem.pg_arr;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4394
for (i = 0, prod = 0; i < ring->ring_mem.nr_pages; i++) {
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4493
struct bnxt_ring_struct *ring;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4502
ring = &rxr->rx_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4503
bnxt_init_rxbd_pages(ring, type);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4504
ring->fw_ring_id = INVALID_HW_RING_ID;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4510
struct bnxt_ring_struct *ring;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4513
ring = &rxr->rx_agg_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4514
ring->fw_ring_id = INVALID_HW_RING_ID;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4526
bnxt_init_rxbd_pages(ring, type);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4556
struct bnxt_ring_struct *ring = &cpr->cp_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4558
ring->fw_ring_id = INVALID_HW_RING_ID;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4566
ring = &cpr2->cp_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4567
ring->fw_ring_id = INVALID_HW_RING_ID;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4604
struct bnxt_ring_struct *ring = &txr->tx_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
4606
ring->fw_ring_id = INVALID_HW_RING_ID;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
5639
struct bnxt_ring_struct *ring = &cpr->cp_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
5641
if (ring->fw_ring_id != INVALID_HW_RING_ID)
drivers/net/ethernet/broadcom/bnxt/bnxt.c
6507
static u16 bnxt_cp_ring_from_grp(struct bnxt *bp, struct bnxt_ring_struct *ring)
drivers/net/ethernet/broadcom/bnxt/bnxt.c
6511
grp_info = &bp->grp_info[ring->grp_idx];
drivers/net/ethernet/broadcom/bnxt/bnxt.c
6835
unsigned int ring = 0, grp_idx;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
6879
ring = 0;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
6881
ring = vnic->vnic_id - 1;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
6883
ring = bp->rx_nr_rings - 1;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
6885
grp_idx = bp->rx_ring[ring].bnapi->index;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7097
struct bnxt_ring_struct *ring)
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7099
struct bnxt_ring_grp_info *grp_info = &bp->grp_info[ring->grp_idx];
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7121
struct bnxt_ring_struct *ring,
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7126
struct bnxt_ring_mem_info *rmem = &ring->ring_mem;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7153
txr = container_of(ring, struct bnxt_tx_ring_info,
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7157
grp_info = &bp->grp_info[ring->grp_idx];
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7161
req->queue_id = cpu_to_le16(ring->queue_id);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7178
rxr, ring);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7187
req->cq_handle = cpu_to_le64(ring->handle);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7217
ring->fw_ring_id = ring_id;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7320
struct bnxt_ring_struct *ring = &rxr->rx_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7326
rc = hwrm_ring_alloc_send_msg(bp, rxr, ring, type, map_idx);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7330
bnxt_set_db(bp, &rxr->rx_db, type, map_idx, ring->fw_ring_id);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7331
bp->grp_info[map_idx].rx_fw_ring_id = ring->fw_ring_id;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7339
struct bnxt_ring_struct *ring = &rxr->rx_agg_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7341
u32 grp_idx = ring->grp_idx;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7346
rc = hwrm_ring_alloc_send_msg(bp, rxr, ring, type, map_idx);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7351
ring->fw_ring_id);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7354
bp->grp_info[grp_idx].agg_fw_ring_id = ring->fw_ring_id;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7364
struct bnxt_ring_struct *ring;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7368
ring = &cpr->cp_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7369
ring->handle = BNXT_SET_NQ_HDL(cpr);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7370
rc = hwrm_ring_alloc_send_msg(bp, NULL, ring, type, map_idx);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7373
bnxt_set_db(bp, &cpr->cp_db, type, map_idx, ring->fw_ring_id);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7381
struct bnxt_ring_struct *ring = &txr->tx_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7385
rc = hwrm_ring_alloc_send_msg(bp, NULL, ring, type, tx_idx);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7388
bnxt_set_db(bp, &txr->tx_db, type, tx_idx, ring->fw_ring_id);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7405
struct bnxt_ring_struct *ring = &cpr->cp_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7406
u32 map_idx = ring->map_idx;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7411
rc = hwrm_ring_alloc_send_msg(bp, NULL, ring, type, map_idx);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7416
bnxt_set_db(bp, &cpr->cp_db, type, map_idx, ring->fw_ring_id);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7419
bp->grp_info[i].cp_fw_ring_id = ring->fw_ring_id;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7422
rc = bnxt_hwrm_set_async_event_cr(bp, ring->fw_ring_id);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7489
struct bnxt_ring_struct *ring,
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7506
req->ring_id = cpu_to_le16(ring->fw_ring_id);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7525
struct bnxt_ring_struct *ring = &txr->tx_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7528
if (ring->fw_ring_id == INVALID_HW_RING_ID)
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7533
hwrm_ring_free_send_msg(bp, ring, RING_FREE_REQ_RING_TYPE_TX,
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7535
ring->fw_ring_id = INVALID_HW_RING_ID;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7542
struct bnxt_ring_struct *ring = &rxr->rx_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7546
if (ring->fw_ring_id == INVALID_HW_RING_ID)
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7550
hwrm_ring_free_send_msg(bp, ring,
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7554
ring->fw_ring_id = INVALID_HW_RING_ID;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7562
struct bnxt_ring_struct *ring = &rxr->rx_agg_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7571
if (ring->fw_ring_id == INVALID_HW_RING_ID)
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7575
hwrm_ring_free_send_msg(bp, ring, type,
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7578
ring->fw_ring_id = INVALID_HW_RING_ID;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7585
struct bnxt_ring_struct *ring;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7587
ring = &cpr->cp_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7588
if (ring->fw_ring_id == INVALID_HW_RING_ID)
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7591
hwrm_ring_free_send_msg(bp, ring, RING_FREE_REQ_RING_TYPE_L2_CMPL,
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7593
ring->fw_ring_id = INVALID_HW_RING_ID;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7598
struct bnxt_ring_struct *ring = &cpr->cp_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7599
int i, size = ring->ring_mem.page_size;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7639
struct bnxt_ring_struct *ring;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7645
ring = &cpr->cp_ring_struct;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7646
if (ring->fw_ring_id != INVALID_HW_RING_ID) {
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7647
hwrm_ring_free_send_msg(bp, ring, type,
drivers/net/ethernet/broadcom/bnxt/bnxt.c
7649
ring->fw_ring_id = INVALID_HW_RING_ID;
drivers/net/ethernet/broadcom/bnxt/bnxt_ethtool.c
1263
u32 ring = ethtool_get_flow_spec_ring(fs->ring_cookie);
drivers/net/ethernet/broadcom/bnxt/bnxt_ethtool.c
1295
vnic_id = bp->vnic_info[ring + 1].fw_vnic_id;
drivers/net/ethernet/broadcom/bnxt/bnxt_ethtool.c
1302
fltr->base.rxq = ring;
drivers/net/ethernet/broadcom/bnxt/bnxt_ethtool.c
1504
u32 ring, flow_type;
drivers/net/ethernet/broadcom/bnxt/bnxt_ethtool.c
1527
ring = ethtool_get_flow_spec_ring(fs->ring_cookie);
drivers/net/ethernet/broadcom/bnxt/bnxt_ethtool.c
1533
if (!vf && ring >= bp->rx_nr_rings)
drivers/net/ethernet/broadcom/bnxt/bnxt_hwrm.c
423
u32 ring = le16_to_cpu(req->cmpl_ring);
drivers/net/ethernet/broadcom/bnxt/bnxt_hwrm.c
429
if (unlikely(ring != (u16)BNXT_HWRM_NO_CMPL_RING))
drivers/net/ethernet/broadcom/bnxt/bnxt_hwrm.c
430
snprintf(opt, 16, " ring %d\n", ring);
drivers/net/ethernet/broadcom/bnxt/bnxt_xdp.c
339
int ring;
drivers/net/ethernet/broadcom/bnxt/bnxt_xdp.c
347
ring = smp_processor_id() % bp->tx_nr_rings_xdp;
drivers/net/ethernet/broadcom/bnxt/bnxt_xdp.c
348
txr = &bp->tx_ring[ring];
drivers/net/ethernet/broadcom/cnic.c
1786
ictx->ustorm_st_context.ring.rq.pbl_base.lo =
drivers/net/ethernet/broadcom/cnic.c
1788
ictx->ustorm_st_context.ring.rq.pbl_base.hi =
drivers/net/ethernet/broadcom/cnic.c
1790
ictx->ustorm_st_context.ring.rq.curr_pbe.lo = req3->qp_first_pte[0].hi;
drivers/net/ethernet/broadcom/cnic.c
1791
ictx->ustorm_st_context.ring.rq.curr_pbe.hi = req3->qp_first_pte[0].lo;
drivers/net/ethernet/broadcom/cnic.c
1792
ictx->ustorm_st_context.ring.r2tq.pbl_base.lo =
drivers/net/ethernet/broadcom/cnic.c
1794
ictx->ustorm_st_context.ring.r2tq.pbl_base.hi =
drivers/net/ethernet/broadcom/cnic.c
1796
ictx->ustorm_st_context.ring.r2tq.curr_pbe.lo =
drivers/net/ethernet/broadcom/cnic.c
1798
ictx->ustorm_st_context.ring.r2tq.curr_pbe.hi =
drivers/net/ethernet/broadcom/cnic.c
1800
ictx->ustorm_st_context.ring.cq_pbl_base.lo =
drivers/net/ethernet/broadcom/cnic.c
1802
ictx->ustorm_st_context.ring.cq_pbl_base.hi =
drivers/net/ethernet/broadcom/cnic.c
1804
ictx->ustorm_st_context.ring.cq[0].cq_sn = ISCSI_INITIAL_SN;
drivers/net/ethernet/broadcom/cnic.c
1805
ictx->ustorm_st_context.ring.cq[0].curr_pbe.lo = req2->cq_first_pte.hi;
drivers/net/ethernet/broadcom/cnic.c
1806
ictx->ustorm_st_context.ring.cq[0].curr_pbe.hi = req2->cq_first_pte.lo;
drivers/net/ethernet/broadcom/cnic.c
1819
ictx->ustorm_st_context.ring.cq[i].cq_sn = ISCSI_INITIAL_SN;
drivers/net/ethernet/broadcom/cnic.c
1820
ictx->ustorm_st_context.ring.cq[i].curr_pbe.lo =
drivers/net/ethernet/broadcom/cnic.c
1822
ictx->ustorm_st_context.ring.cq[i].curr_pbe.hi =
drivers/net/ethernet/broadcom/cnic.c
223
struct drv_ctl_l2_ring *ring = &info.data.ring;
drivers/net/ethernet/broadcom/cnic.c
231
ring->cid = cid;
drivers/net/ethernet/broadcom/cnic.c
232
ring->client_id = cl_id;
drivers/net/ethernet/broadcom/cnic_defs.h
3963
struct rings_db ring;
drivers/net/ethernet/broadcom/cnic_if.h
162
struct drv_ctl_l2_ring ring;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1801
struct bcmgenet_tx_ring *ring)
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1805
tx_cb_ptr = ring->cbs;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1806
tx_cb_ptr += ring->write_ptr - ring->cb_ptr;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1809
if (ring->write_ptr == ring->end_ptr)
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1810
ring->write_ptr = ring->cb_ptr;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1812
ring->write_ptr++;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1818
struct bcmgenet_tx_ring *ring)
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1822
tx_cb_ptr = ring->cbs;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1823
tx_cb_ptr += ring->write_ptr - ring->cb_ptr;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1826
if (ring->write_ptr == ring->cb_ptr)
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1827
ring->write_ptr = ring->end_ptr;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1829
ring->write_ptr--;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1834
static inline void bcmgenet_rx_ring_int_disable(struct bcmgenet_rx_ring *ring)
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1836
bcmgenet_intrl2_1_writel(ring->priv,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1837
1 << (UMAC_IRQ1_RX_INTR_SHIFT + ring->index),
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1841
static inline void bcmgenet_rx_ring_int_enable(struct bcmgenet_rx_ring *ring)
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1843
bcmgenet_intrl2_1_writel(ring->priv,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1844
1 << (UMAC_IRQ1_RX_INTR_SHIFT + ring->index),
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1848
static inline void bcmgenet_tx_ring_int_enable(struct bcmgenet_tx_ring *ring)
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1850
bcmgenet_intrl2_1_writel(ring->priv, 1 << ring->index,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1854
static inline void bcmgenet_tx_ring_int_disable(struct bcmgenet_tx_ring *ring)
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1856
bcmgenet_intrl2_1_writel(ring->priv, 1 << ring->index,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1917
struct bcmgenet_tx_ring *ring)
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1919
struct bcmgenet_tx_stats64 *stats = &ring->stats64;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1929
bcmgenet_intrl2_1_writel(priv, (1 << ring->index), INTRL2_CPU_CLEAR);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1932
c_index = bcmgenet_tdma_ring_readl(priv, ring->index, TDMA_CONS_INDEX)
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1934
txbds_ready = (c_index - ring->c_index) & DMA_C_INDEX_MASK;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1938
__func__, ring->index, ring->c_index, c_index, txbds_ready);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1943
&priv->tx_cbs[ring->clean_ptr]);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1951
if (likely(ring->clean_ptr < ring->end_ptr))
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1952
ring->clean_ptr++;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1954
ring->clean_ptr = ring->cb_ptr;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1957
ring->free_bds += txbds_processed;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1958
ring->c_index = c_index;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1965
netdev_tx_completed_queue(netdev_get_tx_queue(dev, ring->index),
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1972
struct bcmgenet_tx_ring *ring,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1981
spin_lock_bh(&ring->lock);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1982
released = __bcmgenet_tx_reclaim(dev, ring);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1985
drop = (ring->prod_index - ring->c_index) & DMA_C_INDEX_MASK;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1987
ring->prod_index = ring->c_index & DMA_C_INDEX_MASK;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1989
cb_ptr = bcmgenet_put_txcb(priv, ring);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
1999
bcmgenet_tdma_ring_writel(priv, ring->index,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2000
ring->prod_index, TDMA_PROD_INDEX);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2001
wr_ptr = ring->write_ptr * WORDS_PER_BD(priv);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2002
bcmgenet_tdma_ring_writel(priv, ring->index, wr_ptr,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2005
spin_unlock_bh(&ring->lock);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2012
struct bcmgenet_tx_ring *ring =
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2017
spin_lock(&ring->lock);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2018
work_done = __bcmgenet_tx_reclaim(ring->priv->dev, ring);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2019
if (ring->free_bds > (MAX_SKB_FRAGS + 1)) {
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2020
txq = netdev_get_tx_queue(ring->priv->dev, ring->index);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2023
spin_unlock(&ring->lock);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2027
bcmgenet_tx_ring_int_enable(ring);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2050
struct bcmgenet_tx_ring *ring)
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2052
struct bcmgenet_tx_stats64 *stats = &ring->stats64;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2119
struct bcmgenet_tx_ring *ring = NULL;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2138
ring = &priv->tx_rings[index];
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2143
spin_lock(&ring->lock);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2144
if (ring->free_bds <= (nr_frags + 1)) {
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2157
skb = bcmgenet_add_tsb(dev, skb, ring);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2164
tx_cb_ptr = bcmgenet_get_txcb(priv, ring);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2219
ring->free_bds -= nr_frags + 1;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2220
ring->prod_index += nr_frags + 1;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2221
ring->prod_index &= DMA_P_INDEX_MASK;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2225
if (ring->free_bds <= (MAX_SKB_FRAGS + 1))
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2230
bcmgenet_tdma_ring_writel(priv, ring->index,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2231
ring->prod_index, TDMA_PROD_INDEX);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2233
spin_unlock(&ring->lock);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2239
bcmgenet_put_txcb(priv, ring);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2243
tx_cb_ptr = bcmgenet_put_txcb(priv, ring);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2296
static unsigned int bcmgenet_desc_rx(struct bcmgenet_rx_ring *ring,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2299
struct bcmgenet_rx_stats64 *stats = &ring->stats64;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2300
struct bcmgenet_priv *priv = ring->priv;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2313
mask = 1 << (UMAC_IRQ1_RX_INTR_SHIFT + ring->index);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2316
p_index = bcmgenet_rdma_ring_readl(priv, ring->index, RDMA_PROD_INDEX);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2320
if (discards > ring->old_discards) {
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2321
discards = discards - ring->old_discards;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2323
ring->old_discards += discards;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2326
if (ring->old_discards >= 0xC000) {
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2327
ring->old_discards = 0;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2328
bcmgenet_rdma_ring_writel(priv, ring->index, 0,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2334
rxpkttoprocess = (p_index - ring->c_index) & DMA_C_INDEX_MASK;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2344
cb = &priv->rx_cbs[ring->read_ptr];
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2370
__func__, p_index, ring->c_index,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2371
ring->read_ptr, dma_length_status);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2442
napi_gro_receive(&ring->napi, skb);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2447
if (likely(ring->read_ptr < ring->end_ptr))
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2448
ring->read_ptr++;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2450
ring->read_ptr = ring->cb_ptr;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2452
ring->c_index = (ring->c_index + 1) & DMA_C_INDEX_MASK;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2453
bcmgenet_rdma_ring_writel(priv, ring->index, ring->c_index, RDMA_CONS_INDEX);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2456
ring->dim.bytes = bytes_processed;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2457
ring->dim.packets = rxpktprocessed;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2465
struct bcmgenet_rx_ring *ring = container_of(napi,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2470
work_done = bcmgenet_desc_rx(ring, budget);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2473
bcmgenet_rx_ring_int_enable(ring);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2475
if (ring->dim.use_dim) {
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2476
dim_update_sample(ring->dim.event_ctr, ring->dim.packets,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2477
ring->dim.bytes, &dim_sample);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2478
net_dim(&ring->dim.dim, &dim_sample);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2489
struct bcmgenet_rx_ring *ring =
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2494
bcmgenet_set_rx_coalesce(ring, cur_profile.usec, cur_profile.pkts);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2500
struct bcmgenet_rx_ring *ring)
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2509
for (i = 0; i < ring->size; i++) {
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2510
cb = ring->cbs + i;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2669
static void bcmgenet_init_dim(struct bcmgenet_rx_ring *ring,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2672
struct bcmgenet_net_dim *dim = &ring->dim;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2681
static void bcmgenet_init_rx_coalesce(struct bcmgenet_rx_ring *ring)
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2683
struct bcmgenet_net_dim *dim = &ring->dim;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2687
usecs = ring->rx_coalesce_usecs;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2688
pkts = ring->rx_max_coalesced_frames;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2697
bcmgenet_set_rx_coalesce(ring, usecs, pkts);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2705
struct bcmgenet_tx_ring *ring = &priv->tx_rings[index];
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2709
spin_lock_init(&ring->lock);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2710
ring->priv = priv;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2711
ring->index = index;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2712
ring->cbs = priv->tx_cbs + start_ptr;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2713
ring->size = size;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2714
ring->clean_ptr = start_ptr;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2715
ring->c_index = 0;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2716
ring->free_bds = size;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2717
ring->write_ptr = start_ptr;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2718
ring->cb_ptr = start_ptr;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2719
ring->end_ptr = end_ptr - 1;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2720
ring->prod_index = 0;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2747
netif_napi_add_tx(priv->dev, &ring->napi, bcmgenet_tx_poll);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2755
struct bcmgenet_rx_ring *ring = &priv->rx_rings[index];
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2759
ring->priv = priv;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2760
ring->index = index;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2761
ring->cbs = priv->rx_cbs + start_ptr;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2762
ring->size = size;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2763
ring->c_index = 0;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2764
ring->read_ptr = start_ptr;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2765
ring->cb_ptr = start_ptr;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2766
ring->end_ptr = end_ptr - 1;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2768
ret = bcmgenet_alloc_rx_buffers(priv, ring);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2772
bcmgenet_init_dim(ring, bcmgenet_dim_work);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2773
bcmgenet_init_rx_coalesce(ring);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2776
netif_napi_add(priv->dev, &ring->napi, bcmgenet_rx_poll);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2804
struct bcmgenet_tx_ring *ring;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2807
ring = &priv->tx_rings[i];
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2808
napi_enable(&ring->napi);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2809
bcmgenet_tx_ring_int_enable(ring);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2816
struct bcmgenet_tx_ring *ring;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2819
ring = &priv->tx_rings[i];
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2820
napi_disable(&ring->napi);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2827
struct bcmgenet_tx_ring *ring;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2830
ring = &priv->tx_rings[i];
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2831
netif_napi_del(&ring->napi);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2932
struct bcmgenet_rx_ring *ring;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2935
ring = &priv->rx_rings[i];
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2936
napi_enable(&ring->napi);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2937
bcmgenet_rx_ring_int_enable(ring);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2944
struct bcmgenet_rx_ring *ring;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2947
ring = &priv->rx_rings[i];
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2948
napi_disable(&ring->napi);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2949
cancel_work_sync(&ring->dim.dim.work);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2956
struct bcmgenet_rx_ring *ring;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2959
ring = &priv->rx_rings[i];
drivers/net/ethernet/broadcom/genet/bcmgenet.c
2960
netif_napi_del(&ring->napi);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
3436
static void bcmgenet_dump_tx_queue(struct bcmgenet_tx_ring *ring)
drivers/net/ethernet/broadcom/genet/bcmgenet.c
3438
struct bcmgenet_priv *priv = ring->priv;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
3447
txq = netdev_get_tx_queue(priv->dev, ring->index);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
3449
spin_lock(&ring->lock);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
3451
intmsk = 1 << ring->index;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
3452
c_index = bcmgenet_tdma_ring_readl(priv, ring->index, TDMA_CONS_INDEX);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
3453
p_index = bcmgenet_tdma_ring_readl(priv, ring->index, TDMA_PROD_INDEX);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
3455
free_bds = ring->free_bds;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
3456
spin_unlock(&ring->lock);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
3465
ring->index, ring->index,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
3468
free_bds, ring->size,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
3469
ring->prod_index, p_index & DMA_P_INDEX_MASK,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
3470
ring->c_index, c_index & DMA_C_INDEX_MASK,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
3471
ring->clean_ptr, ring->write_ptr,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
3472
ring->cb_ptr, ring->end_ptr);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
406
unsigned int ring,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
410
(DMA_RING_SIZE * ring) +
drivers/net/ethernet/broadcom/genet/bcmgenet.c
415
unsigned int ring, u32 val,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
419
(DMA_RING_SIZE * ring) +
drivers/net/ethernet/broadcom/genet/bcmgenet.c
424
unsigned int ring,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
428
(DMA_RING_SIZE * ring) +
drivers/net/ethernet/broadcom/genet/bcmgenet.c
433
unsigned int ring, u32 val,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
437
(DMA_RING_SIZE * ring) +
drivers/net/ethernet/broadcom/genet/bcmgenet.c
833
struct bcmgenet_rx_ring *ring;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
844
ring = &priv->rx_rings[i];
drivers/net/ethernet/broadcom/genet/bcmgenet.c
845
ec->use_adaptive_rx_coalesce |= ring->dim.use_dim;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
851
static void bcmgenet_set_rx_coalesce(struct bcmgenet_rx_ring *ring,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
854
struct bcmgenet_priv *priv = ring->priv;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
855
unsigned int i = ring->index;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
866
static void bcmgenet_set_ring_rx_coalesce(struct bcmgenet_rx_ring *ring,
drivers/net/ethernet/broadcom/genet/bcmgenet.c
872
ring->rx_coalesce_usecs = ec->rx_coalesce_usecs;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
873
ring->rx_max_coalesced_frames = ec->rx_max_coalesced_frames;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
874
usecs = ring->rx_coalesce_usecs;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
875
pkts = ring->rx_max_coalesced_frames;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
877
if (ec->use_adaptive_rx_coalesce && !ring->dim.use_dim) {
drivers/net/ethernet/broadcom/genet/bcmgenet.c
878
moder = net_dim_get_def_rx_moderation(ring->dim.dim.mode);
drivers/net/ethernet/broadcom/genet/bcmgenet.c
883
ring->dim.use_dim = ec->use_adaptive_rx_coalesce;
drivers/net/ethernet/broadcom/genet/bcmgenet.c
884
bcmgenet_set_rx_coalesce(ring, usecs, pkts);
drivers/net/ethernet/cadence/macb_main.c
3568
struct ethtool_ringparam *ring,
drivers/net/ethernet/cadence/macb_main.c
3574
ring->rx_max_pending = MAX_RX_RING_SIZE;
drivers/net/ethernet/cadence/macb_main.c
3575
ring->tx_max_pending = MAX_TX_RING_SIZE;
drivers/net/ethernet/cadence/macb_main.c
3577
ring->rx_pending = bp->rx_ring_size;
drivers/net/ethernet/cadence/macb_main.c
3578
ring->tx_pending = bp->tx_ring_size;
drivers/net/ethernet/cadence/macb_main.c
3582
struct ethtool_ringparam *ring,
drivers/net/ethernet/cadence/macb_main.c
3590
if ((ring->rx_mini_pending) || (ring->rx_jumbo_pending))
drivers/net/ethernet/cadence/macb_main.c
3593
new_rx_size = clamp_t(u32, ring->rx_pending,
drivers/net/ethernet/cadence/macb_main.c
3597
new_tx_size = clamp_t(u32, ring->tx_pending,
drivers/net/ethernet/cavium/thunder/nicvf_ethtool.c
470
struct ethtool_ringparam *ring,
drivers/net/ethernet/cavium/thunder/nicvf_ethtool.c
477
ring->rx_max_pending = MAX_CMP_QUEUE_LEN;
drivers/net/ethernet/cavium/thunder/nicvf_ethtool.c
478
ring->rx_pending = qs->cq_len;
drivers/net/ethernet/cavium/thunder/nicvf_ethtool.c
479
ring->tx_max_pending = MAX_SND_QUEUE_LEN;
drivers/net/ethernet/cavium/thunder/nicvf_ethtool.c
480
ring->tx_pending = qs->sq_len;
drivers/net/ethernet/cavium/thunder/nicvf_ethtool.c
484
struct ethtool_ringparam *ring,
drivers/net/ethernet/cavium/thunder/nicvf_ethtool.c
496
if ((ring->rx_mini_pending) || (ring->rx_jumbo_pending))
drivers/net/ethernet/cavium/thunder/nicvf_ethtool.c
499
tx_count = clamp_t(u32, ring->tx_pending,
drivers/net/ethernet/cavium/thunder/nicvf_ethtool.c
501
rx_count = clamp_t(u32, ring->rx_pending,
drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_mqprio.c
103
ring = kzalloc_objs(*ring, CXGB4_EOSW_TXQ_DEFAULT_DESC_NUM);
drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_mqprio.c
104
if (!ring)
drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_mqprio.c
107
eosw_txq->desc = ring;
drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_mqprio.c
99
struct tx_sw_desc *ring;
drivers/net/ethernet/cisco/enic/enic_ethtool.c
218
struct ethtool_ringparam *ring,
drivers/net/ethernet/cisco/enic/enic_ethtool.c
225
ring->rx_max_pending = c->max_rq_ring;
drivers/net/ethernet/cisco/enic/enic_ethtool.c
226
ring->rx_pending = c->rq_desc_count;
drivers/net/ethernet/cisco/enic/enic_ethtool.c
227
ring->tx_max_pending = c->max_wq_ring;
drivers/net/ethernet/cisco/enic/enic_ethtool.c
228
ring->tx_pending = c->wq_desc_count;
drivers/net/ethernet/cisco/enic/enic_ethtool.c
232
struct ethtool_ringparam *ring,
drivers/net/ethernet/cisco/enic/enic_ethtool.c
243
if (ring->rx_mini_max_pending || ring->rx_mini_pending) {
drivers/net/ethernet/cisco/enic/enic_ethtool.c
248
if (ring->rx_jumbo_max_pending || ring->rx_jumbo_pending) {
drivers/net/ethernet/cisco/enic/enic_ethtool.c
255
if (ring->rx_pending > c->max_rq_ring ||
drivers/net/ethernet/cisco/enic/enic_ethtool.c
256
ring->rx_pending < ENIC_MIN_RQ_DESCS) {
drivers/net/ethernet/cisco/enic/enic_ethtool.c
258
ring->rx_pending, ENIC_MIN_RQ_DESCS,
drivers/net/ethernet/cisco/enic/enic_ethtool.c
262
if (ring->tx_pending > c->max_wq_ring ||
drivers/net/ethernet/cisco/enic/enic_ethtool.c
263
ring->tx_pending < ENIC_MIN_WQ_DESCS) {
drivers/net/ethernet/cisco/enic/enic_ethtool.c
265
ring->tx_pending, ENIC_MIN_WQ_DESCS,
drivers/net/ethernet/cisco/enic/enic_ethtool.c
272
ring->rx_pending & 0xffffffe0; /* must be aligned to groups of 32 */
drivers/net/ethernet/cisco/enic/enic_ethtool.c
274
ring->tx_pending & 0xffffffe0; /* must be aligned to groups of 32 */
drivers/net/ethernet/cisco/enic/enic_main.c
823
wq->ring.desc_avail++;
drivers/net/ethernet/cisco/enic/enic_rq.c
403
vrq->ring.desc_avail++;
drivers/net/ethernet/cisco/enic/enic_wq.c
96
ext_wq = cq->ring.size > ENIC_MAX_WQ_DESCS_DEFAULT;
drivers/net/ethernet/cisco/enic/vnic_cq.c
18
vnic_dev_free_desc_ring(cq->vdev, &cq->ring);
drivers/net/ethernet/cisco/enic/vnic_cq.c
35
return vnic_dev_alloc_desc_ring(vdev, &cq->ring, desc_count, desc_size);
drivers/net/ethernet/cisco/enic/vnic_cq.c
46
paddr = (u64)cq->ring.base_addr | VNIC_PADDR_TARGET;
drivers/net/ethernet/cisco/enic/vnic_cq.c
48
iowrite32(cq->ring.desc_count, &cq->ctrl->ring_size);
drivers/net/ethernet/cisco/enic/vnic_cq.c
72
vnic_dev_clear_desc_ring(&cq->ring);
drivers/net/ethernet/cisco/enic/vnic_cq.h
49
struct vnic_dev_ring ring;
drivers/net/ethernet/cisco/enic/vnic_cq.h
61
return ((u8 *)cq->ring.descs + cq->ring.desc_size * cq->to_clean);
drivers/net/ethernet/cisco/enic/vnic_cq.h
67
if (cq->to_clean == cq->ring.desc_count) {
drivers/net/ethernet/cisco/enic/vnic_dev.c
146
static unsigned int vnic_dev_desc_ring_size(struct vnic_dev_ring *ring,
drivers/net/ethernet/cisco/enic/vnic_dev.c
151
ring->base_align = VNIC_DESC_BASE_ALIGN;
drivers/net/ethernet/cisco/enic/vnic_dev.c
158
ring->desc_count = ALIGN(desc_count, VNIC_DESC_COUNT_ALIGN);
drivers/net/ethernet/cisco/enic/vnic_dev.c
161
ring->desc_size = ALIGN(desc_size, VNIC_DESC_SIZE_ALIGN);
drivers/net/ethernet/cisco/enic/vnic_dev.c
163
ring->size = ring->desc_count * ring->desc_size;
drivers/net/ethernet/cisco/enic/vnic_dev.c
164
ring->size_unaligned = ring->size + ring->base_align;
drivers/net/ethernet/cisco/enic/vnic_dev.c
166
return ring->size_unaligned;
drivers/net/ethernet/cisco/enic/vnic_dev.c
169
void vnic_dev_clear_desc_ring(struct vnic_dev_ring *ring)
drivers/net/ethernet/cisco/enic/vnic_dev.c
171
memset(ring->descs, 0, ring->size);
drivers/net/ethernet/cisco/enic/vnic_dev.c
174
int vnic_dev_alloc_desc_ring(struct vnic_dev *vdev, struct vnic_dev_ring *ring,
drivers/net/ethernet/cisco/enic/vnic_dev.c
177
vnic_dev_desc_ring_size(ring, desc_count, desc_size);
drivers/net/ethernet/cisco/enic/vnic_dev.c
179
ring->descs_unaligned = dma_alloc_coherent(&vdev->pdev->dev,
drivers/net/ethernet/cisco/enic/vnic_dev.c
180
ring->size_unaligned,
drivers/net/ethernet/cisco/enic/vnic_dev.c
181
&ring->base_addr_unaligned,
drivers/net/ethernet/cisco/enic/vnic_dev.c
184
if (!ring->descs_unaligned) {
drivers/net/ethernet/cisco/enic/vnic_dev.c
186
(int)ring->size);
drivers/net/ethernet/cisco/enic/vnic_dev.c
190
ring->base_addr = ALIGN(ring->base_addr_unaligned,
drivers/net/ethernet/cisco/enic/vnic_dev.c
191
ring->base_align);
drivers/net/ethernet/cisco/enic/vnic_dev.c
192
ring->descs = (u8 *)ring->descs_unaligned +
drivers/net/ethernet/cisco/enic/vnic_dev.c
193
(ring->base_addr - ring->base_addr_unaligned);
drivers/net/ethernet/cisco/enic/vnic_dev.c
195
vnic_dev_clear_desc_ring(ring);
drivers/net/ethernet/cisco/enic/vnic_dev.c
197
ring->desc_avail = ring->desc_count - 1;
drivers/net/ethernet/cisco/enic/vnic_dev.c
202
void vnic_dev_free_desc_ring(struct vnic_dev *vdev, struct vnic_dev_ring *ring)
drivers/net/ethernet/cisco/enic/vnic_dev.c
204
if (ring->descs) {
drivers/net/ethernet/cisco/enic/vnic_dev.c
205
dma_free_coherent(&vdev->pdev->dev, ring->size_unaligned,
drivers/net/ethernet/cisco/enic/vnic_dev.c
206
ring->descs_unaligned,
drivers/net/ethernet/cisco/enic/vnic_dev.c
207
ring->base_addr_unaligned);
drivers/net/ethernet/cisco/enic/vnic_dev.c
208
ring->descs = NULL;
drivers/net/ethernet/cisco/enic/vnic_dev.c
403
vdev->devcmd2->cmd_ring = vdev->devcmd2->wq.ring.descs;
drivers/net/ethernet/cisco/enic/vnic_dev.h
114
void vnic_dev_clear_desc_ring(struct vnic_dev_ring *ring);
drivers/net/ethernet/cisco/enic/vnic_dev.h
115
int vnic_dev_alloc_desc_ring(struct vnic_dev *vdev, struct vnic_dev_ring *ring,
drivers/net/ethernet/cisco/enic/vnic_dev.h
118
struct vnic_dev_ring *ring);
drivers/net/ethernet/cisco/enic/vnic_rq.c
107
unsigned int count = rq->ring.desc_count;
drivers/net/ethernet/cisco/enic/vnic_rq.c
109
paddr = (u64)rq->ring.base_addr | VNIC_PADDR_TARGET;
drivers/net/ethernet/cisco/enic/vnic_rq.c
176
unsigned int count = rq->ring.desc_count;
drivers/net/ethernet/cisco/enic/vnic_rq.c
181
for (i = 0; i < rq->ring.desc_count; i++) {
drivers/net/ethernet/cisco/enic/vnic_rq.c
185
rq->ring.desc_avail = rq->ring.desc_count - 1;
drivers/net/ethernet/cisco/enic/vnic_rq.c
204
vnic_dev_clear_desc_ring(&rq->ring);
drivers/net/ethernet/cisco/enic/vnic_rq.c
21
unsigned int i, j, count = rq->ring.desc_count;
drivers/net/ethernet/cisco/enic/vnic_rq.c
34
buf->desc = (u8 *)rq->ring.descs +
drivers/net/ethernet/cisco/enic/vnic_rq.c
35
rq->ring.desc_size * buf->index;
drivers/net/ethernet/cisco/enic/vnic_rq.c
60
vnic_dev_free_desc_ring(vdev, &rq->ring);
drivers/net/ethernet/cisco/enic/vnic_rq.c
88
err = vnic_dev_alloc_desc_ring(vdev, &rq->ring, desc_count, desc_size);
drivers/net/ethernet/cisco/enic/vnic_rq.h
123
rq->ring.desc_avail--;
drivers/net/ethernet/cisco/enic/vnic_rq.h
145
rq->ring.desc_avail += count;
drivers/net/ethernet/cisco/enic/vnic_rq.h
170
rq->ring.desc_avail++;
drivers/net/ethernet/cisco/enic/vnic_rq.h
78
struct vnic_dev_ring ring;
drivers/net/ethernet/cisco/enic/vnic_rq.h
89
return rq->ring.desc_avail;
drivers/net/ethernet/cisco/enic/vnic_rq.h
95
return rq->ring.desc_count - rq->ring.desc_avail - 1;
drivers/net/ethernet/cisco/enic/vnic_wq.c
116
err = vnic_dev_alloc_desc_ring(vdev, &wq->ring, desc_count, desc_size);
drivers/net/ethernet/cisco/enic/vnic_wq.c
127
unsigned int count = wq->ring.desc_count;
drivers/net/ethernet/cisco/enic/vnic_wq.c
129
paddr = (u64)wq->ring.base_addr | VNIC_PADDR_TARGET;
drivers/net/ethernet/cisco/enic/vnic_wq.c
194
wq->ring.desc_avail++;
drivers/net/ethernet/cisco/enic/vnic_wq.c
203
vnic_dev_clear_desc_ring(&wq->ring);
drivers/net/ethernet/cisco/enic/vnic_wq.c
21
unsigned int i, j, count = wq->ring.desc_count;
drivers/net/ethernet/cisco/enic/vnic_wq.c
34
buf->desc = (u8 *)wq->ring.descs +
drivers/net/ethernet/cisco/enic/vnic_wq.c
35
wq->ring.desc_size * buf->index;
drivers/net/ethernet/cisco/enic/vnic_wq.c
63
vnic_dev_free_desc_ring(vdev, &wq->ring);
drivers/net/ethernet/cisco/enic/vnic_wq.c
91
err = vnic_dev_alloc_desc_ring(vdev, &wq->ring, desc_count, desc_size);
drivers/net/ethernet/cisco/enic/vnic_wq.h
138
wq->ring.desc_avail -= desc_skip_cnt;
drivers/net/ethernet/cisco/enic/vnic_wq.h
154
wq->ring.desc_avail++;
drivers/net/ethernet/cisco/enic/vnic_wq.h
71
struct vnic_dev_ring ring;
drivers/net/ethernet/cisco/enic/vnic_wq.h
93
return wq->ring.desc_avail;
drivers/net/ethernet/cisco/enic/vnic_wq.h
99
return wq->ring.desc_count - wq->ring.desc_avail - 1;
drivers/net/ethernet/cortina/gemini.c
1244
txd = txq->ring + w;
drivers/net/ethernet/cortina/gemini.c
1264
dma_unmap_page(geth->dev, txq->ring[w].word2.buf_adr,
drivers/net/ethernet/cortina/gemini.c
1265
txq->ring[w].word0.bits.buffer_size,
drivers/net/ethernet/cortina/gemini.c
1309
txq->ring[d].word3.bits.eofie = 1;
drivers/net/ethernet/cortina/gemini.c
581
txq->ring = desc_ring;
drivers/net/ethernet/cortina/gemini.c
621
txd = txq->ring + c;
drivers/net/ethernet/cortina/gemini.c
693
n_txq * sizeof(*port->txq->ring) << port->txq_order,
drivers/net/ethernet/cortina/gemini.c
694
port->txq->ring, port->txq_dma_base);
drivers/net/ethernet/cortina/gemini.c
97
struct gmac_txdesc *ring;
drivers/net/ethernet/emulex/benet/be_ethtool.c
686
struct ethtool_ringparam *ring,
drivers/net/ethernet/emulex/benet/be_ethtool.c
692
ring->rx_max_pending = adapter->rx_obj[0].q.len;
drivers/net/ethernet/emulex/benet/be_ethtool.c
693
ring->rx_pending = adapter->rx_obj[0].q.len;
drivers/net/ethernet/emulex/benet/be_ethtool.c
694
ring->tx_max_pending = adapter->tx_obj[0].q.len;
drivers/net/ethernet/emulex/benet/be_ethtool.c
695
ring->tx_pending = adapter->tx_obj[0].q.len;
drivers/net/ethernet/ethoc.c
948
struct ethtool_ringparam *ring,
drivers/net/ethernet/ethoc.c
954
ring->rx_max_pending = priv->num_bd - 1;
drivers/net/ethernet/ethoc.c
955
ring->rx_mini_max_pending = 0;
drivers/net/ethernet/ethoc.c
956
ring->rx_jumbo_max_pending = 0;
drivers/net/ethernet/ethoc.c
957
ring->tx_max_pending = priv->num_bd - 1;
drivers/net/ethernet/ethoc.c
959
ring->rx_pending = priv->num_rx;
drivers/net/ethernet/ethoc.c
960
ring->rx_mini_pending = 0;
drivers/net/ethernet/ethoc.c
961
ring->rx_jumbo_pending = 0;
drivers/net/ethernet/ethoc.c
962
ring->tx_pending = priv->num_tx;
drivers/net/ethernet/ethoc.c
966
struct ethtool_ringparam *ring,
drivers/net/ethernet/ethoc.c
972
if (ring->tx_pending < 1 || ring->rx_pending < 1 ||
drivers/net/ethernet/ethoc.c
973
ring->tx_pending + ring->rx_pending > priv->num_bd)
drivers/net/ethernet/ethoc.c
975
if (ring->rx_mini_pending || ring->rx_jumbo_pending)
drivers/net/ethernet/ethoc.c
985
priv->num_tx = rounddown_pow_of_two(ring->tx_pending);
drivers/net/ethernet/ethoc.c
986
priv->num_rx = ring->rx_pending;
drivers/net/ethernet/freescale/enetc/enetc.h
572
struct enetc_cbdr *ring = &si->cbd_ring;
drivers/net/ethernet/freescale/enetc/enetc.h
576
data = dma_alloc_coherent(ring->dma_dev,
drivers/net/ethernet/freescale/enetc/enetc.h
580
dev_err(ring->dma_dev, "CBD alloc data memory failed!\n");
drivers/net/ethernet/freescale/enetc/enetc.h
597
struct enetc_cbdr *ring = &si->cbd_ring;
drivers/net/ethernet/freescale/enetc/enetc.h
599
dma_free_coherent(ring->dma_dev, size + ENETC_CBD_DATA_MEM_ALIGN,
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
102
i = ring->next_to_clean;
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
104
while (enetc_rd_reg(ring->cir) != i) {
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
105
dest_cbd = ENETC_CBD(*ring, i);
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
108
dev_warn(ring->dma_dev, "CMD err %04x for cmd %04x\n",
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
113
i = (i + 1) % ring->bd_count;
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
116
ring->next_to_clean = i;
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
127
struct enetc_cbdr *ring = &si->cbd_ring;
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
132
if (unlikely(!ring->bd_base))
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
135
if (unlikely(!enetc_cbd_unused(ring)))
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
136
enetc_clean_cbdr(ring);
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
138
i = ring->next_to_use;
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
139
dest_cbd = ENETC_CBD(*ring, i);
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
143
i = (i + 1) % ring->bd_count;
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
145
ring->next_to_use = i;
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
147
enetc_wr_reg(ring->pir, i);
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
150
if (enetc_rd_reg(ring->cir) == i)
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
162
enetc_clean_cbdr(ring);
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
212
struct enetc_cbdr *ring = &si->cbd_ring;
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
233
dev_err(ring->dma_dev, "FS entry add failed (%d)!", err);
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
244
struct enetc_cbdr *ring = &si->cbd_ring;
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
269
dev_err(ring->dma_dev, "RSS cmd failed (%d)!", err);
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
72
user->ring = devm_kcalloc(dev, user->cbdr_num,
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
74
if (!user->ring)
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
84
return ntmp_init_cbdr(user->ring, dev, ®s);
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
92
ntmp_free_cbdr(user->ring);
drivers/net/ethernet/freescale/enetc/enetc_cbdr.c
97
static void enetc_clean_cbdr(struct enetc_cbdr *ring)
drivers/net/ethernet/freescale/enetc/enetc_ethtool.c
818
struct ethtool_ringparam *ring,
drivers/net/ethernet/freescale/enetc/enetc_ethtool.c
824
ring->rx_max_pending = priv->rx_bd_count;
drivers/net/ethernet/freescale/enetc/enetc_ethtool.c
825
ring->tx_max_pending = priv->tx_bd_count;
drivers/net/ethernet/freescale/enetc/enetc_ethtool.c
826
ring->rx_pending = priv->rx_bd_count;
drivers/net/ethernet/freescale/enetc/enetc_ethtool.c
827
ring->tx_pending = priv->tx_bd_count;
drivers/net/ethernet/freescale/enetc/ntmp.c
121
cbdr = &user->ring[0];
drivers/net/ethernet/freescale/ucc_geth_ethtool.c
176
struct ethtool_ringparam *ring,
drivers/net/ethernet/freescale/ucc_geth_ethtool.c
184
ring->rx_max_pending = UCC_GETH_BD_RING_SIZE_MAX;
drivers/net/ethernet/freescale/ucc_geth_ethtool.c
185
ring->rx_mini_max_pending = UCC_GETH_BD_RING_SIZE_MAX;
drivers/net/ethernet/freescale/ucc_geth_ethtool.c
186
ring->rx_jumbo_max_pending = UCC_GETH_BD_RING_SIZE_MAX;
drivers/net/ethernet/freescale/ucc_geth_ethtool.c
187
ring->tx_max_pending = UCC_GETH_BD_RING_SIZE_MAX;
drivers/net/ethernet/freescale/ucc_geth_ethtool.c
189
ring->rx_pending = ug_info->bdRingLenRx[queue];
drivers/net/ethernet/freescale/ucc_geth_ethtool.c
190
ring->rx_mini_pending = ug_info->bdRingLenRx[queue];
drivers/net/ethernet/freescale/ucc_geth_ethtool.c
191
ring->rx_jumbo_pending = ug_info->bdRingLenRx[queue];
drivers/net/ethernet/freescale/ucc_geth_ethtool.c
192
ring->tx_pending = ug_info->bdRingLenTx[queue];
drivers/net/ethernet/freescale/ucc_geth_ethtool.c
197
struct ethtool_ringparam *ring,
drivers/net/ethernet/freescale/ucc_geth_ethtool.c
205
if (ring->rx_pending < UCC_GETH_RX_BD_RING_SIZE_MIN) {
drivers/net/ethernet/freescale/ucc_geth_ethtool.c
210
if (ring->rx_pending % UCC_GETH_RX_BD_RING_SIZE_ALIGNMENT) {
drivers/net/ethernet/freescale/ucc_geth_ethtool.c
215
if (ring->tx_pending < UCC_GETH_TX_BD_RING_SIZE_MIN) {
drivers/net/ethernet/freescale/ucc_geth_ethtool.c
224
ug_info->bdRingLenRx[queue] = ring->rx_pending;
drivers/net/ethernet/freescale/ucc_geth_ethtool.c
225
ug_info->bdRingLenTx[queue] = ring->tx_pending;
drivers/net/ethernet/fungible/funeth/funeth_ethtool.c
553
struct ethtool_ringparam *ring,
drivers/net/ethernet/fungible/funeth/funeth_ethtool.c
563
ring->rx_max_pending = max_depth / 2;
drivers/net/ethernet/fungible/funeth/funeth_ethtool.c
564
ring->tx_max_pending = max_depth;
drivers/net/ethernet/fungible/funeth/funeth_ethtool.c
566
ring->rx_pending = fp->rq_depth;
drivers/net/ethernet/fungible/funeth/funeth_ethtool.c
567
ring->tx_pending = fp->sq_depth;
drivers/net/ethernet/fungible/funeth/funeth_ethtool.c
574
struct ethtool_ringparam *ring,
drivers/net/ethernet/fungible/funeth/funeth_ethtool.c
581
if (ring->rx_mini_pending || ring->rx_jumbo_pending)
drivers/net/ethernet/fungible/funeth/funeth_ethtool.c
585
if (!is_power_of_2(ring->rx_pending) ||
drivers/net/ethernet/fungible/funeth/funeth_ethtool.c
586
!is_power_of_2(ring->tx_pending))
drivers/net/ethernet/fungible/funeth/funeth_ethtool.c
589
if (ring->rx_pending < FUNETH_MIN_QDEPTH ||
drivers/net/ethernet/fungible/funeth/funeth_ethtool.c
590
ring->tx_pending < FUNETH_MIN_QDEPTH)
drivers/net/ethernet/fungible/funeth/funeth_ethtool.c
593
if (fp->sq_depth == ring->tx_pending &&
drivers/net/ethernet/fungible/funeth/funeth_ethtool.c
594
fp->rq_depth == ring->rx_pending)
drivers/net/ethernet/fungible/funeth/funeth_ethtool.c
599
.cq_depth = 2 * ring->rx_pending,
drivers/net/ethernet/fungible/funeth/funeth_ethtool.c
600
.rq_depth = ring->rx_pending,
drivers/net/ethernet/fungible/funeth/funeth_ethtool.c
601
.sq_depth = ring->tx_pending
drivers/net/ethernet/fungible/funeth/funeth_ethtool.c
609
fp->sq_depth = ring->tx_pending;
drivers/net/ethernet/fungible/funeth/funeth_ethtool.c
610
fp->rq_depth = ring->rx_pending;
drivers/net/ethernet/google/gve/gve_ethtool.c
171
int ring;
drivers/net/ethernet/google/gve/gve_ethtool.c
182
for (ring = 0; ring < priv->rx_cfg.num_queues; ring++) {
drivers/net/ethernet/google/gve/gve_ethtool.c
183
rx_qid_to_stats_idx[ring] = -1;
drivers/net/ethernet/google/gve/gve_ethtool.c
184
if (!gve_rx_was_added_to_block(priv, ring))
drivers/net/ethernet/google/gve/gve_ethtool.c
192
for (ring = 0; ring < num_tx_queues; ring++) {
drivers/net/ethernet/google/gve/gve_ethtool.c
193
tx_qid_to_stats_idx[ring] = -1;
drivers/net/ethernet/google/gve/gve_ethtool.c
194
if (!gve_tx_was_added_to_block(priv, ring))
drivers/net/ethernet/google/gve/gve_ethtool.c
202
ring = 0;
drivers/net/ethernet/google/gve/gve_ethtool.c
203
ring < priv->rx_cfg.num_queues; ring++) {
drivers/net/ethernet/google/gve/gve_ethtool.c
206
struct gve_rx_ring *rx = &priv->rx[ring];
drivers/net/ethernet/google/gve/gve_ethtool.c
209
u64_stats_fetch_begin(&priv->rx[ring].statss);
drivers/net/ethernet/google/gve/gve_ethtool.c
222
} while (u64_stats_fetch_retry(&priv->rx[ring].statss,
drivers/net/ethernet/google/gve/gve_ethtool.c
235
for (tx_pkts = 0, tx_bytes = 0, tx_dropped = 0, ring = 0;
drivers/net/ethernet/google/gve/gve_ethtool.c
236
ring < num_tx_queues; ring++) {
drivers/net/ethernet/google/gve/gve_ethtool.c
240
u64_stats_fetch_begin(&priv->tx[ring].statss);
drivers/net/ethernet/google/gve/gve_ethtool.c
241
tmp_tx_pkts = priv->tx[ring].pkt_done;
drivers/net/ethernet/google/gve/gve_ethtool.c
242
tmp_tx_bytes = priv->tx[ring].bytes_done;
drivers/net/ethernet/google/gve/gve_ethtool.c
243
} while (u64_stats_fetch_retry(&priv->tx[ring].statss,
drivers/net/ethernet/google/gve/gve_ethtool.c
247
tx_dropped += priv->tx[ring].dropped_pkt;
drivers/net/ethernet/google/gve/gve_ethtool.c
323
for (ring = 0; ring < priv->rx_cfg.num_queues; ring++) {
drivers/net/ethernet/google/gve/gve_ethtool.c
324
struct gve_rx_ring *rx = &priv->rx[ring];
drivers/net/ethernet/google/gve/gve_ethtool.c
331
u64_stats_fetch_begin(&priv->rx[ring].statss);
drivers/net/ethernet/google/gve/gve_ethtool.c
341
} while (u64_stats_fetch_retry(&priv->rx[ring].statss,
drivers/net/ethernet/google/gve/gve_ethtool.c
357
stats_idx = rx_qid_to_stats_idx[ring];
drivers/net/ethernet/google/gve/gve_ethtool.c
371
start = u64_stats_fetch_begin(&priv->rx[ring].statss);
drivers/net/ethernet/google/gve/gve_ethtool.c
377
} while (u64_stats_fetch_retry(&priv->rx[ring].statss,
drivers/net/ethernet/google/gve/gve_ethtool.c
405
for (ring = 0; ring < num_tx_queues; ring++) {
drivers/net/ethernet/google/gve/gve_ethtool.c
406
struct gve_tx_ring *tx = &priv->tx[ring];
drivers/net/ethernet/google/gve/gve_ethtool.c
424
u64_stats_fetch_begin(&priv->tx[ring].statss);
drivers/net/ethernet/google/gve/gve_ethtool.c
426
} while (u64_stats_fetch_retry(&priv->tx[ring].statss,
drivers/net/ethernet/google/gve/gve_ethtool.c
434
stats_idx = tx_qid_to_stats_idx[ring];
drivers/net/ethernet/google/gve/gve_ethtool.c
447
start = u64_stats_fetch_begin(&priv->tx[ring].statss);
drivers/net/ethernet/google/gve/gve_ethtool.c
451
} while (u64_stats_fetch_retry(&priv->tx[ring].statss,
drivers/net/ethernet/google/gve/gve_main.c
114
int ring;
drivers/net/ethernet/google/gve/gve_main.c
118
for (ring = 0; ring < priv->rx_cfg.num_queues; ring++) {
drivers/net/ethernet/google/gve/gve_main.c
121
u64_stats_fetch_begin(&priv->rx[ring].statss);
drivers/net/ethernet/google/gve/gve_main.c
122
packets = priv->rx[ring].rpackets;
drivers/net/ethernet/google/gve/gve_main.c
123
bytes = priv->rx[ring].rbytes;
drivers/net/ethernet/google/gve/gve_main.c
124
} while (u64_stats_fetch_retry(&priv->rx[ring].statss,
drivers/net/ethernet/google/gve/gve_main.c
131
for (ring = 0; ring < num_tx_queues; ring++) {
drivers/net/ethernet/google/gve/gve_main.c
134
u64_stats_fetch_begin(&priv->tx[ring].statss);
drivers/net/ethernet/google/gve/gve_main.c
135
packets = priv->tx[ring].pkt_done;
drivers/net/ethernet/google/gve/gve_main.c
136
bytes = priv->tx[ring].bytes_done;
drivers/net/ethernet/google/gve/gve_main.c
137
} while (u64_stats_fetch_retry(&priv->tx[ring].statss,
drivers/net/ethernet/hisilicon/hibmcge/hbg_common.h
66
struct hbg_ring *ring;
drivers/net/ethernet/hisilicon/hibmcge/hbg_debugfs.c
24
static void hbg_dbg_ring(struct hbg_priv *priv, struct hbg_ring *ring,
drivers/net/ethernet/hisilicon/hibmcge/hbg_debugfs.c
27
u32 irq_mask = ring->dir == HBG_DIR_TX ? HBG_INT_MSK_TX_B :
drivers/net/ethernet/hisilicon/hibmcge/hbg_debugfs.c
31
hbg_get_queue_used_num(ring));
drivers/net/ethernet/hisilicon/hibmcge/hbg_debugfs.c
32
seq_printf(s, "ring max num: %u\n", ring->len);
drivers/net/ethernet/hisilicon/hibmcge/hbg_debugfs.c
33
seq_printf(s, "ring head: %u, tail: %u\n", ring->head, ring->tail);
drivers/net/ethernet/hisilicon/hibmcge/hbg_debugfs.c
35
hbg_hw_get_fifo_used_num(priv, ring->dir));
drivers/net/ethernet/hisilicon/hibmcge/hbg_debugfs.c
37
hbg_get_spec_fifo_max_num(priv, ring->dir));
drivers/net/ethernet/hisilicon/hibmcge/hbg_main.c
218
struct hbg_ring *ring = &priv->tx_ring;
drivers/net/ethernet/hisilicon/hibmcge/hbg_main.c
219
char *buf = ring->tout_log_buf;
drivers/net/ethernet/hisilicon/hibmcge/hbg_main.c
228
hbg_get_queue_used_num(ring),
drivers/net/ethernet/hisilicon/hibmcge/hbg_main.c
232
ring->ntc, ring->ntu,
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
132
struct hbg_ring *ring = netdev_get_tx_ring(netdev);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
137
u32 ntc = smp_load_acquire(&ring->ntc);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
140
u32 ntu = ring->ntu;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
150
hbg_queue_left_num(ntc, ntu, ring),
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
154
buffer = &ring->queue[ntu];
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
169
smp_store_release(&ring->ntu, hbg_queue_next_prt(ntu, ring));
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
19
#define hbg_queue_used_num(head, tail, ring) ({ \
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
195
struct hbg_ring *ring = container_of(napi, struct hbg_ring, napi);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
199
u32 ntu = smp_load_acquire(&ring->ntu);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
20
typeof(ring) _ring = (ring); \
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
200
struct hbg_priv *priv = ring->priv;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
202
u32 ntc = ring->ntc;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
211
if (unlikely(hbg_queue_is_empty(ntc, ntu, ring)))
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
217
buffer = &ring->queue[ntc];
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
22
#define hbg_queue_left_num(head, tail, ring) ({ \
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
222
ntc = hbg_queue_next_prt(ntc, ring);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
229
smp_store_release(&ring->ntc, ntc);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
23
typeof(ring) _r = (ring); \
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
25
#define hbg_queue_is_empty(head, tail, ring) \
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
26
(hbg_queue_used_num((head), (tail), (ring)) == 0)
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
27
#define hbg_queue_is_full(head, tail, ring) \
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
28
(hbg_queue_left_num((head), (tail), (ring)) == 0)
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
29
#define hbg_queue_next_prt(p, ring) (((p) + 1) % (ring)->len)
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
30
#define hbg_queue_move_next(p, ring) ({ \
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
31
typeof(ring) _ring = (ring); \
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
34
#define hbg_get_page_order(ring) ({ \
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
35
typeof(ring) _ring = (ring); \
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
37
#define hbg_get_page_size(ring) (PAGE_SIZE << hbg_get_page_order((ring)))
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
408
struct hbg_ring *ring = &priv->rx_ring;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
412
if (hbg_queue_is_full(ring->ntc, ring->ntu, ring) ||
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
413
hbg_fifo_is_full(priv, ring->dir))
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
416
buffer = &ring->queue[ring->ntu];
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
426
hbg_queue_move_next(ntu, ring);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
483
struct hbg_ring *ring = container_of(napi, struct hbg_ring, napi);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
484
struct hbg_priv *priv = ring->priv;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
492
if (unlikely(hbg_queue_is_empty(ring->ntc, ring->ntu, ring)))
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
495
buffer = &ring->queue[ring->ntc];
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
503
trace_hbg_rx_desc(priv, ring->ntc, rx_desc);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
526
hbg_queue_move_next(ntc, ring);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
537
static void hbg_ring_page_pool_destory(struct hbg_ring *ring)
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
539
if (!ring->page_pool)
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
542
page_pool_destroy(ring->page_pool);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
543
ring->page_pool = NULL;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
546
static int hbg_ring_page_pool_init(struct hbg_priv *priv, struct hbg_ring *ring)
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
548
u32 buf_size = hbg_spec_max_frame_len(priv, ring->dir);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
551
.order = hbg_get_page_order(ring),
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
552
.pool_size = ring->len * buf_size / hbg_get_page_size(ring),
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
555
.napi = &ring->napi,
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
558
.max_len = hbg_get_page_size(ring),
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
562
ring->page_pool = page_pool_create(&pp_params);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
563
if (IS_ERR(ring->page_pool)) {
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
564
ret = PTR_ERR(ring->page_pool);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
567
ring->page_pool = NULL;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
573
static void hbg_ring_uninit(struct hbg_ring *ring)
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
578
if (!ring->queue)
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
581
napi_disable(&ring->napi);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
582
netif_napi_del(&ring->napi);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
584
for (i = 0; i < ring->len; i++) {
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
585
buffer = &ring->queue[i];
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
587
buffer->ring = NULL;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
591
hbg_ring_page_pool_destory(ring);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
592
dma_free_coherent(&ring->priv->pdev->dev,
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
593
ring->len * sizeof(*ring->queue),
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
594
ring->queue, ring->queue_dma);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
595
ring->queue = NULL;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
596
ring->queue_dma = 0;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
597
ring->len = 0;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
598
ring->priv = NULL;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
601
static int hbg_ring_init(struct hbg_priv *priv, struct hbg_ring *ring,
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
620
ring->queue = dma_alloc_coherent(&priv->pdev->dev,
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
621
len * sizeof(*ring->queue),
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
622
&ring->queue_dma, GFP_KERNEL);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
623
if (!ring->queue)
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
627
buffer = &ring->queue[i];
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
630
buffer->ring = ring;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
632
buffer->state_dma = ring->queue_dma + (i * sizeof(*buffer));
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
635
ring->dir = dir;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
636
ring->priv = priv;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
637
ring->ntc = 0;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
638
ring->ntu = 0;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
639
ring->len = len;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
642
netif_napi_add_tx(priv->netdev, &ring->napi, napi_poll);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
644
netif_napi_add(priv->netdev, &ring->napi, napi_poll);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
646
ret = hbg_ring_page_pool_init(priv, ring);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
648
netif_napi_del(&ring->napi);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
649
dma_free_coherent(&ring->priv->pdev->dev,
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
650
ring->len * sizeof(*ring->queue),
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
651
ring->queue, ring->queue_dma);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
652
ring->queue = NULL;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
653
ring->len = 0;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
658
napi_enable(&ring->napi);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
75
struct hbg_ring *ring = buffer->ring;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
80
page_pool_put_full_page(ring->page_pool, buffer->page, false);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
91
struct hbg_ring *ring = buffer->ring;
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
92
u32 len = hbg_get_page_size(ring);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
95
if (unlikely(!ring->page_pool))
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.c
98
buffer->page = page_pool_dev_alloc_frag(ring->page_pool, &offset, len);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.h
30
static inline u32 hbg_get_queue_used_num(struct hbg_ring *ring)
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.h
32
u32 len = READ_ONCE(ring->len);
drivers/net/ethernet/hisilicon/hibmcge/hbg_txrx.h
37
return (READ_ONCE(ring->ntu) + len - READ_ONCE(ring->ntc)) % len;
drivers/net/ethernet/hisilicon/hns/hnae.c
121
static void hnae_free_buffers(struct hnae_ring *ring)
drivers/net/ethernet/hisilicon/hns/hnae.c
125
for (i = 0; i < ring->desc_num; i++)
drivers/net/ethernet/hisilicon/hns/hnae.c
126
hnae_free_buffer_detach(ring, i);
drivers/net/ethernet/hisilicon/hns/hnae.c
130
static int hnae_alloc_buffers(struct hnae_ring *ring)
drivers/net/ethernet/hisilicon/hns/hnae.c
134
for (i = 0; i < ring->desc_num; i++) {
drivers/net/ethernet/hisilicon/hns/hnae.c
135
ret = hnae_alloc_buffer_attach(ring, i);
drivers/net/ethernet/hisilicon/hns/hnae.c
144
hnae_free_buffer_detach(ring, j);
drivers/net/ethernet/hisilicon/hns/hnae.c
149
static void hnae_free_desc(struct hnae_ring *ring)
drivers/net/ethernet/hisilicon/hns/hnae.c
151
dma_unmap_single(ring_to_dev(ring), ring->desc_dma_addr,
drivers/net/ethernet/hisilicon/hns/hnae.c
152
ring->desc_num * sizeof(ring->desc[0]),
drivers/net/ethernet/hisilicon/hns/hnae.c
153
ring_to_dma_dir(ring));
drivers/net/ethernet/hisilicon/hns/hnae.c
154
ring->desc_dma_addr = 0;
drivers/net/ethernet/hisilicon/hns/hnae.c
155
kfree(ring->desc);
drivers/net/ethernet/hisilicon/hns/hnae.c
156
ring->desc = NULL;
drivers/net/ethernet/hisilicon/hns/hnae.c
160
static int hnae_alloc_desc(struct hnae_ring *ring)
drivers/net/ethernet/hisilicon/hns/hnae.c
162
int size = ring->desc_num * sizeof(ring->desc[0]);
drivers/net/ethernet/hisilicon/hns/hnae.c
164
ring->desc = kzalloc(size, GFP_KERNEL);
drivers/net/ethernet/hisilicon/hns/hnae.c
165
if (!ring->desc)
drivers/net/ethernet/hisilicon/hns/hnae.c
168
ring->desc_dma_addr = dma_map_single(ring_to_dev(ring),
drivers/net/ethernet/hisilicon/hns/hnae.c
169
ring->desc, size, ring_to_dma_dir(ring));
drivers/net/ethernet/hisilicon/hns/hnae.c
170
if (dma_mapping_error(ring_to_dev(ring), ring->desc_dma_addr)) {
drivers/net/ethernet/hisilicon/hns/hnae.c
171
ring->desc_dma_addr = 0;
drivers/net/ethernet/hisilicon/hns/hnae.c
172
kfree(ring->desc);
drivers/net/ethernet/hisilicon/hns/hnae.c
173
ring->desc = NULL;
drivers/net/ethernet/hisilicon/hns/hnae.c
181
static void hnae_fini_ring(struct hnae_ring *ring)
drivers/net/ethernet/hisilicon/hns/hnae.c
183
if (is_rx_ring(ring))
drivers/net/ethernet/hisilicon/hns/hnae.c
184
hnae_free_buffers(ring);
drivers/net/ethernet/hisilicon/hns/hnae.c
186
hnae_free_desc(ring);
drivers/net/ethernet/hisilicon/hns/hnae.c
187
kfree(ring->desc_cb);
drivers/net/ethernet/hisilicon/hns/hnae.c
188
ring->desc_cb = NULL;
drivers/net/ethernet/hisilicon/hns/hnae.c
189
ring->next_to_clean = 0;
drivers/net/ethernet/hisilicon/hns/hnae.c
190
ring->next_to_use = 0;
drivers/net/ethernet/hisilicon/hns/hnae.c
195
hnae_init_ring(struct hnae_queue *q, struct hnae_ring *ring, int flags)
drivers/net/ethernet/hisilicon/hns/hnae.c
199
if (ring->desc_num <= 0 || ring->buf_size <= 0)
drivers/net/ethernet/hisilicon/hns/hnae.c
202
ring->q = q;
drivers/net/ethernet/hisilicon/hns/hnae.c
203
ring->flags = flags;
drivers/net/ethernet/hisilicon/hns/hnae.c
204
ring->coal_param = q->handle->coal_param;
drivers/net/ethernet/hisilicon/hns/hnae.c
205
assert(!ring->desc && !ring->desc_cb && !ring->desc_dma_addr);
drivers/net/ethernet/hisilicon/hns/hnae.c
208
assert(ring->next_to_use == 0);
drivers/net/ethernet/hisilicon/hns/hnae.c
209
assert(ring->next_to_clean == 0);
drivers/net/ethernet/hisilicon/hns/hnae.c
211
ring->desc_cb = kzalloc_objs(ring->desc_cb[0], ring->desc_num);
drivers/net/ethernet/hisilicon/hns/hnae.c
212
if (!ring->desc_cb) {
drivers/net/ethernet/hisilicon/hns/hnae.c
217
ret = hnae_alloc_desc(ring);
drivers/net/ethernet/hisilicon/hns/hnae.c
221
if (is_rx_ring(ring)) {
drivers/net/ethernet/hisilicon/hns/hnae.c
222
ret = hnae_alloc_buffers(ring);
drivers/net/ethernet/hisilicon/hns/hnae.c
230
hnae_free_desc(ring);
drivers/net/ethernet/hisilicon/hns/hnae.c
232
kfree(ring->desc_cb);
drivers/net/ethernet/hisilicon/hns/hnae.c
233
ring->desc_cb = NULL;
drivers/net/ethernet/hisilicon/hns/hnae.c
38
static int hnae_alloc_buffer(struct hnae_ring *ring, struct hnae_desc_cb *cb)
drivers/net/ethernet/hisilicon/hns/hnae.c
40
unsigned int order = hnae_page_order(ring);
drivers/net/ethernet/hisilicon/hns/hnae.c
50
cb->length = hnae_page_size(ring);
drivers/net/ethernet/hisilicon/hns/hnae.c
56
static void hnae_free_buffer(struct hnae_ring *ring, struct hnae_desc_cb *cb)
drivers/net/ethernet/hisilicon/hns/hnae.c
63
else if (unlikely(is_rx_ring(ring)))
drivers/net/ethernet/hisilicon/hns/hnae.c
69
static int hnae_map_buffer(struct hnae_ring *ring, struct hnae_desc_cb *cb)
drivers/net/ethernet/hisilicon/hns/hnae.c
71
cb->dma = dma_map_page(ring_to_dev(ring), cb->priv, 0,
drivers/net/ethernet/hisilicon/hns/hnae.c
72
cb->length, ring_to_dma_dir(ring));
drivers/net/ethernet/hisilicon/hns/hnae.c
74
if (dma_mapping_error(ring_to_dev(ring), cb->dma))
drivers/net/ethernet/hisilicon/hns/hnae.c
80
static void hnae_unmap_buffer(struct hnae_ring *ring, struct hnae_desc_cb *cb)
drivers/net/ethernet/hisilicon/hns/hnae.c
83
dma_unmap_single(ring_to_dev(ring), cb->dma, cb->length,
drivers/net/ethernet/hisilicon/hns/hnae.c
84
ring_to_dma_dir(ring));
drivers/net/ethernet/hisilicon/hns/hnae.c
86
dma_unmap_page(ring_to_dev(ring), cb->dma, cb->length,
drivers/net/ethernet/hisilicon/hns/hnae.c
87
ring_to_dma_dir(ring));
drivers/net/ethernet/hisilicon/hns/hnae.h
233
#define is_tx_ring(ring) ((ring)->flags & RINGF_DIR)
drivers/net/ethernet/hisilicon/hns/hnae.h
234
#define is_rx_ring(ring) (!is_tx_ring(ring))
drivers/net/ethernet/hisilicon/hns/hnae.h
235
#define ring_to_dma_dir(ring) (is_tx_ring(ring) ? \
drivers/net/ethernet/hisilicon/hns/hnae.h
300
#define ring_ptr_move_fw(ring, p) \
drivers/net/ethernet/hisilicon/hns/hnae.h
301
((ring)->p = ((ring)->p + 1) % (ring)->desc_num)
drivers/net/ethernet/hisilicon/hns/hnae.h
302
#define ring_ptr_move_bw(ring, p) \
drivers/net/ethernet/hisilicon/hns/hnae.h
303
((ring)->p = ((ring)->p - 1 + (ring)->desc_num) % (ring)->desc_num)
drivers/net/ethernet/hisilicon/hns/hnae.h
310
#define assert_is_ring_idx(ring, idx) \
drivers/net/ethernet/hisilicon/hns/hnae.h
311
assert((idx) >= 0 && (idx) < (ring)->desc_num)
drivers/net/ethernet/hisilicon/hns/hnae.h
316
static inline int ring_dist(struct hnae_ring *ring, int begin, int end)
drivers/net/ethernet/hisilicon/hns/hnae.h
318
assert_is_ring_idx(ring, begin);
drivers/net/ethernet/hisilicon/hns/hnae.h
319
assert_is_ring_idx(ring, end);
drivers/net/ethernet/hisilicon/hns/hnae.h
321
return (end - begin + ring->desc_num) % ring->desc_num;
drivers/net/ethernet/hisilicon/hns/hnae.h
324
static inline int ring_space(struct hnae_ring *ring)
drivers/net/ethernet/hisilicon/hns/hnae.h
326
return ring->desc_num -
drivers/net/ethernet/hisilicon/hns/hnae.h
327
ring_dist(ring, ring->next_to_clean, ring->next_to_use) - 1;
drivers/net/ethernet/hisilicon/hns/hnae.h
330
static inline int is_ring_empty(struct hnae_ring *ring)
drivers/net/ethernet/hisilicon/hns/hnae.h
332
assert_is_ring_idx(ring, ring->next_to_use);
drivers/net/ethernet/hisilicon/hns/hnae.h
333
assert_is_ring_idx(ring, ring->next_to_clean);
drivers/net/ethernet/hisilicon/hns/hnae.h
335
return ring->next_to_use == ring->next_to_clean;
drivers/net/ethernet/hisilicon/hns/hnae.h
346
int (*alloc_buffer)(struct hnae_ring *ring, struct hnae_desc_cb *cb);
drivers/net/ethernet/hisilicon/hns/hnae.h
347
void (*free_buffer)(struct hnae_ring *ring, struct hnae_desc_cb *cb);
drivers/net/ethernet/hisilicon/hns/hnae.h
348
int (*map_buffer)(struct hnae_ring *ring, struct hnae_desc_cb *cb);
drivers/net/ethernet/hisilicon/hns/hnae.h
349
void (*unmap_buffer)(struct hnae_ring *ring, struct hnae_desc_cb *cb);
drivers/net/ethernet/hisilicon/hns/hnae.h
476
void (*toggle_ring_irq)(struct hnae_ring *ring, u32 val);
drivers/net/ethernet/hisilicon/hns/hnae.h
564
#define ring_to_dev(ring) ((ring)->q->dev->dev)
drivers/net/ethernet/hisilicon/hns/hnae.h
586
static inline int hnae_reserve_buffer_map(struct hnae_ring *ring,
drivers/net/ethernet/hisilicon/hns/hnae.h
589
struct hnae_buf_ops *bops = ring->q->handle->bops;
drivers/net/ethernet/hisilicon/hns/hnae.h
592
ret = bops->alloc_buffer(ring, cb);
drivers/net/ethernet/hisilicon/hns/hnae.h
596
ret = bops->map_buffer(ring, cb);
drivers/net/ethernet/hisilicon/hns/hnae.h
603
bops->free_buffer(ring, cb);
drivers/net/ethernet/hisilicon/hns/hnae.h
608
static inline int hnae_alloc_buffer_attach(struct hnae_ring *ring, int i)
drivers/net/ethernet/hisilicon/hns/hnae.h
610
int ret = hnae_reserve_buffer_map(ring, &ring->desc_cb[i]);
drivers/net/ethernet/hisilicon/hns/hnae.h
615
ring->desc[i].addr = cpu_to_le64(ring->desc_cb[i].dma);
drivers/net/ethernet/hisilicon/hns/hnae.h
620
static inline void hnae_buffer_detach(struct hnae_ring *ring, int i)
drivers/net/ethernet/hisilicon/hns/hnae.h
622
ring->q->handle->bops->unmap_buffer(ring, &ring->desc_cb[i]);
drivers/net/ethernet/hisilicon/hns/hnae.h
623
ring->desc[i].addr = 0;
drivers/net/ethernet/hisilicon/hns/hnae.h
626
static inline void hnae_free_buffer_detach(struct hnae_ring *ring, int i)
drivers/net/ethernet/hisilicon/hns/hnae.h
628
struct hnae_buf_ops *bops = ring->q->handle->bops;
drivers/net/ethernet/hisilicon/hns/hnae.h
629
struct hnae_desc_cb *cb = &ring->desc_cb[i];
drivers/net/ethernet/hisilicon/hns/hnae.h
631
if (!ring->desc_cb[i].dma)
drivers/net/ethernet/hisilicon/hns/hnae.h
634
hnae_buffer_detach(ring, i);
drivers/net/ethernet/hisilicon/hns/hnae.h
635
bops->free_buffer(ring, cb);
drivers/net/ethernet/hisilicon/hns/hnae.h
639
static inline void hnae_replace_buffer(struct hnae_ring *ring, int i,
drivers/net/ethernet/hisilicon/hns/hnae.h
642
struct hnae_buf_ops *bops = ring->q->handle->bops;
drivers/net/ethernet/hisilicon/hns/hnae.h
644
bops->unmap_buffer(ring, &ring->desc_cb[i]);
drivers/net/ethernet/hisilicon/hns/hnae.h
645
ring->desc_cb[i] = *res_cb;
drivers/net/ethernet/hisilicon/hns/hnae.h
646
ring->desc[i].addr = cpu_to_le64(ring->desc_cb[i].dma);
drivers/net/ethernet/hisilicon/hns/hnae.h
647
ring->desc[i].rx.ipoff_bnum_pid_flag = 0;
drivers/net/ethernet/hisilicon/hns/hnae.h
650
static inline void hnae_reuse_buffer(struct hnae_ring *ring, int i)
drivers/net/ethernet/hisilicon/hns/hnae.h
652
ring->desc_cb[i].reuse_flag = 0;
drivers/net/ethernet/hisilicon/hns/hnae.h
653
ring->desc[i].addr = cpu_to_le64(ring->desc_cb[i].dma
drivers/net/ethernet/hisilicon/hns/hnae.h
654
+ ring->desc_cb[i].page_offset);
drivers/net/ethernet/hisilicon/hns/hnae.h
655
ring->desc[i].rx.ipoff_bnum_pid_flag = 0;
drivers/net/ethernet/hisilicon/hns/hnae.h
662
struct hnae_ring *ring;
drivers/net/ethernet/hisilicon/hns/hnae.h
665
ring = &h->qs[i]->rx_ring;
drivers/net/ethernet/hisilicon/hns/hnae.h
666
for (j = 0; j < ring->desc_num; j++)
drivers/net/ethernet/hisilicon/hns/hnae.h
667
ring->desc[j].addr = cpu_to_le64(ring->desc_cb[j].dma);
drivers/net/ethernet/hisilicon/hns/hnae.h
677
struct hnae_ring *ring;
drivers/net/ethernet/hisilicon/hns/hnae.h
680
ring = &h->qs[i]->rx_ring;
drivers/net/ethernet/hisilicon/hns/hnae.h
681
for (j = 0; j < ring->desc_num; j++) {
drivers/net/ethernet/hisilicon/hns/hnae.h
682
ring->desc_cb[j].page_offset = 0;
drivers/net/ethernet/hisilicon/hns/hnae.h
683
if (ring->desc[j].addr !=
drivers/net/ethernet/hisilicon/hns/hnae.h
684
cpu_to_le64(ring->desc_cb[j].dma))
drivers/net/ethernet/hisilicon/hns/hnae.h
685
ring->desc[j].addr =
drivers/net/ethernet/hisilicon/hns/hnae.h
686
cpu_to_le64(ring->desc_cb[j].dma);
drivers/net/ethernet/hisilicon/hns/hns_ae_adapt.c
194
struct ring_pair_cb *ring =
drivers/net/ethernet/hisilicon/hns/hns_ae_adapt.c
197
hns_rcb_init_hw(ring);
drivers/net/ethernet/hisilicon/hns/hns_ae_adapt.c
388
static void hns_ae_toggle_ring_irq(struct hnae_ring *ring, u32 mask)
drivers/net/ethernet/hisilicon/hns/hns_ae_adapt.c
392
if (is_tx_ring(ring))
drivers/net/ethernet/hisilicon/hns/hns_ae_adapt.c
397
hns_rcb_int_ctrl_hw(ring->q, flag, mask);
drivers/net/ethernet/hisilicon/hns/hns_ae_adapt.c
400
static void hns_aev2_toggle_ring_irq(struct hnae_ring *ring, u32 mask)
drivers/net/ethernet/hisilicon/hns/hns_ae_adapt.c
404
if (is_tx_ring(ring))
drivers/net/ethernet/hisilicon/hns/hns_ae_adapt.c
409
hns_rcbv2_int_ctrl_hw(ring->q, flag, mask);
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
241
struct hnae_ring *ring =
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
243
dma_addr_t dma = ring->desc_dma_addr;
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
251
hns_rcb_set_rx_ring_bs(q, ring->buf_size);
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
263
hns_rcb_set_tx_ring_bs(q, ring->buf_size);
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
276
void hns_rcb_init_hw(struct ring_pair_cb *ring)
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
278
hns_rcb_ring_init(ring, RX_RING);
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
279
hns_rcb_ring_init(ring, TX_RING);
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
434
struct hnae_ring *ring;
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
443
ring = &q->rx_ring;
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
444
ring->io_base = ring_pair_cb->q.io_base;
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
448
ring = &q->tx_ring;
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
449
ring->io_base = ring_pair_cb->q.io_base +
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
459
ring->desc = NULL;
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
460
ring->desc_cb = NULL;
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
462
ring->irq = ring_pair_cb->virq[irq_idx];
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
463
ring->desc_dma_addr = 0;
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
465
ring->buf_size = RCB_DEFAULT_BUFFER_SIZE;
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
466
ring->desc_num = desc_num;
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
467
ring->max_desc_num_per_pkt = mdnum_ppkt;
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
468
ring->max_raw_data_sz_per_desc = HNS_RCB_MAX_PKT_SIZE;
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
469
ring->max_pkt_size = HNS_RCB_MAX_PKT_SIZE;
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
470
ring->next_to_use = 0;
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
471
ring->next_to_clean = 0;
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
811
struct ring_pair_cb *ring =
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
813
struct dsaf_device *dsaf_dev = ring->rcb_common->dsaf_dev;
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
815
= dsaf_dev->ppe_common[ring->rcb_common->comm_index];
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
816
struct hns_ring_hw_stats *hw_stats = &ring->hw_stats;
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
823
PPE_COM_HIS_RX_PKT_QID_OK_CNT_REG + 4 * ring->index);
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
825
PPE_COM_HIS_RX_PKT_QID_DROP_CNT_REG + 4 * ring->index);
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
832
PPE_COM_HIS_TX_PKT_QID_OK_CNT_REG + 4 * ring->index);
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
834
PPE_COM_HIS_TX_PKT_QID_ERR_CNT_REG + 4 * ring->index);
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
845
struct ring_pair_cb *ring =
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c
847
struct hns_ring_hw_stats *hw_stats = &ring->hw_stats;
drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.h
131
void hns_rcb_init_hw(struct ring_pair_cb *ring);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1002
ring->stats.tx_pkts += pkts;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1003
ring->stats.tx_bytes += bytes;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1012
(ring_space(ring) >= ring->max_desc_num_per_pkt * 2))) {
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1020
ring->stats.restart_queue++;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1028
struct hnae_ring *ring = ring_data->ring;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1031
ring_data->ring->q->handle->dev->ops->toggle_ring_irq(ring, 0);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1033
head = readl_relaxed(ring->io_base + RCB_REG_HEAD);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1035
if (head != ring->next_to_clean) {
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1036
ring_data->ring->q->handle->dev->ops->toggle_ring_irq(
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1037
ring_data->ring, 1);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1047
struct hnae_ring *ring = ring_data->ring;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1048
int head = readl_relaxed(ring->io_base + RCB_REG_HEAD);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1050
if (head == ring->next_to_clean)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1058
struct hnae_ring *ring = ring_data->ring;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1064
head = ring->next_to_use; /* ntu :soft setted ring position*/
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1067
while (head != ring->next_to_clean)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1068
hns_nic_reclaim_one_desc(ring, &bytes, &pkts);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1079
struct hnae_ring *ring = ring_data->ring;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1088
ring->q->handle->dev->ops->toggle_ring_irq(ring, 0);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1101
ring_data->ring->q->handle->dev->ops->toggle_ring_irq(
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1102
ring_data->ring, 1);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1198
enable_irq(priv->ring_data[idx].ring->irq);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1199
h->dev->ops->toggle_ring_irq(priv->ring_data[idx].ring, 0);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
125
ring_ptr_move_fw(ring, next_to_use);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1250
h->dev->ops->toggle_ring_irq(priv->ring_data[idx].ring, 1);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1251
disable_irq(priv->ring_data[idx].ring->irq);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1257
struct hnae_ring *ring, cpumask_t *mask)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1266
if (is_tx_ring(ring))
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1271
if (is_tx_ring(ring))
drivers/net/ethernet/hisilicon/hns/hns_enet.c
128
static void fill_v2_desc(struct hnae_ring *ring, void *priv,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1288
if (priv->ring_data[i].ring->irq_init_flag == RCB_IRQ_INITED) {
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1289
irq_set_affinity_hint(priv->ring_data[i].ring->irq,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1291
free_irq(priv->ring_data[i].ring->irq,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1293
priv->ring_data[i].ring->irq_init_flag =
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1310
if (rd->ring->irq_init_flag == RCB_IRQ_INITED)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1313
snprintf(rd->ring->ring_name, RCB_RING_NAME_LEN,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1315
(is_tx_ring(rd->ring) ? "tx" : "rx"), rd->queue_index);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1317
rd->ring->ring_name[RCB_RING_NAME_LEN - 1] = '\0';
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1319
irq_set_status_flags(rd->ring->irq, IRQ_NOAUTOEN);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
132
fill_v2_desc_hw(ring, priv, size, size, dma, frag_end,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1320
ret = request_irq(rd->ring->irq,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1321
hns_irq_handle, 0, rd->ring->ring_name, rd);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1324
rd->ring->irq);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1329
rd->ring, &rd->mask);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1332
irq_set_affinity_hint(rd->ring->irq,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1335
rd->ring->irq_init_flag = RCB_IRQ_INITED;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
143
static void fill_desc(struct hnae_ring *ring, void *priv,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
148
struct hnae_desc *desc = &ring->desc[ring->next_to_use];
drivers/net/ethernet/hisilicon/hns/hns_enet.c
149
struct hnae_desc_cb *desc_cb = &ring->desc_cb[ring->next_to_use];
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1629
struct hnae_ring *ring;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1664
ring = &h->qs[i]->rx_ring;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1665
head = readl_relaxed(ring->io_base + RCB_REG_HEAD);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1666
tail = readl_relaxed(ring->io_base + RCB_REG_TAIL);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1668
fetch_num = ring_dist(ring, head, tail);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1671
if (ring->desc_cb[head].page_offset != 0) {
drivers/net/ethernet/hisilicon/hns/hns_enet.c
1677
if (head == ring->desc_num)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
202
ring_ptr_move_fw(ring, next_to_use);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
205
static void unfill_desc(struct hnae_ring *ring)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
207
ring_ptr_move_bw(ring, next_to_use);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
211
struct sk_buff **out_skb, int *bnum, struct hnae_ring *ring)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
2123
rd->ring = &h->qs[i]->tx_ring;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
2129
rd->ring->irq_init_flag = RCB_IRQ_NOT_INITED;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
2134
rd->ring = &h->qs[i - h->q_num]->rx_ring;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
2141
rd->ring->irq_init_flag = RCB_IRQ_NOT_INITED;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
2154
if (priv->ring_data[i].ring->irq_init_flag == RCB_IRQ_INITED) {
drivers/net/ethernet/hisilicon/hns/hns_enet.c
2156
priv->ring_data[i].ring->irq,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
2158
free_irq(priv->ring_data[i].ring->irq,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
2162
priv->ring_data[i].ring->irq_init_flag = RCB_IRQ_NOT_INITED;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
220
if (unlikely(buf_num > ring->max_desc_num_per_pkt)) {
drivers/net/ethernet/hisilicon/hns/hns_enet.c
221
if (ring_space(ring) < 1)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
231
} else if (buf_num > ring_space(ring)) {
drivers/net/ethernet/hisilicon/hns/hns_enet.c
240
struct sk_buff **out_skb, int *bnum, struct hnae_ring *ring)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
260
if (unlikely(buf_num > ring->max_desc_num_per_pkt)) {
drivers/net/ethernet/hisilicon/hns/hns_enet.c
262
if (ring_space(ring) < buf_num)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
271
} else if (ring_space(ring) < buf_num) {
drivers/net/ethernet/hisilicon/hns/hns_enet.c
280
struct hnae_ring *ring)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
283
return hns_nic_maybe_stop_tso(out_skb, bnum, ring);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
285
return hns_nic_maybe_stop_tx(out_skb, bnum, ring);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
288
static void fill_tso_desc(struct hnae_ring *ring, void *priv,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
302
fill_v2_desc_hw(ring, priv, k == 0 ? size : 0,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
313
static void fill_desc_v2(struct hnae_ring *ring, void *priv,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
319
fill_tso_desc(ring, priv, size, dma, frag_end, buf_num, type,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
322
fill_v2_desc(ring, priv, size, dma, frag_end, buf_num, type,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
331
struct hnae_ring *ring = ring_data->ring;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
332
struct device *dev = ring_to_dev(ring);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
342
switch (priv->ops.maybe_stop_tx(&skb, &buf_num, ring)) {
drivers/net/ethernet/hisilicon/hns/hns_enet.c
344
ring->stats.tx_busy++;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
347
ring->stats.sw_err_cnt++;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
35
static void fill_v2_desc_hw(struct hnae_ring *ring, void *priv, int size,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
356
next_to_use = ring->next_to_use;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
363
ring->stats.sw_err_cnt++;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
367
priv->ops.fill_desc(ring, skb, size, dma, seg_num == 1 ? 1 : 0,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
377
ring->stats.sw_err_cnt++;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
380
priv->ops.fill_desc(ring, skb_frag_page(frag), size, dma,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
39
struct hnae_desc *desc = &ring->desc[ring->next_to_use];
drivers/net/ethernet/hisilicon/hns/hns_enet.c
40
struct hnae_desc_cb *desc_cb = &ring->desc_cb[ring->next_to_use];
drivers/net/ethernet/hisilicon/hns/hns_enet.c
401
while (ring->next_to_use != next_to_use) {
drivers/net/ethernet/hisilicon/hns/hns_enet.c
402
unfill_desc(ring);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
403
if (ring->next_to_use != next_to_use)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
405
ring->desc_cb[ring->next_to_use].dma,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
406
ring->desc_cb[ring->next_to_use].length,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
410
ring->desc_cb[next_to_use].dma,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
411
ring->desc_cb[next_to_use].length,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
433
struct hnae_ring *ring, int pull_len,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
443
hnae_buf_size(ring) == HNS_BUFFER_SIZE_2048);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
445
desc = &ring->desc[ring->next_to_clean];
drivers/net/ethernet/hisilicon/hns/hns_enet.c
449
truesize = hnae_buf_size(ring);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
452
last_offset = hnae_page_size(ring) - hnae_buf_size(ring);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
565
struct hnae_ring *ring = ring_data->ring;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
576
desc = &ring->desc[ring->next_to_clean];
drivers/net/ethernet/hisilicon/hns/hns_enet.c
577
desc_cb = &ring->desc_cb[ring->next_to_clean];
drivers/net/ethernet/hisilicon/hns/hns_enet.c
589
ring->stats.sw_err_cnt++;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
608
ring_ptr_move_fw(ring, next_to_clean);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
615
ring->stats.seg_pkt_cnt++;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
621
hns_nic_reuse_page(skb, 0, ring, pull_len, desc_cb);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
622
ring_ptr_move_fw(ring, next_to_clean);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
629
desc = &ring->desc[ring->next_to_clean];
drivers/net/ethernet/hisilicon/hns/hns_enet.c
630
desc_cb = &ring->desc_cb[ring->next_to_clean];
drivers/net/ethernet/hisilicon/hns/hns_enet.c
632
hns_nic_reuse_page(skb, i, ring, 0, desc_cb);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
633
ring_ptr_move_fw(ring, next_to_clean);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
638
if (unlikely((!bnum) || (bnum > ring->max_desc_num_per_pkt))) {
drivers/net/ethernet/hisilicon/hns/hns_enet.c
642
bnum, ring->max_desc_num_per_pkt,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
645
ring->stats.err_bd_num++;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
655
ring->stats.non_vld_descs++;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
662
ring->stats.err_pkt_len++;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
668
ring->stats.l2_err++;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
67
HNSV2_TXD_PORTID_S, ring->q->handle->dport_id);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
673
ring->stats.rx_pkts++;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
674
ring->stats.rx_bytes += skb->len;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
690
struct hnae_ring *ring = ring_data->ring;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
694
desc_cb = &ring->desc_cb[ring->next_to_use];
drivers/net/ethernet/hisilicon/hns/hns_enet.c
696
ring->stats.reuse_pg_cnt++;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
697
hnae_reuse_buffer(ring, ring->next_to_use);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
699
ret = hnae_reserve_buffer_map(ring, &res_cbs);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
701
ring->stats.sw_err_cnt++;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
705
hnae_replace_buffer(ring, ring->next_to_use, &res_cbs);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
708
ring_ptr_move_fw(ring, next_to_use);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
712
writel_relaxed(i, ring->io_base + RCB_REG_HEAD);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
726
static int hns_desc_unused(struct hnae_ring *ring)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
728
int ntc = ring->next_to_clean;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
729
int ntu = ring->next_to_use;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
731
return ((ntc >= ntu) ? 0 : ring->desc_num) + ntc - ntu;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
739
static u32 hns_coal_rx_bdnum(struct hnae_ring *ring)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
741
bool coal_enable = ring->q->handle->coal_adapt_en;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
744
ring->coal_last_rx_bytes > HNS_LOWEST_LATENCY_RATE)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
750
static void hns_update_rx_rate(struct hnae_ring *ring)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
752
bool coal_enable = ring->q->handle->coal_adapt_en;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
757
time_before(jiffies, ring->coal_last_jiffies + (HZ >> 4)))
drivers/net/ethernet/hisilicon/hns/hns_enet.c
761
if (ring->coal_last_rx_bytes > ring->stats.rx_bytes) {
drivers/net/ethernet/hisilicon/hns/hns_enet.c
762
ring->coal_last_rx_bytes = ring->stats.rx_bytes;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
763
ring->coal_last_jiffies = jiffies;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
767
total_bytes = ring->stats.rx_bytes - ring->coal_last_rx_bytes;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
768
time_passed_ms = jiffies_to_msecs(jiffies - ring->coal_last_jiffies);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
770
ring->coal_rx_rate = total_bytes >> 10;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
772
ring->coal_last_rx_bytes = ring->stats.rx_bytes;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
773
ring->coal_last_jiffies = jiffies;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
801
struct hnae_ring *ring = ring_data->ring;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
802
struct hnae_handle *handle = ring->q->handle;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
803
u32 new_coal_param, old_coal_param = ring->coal_param;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
805
if (ring->coal_rx_rate < HNS_LOWEST_LATENCY_RATE)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
807
else if (ring->coal_rx_rate < HNS_LOW_LATENCY_RATE)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
817
ring->coal_param = new_coal_param;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
846
struct hnae_ring *ring = ring_data->ring;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
851
int unused_count = hns_desc_unused(ring);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
853
num = readl_relaxed(ring->io_base + RCB_REG_FBDNUM);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
865
unused_count = hns_desc_unused(ring);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
897
struct hnae_ring *ring = ring_data->ring;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
901
hns_update_rx_rate(ring);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
904
ring_data->ring->q->handle->dev->ops->toggle_ring_irq(ring, 0);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
905
num = readl_relaxed(ring->io_base + RCB_REG_FBDNUM);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
907
if (num <= hns_coal_rx_bdnum(ring)) {
drivers/net/ethernet/hisilicon/hns/hns_enet.c
908
if (ring->q->handle->coal_adapt_en)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
913
ring_data->ring->q->handle->dev->ops->toggle_ring_irq(
drivers/net/ethernet/hisilicon/hns/hns_enet.c
914
ring_data->ring, 1);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
924
struct hnae_ring *ring = ring_data->ring;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
927
hns_update_rx_rate(ring);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
928
num = readl_relaxed(ring->io_base + RCB_REG_FBDNUM);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
930
if (num <= hns_coal_rx_bdnum(ring)) {
drivers/net/ethernet/hisilicon/hns/hns_enet.c
931
if (ring->q->handle->coal_adapt_en)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
940
static inline void hns_nic_reclaim_one_desc(struct hnae_ring *ring,
drivers/net/ethernet/hisilicon/hns/hns_enet.c
943
struct hnae_desc_cb *desc_cb = &ring->desc_cb[ring->next_to_clean];
drivers/net/ethernet/hisilicon/hns/hns_enet.c
948
hnae_free_buffer_detach(ring, ring->next_to_clean);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
950
ring_ptr_move_fw(ring, next_to_clean);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
953
static int is_valid_clean_head(struct hnae_ring *ring, int h)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
955
int u = ring->next_to_use;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
956
int c = ring->next_to_clean;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
958
if (unlikely(h > ring->desc_num))
drivers/net/ethernet/hisilicon/hns/hns_enet.c
961
assert(u > 0 && u < ring->desc_num);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
962
assert(c > 0 && c < ring->desc_num);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
974
struct hnae_ring *ring = ring_data->ring;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
981
head = readl_relaxed(ring->io_base + RCB_REG_HEAD);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
984
if (is_ring_empty(ring) || head == ring->next_to_clean)
drivers/net/ethernet/hisilicon/hns/hns_enet.c
987
if (!is_valid_clean_head(ring, head)) {
drivers/net/ethernet/hisilicon/hns/hns_enet.c
989
ring->next_to_use, ring->next_to_clean);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
990
ring->stats.io_err_cnt++;
drivers/net/ethernet/hisilicon/hns/hns_enet.c
996
while (head != ring->next_to_clean) {
drivers/net/ethernet/hisilicon/hns/hns_enet.c
997
hns_nic_reclaim_one_desc(ring, &bytes, &pkts);
drivers/net/ethernet/hisilicon/hns/hns_enet.c
999
prefetch(&ring->desc_cb[ring->next_to_clean]);
drivers/net/ethernet/hisilicon/hns/hns_enet.h
34
struct hnae_ring *ring;
drivers/net/ethernet/hisilicon/hns/hns_enet.h
45
void (*fill_desc)(struct hnae_ring *ring, void *priv,
drivers/net/ethernet/hisilicon/hns/hns_enet.h
50
int *bnum, struct hnae_ring *ring);
drivers/net/ethernet/hisilicon/hns/hns_ethtool.c
375
struct hnae_ring *ring;
drivers/net/ethernet/hisilicon/hns/hns_ethtool.c
404
ring = ring_data->ring;
drivers/net/ethernet/hisilicon/hns/hns_ethtool.c
406
if (is_tx_ring(ring)) { /* for tx queue reset*/
drivers/net/ethernet/hisilicon/hns3/hnae3.h
191
#define ring_ptr_move_fw(ring, p) \
drivers/net/ethernet/hisilicon/hns3/hnae3.h
192
((ring)->p = ((ring)->p + 1) % (ring)->desc_num)
drivers/net/ethernet/hisilicon/hns3/hnae3.h
193
#define ring_ptr_move_bw(ring, p) \
drivers/net/ethernet/hisilicon/hns3/hnae3.h
194
((ring)->p = ((ring)->p - 1 + (ring)->desc_num) % (ring)->desc_num)
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
10
dma_addr_t dma = ring->desc_dma_addr;
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
104
void hclge_comm_free_cmd_desc(struct hclge_comm_cmq_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
106
int size = ring->desc_num * sizeof(struct hclge_desc);
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
108
if (!ring->desc)
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
111
dma_free_coherent(&ring->pdev->dev, size,
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
112
ring->desc, ring->desc_dma_addr);
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
113
ring->desc = NULL;
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
116
static int hclge_comm_alloc_cmd_desc(struct hclge_comm_cmq_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
118
int size = ring->desc_num * sizeof(struct hclge_desc);
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
120
ring->desc = dma_alloc_coherent(&ring->pdev->dev,
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
121
size, &ring->desc_dma_addr, GFP_KERNEL);
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
122
if (!ring->desc)
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
13
if (ring->ring_type == HCLGE_COMM_TYPE_CSQ) {
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
20
reg_val |= ring->desc_num >> HCLGE_COMM_NIC_CMQ_DESC_NUM_S;
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
212
struct hclge_comm_cmq_ring *ring =
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
217
ring->ring_type = ring_type;
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
219
ret = hclge_comm_alloc_cmd_desc(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
221
dev_err(&ring->pdev->dev, "descriptor %s alloc error %d\n",
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
286
static int hclge_comm_ring_space(struct hclge_comm_cmq_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
288
int ntc = ring->next_to_clean;
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
289
int ntu = ring->next_to_use;
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
29
reg_val = ring->desc_num >> HCLGE_COMM_NIC_CMQ_DESC_NUM_S;
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
290
int used = (ntu - ntc + ring->desc_num) % ring->desc_num;
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
292
return ring->desc_num - used - 1;
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
311
static int hclge_comm_is_valid_csq_clean_head(struct hclge_comm_cmq_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
314
int ntc = ring->next_to_clean;
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
315
int ntu = ring->next_to_use;
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.c
8
struct hclge_comm_cmq_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_common/hclge_comm_cmd.h
488
void hclge_comm_free_cmd_desc(struct hclge_comm_cmq_ring *ring);
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
459
static void hns3_dump_rx_queue_info(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
463
void __iomem *base = ring->tqp->io_base;
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
479
seq_printf(s, "%-11u", ring->rx_copybreak);
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
498
struct hns3_enet_ring *ring;
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
501
if (!priv->ring) {
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
518
ring = &priv->ring[(u32)(i + h->kinfo.num_tqps)];
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
519
hns3_dump_rx_queue_info(ring, s, i);
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
525
static void hns3_dump_tx_queue_info(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
529
void __iomem *base = ring->tqp->io_base;
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
563
struct hns3_enet_ring *ring;
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
566
if (!priv->ring) {
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
583
ring = &priv->ring[i];
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
584
hns3_dump_tx_queue_info(ring, s, i);
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
602
if (!priv->ring || !priv->ring[i].tqp_vector)
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
606
priv->ring[i].tqp_vector->vector_irq);
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
642
struct hns3_enet_ring *ring;
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
656
ring = &priv->ring[data->qid + data->handle->kinfo.num_tqps];
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
657
for (i = 0; i < ring->desc_num; i++) {
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
658
desc = &ring->desc[i];
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
687
struct hns3_enet_ring *ring;
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
701
ring = &priv->ring[data->qid];
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
702
for (i = 0; i < ring->desc_num; i++) {
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
703
desc = &ring->desc[i];
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
773
static void hns3_dump_page_pool_info(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
778
READ_ONCE(ring->page_pool->pages_state_hold_cnt),
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
779
atomic_read(&ring->page_pool->pages_state_release_cnt),
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
780
ring->page_pool->p.pool_size,
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
781
ring->page_pool->p.order,
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
782
ring->page_pool->p.nid,
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
783
ring->page_pool->p.max_len / 1024);
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
790
struct hns3_enet_ring *ring;
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
793
if (!priv->ring) {
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
798
if (!priv->ring[h->kinfo.num_tqps].page_pool) {
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
811
ring = &priv->ring[(u32)(i + h->kinfo.num_tqps)];
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
812
hns3_dump_page_pool_info(ring, s, i);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1006
static bool hns3_can_use_tx_bounce(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1010
u32 len = skb->len <= ring->tx_copybreak ? skb->len :
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1013
if (len > ring->tx_copybreak)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1017
hns3_ring_stats_update(ring, tx_spare_full);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1024
static bool hns3_can_use_tx_sgl(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1028
if (skb->len <= ring->tx_copybreak || !tx_sgl ||
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1034
hns3_ring_stats_update(ring, tx_spare_full);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1041
static void hns3_init_tx_spare_buffer(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1043
u32 alloc_size = ring->tqp->handle->kinfo.tx_spare_buf_size;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1044
struct net_device *netdev = ring_to_netdev(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1057
dev_warn(ring_to_dev(ring), "failed to allocate tx spare buffer, exceed to max order\n");
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1061
tx_spare = devm_kzalloc(ring_to_dev(ring), sizeof(*tx_spare),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1065
dev_warn(ring_to_dev(ring), "failed to allocate hns3_tx_spare\n");
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1069
page = alloc_pages_node(dev_to_node(ring_to_dev(ring)),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1072
dev_warn(ring_to_dev(ring), "failed to allocate tx spare pages\n");
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1076
dma = dma_map_page(ring_to_dev(ring), page, 0,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1078
if (dma_mapping_error(ring_to_dev(ring), dma)) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1079
dev_warn(ring_to_dev(ring), "failed to map pages for tx spare\n");
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1086
ring->tx_spare = tx_spare;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1087
ring->tx_copybreak = priv->tx_copybreak;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1093
devm_kfree(ring_to_dev(ring), tx_spare);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1095
ring->tqp->handle->kinfo.tx_spare_buf_size = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1102
ring->tx_spare = NULL;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1108
static void *hns3_tx_spare_alloc(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1112
struct hns3_tx_spare *tx_spare = ring->tx_spare;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1135
static void hns3_tx_spare_rollback(struct hns3_enet_ring *ring, u32 len)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1137
struct hns3_tx_spare *tx_spare = ring->tx_spare;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1147
static void hns3_tx_spare_reclaim_cb(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1150
struct hns3_tx_spare *tx_spare = ring->tx_spare;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1173
dma_sync_single_for_cpu(ring_to_dev(ring), dma, len,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1178
dma_unmap_sg(ring_to_dev(ring), sgt->sgl, sgt->orig_nents,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1598
static int hns3_handle_vlan_info(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1604
ret = hns3_handle_vtags(ring, skb);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1606
hns3_ring_stats_update(ring, tx_vlan_err);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1623
static int hns3_handle_csum_partial(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1646
hns3_ring_stats_update(ring, tx_l4_proto_err);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1654
hns3_ring_stats_update(ring, tx_l2l3l4_err);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1661
hns3_ring_stats_update(ring, tx_tso_err);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1667
static int hns3_fill_skb_desc(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1675
ret = hns3_handle_vlan_info(ring, skb, ¶m);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1682
ret = hns3_handle_csum_partial(ring, skb, desc_cb, ¶m);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1699
static int hns3_fill_desc(struct hns3_enet_ring *ring, dma_addr_t dma,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1704
struct hns3_desc *desc = &ring->desc[ring->next_to_use];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1714
trace_hns3_tx_desc(ring, ring->next_to_use);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1715
ring_ptr_move_fw(ring, next_to_use);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1732
trace_hns3_tx_desc(ring, ring->next_to_use);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1734
ring_ptr_move_fw(ring, next_to_use);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1736
desc = &ring->desc[ring->next_to_use];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1742
static int hns3_map_and_fill_desc(struct hns3_enet_ring *ring, void *priv,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1745
struct hns3_desc_cb *desc_cb = &ring->desc_cb[ring->next_to_use];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1746
struct device *dev = ring_to_dev(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1774
hns3_ring_stats_update(ring, sw_err_cnt);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1783
return hns3_fill_desc(ring, dma, size);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1912
static int hns3_skb_linearize(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1920
hns3_ring_stats_update(ring, over_max_recursion);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1929
hns3_ring_stats_update(ring, hw_limitation);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1934
hns3_ring_stats_update(ring, sw_err_cnt);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1941
static int hns3_nic_maybe_stop_tx(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1959
if (hns3_skb_linearize(ring, skb, bd_num))
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1964
hns3_ring_stats_update(ring, tx_copy);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1968
if (likely(ring_space(ring) >= bd_num))
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1971
netif_stop_subqueue(netdev, ring->queue_index);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1978
if (ring_space(ring) >= bd_num && netif_carrier_ok(netdev) &&
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1980
netif_start_subqueue(netdev, ring->queue_index);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1984
hns3_ring_stats_update(ring, tx_busy);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1989
static void hns3_clear_desc(struct hns3_enet_ring *ring, int next_to_use_orig)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1991
struct device *dev = ring_to_dev(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1994
for (i = 0; i < ring->desc_num; i++) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
1995
struct hns3_desc *desc = &ring->desc[ring->next_to_use];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2001
if (ring->next_to_use == next_to_use_orig)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2005
ring_ptr_move_bw(ring, next_to_use);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2007
desc_cb = &ring->desc_cb[ring->next_to_use];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2018
hns3_tx_spare_rollback(ring, desc_cb->length);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2029
static int hns3_fill_skb_to_desc(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2035
ret = hns3_map_and_fill_desc(ring, skb, type);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2044
ret = hns3_map_and_fill_desc(ring, frag, DESC_TYPE_PAGE);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2052
ret = hns3_fill_skb_to_desc(ring, frag_skb,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2063
static void hns3_tx_push_bd(struct hns3_enet_ring *ring, int num)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2076
int idx = (ring->next_to_use - num + ring->desc_num) %
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2077
ring->desc_num;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2079
u64_stats_update_begin(&ring->syncp);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2080
ring->stats.tx_push++;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2081
u64_stats_update_end(&ring->syncp);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2082
memcpy(&desc[offset], &ring->desc[idx],
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2087
__iowrite64_copy(ring->tqp->mem_base, desc,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2092
static void hns3_tx_mem_doorbell(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2096
__le64 bd_num = cpu_to_le64((u64)ring->pending_buf);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2103
__iowrite64_copy(ring->tqp->mem_base + HNS3_MEM_DOORBELL_OFFSET,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2105
u64_stats_update_begin(&ring->syncp);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2106
ring->stats.tx_mem_doorbell += ring->pending_buf;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2107
u64_stats_update_end(&ring->syncp);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2110
static void hns3_tx_doorbell(struct hns3_enet_ring *ring, int num,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2113
struct net_device *netdev = ring_to_netdev(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2120
!ring->pending_buf && num <= HNS3_MAX_PUSH_BD_NUM && doorbell) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2125
smp_store_release(&ring->last_to_use, ring->next_to_use);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2126
hns3_tx_push_bd(ring, num);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2130
ring->pending_buf += num;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2133
hns3_ring_stats_update(ring, tx_more);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2140
smp_store_release(&ring->last_to_use, ring->next_to_use);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2142
if (ring->tqp->mem_base)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2143
hns3_tx_mem_doorbell(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2145
writel(ring->pending_buf,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2146
ring->tqp->io_base + HNS3_RING_TX_RING_TAIL_REG);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2148
ring->pending_buf = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2163
static int hns3_handle_tx_bounce(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2166
struct hns3_desc_cb *desc_cb = &ring->desc_cb[ring->next_to_use];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2175
if (skb->len <= ring->tx_copybreak) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2183
buf = hns3_tx_spare_alloc(ring, size, &dma, &cb_len);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2187
hns3_tx_spare_rollback(ring, cb_len);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2188
hns3_ring_stats_update(ring, copy_bits_err);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2197
bd_num += hns3_fill_desc(ring, dma, size);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2200
ret = hns3_fill_skb_to_desc(ring, skb,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2208
dma_sync_single_for_device(ring_to_dev(ring), dma, size,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2211
hns3_ring_stats_update(ring, tx_bounce);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2216
static int hns3_handle_tx_sgl(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2219
struct hns3_desc_cb *desc_cb = &ring->desc_cb[ring->next_to_use];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2233
sgt = hns3_tx_spare_alloc(ring, HNS3_SGL_SIZE(nfrag),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2241
hns3_tx_spare_rollback(ring, cb_len);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2242
hns3_ring_stats_update(ring, skb2sgl_err);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2247
sgt->nents = dma_map_sg(ring_to_dev(ring), sgt->sgl, sgt->orig_nents,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2250
hns3_tx_spare_rollback(ring, cb_len);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2251
hns3_ring_stats_update(ring, map_sg_err);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2261
bd_num += hns3_fill_desc(ring, sg_dma_address(sgt->sgl + i),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2263
hns3_ring_stats_update(ring, tx_sgl);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2268
static int hns3_handle_desc_filling(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2273
if (!ring->tx_spare)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2276
space = hns3_tx_spare_space(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2278
if (hns3_can_use_tx_sgl(ring, skb, space))
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2279
return hns3_handle_tx_sgl(ring, skb);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2281
if (hns3_can_use_tx_bounce(ring, skb, space))
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2282
return hns3_handle_tx_bounce(ring, skb);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2285
return hns3_fill_skb_to_desc(ring, skb, DESC_TYPE_SKB);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2288
static int hns3_handle_skb_desc(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2295
ret = hns3_fill_skb_desc(ring, skb, &ring->desc[ring->next_to_use],
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2304
ret = hns3_handle_desc_filling(ring, skb);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2309
hns3_clear_desc(ring, next_to_use_head);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2316
struct hns3_enet_ring *ring = &priv->ring[skb->queue_mapping];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2317
struct hns3_desc_cb *desc_cb = &ring->desc_cb[ring->next_to_use];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2324
hns3_tx_doorbell(ring, 0, !netdev_xmit_more());
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2326
hns3_ring_stats_update(ring, sw_err_cnt);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2334
ret = hns3_nic_maybe_stop_tx(ring, netdev, skb);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2337
hns3_tx_doorbell(ring, 0, true);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2345
ret = hns3_handle_skb_desc(ring, skb, desc_cb, ring->next_to_use);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2349
pre_ntu = ring->next_to_use ? (ring->next_to_use - 1) :
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2350
(ring->desc_num - 1);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2353
hns3_tsyn(netdev, skb, &ring->desc[pre_ntu]);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2355
ring->desc[pre_ntu].tx.bdtp_fe_sc_vld_ra_ri |=
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2357
trace_hns3_tx_desc(ring, pre_ntu);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2362
dev_queue = netdev_get_tx_queue(netdev, ring->queue_index);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2365
hns3_tx_doorbell(ring, ret, doorbell);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2371
hns3_tx_doorbell(ring, 0, !netdev_xmit_more());
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2538
struct hns3_enet_ring *ring, bool is_tx)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2544
start = u64_stats_fetch_begin(&ring->syncp);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2545
ring_stats = ring->stats;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2546
} while (u64_stats_fetch_retry(&ring->syncp, start));
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2590
struct hns3_enet_ring *ring;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2601
ring = &priv->ring[idx];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2602
hns3_fetch_stats(&ring_total_stats, ring, true);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2605
ring = &priv->ring[idx + queue_num];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2606
hns3_fetch_stats(&ring_total_stats, ring, false);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
2907
tx_ring = &priv->ring[timeout_queue];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3412
static int hns3_alloc_buffer(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3415
unsigned int order = hns3_page_order(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3418
if (ring->page_pool) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3419
p = page_pool_dev_alloc_frag(ring->page_pool,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3421
hns3_buf_size(ring));
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3441
cb->length = hns3_page_size(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3449
static void hns3_free_buffer(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3455
else if (!HNAE3_IS_TX_RING(ring)) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3459
page_pool_put_full_page(ring->page_pool, cb->priv,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3465
static int hns3_map_buffer(struct hns3_enet_ring *ring, struct hns3_desc_cb *cb)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3467
cb->dma = dma_map_page(ring_to_dev(ring), cb->priv, 0,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3468
cb->length, ring_to_dma_dir(ring));
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3470
if (unlikely(dma_mapping_error(ring_to_dev(ring), cb->dma)))
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3476
static void hns3_unmap_buffer(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3480
dma_unmap_single(ring_to_dev(ring), cb->dma, cb->length,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3481
ring_to_dma_dir(ring));
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3483
dma_unmap_page(ring_to_dev(ring), cb->dma, cb->length,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3484
ring_to_dma_dir(ring));
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3487
hns3_tx_spare_reclaim_cb(ring, cb);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3490
static void hns3_buffer_detach(struct hns3_enet_ring *ring, int i)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3492
hns3_unmap_buffer(ring, &ring->desc_cb[i]);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3493
ring->desc[i].addr = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3494
ring->desc_cb[i].refill = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3497
static void hns3_free_buffer_detach(struct hns3_enet_ring *ring, int i,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3500
struct hns3_desc_cb *cb = &ring->desc_cb[i];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3502
if (!ring->desc_cb[i].dma)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3505
hns3_buffer_detach(ring, i);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3506
hns3_free_buffer(ring, cb, budget);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3509
static void hns3_free_buffers(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3513
for (i = 0; i < ring->desc_num; i++)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3514
hns3_free_buffer_detach(ring, i, 0);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3518
static void hns3_free_desc(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3520
int size = ring->desc_num * sizeof(ring->desc[0]);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3522
hns3_free_buffers(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3524
if (ring->desc) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3525
dma_free_coherent(ring_to_dev(ring), size,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3526
ring->desc, ring->desc_dma_addr);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3527
ring->desc = NULL;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3531
static int hns3_alloc_desc(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3533
int size = ring->desc_num * sizeof(ring->desc[0]);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3535
ring->desc = dma_alloc_coherent(ring_to_dev(ring), size,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3536
&ring->desc_dma_addr, GFP_KERNEL);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3537
if (!ring->desc)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3543
static int hns3_alloc_and_map_buffer(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3548
ret = hns3_alloc_buffer(ring, cb);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3549
if (ret || ring->page_pool)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3552
ret = hns3_map_buffer(ring, cb);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3559
hns3_free_buffer(ring, cb, 0);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3564
static int hns3_alloc_and_attach_buffer(struct hns3_enet_ring *ring, int i)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3566
int ret = hns3_alloc_and_map_buffer(ring, &ring->desc_cb[i]);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3571
ring->desc[i].addr = cpu_to_le64(ring->desc_cb[i].dma +
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3572
ring->desc_cb[i].page_offset);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3573
ring->desc_cb[i].refill = 1;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3579
static int hns3_alloc_ring_buffers(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3583
for (i = 0; i < ring->desc_num; i++) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3584
ret = hns3_alloc_and_attach_buffer(ring, i);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3596
hns3_free_buffer_detach(ring, j, 0);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3601
static void hns3_replace_buffer(struct hns3_enet_ring *ring, int i,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3604
hns3_unmap_buffer(ring, &ring->desc_cb[i]);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3605
ring->desc_cb[i] = *res_cb;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3606
ring->desc_cb[i].refill = 1;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3607
ring->desc[i].addr = cpu_to_le64(ring->desc_cb[i].dma +
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3608
ring->desc_cb[i].page_offset);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3609
ring->desc[i].rx.bd_base_info = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3612
static void hns3_reuse_buffer(struct hns3_enet_ring *ring, int i)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3614
ring->desc_cb[i].reuse_flag = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3615
ring->desc_cb[i].refill = 1;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3616
ring->desc[i].addr = cpu_to_le64(ring->desc_cb[i].dma +
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3617
ring->desc_cb[i].page_offset);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3618
ring->desc[i].rx.bd_base_info = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3620
dma_sync_single_for_device(ring_to_dev(ring),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3621
ring->desc_cb[i].dma + ring->desc_cb[i].page_offset,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3622
hns3_buf_size(ring),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3626
static bool hns3_nic_reclaim_desc(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3632
int ltu = smp_load_acquire(&ring->last_to_use);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3633
int ntc = ring->next_to_clean;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3639
desc = &ring->desc[ntc];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3645
desc_cb = &ring->desc_cb[ntc];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3655
hns3_free_buffer_detach(ring, ntc, budget);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3657
if (++ntc == ring->desc_num)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3661
prefetch(&ring->desc_cb[ntc]);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3671
smp_store_release(&ring->next_to_clean, ntc);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3673
hns3_tx_spare_update(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3678
void hns3_clean_tx_ring(struct hns3_enet_ring *ring, int budget)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3680
struct net_device *netdev = ring_to_netdev(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3688
if (unlikely(!hns3_nic_reclaim_desc(ring, &bytes, &pkts, budget)))
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3691
ring->tqp_vector->tx_group.total_bytes += bytes;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3692
ring->tqp_vector->tx_group.total_packets += pkts;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3694
u64_stats_update_begin(&ring->syncp);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3695
ring->stats.tx_bytes += bytes;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3696
ring->stats.tx_pkts += pkts;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3697
u64_stats_update_end(&ring->syncp);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3699
dev_queue = netdev_get_tx_queue(netdev, ring->tqp->tqp_index);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3703
ring_space(ring) > HNS3_MAX_TSO_BD_NUM)) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3711
ring->stats.restart_queue++;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3716
static int hns3_desc_unused(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3718
int ntc = ring->next_to_clean;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3719
int ntu = ring->next_to_use;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3721
if (unlikely(ntc == ntu && !ring->desc_cb[ntc].refill))
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3722
return ring->desc_num;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3724
return ((ntc >= ntu) ? 0 : ring->desc_num) + ntc - ntu;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3728
static bool hns3_nic_alloc_rx_buffers(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3736
desc_cb = &ring->desc_cb[ring->next_to_use];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3738
hns3_ring_stats_update(ring, reuse_pg_cnt);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3740
hns3_reuse_buffer(ring, ring->next_to_use);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3742
ret = hns3_alloc_and_map_buffer(ring, &res_cbs);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3744
hns3_ring_stats_update(ring, sw_err_cnt);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3746
hns3_rl_err(ring_to_netdev(ring),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3750
writel(i, ring->tqp->io_base +
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3754
hns3_replace_buffer(ring, ring->next_to_use, &res_cbs);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3756
hns3_ring_stats_update(ring, non_reuse_pg);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3759
ring_ptr_move_fw(ring, next_to_use);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3762
writel(i, ring->tqp->io_base + HNS3_RING_RX_RING_HEAD_REG);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3772
struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3776
struct hns3_desc *desc = &ring->desc[ring->next_to_clean];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3783
hns3_ring_stats_update(ring, frag_alloc_err);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3785
hns3_rl_err(ring_to_netdev(ring),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3795
hns3_ring_stats_update(ring, frag_alloc);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3800
struct hns3_enet_ring *ring, int pull_len,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3803
struct hns3_desc *desc = &ring->desc[ring->next_to_clean];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3806
u32 truesize = hns3_buf_size(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3811
if (ring->page_pool) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3836
hns3_page_size(ring) && desc_cb->page_offset)) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3842
} else if (frag_size <= ring->rx_copybreak) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3843
ret = hns3_handle_rx_copybreak(skb, i, ring, pull_len, desc_cb);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3921
static void hns3_checksum_complete(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3928
hns3_ring_stats_update(ring, csum_complete);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3971
static void hns3_rx_checksum(struct hns3_enet_ring *ring, struct sk_buff *skb,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3975
struct net_device *netdev = ring_to_netdev(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
3990
hns3_checksum_complete(ring, skb, ptype, csum);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4000
hns3_ring_stats_update(ring, l3l4_csum_err);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4008
static void hns3_rx_skb(struct hns3_enet_ring *ring, struct sk_buff *skb)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4011
napi_gro_flush(&ring->tqp_vector->napi, false);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4013
napi_gro_receive(&ring->tqp_vector->napi, skb);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4016
static bool hns3_parse_vlan_tag(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4020
struct hnae3_handle *handle = ring->tqp->handle;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4021
struct pci_dev *pdev = ring->tqp->handle->pdev;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4069
static void hns3_rx_ring_move_fw(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4071
ring->desc[ring->next_to_clean].rx.bd_base_info &=
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4073
ring->desc_cb[ring->next_to_clean].refill = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4074
ring->next_to_clean += 1;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4076
if (unlikely(ring->next_to_clean == ring->desc_num))
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4077
ring->next_to_clean = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4080
static int hns3_alloc_skb(struct hns3_enet_ring *ring, unsigned int length,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4083
struct hns3_desc_cb *desc_cb = &ring->desc_cb[ring->next_to_clean];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4084
struct net_device *netdev = ring_to_netdev(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4087
ring->skb = napi_alloc_skb(&ring->tqp_vector->napi, HNS3_RX_HEAD_SIZE);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4088
skb = ring->skb;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4091
hns3_ring_stats_update(ring, sw_err_cnt);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4096
trace_hns3_rx_desc(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4099
ring->pending_buf = 1;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4100
ring->frag_num = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4101
ring->tail_skb = NULL;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4109
page_pool_put_full_page(ring->page_pool, desc_cb->priv,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4115
hns3_rx_ring_move_fw(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4119
if (ring->page_pool)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4122
hns3_ring_stats_update(ring, seg_pkt_cnt);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4124
ring->pull_len = eth_get_headlen(netdev, va, HNS3_RX_HEAD_SIZE);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4125
__skb_put(skb, ring->pull_len);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4126
hns3_nic_reuse_page(skb, ring->frag_num++, ring, ring->pull_len,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4128
hns3_rx_ring_move_fw(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4133
static int hns3_add_frag(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4135
struct sk_buff *skb = ring->skb;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4143
desc = &ring->desc[ring->next_to_clean];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4144
desc_cb = &ring->desc_cb[ring->next_to_clean];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4151
if (unlikely(ring->frag_num >= MAX_SKB_FRAGS)) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4152
new_skb = napi_alloc_skb(&ring->tqp_vector->napi, 0);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4154
hns3_rl_err(ring_to_netdev(ring),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4159
if (ring->page_pool)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4162
ring->frag_num = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4164
if (ring->tail_skb) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4165
ring->tail_skb->next = new_skb;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4166
ring->tail_skb = new_skb;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4169
ring->tail_skb = new_skb;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4173
if (ring->tail_skb) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4174
head_skb->truesize += hns3_buf_size(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4177
skb = ring->tail_skb;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4180
dma_sync_single_for_cpu(ring_to_dev(ring),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4182
hns3_buf_size(ring),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4185
hns3_nic_reuse_page(skb, ring->frag_num++, ring, 0, desc_cb);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4186
trace_hns3_rx_desc(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4187
hns3_rx_ring_move_fw(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4188
ring->pending_buf++;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4194
static int hns3_set_gro_and_checksum(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4198
struct net_device *netdev = ring_to_netdev(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4207
hns3_rx_checksum(ring, skb, l234info, bd_base_info, ol_info,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4236
static void hns3_set_rx_skb_rss_type(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4241
struct net_device *netdev = ring_to_netdev(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4284
static void hns3_handle_rx_vlan_tag(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4288
struct net_device *netdev = ring_to_netdev(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4297
if (hns3_parse_vlan_tag(ring, desc, l234info, &vlan_tag))
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
430
if (tqp_vectors->tx_group.ring && tqp_vectors->rx_group.ring) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4303
static int hns3_handle_bdinfo(struct hns3_enet_ring *ring, struct sk_buff *skb)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4305
struct net_device *netdev = ring_to_netdev(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4317
pre_ntc = ring->next_to_clean ? (ring->next_to_clean - 1) :
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4318
(ring->desc_num - 1);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4319
desc = &ring->desc[pre_ntc];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4327
hns3_handle_rx_vlan_tag(ring, desc, skb, l234info);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4331
u64_stats_update_begin(&ring->syncp);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4333
ring->stats.l2_err++;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4335
ring->stats.err_pkt_len++;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4336
u64_stats_update_end(&ring->syncp);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4347
ret = hns3_set_gro_and_checksum(ring, skb, l234info,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4350
hns3_ring_stats_update(ring, rx_err_cnt);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4357
u64_stats_update_begin(&ring->syncp);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4358
ring->stats.rx_pkts++;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4359
ring->stats.rx_bytes += len;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
436
} else if (tqp_vectors->rx_group.ring) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4362
ring->stats.rx_multicast++;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4364
u64_stats_update_end(&ring->syncp);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4366
ring->tqp_vector->rx_group.total_bytes += len;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4368
hns3_set_rx_skb_rss_type(ring, skb, le32_to_cpu(desc->rx.rss_hash),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4373
static int hns3_handle_rx_bd(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4375
struct sk_buff *skb = ring->skb;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4382
desc = &ring->desc[ring->next_to_clean];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4383
desc_cb = &ring->desc_cb[ring->next_to_clean];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4396
ring->va = desc_cb->buf + desc_cb->page_offset;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4398
dma_sync_single_for_cpu(ring_to_dev(ring),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4400
hns3_buf_size(ring),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
441
} else if (tqp_vectors->tx_group.ring) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4410
net_prefetch(ring->va);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4412
ret = hns3_alloc_skb(ring, length, ring->va);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4413
skb = ring->skb;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4418
ret = hns3_add_frag(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4423
ret = hns3_add_frag(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4432
memcpy(skb->data, ring->va,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4433
ALIGN(ring->pull_len, sizeof(long)));
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4435
ret = hns3_handle_bdinfo(ring, skb);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4441
skb_record_rx_queue(skb, ring->tqp->tqp_index);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4445
int hns3_clean_rx_ring(struct hns3_enet_ring *ring, int budget,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4449
int unused_count = hns3_desc_unused(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4454
unused_count -= ring->pending_buf;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4460
hns3_nic_alloc_rx_buffers(ring, unused_count);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4465
err = hns3_handle_rx_bd(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4467
if (unlikely(!ring->skb || err == -ENXIO)) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4470
rx_fn(ring, ring->skb);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4474
unused_count += ring->pending_buf;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4475
ring->skb = NULL;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4476
ring->pending_buf = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4485
hns3_nic_alloc_rx_buffers(ring, unused_count);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4519
struct hns3_enet_ring *ring;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4535
hns3_for_each_ring(ring, tqp_vector->tx_group)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4536
hns3_clean_tx_ring(ring, budget);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4542
hns3_for_each_ring(ring, tqp_vector->rx_group) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4543
int rx_cleaned = hns3_clean_rx_ring(ring, rx_budget,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4576
struct hns3_enet_ring *ring;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4578
ring = is_tx ? tqp_vector->tx_group.ring : tqp_vector->rx_group.ring;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4585
while (ring) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4593
chain->tqp_index = ring->tqp->tqp_index;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4602
ring = ring->next;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4649
struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4651
ring->next = group->ring;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4652
group->ring = ring;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4679
struct hns3_enet_tqp_vector *tqp_vector = group->ring->tqp_vector;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4683
hns3_set_vector_coalesce_rx_gl(group->ring->tqp_vector, cur_moder.usec);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4699
struct hns3_enet_tqp_vector *tqp_vector = group->ring->tqp_vector;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4743
&priv->ring[i]);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4746
&priv->ring[i + tqp_num]);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4748
priv->ring[i].tqp_vector = tqp_vector;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4749
priv->ring[i + tqp_num].tqp_vector = tqp_vector;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4866
group->ring = NULL;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4880
if (!tqp_vector->rx_group.ring && !tqp_vector->tx_group.ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4948
struct hns3_enet_ring *ring;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4952
ring = &priv->ring[q->tqp_index];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4954
ring->queue_index = q->tqp_index;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4955
ring->tx_copybreak = priv->tx_copybreak;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4956
ring->last_to_use = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4958
ring = &priv->ring[q->tqp_index + queue_num];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4960
ring->queue_index = q->tqp_index;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4961
ring->rx_copybreak = priv->rx_copybreak;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4964
hnae3_set_bit(ring->flag, HNAE3_RING_TYPE_B, ring_type);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4966
ring->tqp = q;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4967
ring->desc = NULL;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4968
ring->desc_cb = NULL;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4969
ring->dev = priv->dev;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4970
ring->desc_dma_addr = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4971
ring->buf_size = q->buf_size;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4972
ring->desc_num = desc_num;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4973
ring->next_to_use = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4974
ring->next_to_clean = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4990
priv->ring = devm_kzalloc(&pdev->dev,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4992
sizeof(*priv->ring), 2),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4994
if (!priv->ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5005
if (!priv->ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5008
devm_kfree(priv->dev, priv->ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5009
priv->ring = NULL;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5012
static void hns3_alloc_page_pool(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5016
.order = hns3_page_order(ring),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5017
.pool_size = ring->desc_num * hns3_buf_size(ring) /
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5018
(PAGE_SIZE << hns3_page_order(ring)),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5019
.nid = dev_to_node(ring_to_dev(ring)),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5020
.dev = ring_to_dev(ring),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5023
.max_len = PAGE_SIZE << hns3_page_order(ring),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5026
ring->page_pool = page_pool_create(&pp_params);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5027
if (IS_ERR(ring->page_pool)) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5028
dev_warn(ring_to_dev(ring), "page pool creation failed: %ld\n",
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5029
PTR_ERR(ring->page_pool));
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5030
ring->page_pool = NULL;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5034
static int hns3_alloc_ring_memory(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5038
if (ring->desc_num <= 0 || ring->buf_size <= 0)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5041
ring->desc_cb = devm_kcalloc(ring_to_dev(ring), ring->desc_num,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5042
sizeof(ring->desc_cb[0]), GFP_KERNEL);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5043
if (!ring->desc_cb) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5048
ret = hns3_alloc_desc(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5052
if (!HNAE3_IS_TX_RING(ring)) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5054
hns3_alloc_page_pool(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5056
ret = hns3_alloc_ring_buffers(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5060
hns3_init_tx_spare_buffer(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5066
hns3_free_desc(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5068
devm_kfree(ring_to_dev(ring), ring->desc_cb);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5069
ring->desc_cb = NULL;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5074
void hns3_fini_ring(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5076
hns3_free_desc(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5077
devm_kfree(ring_to_dev(ring), ring->desc_cb);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5078
ring->desc_cb = NULL;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5079
ring->next_to_clean = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5080
ring->next_to_use = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5081
ring->last_to_use = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5082
ring->pending_buf = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5083
if (!HNAE3_IS_TX_RING(ring) && ring->skb) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5084
dev_kfree_skb_any(ring->skb);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5085
ring->skb = NULL;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5086
} else if (HNAE3_IS_TX_RING(ring) && ring->tx_spare) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5087
struct hns3_tx_spare *tx_spare = ring->tx_spare;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5089
dma_unmap_page(ring_to_dev(ring), tx_spare->dma, tx_spare->len,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5093
devm_kfree(ring_to_dev(ring), tx_spare);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5094
ring->tx_spare = NULL;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5097
if (!HNAE3_IS_TX_RING(ring) && ring->page_pool) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5098
page_pool_destroy(ring->page_pool);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5099
ring->page_pool = NULL;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5127
static void hns3_init_ring_hw(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5129
dma_addr_t dma = ring->desc_dma_addr;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5130
struct hnae3_queue *q = ring->tqp;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5132
if (!HNAE3_IS_TX_RING(ring)) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5138
hns3_buf_size2type(ring->buf_size));
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5140
ring->desc_num / 8 - 1);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5148
ring->desc_num / 8 - 1);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5164
q = priv->ring[tc_info->tqp_offset[i] + j].tqp;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5179
ret = hns3_alloc_ring_memory(&priv->ring[i]);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5186
u64_stats_init(&priv->ring[i].syncp);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5194
hns3_fini_ring(&priv->ring[j]);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5205
hns3_fini_ring(&priv->ring[i]);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5206
hns3_fini_ring(&priv->ring[i + h->kinfo.num_tqps]);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5497
priv->ring = NULL;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5560
static void hns3_clear_tx_ring(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5562
while (ring->next_to_clean != ring->next_to_use) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5563
ring->desc[ring->next_to_clean].tx.bdtp_fe_sc_vld_ra_ri = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5564
hns3_free_buffer_detach(ring, ring->next_to_clean, 0);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5565
ring_ptr_move_fw(ring, next_to_clean);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5568
ring->pending_buf = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5571
static int hns3_clear_rx_ring(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5576
while (ring->next_to_use != ring->next_to_clean) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5581
if (!ring->desc_cb[ring->next_to_use].reuse_flag) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5582
ret = hns3_alloc_and_map_buffer(ring, &res_cbs);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5584
hns3_ring_stats_update(ring, sw_err_cnt);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5588
netdev_warn(ring_to_netdev(ring),
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5593
hns3_replace_buffer(ring, ring->next_to_use, &res_cbs);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5595
ring_ptr_move_fw(ring, next_to_use);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5599
if (ring->skb) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5600
dev_kfree_skb_any(ring->skb);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5601
ring->skb = NULL;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5602
ring->pending_buf = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5608
static void hns3_force_clear_rx_ring(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5610
while (ring->next_to_use != ring->next_to_clean) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5615
if (!ring->desc_cb[ring->next_to_use].reuse_flag) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5616
hns3_unmap_buffer(ring,
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5617
&ring->desc_cb[ring->next_to_use]);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5618
ring->desc_cb[ring->next_to_use].dma = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5621
ring_ptr_move_fw(ring, next_to_use);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5632
struct hns3_enet_ring *ring;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5634
ring = &priv->ring[i];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5635
hns3_clear_tx_ring(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5637
ring = &priv->ring[i + h->kinfo.num_tqps];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5642
hns3_force_clear_rx_ring(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5644
hns3_clear_rx_ring(ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5661
hns3_init_ring_hw(&priv->ring[i]);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5666
hns3_clear_tx_ring(&priv->ring[i]);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5667
priv->ring[i].next_to_clean = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5668
priv->ring[i].next_to_use = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5669
priv->ring[i].last_to_use = 0;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
5671
rx_ring = &priv->ring[i + h->kinfo.num_tqps];
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
768
struct hns3_enet_ring *ring = tqp_vector->tx_group.ring;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
770
while (ring) {
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
775
ring->tqp->tqp_index);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
780
ring = ring->next;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
835
priv->ring[i].queue_index);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
971
static u32 hns3_tx_spare_space(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
973
struct hns3_tx_spare *tx_spare = ring->tx_spare;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
991
static void hns3_tx_spare_update(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
993
struct hns3_tx_spare *tx_spare = ring->tx_spare;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.h
545
struct hns3_enet_ring *ring;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.h
584
struct hns3_enet_ring *ring;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.h
626
static inline u32 ring_space(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.h
631
int begin = smp_load_acquire(&ring->next_to_clean);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.h
632
int end = READ_ONCE(ring->next_to_use);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.h
634
return ((end >= begin) ? (ring->desc_num - end + begin) :
drivers/net/ethernet/hisilicon/hns3/hns3_enet.h
638
static inline u32 hns3_tqp_read_reg(struct hns3_enet_ring *ring, u32 reg)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.h
640
return readl_relaxed(ring->tqp->io_base + reg);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.h
668
#define ring_to_dev(ring) ((ring)->dev)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.h
670
#define ring_to_netdev(ring) ((ring)->tqp_vector->napi.dev)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.h
672
#define ring_to_dma_dir(ring) (HNAE3_IS_TX_RING(ring) ? \
drivers/net/ethernet/hisilicon/hns3/hns3_enet.h
677
#define hns3_ring_stats_update(ring, cnt) do { \
drivers/net/ethernet/hisilicon/hns3/hns3_enet.h
678
typeof(ring) (tmp) = (ring); \
drivers/net/ethernet/hisilicon/hns3/hns3_enet.h
684
static inline unsigned int hns3_page_order(struct hns3_enet_ring *ring)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.h
687
if (ring->buf_size > (PAGE_SIZE / 2))
drivers/net/ethernet/hisilicon/hns3/hns3_enet.h
697
for ((pos) = (head).ring; (pos); (pos) = (pos)->next)
drivers/net/ethernet/hisilicon/hns3/hns3_enet.h
718
void hns3_clean_tx_ring(struct hns3_enet_ring *ring, int budget);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.h
721
void hns3_fini_ring(struct hns3_enet_ring *ring);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.h
725
struct hns3_enet_ring *ring, int budget,
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
1091
priv->ring[i].desc_num = tx_desc_num;
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
1092
priv->ring[i + h->kinfo.num_tqps].desc_num = rx_desc_num;
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
1108
memcpy(&tmp_rings[i], &priv->ring[i],
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
1125
if (hns3_nic_resetting(ndev) || !priv->ring) {
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
1167
old_ringparam->tx_desc_num = priv->ring[0].desc_num;
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
1168
old_ringparam->rx_desc_num = priv->ring[queue_num].desc_num;
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
1169
old_ringparam->rx_buf_len = priv->ring[queue_num].buf_size;
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
1192
priv->ring[i + h->kinfo.num_tqps].buf_size = rx_buf_len;
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
1273
memcpy(&priv->ring[i], &tmp_rings[i],
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
1541
tx_vector = priv->ring[queue].tqp_vector;
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
1542
rx_vector = priv->ring[queue_num + queue].tqp_vector;
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
185
static void hns3_lb_check_skb_data(struct hns3_enet_ring *ring,
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
188
struct hns3_enet_tqp_vector *tqp_vector = ring->tqp_vector;
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
1969
if (hns3_nic_resetting(netdev) || !priv->ring) {
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
1983
priv->ring[i].tx_copybreak = priv->tx_copybreak;
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
1990
priv->ring[i].rx_copybreak = priv->rx_copybreak;
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
2004
(!priv->ring->tx_spare && new_tx_spare_buf_size != 0)) {
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
2018
if (!priv->ring->tx_spare)
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
2022
priv->ring->tx_spare->len);
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
217
struct hns3_enet_ring *ring = &priv->ring[i];
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
221
rx_group = &ring->tqp_vector->rx_group;
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
225
hns3_clean_rx_ring(ring, budget, hns3_lb_check_skb_data);
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
240
struct hns3_enet_ring *ring = &priv->ring[i];
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
242
hns3_clean_tx_ring(ring, 0);
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
570
struct hns3_enet_ring *ring;
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
576
ring = &nic_priv->ring[i];
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
578
stat = (u8 *)ring + hns3_txq_stats[j].stats_offset;
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
585
ring = &nic_priv->ring[i + kinfo->num_tqps];
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
587
stat = (u8 *)ring + hns3_rxq_stats[j].stats_offset;
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
676
if (hns3_nic_resetting(netdev) || !priv->ring) {
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
684
param->tx_pending = priv->ring[0].desc_num;
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
685
param->rx_pending = priv->ring[rx_queue_index].desc_num;
drivers/net/ethernet/hisilicon/hns3/hns3_ethtool.c
686
kernel_param->rx_buf_len = priv->ring[rx_queue_index].buf_size;
drivers/net/ethernet/hisilicon/hns3/hns3_trace.h
100
TP_ARGS(ring),
drivers/net/ethernet/hisilicon/hns3/hns3_trace.h
109
__string(devname, ring->tqp->handle->kinfo.netdev->name)
drivers/net/ethernet/hisilicon/hns3/hns3_trace.h
113
__entry->index = ring->tqp->tqp_index;
drivers/net/ethernet/hisilicon/hns3/hns3_trace.h
114
__entry->ntu = ring->next_to_use;
drivers/net/ethernet/hisilicon/hns3/hns3_trace.h
115
__entry->ntc = ring->next_to_clean;
drivers/net/ethernet/hisilicon/hns3/hns3_trace.h
116
__entry->desc_dma = ring->desc_dma_addr;
drivers/net/ethernet/hisilicon/hns3/hns3_trace.h
117
__entry->buf_dma = ring->desc_cb[ring->next_to_clean].dma;
drivers/net/ethernet/hisilicon/hns3/hns3_trace.h
118
memcpy(__entry->desc, &ring->desc[ring->next_to_clean],
drivers/net/ethernet/hisilicon/hns3/hns3_trace.h
68
TP_PROTO(struct hns3_enet_ring *ring, int cur_ntu),
drivers/net/ethernet/hisilicon/hns3/hns3_trace.h
69
TP_ARGS(ring, cur_ntu),
drivers/net/ethernet/hisilicon/hns3/hns3_trace.h
77
__string(devname, ring->tqp->handle->kinfo.netdev->name)
drivers/net/ethernet/hisilicon/hns3/hns3_trace.h
81
__entry->index = ring->tqp->tqp_index;
drivers/net/ethernet/hisilicon/hns3/hns3_trace.h
82
__entry->ntu = ring->next_to_use;
drivers/net/ethernet/hisilicon/hns3/hns3_trace.h
83
__entry->ntc = ring->next_to_clean;
drivers/net/ethernet/hisilicon/hns3/hns3_trace.h
84
__entry->desc_dma = ring->desc_dma_addr,
drivers/net/ethernet/hisilicon/hns3/hns3_trace.h
85
memcpy(__entry->desc, &ring->desc[cur_ntu],
drivers/net/ethernet/hisilicon/hns3/hns3_trace.h
99
TP_PROTO(struct hns3_enet_ring *ring),
drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_main.c
6518
u32 ring = ethtool_get_flow_spec_ring(ring_cookie);
drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_main.c
6535
if (ring >= tqps) {
drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_main.c
6538
ring, tqps - 1U);
drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_main.c
6543
*queue_id = ring;
drivers/net/ethernet/huawei/hinic/hinic_ethtool.c
549
struct ethtool_ringparam *ring,
drivers/net/ethernet/huawei/hinic/hinic_ethtool.c
555
ring->rx_max_pending = HINIC_MAX_QUEUE_DEPTH;
drivers/net/ethernet/huawei/hinic/hinic_ethtool.c
556
ring->tx_max_pending = HINIC_MAX_QUEUE_DEPTH;
drivers/net/ethernet/huawei/hinic/hinic_ethtool.c
557
ring->rx_pending = nic_dev->rq_depth;
drivers/net/ethernet/huawei/hinic/hinic_ethtool.c
558
ring->tx_pending = nic_dev->sq_depth;
drivers/net/ethernet/huawei/hinic/hinic_ethtool.c
562
struct ethtool_ringparam *ring)
drivers/net/ethernet/huawei/hinic/hinic_ethtool.c
564
if (ring->rx_jumbo_pending || ring->rx_mini_pending) {
drivers/net/ethernet/huawei/hinic/hinic_ethtool.c
570
if (ring->tx_pending > HINIC_MAX_QUEUE_DEPTH ||
drivers/net/ethernet/huawei/hinic/hinic_ethtool.c
571
ring->tx_pending < HINIC_MIN_QUEUE_DEPTH ||
drivers/net/ethernet/huawei/hinic/hinic_ethtool.c
572
ring->rx_pending > HINIC_MAX_QUEUE_DEPTH ||
drivers/net/ethernet/huawei/hinic/hinic_ethtool.c
573
ring->rx_pending < HINIC_MIN_QUEUE_DEPTH) {
drivers/net/ethernet/huawei/hinic/hinic_ethtool.c
584
struct ethtool_ringparam *ring,
drivers/net/ethernet/huawei/hinic/hinic_ethtool.c
592
err = check_ringparam_valid(nic_dev, ring);
drivers/net/ethernet/huawei/hinic/hinic_ethtool.c
596
new_sq_depth = (u16)(1U << (u16)ilog2(ring->tx_pending));
drivers/net/ethernet/huawei/hinic/hinic_ethtool.c
597
new_rq_depth = (u16)(1U << (u16)ilog2(ring->rx_pending));
drivers/net/ethernet/ibm/ibmvnic.c
3777
struct ethtool_ringparam *ring,
drivers/net/ethernet/ibm/ibmvnic.c
3783
ring->rx_max_pending = adapter->max_rx_add_entries_per_subcrq;
drivers/net/ethernet/ibm/ibmvnic.c
3784
ring->tx_max_pending = adapter->max_tx_entries_per_subcrq;
drivers/net/ethernet/ibm/ibmvnic.c
3785
ring->rx_mini_max_pending = 0;
drivers/net/ethernet/ibm/ibmvnic.c
3786
ring->rx_jumbo_max_pending = 0;
drivers/net/ethernet/ibm/ibmvnic.c
3787
ring->rx_pending = adapter->req_rx_add_entries_per_subcrq;
drivers/net/ethernet/ibm/ibmvnic.c
3788
ring->tx_pending = adapter->req_tx_entries_per_subcrq;
drivers/net/ethernet/ibm/ibmvnic.c
3789
ring->rx_mini_pending = 0;
drivers/net/ethernet/ibm/ibmvnic.c
3790
ring->rx_jumbo_pending = 0;
drivers/net/ethernet/ibm/ibmvnic.c
3794
struct ethtool_ringparam *ring,
drivers/net/ethernet/ibm/ibmvnic.c
3800
if (ring->rx_pending > adapter->max_rx_add_entries_per_subcrq ||
drivers/net/ethernet/ibm/ibmvnic.c
3801
ring->tx_pending > adapter->max_tx_entries_per_subcrq) {
drivers/net/ethernet/ibm/ibmvnic.c
3810
adapter->desired.rx_entries = ring->rx_pending;
drivers/net/ethernet/ibm/ibmvnic.c
3811
adapter->desired.tx_entries = ring->tx_pending;
drivers/net/ethernet/intel/e100.c
2555
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/e100.c
2563
ring->rx_max_pending = rfds->max;
drivers/net/ethernet/intel/e100.c
2564
ring->tx_max_pending = cbs->max;
drivers/net/ethernet/intel/e100.c
2565
ring->rx_pending = rfds->count;
drivers/net/ethernet/intel/e100.c
2566
ring->tx_pending = cbs->count;
drivers/net/ethernet/intel/e100.c
2570
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/e100.c
2578
if ((ring->rx_mini_pending) || (ring->rx_jumbo_pending))
drivers/net/ethernet/intel/e100.c
2583
rfds->count = max(ring->rx_pending, rfds->min);
drivers/net/ethernet/intel/e100.c
2585
cbs->count = max(ring->tx_pending, cbs->min);
drivers/net/ethernet/intel/e1000/e1000_ethtool.c
548
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/e1000/e1000_ethtool.c
558
ring->rx_max_pending = (mac_type < e1000_82544) ? E1000_MAX_RXD :
drivers/net/ethernet/intel/e1000/e1000_ethtool.c
560
ring->tx_max_pending = (mac_type < e1000_82544) ? E1000_MAX_TXD :
drivers/net/ethernet/intel/e1000/e1000_ethtool.c
562
ring->rx_pending = rxdr->count;
drivers/net/ethernet/intel/e1000/e1000_ethtool.c
563
ring->tx_pending = txdr->count;
drivers/net/ethernet/intel/e1000/e1000_ethtool.c
567
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/e1000/e1000_ethtool.c
578
if ((ring->rx_mini_pending) || (ring->rx_jumbo_pending))
drivers/net/ethernet/intel/e1000/e1000_ethtool.c
602
rxdr->count = max(ring->rx_pending, (u32)E1000_MIN_RXD);
drivers/net/ethernet/intel/e1000/e1000_ethtool.c
606
txdr->count = max(ring->tx_pending, (u32)E1000_MIN_TXD);
drivers/net/ethernet/intel/e1000/e1000_main.c
2187
struct e1000_rx_ring *ring = &adapter->rx_ring[0];
drivers/net/ethernet/intel/e1000/e1000_main.c
2189
adapter->alloc_rx_buf(adapter, ring, E1000_DESC_UNUSED(ring));
drivers/net/ethernet/intel/e1000/e1000_main.c
375
struct e1000_rx_ring *ring = &adapter->rx_ring[i];
drivers/net/ethernet/intel/e1000/e1000_main.c
376
adapter->alloc_rx_buf(adapter, ring,
drivers/net/ethernet/intel/e1000/e1000_main.c
377
E1000_DESC_UNUSED(ring));
drivers/net/ethernet/intel/e1000e/e1000.h
256
bool (*clean_rx)(struct e1000_ring *ring, int *work_done,
drivers/net/ethernet/intel/e1000e/e1000.h
258
void (*alloc_rx_buf)(struct e1000_ring *ring, int cleaned_count,
drivers/net/ethernet/intel/e1000e/e1000.h
501
int e1000e_setup_rx_resources(struct e1000_ring *ring);
drivers/net/ethernet/intel/e1000e/e1000.h
502
int e1000e_setup_tx_resources(struct e1000_ring *ring);
drivers/net/ethernet/intel/e1000e/e1000.h
503
void e1000e_free_rx_resources(struct e1000_ring *ring);
drivers/net/ethernet/intel/e1000e/e1000.h
504
void e1000e_free_tx_resources(struct e1000_ring *ring);
drivers/net/ethernet/intel/e1000e/ethtool.c
647
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/e1000e/ethtool.c
653
ring->rx_max_pending = E1000_MAX_RXD;
drivers/net/ethernet/intel/e1000e/ethtool.c
654
ring->tx_max_pending = E1000_MAX_TXD;
drivers/net/ethernet/intel/e1000e/ethtool.c
655
ring->rx_pending = adapter->rx_ring_count;
drivers/net/ethernet/intel/e1000e/ethtool.c
656
ring->tx_pending = adapter->tx_ring_count;
drivers/net/ethernet/intel/e1000e/ethtool.c
660
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/e1000e/ethtool.c
670
if ((ring->rx_mini_pending) || (ring->rx_jumbo_pending))
drivers/net/ethernet/intel/e1000e/ethtool.c
673
new_rx_count = clamp_t(u32, ring->rx_pending, E1000_MIN_RXD,
drivers/net/ethernet/intel/e1000e/ethtool.c
677
new_tx_count = clamp_t(u32, ring->tx_pending, E1000_MIN_TXD,
drivers/net/ethernet/intel/e1000e/netdev.c
2307
struct e1000_ring *ring)
drivers/net/ethernet/intel/e1000e/netdev.c
2311
ring->desc = dma_alloc_coherent(&pdev->dev, ring->size, &ring->dma,
drivers/net/ethernet/intel/e1000e/netdev.c
2313
if (!ring->desc)
drivers/net/ethernet/intel/e1000e/netdev.c
473
static int e1000_desc_unused(struct e1000_ring *ring)
drivers/net/ethernet/intel/e1000e/netdev.c
475
if (ring->next_to_clean > ring->next_to_use)
drivers/net/ethernet/intel/e1000e/netdev.c
476
return ring->next_to_clean - ring->next_to_use - 1;
drivers/net/ethernet/intel/e1000e/netdev.c
478
return ring->count + ring->next_to_clean - ring->next_to_use - 1;
drivers/net/ethernet/intel/fm10k/fm10k.h
146
struct fm10k_ring *ring; /* pointer to linked list of rings */
drivers/net/ethernet/intel/fm10k/fm10k.h
167
static inline struct netdev_queue *txring_txq(const struct fm10k_ring *ring)
drivers/net/ethernet/intel/fm10k/fm10k.h
169
return &ring->netdev->_tx[ring->queue_index];
drivers/net/ethernet/intel/fm10k/fm10k.h
174
for (pos = &(head).ring[(head).count]; (--pos) >= (head).ring;)
drivers/net/ethernet/intel/fm10k/fm10k.h
202
struct fm10k_ring ring[] ____cacheline_internodealigned_in_smp;
drivers/net/ethernet/intel/fm10k/fm10k.h
417
static inline u16 fm10k_desc_unused(struct fm10k_ring *ring)
drivers/net/ethernet/intel/fm10k/fm10k.h
419
s16 unused = ring->next_to_clean - ring->next_to_use - 1;
drivers/net/ethernet/intel/fm10k/fm10k.h
421
return likely(unused < 0) ? unused + ring->count : unused;
drivers/net/ethernet/intel/fm10k/fm10k.h
480
u64 fm10k_get_tx_pending(struct fm10k_ring *ring, bool in_sw);
drivers/net/ethernet/intel/fm10k/fm10k.h
55
#define check_for_tx_hang(ring) \
drivers/net/ethernet/intel/fm10k/fm10k.h
56
test_bit(__FM10K_TX_DETECT_HANG, (ring)->state)
drivers/net/ethernet/intel/fm10k/fm10k.h
57
#define set_check_for_tx_hang(ring) \
drivers/net/ethernet/intel/fm10k/fm10k.h
58
set_bit(__FM10K_TX_DETECT_HANG, (ring)->state)
drivers/net/ethernet/intel/fm10k/fm10k.h
59
#define clear_check_for_tx_hang(ring) \
drivers/net/ethernet/intel/fm10k/fm10k.h
60
clear_bit(__FM10K_TX_DETECT_HANG, (ring)->state)
drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c
115
struct fm10k_ring *ring = inode->i_private;
drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c
116
struct fm10k_q_vector *q_vector = ring->q_vector;
drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c
120
if (ring < q_vector->rx.ring)
drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c
129
((struct seq_file *)filep->private_data)->private = ring;
drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c
15
struct fm10k_ring *ring = s->private;
drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c
166
struct fm10k_ring *ring = &q_vector->tx.ring[i];
drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c
168
snprintf(name, sizeof(name), "tx_ring.%03d", ring->queue_index);
drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c
17
return (*pos < ring->count) ? pos : NULL;
drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c
171
q_vector->dbg_q_vector, ring,
drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c
177
struct fm10k_ring *ring = &q_vector->rx.ring[i];
drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c
179
snprintf(name, sizeof(name), "rx_ring.%03d", ring->queue_index);
drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c
182
q_vector->dbg_q_vector, ring,
drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c
24
struct fm10k_ring *ring = s->private;
drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c
26
return (++(*pos) < ring->count) ? pos : NULL;
drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c
45
struct fm10k_ring *ring = s->private;
drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c
57
if (!ring->desc) {
drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c
60
struct fm10k_tx_desc *txd = FM10K_TX_DESC(ring, i);
drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c
72
struct fm10k_ring *ring = s->private;
drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c
84
if (!ring->desc) {
drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c
87
union fm10k_rx_desc *rxd = FM10K_RX_DESC(ring, i);
drivers/net/ethernet/intel/fm10k/fm10k_ethtool.c
284
struct fm10k_ring *ring;
drivers/net/ethernet/intel/fm10k/fm10k_ethtool.c
286
ring = interface->tx_ring[i];
drivers/net/ethernet/intel/fm10k/fm10k_ethtool.c
287
fm10k_add_ethtool_stats(&data, ring,
drivers/net/ethernet/intel/fm10k/fm10k_ethtool.c
290
ring = interface->rx_ring[i];
drivers/net/ethernet/intel/fm10k/fm10k_ethtool.c
291
fm10k_add_ethtool_stats(&data, ring,
drivers/net/ethernet/intel/fm10k/fm10k_ethtool.c
505
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/fm10k/fm10k_ethtool.c
511
ring->rx_max_pending = FM10K_MAX_RXD;
drivers/net/ethernet/intel/fm10k/fm10k_ethtool.c
512
ring->tx_max_pending = FM10K_MAX_TXD;
drivers/net/ethernet/intel/fm10k/fm10k_ethtool.c
513
ring->rx_mini_max_pending = 0;
drivers/net/ethernet/intel/fm10k/fm10k_ethtool.c
514
ring->rx_jumbo_max_pending = 0;
drivers/net/ethernet/intel/fm10k/fm10k_ethtool.c
515
ring->rx_pending = interface->rx_ring_count;
drivers/net/ethernet/intel/fm10k/fm10k_ethtool.c
516
ring->tx_pending = interface->tx_ring_count;
drivers/net/ethernet/intel/fm10k/fm10k_ethtool.c
517
ring->rx_mini_pending = 0;
drivers/net/ethernet/intel/fm10k/fm10k_ethtool.c
518
ring->rx_jumbo_pending = 0;
drivers/net/ethernet/intel/fm10k/fm10k_ethtool.c
522
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/fm10k/fm10k_ethtool.c
531
if ((ring->rx_mini_pending) || (ring->rx_jumbo_pending))
drivers/net/ethernet/intel/fm10k/fm10k_ethtool.c
534
new_tx_count = clamp_t(u32, ring->tx_pending,
drivers/net/ethernet/intel/fm10k/fm10k_ethtool.c
538
new_rx_count = clamp_t(u32, ring->rx_pending,
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1102
static u64 fm10k_get_tx_completed(struct fm10k_ring *ring)
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1104
return ring->stats.packets;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1112
u64 fm10k_get_tx_pending(struct fm10k_ring *ring, bool in_sw)
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1114
struct fm10k_intfc *interface = ring->q_vector->interface;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1119
head = ring->next_to_clean;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1120
tail = ring->next_to_use;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1122
head = fm10k_read_reg(hw, FM10K_TDH(ring->reg_idx));
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1123
tail = fm10k_read_reg(hw, FM10K_TDT(ring->reg_idx));
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1126
return ((head <= tail) ? tail : tail + ring->count) - head;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1424
struct fm10k_ring *ring;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1428
fm10k_for_each_ring(ring, q_vector->tx) {
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1429
if (!fm10k_clean_tx_irq(q_vector, ring, budget))
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1445
fm10k_for_each_ring(ring, q_vector->rx) {
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1446
int work = fm10k_clean_rx_irq(q_vector, ring, per_ring_budget);
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1594
struct fm10k_ring *ring;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1600
q_vector = kzalloc_flex(*q_vector, ring, ring_count);
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1613
ring = q_vector->ring;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1616
q_vector->tx.ring = ring;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1624
ring->dev = &interface->pdev->dev;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1625
ring->netdev = interface->netdev;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1628
ring->q_vector = q_vector;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1631
ring->count = interface->tx_ring_count;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1632
ring->queue_index = txr_idx;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1635
interface->tx_ring[txr_idx] = ring;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1642
ring++;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1646
q_vector->rx.ring = ring;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1653
ring->dev = &interface->pdev->dev;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1654
ring->netdev = interface->netdev;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1655
rcu_assign_pointer(ring->l2_accel, interface->l2_accel);
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1658
ring->q_vector = q_vector;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1661
ring->count = interface->rx_ring_count;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1662
ring->queue_index = rxr_idx;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1665
interface->rx_ring[rxr_idx] = ring;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1672
ring++;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1692
struct fm10k_ring *ring;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1696
fm10k_for_each_ring(ring, q_vector->tx)
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1697
interface->tx_ring[ring->queue_index] = NULL;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1699
fm10k_for_each_ring(ring, q_vector->rx)
drivers/net/ethernet/intel/fm10k/fm10k_main.c
1700
interface->rx_ring[ring->queue_index] = NULL;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
355
static inline void fm10k_rx_checksum(struct fm10k_ring *ring,
drivers/net/ethernet/intel/fm10k/fm10k_main.c
362
if (!(ring->netdev->features & NETIF_F_RXCSUM))
drivers/net/ethernet/intel/fm10k/fm10k_main.c
371
ring->rx_stats.csum_err++;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
383
ring->rx_stats.csum_good++;
drivers/net/ethernet/intel/fm10k/fm10k_main.c
392
static inline void fm10k_rx_hash(struct fm10k_ring *ring,
drivers/net/ethernet/intel/fm10k/fm10k_main.c
398
if (!(ring->netdev->features & NETIF_F_RXHASH))
drivers/net/ethernet/intel/fm10k/fm10k_netdev.c
1219
struct fm10k_ring *ring;
drivers/net/ethernet/intel/fm10k/fm10k_netdev.c
1226
ring = READ_ONCE(interface->rx_ring[i]);
drivers/net/ethernet/intel/fm10k/fm10k_netdev.c
1228
if (!ring)
drivers/net/ethernet/intel/fm10k/fm10k_netdev.c
1232
start = u64_stats_fetch_begin(&ring->syncp);
drivers/net/ethernet/intel/fm10k/fm10k_netdev.c
1233
packets = ring->stats.packets;
drivers/net/ethernet/intel/fm10k/fm10k_netdev.c
1234
bytes = ring->stats.bytes;
drivers/net/ethernet/intel/fm10k/fm10k_netdev.c
1235
} while (u64_stats_fetch_retry(&ring->syncp, start));
drivers/net/ethernet/intel/fm10k/fm10k_netdev.c
1242
ring = READ_ONCE(interface->tx_ring[i]);
drivers/net/ethernet/intel/fm10k/fm10k_netdev.c
1244
if (!ring)
drivers/net/ethernet/intel/fm10k/fm10k_netdev.c
1248
start = u64_stats_fetch_begin(&ring->syncp);
drivers/net/ethernet/intel/fm10k/fm10k_netdev.c
1249
packets = ring->stats.packets;
drivers/net/ethernet/intel/fm10k/fm10k_netdev.c
1250
bytes = ring->stats.bytes;
drivers/net/ethernet/intel/fm10k/fm10k_netdev.c
1251
} while (u64_stats_fetch_retry(&ring->syncp, start));
drivers/net/ethernet/intel/fm10k/fm10k_netdev.c
1336
struct fm10k_ring *ring = interface->rx_ring[i];
drivers/net/ethernet/intel/fm10k/fm10k_netdev.c
1338
rcu_assign_pointer(ring->l2_accel, l2_accel);
drivers/net/ethernet/intel/fm10k/fm10k_netdev.c
144
void fm10k_unmap_and_free_tx_resource(struct fm10k_ring *ring,
drivers/net/ethernet/intel/fm10k/fm10k_netdev.c
150
dma_unmap_single(ring->dev,
drivers/net/ethernet/intel/fm10k/fm10k_netdev.c
155
dma_unmap_page(ring->dev,
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
1007
ring->tail = &interface->uc_addr[FM10K_RDT(reg_idx)];
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
1010
ring->next_to_clean = 0;
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
1011
ring->next_to_use = 0;
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
1012
ring->next_to_alloc = 0;
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
1026
if (!(rx_pause & BIT(ring->qos_pc)))
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
1032
ring->vid = hw->mac.default_vid;
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
1036
ring->vid |= FM10K_VLAN_CLEAR;
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
1039
if (ring->q_vector) {
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
1040
rxint = ring->q_vector->v_idx + NON_Q_VECTORS;
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
1052
fm10k_alloc_rx_buffers(ring, fm10k_desc_unused(ring));
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
1073
struct fm10k_ring *ring = interface->rx_ring[i];
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
1075
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
1077
if (!(rx_pause & BIT(ring->qos_pc)))
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
868
struct fm10k_ring *ring)
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
871
u64 tdba = ring->dma;
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
872
u32 size = ring->count * sizeof(struct fm10k_tx_desc);
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
875
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
893
ring->tail = &interface->uc_addr[FM10K_TDT(reg_idx)];
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
896
ring->next_to_clean = 0;
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
897
ring->next_to_use = 0;
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
900
if (ring->q_vector) {
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
901
txint = ring->q_vector->v_idx + NON_Q_VECTORS;
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
912
if (!test_and_set_bit(__FM10K_TX_XPS_INIT_DONE, ring->state) &&
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
913
ring->q_vector)
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
914
netif_set_xps_queue(ring->netdev,
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
915
&ring->q_vector->affinity_mask,
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
916
ring->queue_index);
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
930
struct fm10k_ring *ring)
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
935
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
978
struct fm10k_ring *ring)
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
980
u64 rdba = ring->dma;
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
982
u32 size = ring->count * sizeof(union fm10k_rx_desc);
drivers/net/ethernet/intel/fm10k/fm10k_pci.c
987
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/i40e/i40e_debugfs.c
437
struct i40e_adminq_ring *ring;
drivers/net/ethernet/intel/i40e/i40e_debugfs.c
448
ring = &(hw->aq.asq);
drivers/net/ethernet/intel/i40e/i40e_debugfs.c
449
for (i = 0; i < ring->count; i++) {
drivers/net/ethernet/intel/i40e/i40e_debugfs.c
450
struct libie_aq_desc *d = I40E_ADMINQ_DESC(*ring, i);
drivers/net/ethernet/intel/i40e/i40e_debugfs.c
461
ring = &(hw->aq.arq);
drivers/net/ethernet/intel/i40e/i40e_debugfs.c
462
for (i = 0; i < ring->count; i++) {
drivers/net/ethernet/intel/i40e/i40e_debugfs.c
463
struct libie_aq_desc *d = I40E_ADMINQ_DESC(*ring, i);
drivers/net/ethernet/intel/i40e/i40e_debugfs.c
489
struct i40e_ring *ring;
drivers/net/ethernet/intel/i40e/i40e_debugfs.c
523
ring = kmemdup(vsi->rx_rings[ring_id], sizeof(*ring), GFP_KERNEL);
drivers/net/ethernet/intel/i40e/i40e_debugfs.c
526
ring = kmemdup(vsi->tx_rings[ring_id], sizeof(*ring), GFP_KERNEL);
drivers/net/ethernet/intel/i40e/i40e_debugfs.c
529
ring = kmemdup(vsi->xdp_rings[ring_id], sizeof(*ring), GFP_KERNEL);
drivers/net/ethernet/intel/i40e/i40e_debugfs.c
532
ring = NULL;
drivers/net/ethernet/intel/i40e/i40e_debugfs.c
535
if (!ring)
drivers/net/ethernet/intel/i40e/i40e_debugfs.c
550
for (i = 0; i < ring->count; i++) {
drivers/net/ethernet/intel/i40e/i40e_debugfs.c
552
txd = I40E_TX_DESC(ring, i);
drivers/net/ethernet/intel/i40e/i40e_debugfs.c
558
rxd = I40E_RX_DESC(ring, i);
drivers/net/ethernet/intel/i40e/i40e_debugfs.c
566
if (desc_n >= ring->count || desc_n < 0) {
drivers/net/ethernet/intel/i40e/i40e_debugfs.c
572
txd = I40E_TX_DESC(ring, desc_n);
drivers/net/ethernet/intel/i40e/i40e_debugfs.c
578
rxd = I40E_RX_DESC(ring, desc_n);
drivers/net/ethernet/intel/i40e/i40e_debugfs.c
589
kfree(ring);
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
168
i40e_add_queue_stats(u64 **data, struct i40e_ring *ring)
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
181
start = !ring ? 0 : u64_stats_fetch_begin(&ring->syncp);
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
183
i40e_add_one_ethtool_stat(&(*data)[i], ring,
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
186
} while (ring && u64_stats_fetch_retry(&ring->syncp, start));
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
2017
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
2025
ring->rx_max_pending = i40e_get_max_num_descriptors(pf);
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
2026
ring->tx_max_pending = i40e_get_max_num_descriptors(pf);
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
2027
ring->rx_mini_max_pending = 0;
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
2028
ring->rx_jumbo_max_pending = 0;
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
2029
ring->rx_pending = vsi->rx_rings[0]->count;
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
2030
ring->tx_pending = vsi->tx_rings[0]->count;
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
2031
ring->rx_mini_pending = 0;
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
2032
ring->rx_jumbo_pending = 0;
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
2047
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
2061
if ((ring->rx_mini_pending) || (ring->rx_jumbo_pending))
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
2065
if (ring->tx_pending > max_num_descriptors ||
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
2066
ring->tx_pending < I40E_MIN_NUM_DESCRIPTORS ||
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
2067
ring->rx_pending > max_num_descriptors ||
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
2068
ring->rx_pending < I40E_MIN_NUM_DESCRIPTORS) {
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
2071
ring->tx_pending, ring->rx_pending,
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
2076
new_tx_count = ALIGN(ring->tx_pending, I40E_REQ_DESCRIPTOR_MULTIPLE);
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
2077
new_rx_count = ALIGN(ring->rx_pending, I40E_REQ_DESCRIPTOR_MULTIPLE);
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
4848
u32 ring = ethtool_get_flow_spec_ring(fsp->ring_cookie);
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
4852
if (ring >= vsi->num_queue_pairs)
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
4861
if (ring >= pf->vf[vf].num_queue_pairs)
drivers/net/ethernet/intel/i40e/i40e_ethtool.c
4866
q_index = ring;
drivers/net/ethernet/intel/i40e/i40e_main.c
11716
struct i40e_ring *ring;
drivers/net/ethernet/intel/i40e/i40e_main.c
11721
ring = kzalloc_objs(struct i40e_ring, qpv);
drivers/net/ethernet/intel/i40e/i40e_main.c
11722
if (!ring)
drivers/net/ethernet/intel/i40e/i40e_main.c
11725
ring->queue_index = i;
drivers/net/ethernet/intel/i40e/i40e_main.c
11726
ring->reg_idx = vsi->base_queue + i;
drivers/net/ethernet/intel/i40e/i40e_main.c
11727
ring->ring_active = false;
drivers/net/ethernet/intel/i40e/i40e_main.c
11728
ring->vsi = vsi;
drivers/net/ethernet/intel/i40e/i40e_main.c
11729
ring->netdev = vsi->netdev;
drivers/net/ethernet/intel/i40e/i40e_main.c
11730
ring->dev = &pf->pdev->dev;
drivers/net/ethernet/intel/i40e/i40e_main.c
11731
ring->count = vsi->num_tx_desc;
drivers/net/ethernet/intel/i40e/i40e_main.c
11732
ring->size = 0;
drivers/net/ethernet/intel/i40e/i40e_main.c
11733
ring->dcb_tc = 0;
drivers/net/ethernet/intel/i40e/i40e_main.c
11735
ring->flags = I40E_TXR_FLAGS_WB_ON_ITR;
drivers/net/ethernet/intel/i40e/i40e_main.c
11736
ring->itr_setting = pf->tx_itr_default;
drivers/net/ethernet/intel/i40e/i40e_main.c
11737
WRITE_ONCE(vsi->tx_rings[i], ring++);
drivers/net/ethernet/intel/i40e/i40e_main.c
11742
ring->queue_index = vsi->alloc_queue_pairs + i;
drivers/net/ethernet/intel/i40e/i40e_main.c
11743
ring->reg_idx = vsi->base_queue + ring->queue_index;
drivers/net/ethernet/intel/i40e/i40e_main.c
11744
ring->ring_active = false;
drivers/net/ethernet/intel/i40e/i40e_main.c
11745
ring->vsi = vsi;
drivers/net/ethernet/intel/i40e/i40e_main.c
11746
ring->netdev = NULL;
drivers/net/ethernet/intel/i40e/i40e_main.c
11747
ring->dev = &pf->pdev->dev;
drivers/net/ethernet/intel/i40e/i40e_main.c
11748
ring->count = vsi->num_tx_desc;
drivers/net/ethernet/intel/i40e/i40e_main.c
11749
ring->size = 0;
drivers/net/ethernet/intel/i40e/i40e_main.c
11750
ring->dcb_tc = 0;
drivers/net/ethernet/intel/i40e/i40e_main.c
11752
ring->flags = I40E_TXR_FLAGS_WB_ON_ITR;
drivers/net/ethernet/intel/i40e/i40e_main.c
11753
set_ring_xdp(ring);
drivers/net/ethernet/intel/i40e/i40e_main.c
11754
ring->itr_setting = pf->tx_itr_default;
drivers/net/ethernet/intel/i40e/i40e_main.c
11755
WRITE_ONCE(vsi->xdp_rings[i], ring++);
drivers/net/ethernet/intel/i40e/i40e_main.c
11758
ring->queue_index = i;
drivers/net/ethernet/intel/i40e/i40e_main.c
11759
ring->reg_idx = vsi->base_queue + i;
drivers/net/ethernet/intel/i40e/i40e_main.c
11760
ring->ring_active = false;
drivers/net/ethernet/intel/i40e/i40e_main.c
11761
ring->vsi = vsi;
drivers/net/ethernet/intel/i40e/i40e_main.c
11762
ring->netdev = vsi->netdev;
drivers/net/ethernet/intel/i40e/i40e_main.c
11763
ring->dev = &pf->pdev->dev;
drivers/net/ethernet/intel/i40e/i40e_main.c
11764
ring->count = vsi->num_rx_desc;
drivers/net/ethernet/intel/i40e/i40e_main.c
11765
ring->size = 0;
drivers/net/ethernet/intel/i40e/i40e_main.c
11766
ring->dcb_tc = 0;
drivers/net/ethernet/intel/i40e/i40e_main.c
11767
ring->itr_setting = pf->rx_itr_default;
drivers/net/ethernet/intel/i40e/i40e_main.c
11768
WRITE_ONCE(vsi->rx_rings[i], ring);
drivers/net/ethernet/intel/i40e/i40e_main.c
13390
if (q_vector->rx.ring || q_vector->tx.ring) {
drivers/net/ethernet/intel/i40e/i40e_main.c
3399
static void i40e_config_xps_tx_ring(struct i40e_ring *ring)
drivers/net/ethernet/intel/i40e/i40e_main.c
3403
if (!ring->q_vector || !ring->netdev || ring->ch)
drivers/net/ethernet/intel/i40e/i40e_main.c
3407
if (test_and_set_bit(__I40E_TX_XPS_INIT_DONE, ring->state))
drivers/net/ethernet/intel/i40e/i40e_main.c
3410
cpu = cpumask_local_spread(ring->q_vector->v_idx, -1);
drivers/net/ethernet/intel/i40e/i40e_main.c
3411
netif_set_xps_queue(ring->netdev, get_cpu_mask(cpu),
drivers/net/ethernet/intel/i40e/i40e_main.c
3412
ring->queue_index);
drivers/net/ethernet/intel/i40e/i40e_main.c
3421
static struct xsk_buff_pool *i40e_xsk_pool(struct i40e_ring *ring)
drivers/net/ethernet/intel/i40e/i40e_main.c
3423
bool xdp_on = i40e_enabled_xdp_vsi(ring->vsi);
drivers/net/ethernet/intel/i40e/i40e_main.c
3424
int qid = ring->queue_index;
drivers/net/ethernet/intel/i40e/i40e_main.c
3426
if (ring_is_xdp(ring))
drivers/net/ethernet/intel/i40e/i40e_main.c
3427
qid -= ring->vsi->alloc_queue_pairs;
drivers/net/ethernet/intel/i40e/i40e_main.c
3429
if (!xdp_on || !test_bit(qid, ring->vsi->af_xdp_zc_qps))
drivers/net/ethernet/intel/i40e/i40e_main.c
3432
return xsk_get_pool_from_qid(ring->vsi->netdev, qid);
drivers/net/ethernet/intel/i40e/i40e_main.c
3441
static int i40e_configure_tx_ring(struct i40e_ring *ring)
drivers/net/ethernet/intel/i40e/i40e_main.c
3443
struct i40e_vsi *vsi = ring->vsi;
drivers/net/ethernet/intel/i40e/i40e_main.c
3444
u16 pf_q = vsi->base_queue + ring->queue_index;
drivers/net/ethernet/intel/i40e/i40e_main.c
3450
if (ring_is_xdp(ring))
drivers/net/ethernet/intel/i40e/i40e_main.c
3451
ring->xsk_pool = i40e_xsk_pool(ring);
drivers/net/ethernet/intel/i40e/i40e_main.c
3455
ring->atr_sample_rate = I40E_DEFAULT_ATR_SAMPLE_RATE;
drivers/net/ethernet/intel/i40e/i40e_main.c
3456
ring->atr_count = 0;
drivers/net/ethernet/intel/i40e/i40e_main.c
3458
ring->atr_sample_rate = 0;
drivers/net/ethernet/intel/i40e/i40e_main.c
3462
i40e_config_xps_tx_ring(ring);
drivers/net/ethernet/intel/i40e/i40e_main.c
3468
tx_ctx.base = (ring->dma / 128);
drivers/net/ethernet/intel/i40e/i40e_main.c
3469
tx_ctx.qlen = ring->count;
drivers/net/ethernet/intel/i40e/i40e_main.c
3478
tx_ctx.head_wb_addr = ring->dma +
drivers/net/ethernet/intel/i40e/i40e_main.c
3479
(ring->count * sizeof(struct i40e_tx_desc));
drivers/net/ethernet/intel/i40e/i40e_main.c
3492
if (ring->ch)
drivers/net/ethernet/intel/i40e/i40e_main.c
3494
le16_to_cpu(ring->ch->info.qs_handle[ring->dcb_tc]);
drivers/net/ethernet/intel/i40e/i40e_main.c
3497
tx_ctx.rdylist = le16_to_cpu(vsi->info.qs_handle[ring->dcb_tc]);
drivers/net/ethernet/intel/i40e/i40e_main.c
3506
ring->queue_index, pf_q, err);
drivers/net/ethernet/intel/i40e/i40e_main.c
3515
ring->queue_index, pf_q, err);
drivers/net/ethernet/intel/i40e/i40e_main.c
3520
if (ring->ch) {
drivers/net/ethernet/intel/i40e/i40e_main.c
3521
if (ring->ch->type == I40E_VSI_VMDQ2)
drivers/net/ethernet/intel/i40e/i40e_main.c
3527
ring->ch->vsi_number);
drivers/net/ethernet/intel/i40e/i40e_main.c
3543
ring->tail = hw->hw_addr + I40E_QTX_TAIL(pf_q);
drivers/net/ethernet/intel/i40e/i40e_main.c
3565
static int i40e_configure_rx_ring(struct i40e_ring *ring)
drivers/net/ethernet/intel/i40e/i40e_main.c
3567
struct i40e_vsi *vsi = ring->vsi;
drivers/net/ethernet/intel/i40e/i40e_main.c
3569
u16 pf_q = vsi->base_queue + ring->queue_index;
drivers/net/ethernet/intel/i40e/i40e_main.c
3576
bitmap_zero(ring->state, __I40E_RING_STATE_NBITS);
drivers/net/ethernet/intel/i40e/i40e_main.c
3581
ring->rx_buf_len = vsi->rx_buf_len;
drivers/net/ethernet/intel/i40e/i40e_main.c
3582
xdp_frame_sz = i40e_rx_pg_size(ring) / 2;
drivers/net/ethernet/intel/i40e/i40e_main.c
3585
if (ring->vsi->type != I40E_VSI_MAIN)
drivers/net/ethernet/intel/i40e/i40e_main.c
3588
ring->xsk_pool = i40e_xsk_pool(ring);
drivers/net/ethernet/intel/i40e/i40e_main.c
3589
if (ring->xsk_pool) {
drivers/net/ethernet/intel/i40e/i40e_main.c
3590
xdp_frame_sz = xsk_pool_get_rx_frag_step(ring->xsk_pool);
drivers/net/ethernet/intel/i40e/i40e_main.c
3591
ring->rx_buf_len = xsk_pool_get_rx_frame_size(ring->xsk_pool);
drivers/net/ethernet/intel/i40e/i40e_main.c
3592
err = __xdp_rxq_info_reg(&ring->xdp_rxq, ring->netdev,
drivers/net/ethernet/intel/i40e/i40e_main.c
3593
ring->queue_index,
drivers/net/ethernet/intel/i40e/i40e_main.c
3594
ring->q_vector->napi.napi_id,
drivers/net/ethernet/intel/i40e/i40e_main.c
3598
err = xdp_rxq_info_reg_mem_model(&ring->xdp_rxq,
drivers/net/ethernet/intel/i40e/i40e_main.c
3605
ring->queue_index);
drivers/net/ethernet/intel/i40e/i40e_main.c
3608
err = __xdp_rxq_info_reg(&ring->xdp_rxq, ring->netdev,
drivers/net/ethernet/intel/i40e/i40e_main.c
3609
ring->queue_index,
drivers/net/ethernet/intel/i40e/i40e_main.c
3610
ring->q_vector->napi.napi_id,
drivers/net/ethernet/intel/i40e/i40e_main.c
3614
err = xdp_rxq_info_reg_mem_model(&ring->xdp_rxq,
drivers/net/ethernet/intel/i40e/i40e_main.c
3622
xdp_init_buff(&ring->xdp, xdp_frame_sz, &ring->xdp_rxq);
drivers/net/ethernet/intel/i40e/i40e_main.c
3624
rx_ctx.dbuff = DIV_ROUND_UP(ring->rx_buf_len,
drivers/net/ethernet/intel/i40e/i40e_main.c
3627
rx_ctx.base = (ring->dma / 128);
drivers/net/ethernet/intel/i40e/i40e_main.c
3628
rx_ctx.qlen = ring->count;
drivers/net/ethernet/intel/i40e/i40e_main.c
3638
rx_ctx.rxmax = min_t(u16, vsi->max_frame, chain_len * ring->rx_buf_len);
drivers/net/ethernet/intel/i40e/i40e_main.c
3655
ring->queue_index, pf_q, err);
drivers/net/ethernet/intel/i40e/i40e_main.c
3665
ring->queue_index, pf_q, err);
drivers/net/ethernet/intel/i40e/i40e_main.c
3678
clear_ring_build_skb_enabled(ring);
drivers/net/ethernet/intel/i40e/i40e_main.c
3680
set_ring_build_skb_enabled(ring);
drivers/net/ethernet/intel/i40e/i40e_main.c
3683
ring->rx_offset = i40e_rx_offset(ring);
drivers/net/ethernet/intel/i40e/i40e_main.c
3686
ring->tail = hw->hw_addr + I40E_QRX_TAIL(pf_q);
drivers/net/ethernet/intel/i40e/i40e_main.c
3687
writel(0, ring->tail);
drivers/net/ethernet/intel/i40e/i40e_main.c
3689
if (ring->xsk_pool) {
drivers/net/ethernet/intel/i40e/i40e_main.c
3690
xsk_pool_set_rxq_info(ring->xsk_pool, &ring->xdp_rxq);
drivers/net/ethernet/intel/i40e/i40e_main.c
3691
ok = i40e_alloc_rx_buffers_zc(ring, I40E_DESC_UNUSED(ring));
drivers/net/ethernet/intel/i40e/i40e_main.c
3693
ok = !i40e_alloc_rx_buffers(ring, I40E_DESC_UNUSED(ring));
drivers/net/ethernet/intel/i40e/i40e_main.c
3701
ring->xsk_pool ? "AF_XDP ZC enabled " : "",
drivers/net/ethernet/intel/i40e/i40e_main.c
3702
ring->queue_index, pf_q);
drivers/net/ethernet/intel/i40e/i40e_main.c
3707
if (ring->vsi->type == I40E_VSI_MAIN)
drivers/net/ethernet/intel/i40e/i40e_main.c
3708
xdp_rxq_info_unreg(&ring->xdp_rxq);
drivers/net/ethernet/intel/i40e/i40e_main.c
4084
if (!q_vector->tx.ring && !q_vector->rx.ring)
drivers/net/ethernet/intel/i40e/i40e_main.c
4142
if (q_vector->tx.ring && q_vector->rx.ring) {
drivers/net/ethernet/intel/i40e/i40e_main.c
4146
} else if (q_vector->rx.ring) {
drivers/net/ethernet/intel/i40e/i40e_main.c
4149
} else if (q_vector->tx.ring) {
drivers/net/ethernet/intel/i40e/i40e_main.c
444
static void i40e_get_netdev_stats_struct_tx(struct i40e_ring *ring,
drivers/net/ethernet/intel/i40e/i40e_main.c
451
start = u64_stats_fetch_begin(&ring->syncp);
drivers/net/ethernet/intel/i40e/i40e_main.c
4515
if (!q_vector->tx.ring)
drivers/net/ethernet/intel/i40e/i40e_main.c
4518
vsi = q_vector->tx.ring->vsi;
drivers/net/ethernet/intel/i40e/i40e_main.c
4519
i40e_clean_fdir_tx_irq(q_vector->tx.ring, vsi->work_limit);
drivers/net/ethernet/intel/i40e/i40e_main.c
452
packets = ring->stats.packets;
drivers/net/ethernet/intel/i40e/i40e_main.c
453
bytes = ring->stats.bytes;
drivers/net/ethernet/intel/i40e/i40e_main.c
4537
tx_ring->next = q_vector->tx.ring;
drivers/net/ethernet/intel/i40e/i40e_main.c
4538
q_vector->tx.ring = tx_ring;
drivers/net/ethernet/intel/i40e/i40e_main.c
454
} while (u64_stats_fetch_retry(&ring->syncp, start));
drivers/net/ethernet/intel/i40e/i40e_main.c
4546
xdp_ring->next = q_vector->tx.ring;
drivers/net/ethernet/intel/i40e/i40e_main.c
4547
q_vector->tx.ring = xdp_ring;
drivers/net/ethernet/intel/i40e/i40e_main.c
4552
rx_ring->next = q_vector->rx.ring;
drivers/net/ethernet/intel/i40e/i40e_main.c
4553
q_vector->rx.ring = rx_ring;
drivers/net/ethernet/intel/i40e/i40e_main.c
4591
q_vector->rx.ring = NULL;
drivers/net/ethernet/intel/i40e/i40e_main.c
4592
q_vector->tx.ring = NULL;
drivers/net/ethernet/intel/i40e/i40e_main.c
474
struct i40e_ring *ring;
drivers/net/ethernet/intel/i40e/i40e_main.c
488
ring = READ_ONCE(vsi->tx_rings[i]);
drivers/net/ethernet/intel/i40e/i40e_main.c
489
if (!ring)
drivers/net/ethernet/intel/i40e/i40e_main.c
491
i40e_get_netdev_stats_struct_tx(ring, stats);
drivers/net/ethernet/intel/i40e/i40e_main.c
494
ring = READ_ONCE(vsi->xdp_rings[i]);
drivers/net/ethernet/intel/i40e/i40e_main.c
495
if (!ring)
drivers/net/ethernet/intel/i40e/i40e_main.c
497
i40e_get_netdev_stats_struct_tx(ring, stats);
drivers/net/ethernet/intel/i40e/i40e_main.c
500
ring = READ_ONCE(vsi->rx_rings[i]);
drivers/net/ethernet/intel/i40e/i40e_main.c
501
if (!ring)
drivers/net/ethernet/intel/i40e/i40e_main.c
504
start = u64_stats_fetch_begin(&ring->syncp);
drivers/net/ethernet/intel/i40e/i40e_main.c
505
packets = ring->stats.packets;
drivers/net/ethernet/intel/i40e/i40e_main.c
506
bytes = ring->stats.bytes;
drivers/net/ethernet/intel/i40e/i40e_main.c
507
} while (u64_stats_fetch_retry(&ring->syncp, start));
drivers/net/ethernet/intel/i40e/i40e_main.c
5100
struct i40e_ring *ring;
drivers/net/ethernet/intel/i40e/i40e_main.c
5106
i40e_for_each_ring(ring, q_vector->tx)
drivers/net/ethernet/intel/i40e/i40e_main.c
5107
ring->q_vector = NULL;
drivers/net/ethernet/intel/i40e/i40e_main.c
5109
i40e_for_each_ring(ring, q_vector->rx)
drivers/net/ethernet/intel/i40e/i40e_main.c
5110
ring->q_vector = NULL;
drivers/net/ethernet/intel/i40e/i40e_main.c
5196
if (q_vector->rx.ring || q_vector->tx.ring)
drivers/net/ethernet/intel/i40e/i40e_main.c
5215
if (q_vector->rx.ring || q_vector->tx.ring)
drivers/net/ethernet/intel/i40e/i40e_trace.h
111
TP_PROTO(struct i40e_ring *ring,
drivers/net/ethernet/intel/i40e/i40e_trace.h
115
TP_ARGS(ring, desc, buf),
drivers/net/ethernet/intel/i40e/i40e_trace.h
125
__field(void*, ring)
drivers/net/ethernet/intel/i40e/i40e_trace.h
128
__string(devname, ring->netdev->name)
drivers/net/ethernet/intel/i40e/i40e_trace.h
132
__entry->ring = ring;
drivers/net/ethernet/intel/i40e/i40e_trace.h
140
__get_str(devname), __entry->ring,
drivers/net/ethernet/intel/i40e/i40e_trace.h
146
TP_PROTO(struct i40e_ring *ring,
drivers/net/ethernet/intel/i40e/i40e_trace.h
150
TP_ARGS(ring, desc, buf));
drivers/net/ethernet/intel/i40e/i40e_trace.h
154
TP_PROTO(struct i40e_ring *ring,
drivers/net/ethernet/intel/i40e/i40e_trace.h
158
TP_ARGS(ring, desc, buf));
drivers/net/ethernet/intel/i40e/i40e_trace.h
163
TP_PROTO(struct i40e_ring *ring,
drivers/net/ethernet/intel/i40e/i40e_trace.h
167
TP_ARGS(ring, desc, xdp),
drivers/net/ethernet/intel/i40e/i40e_trace.h
170
__field(void*, ring)
drivers/net/ethernet/intel/i40e/i40e_trace.h
173
__string(devname, ring->netdev->name)
drivers/net/ethernet/intel/i40e/i40e_trace.h
177
__entry->ring = ring;
drivers/net/ethernet/intel/i40e/i40e_trace.h
185
__get_str(devname), __entry->ring,
drivers/net/ethernet/intel/i40e/i40e_trace.h
191
TP_PROTO(struct i40e_ring *ring,
drivers/net/ethernet/intel/i40e/i40e_trace.h
195
TP_ARGS(ring, desc, xdp));
drivers/net/ethernet/intel/i40e/i40e_trace.h
199
TP_PROTO(struct i40e_ring *ring,
drivers/net/ethernet/intel/i40e/i40e_trace.h
203
TP_ARGS(ring, desc, xdp));
drivers/net/ethernet/intel/i40e/i40e_trace.h
209
struct i40e_ring *ring),
drivers/net/ethernet/intel/i40e/i40e_trace.h
211
TP_ARGS(skb, ring),
drivers/net/ethernet/intel/i40e/i40e_trace.h
215
__field(void*, ring)
drivers/net/ethernet/intel/i40e/i40e_trace.h
216
__string(devname, ring->netdev->name)
drivers/net/ethernet/intel/i40e/i40e_trace.h
221
__entry->ring = ring;
drivers/net/ethernet/intel/i40e/i40e_trace.h
228
__entry->ring)
drivers/net/ethernet/intel/i40e/i40e_trace.h
234
struct i40e_ring *ring),
drivers/net/ethernet/intel/i40e/i40e_trace.h
236
TP_ARGS(skb, ring));
drivers/net/ethernet/intel/i40e/i40e_trace.h
241
struct i40e_ring *ring),
drivers/net/ethernet/intel/i40e/i40e_trace.h
243
TP_ARGS(skb, ring));
drivers/net/ethernet/intel/i40e/i40e_txrx.c
1060
u16 flags = q_vector->tx.ring[0].flags;
drivers/net/ethernet/intel/i40e/i40e_txrx.c
1166
if (!rc->ring || !ITR_IS_DYNAMIC(rc->ring->itr_setting))
drivers/net/ethernet/intel/i40e/i40e_txrx.c
1814
static inline void i40e_rx_hash(struct i40e_ring *ring,
drivers/net/ethernet/intel/i40e/i40e_txrx.c
1826
if (!libeth_rx_pt_has_hash(ring->netdev, decoded))
drivers/net/ethernet/intel/i40e/i40e_txrx.c
2748
struct i40e_ring *ring;
drivers/net/ethernet/intel/i40e/i40e_txrx.c
2766
i40e_for_each_ring(ring, q_vector->tx) {
drivers/net/ethernet/intel/i40e/i40e_txrx.c
2767
bool wd = ring->xsk_pool ?
drivers/net/ethernet/intel/i40e/i40e_txrx.c
2768
i40e_clean_xdp_tx_irq(vsi, ring) :
drivers/net/ethernet/intel/i40e/i40e_txrx.c
2769
i40e_clean_tx_irq(vsi, ring, budget, &tx_cleaned);
drivers/net/ethernet/intel/i40e/i40e_txrx.c
2775
arm_wb |= ring->arm_wb;
drivers/net/ethernet/intel/i40e/i40e_txrx.c
2776
ring->arm_wb = false;
drivers/net/ethernet/intel/i40e/i40e_txrx.c
2794
i40e_for_each_ring(ring, q_vector->rx) {
drivers/net/ethernet/intel/i40e/i40e_txrx.c
2795
int cleaned = ring->xsk_pool ?
drivers/net/ethernet/intel/i40e/i40e_txrx.c
2796
i40e_clean_rx_irq_zc(ring, budget_per_ring) :
drivers/net/ethernet/intel/i40e/i40e_txrx.c
2797
i40e_clean_rx_irq(ring, budget_per_ring, &rx_cleaned);
drivers/net/ethernet/intel/i40e/i40e_txrx.c
2832
q_vector->tx.ring[0].tx_stats.tx_force_wb++;
drivers/net/ethernet/intel/i40e/i40e_txrx.c
2838
if (q_vector->tx.ring[0].flags & I40E_TXR_FLAGS_WB_ON_ITR)
drivers/net/ethernet/intel/i40e/i40e_txrx.c
751
static void i40e_unmap_and_free_tx_resource(struct i40e_ring *ring,
drivers/net/ethernet/intel/i40e/i40e_txrx.c
757
else if (ring_is_xdp(ring))
drivers/net/ethernet/intel/i40e/i40e_txrx.c
762
dma_unmap_single(ring->dev,
drivers/net/ethernet/intel/i40e/i40e_txrx.c
767
dma_unmap_page(ring->dev,
drivers/net/ethernet/intel/i40e/i40e_txrx.c
844
u32 i40e_get_tx_pending(struct i40e_ring *ring, bool in_sw)
drivers/net/ethernet/intel/i40e/i40e_txrx.c
849
head = i40e_get_head(ring);
drivers/net/ethernet/intel/i40e/i40e_txrx.c
850
tail = readl(ring->tail);
drivers/net/ethernet/intel/i40e/i40e_txrx.c
852
head = ring->next_to_clean;
drivers/net/ethernet/intel/i40e/i40e_txrx.c
853
tail = ring->next_to_use;
drivers/net/ethernet/intel/i40e/i40e_txrx.c
858
tail - head : (tail + ring->count - head);
drivers/net/ethernet/intel/i40e/i40e_txrx.h
405
static inline bool ring_uses_build_skb(struct i40e_ring *ring)
drivers/net/ethernet/intel/i40e/i40e_txrx.h
407
return !!(ring->flags & I40E_RXR_FLAGS_BUILD_SKB_ENABLED);
drivers/net/ethernet/intel/i40e/i40e_txrx.h
410
static inline void set_ring_build_skb_enabled(struct i40e_ring *ring)
drivers/net/ethernet/intel/i40e/i40e_txrx.h
412
ring->flags |= I40E_RXR_FLAGS_BUILD_SKB_ENABLED;
drivers/net/ethernet/intel/i40e/i40e_txrx.h
415
static inline void clear_ring_build_skb_enabled(struct i40e_ring *ring)
drivers/net/ethernet/intel/i40e/i40e_txrx.h
417
ring->flags &= ~I40E_RXR_FLAGS_BUILD_SKB_ENABLED;
drivers/net/ethernet/intel/i40e/i40e_txrx.h
420
static inline bool ring_is_xdp(struct i40e_ring *ring)
drivers/net/ethernet/intel/i40e/i40e_txrx.h
422
return !!(ring->flags & I40E_TXR_FLAGS_XDP);
drivers/net/ethernet/intel/i40e/i40e_txrx.h
425
static inline void set_ring_xdp(struct i40e_ring *ring)
drivers/net/ethernet/intel/i40e/i40e_txrx.h
427
ring->flags |= I40E_TXR_FLAGS_XDP;
drivers/net/ethernet/intel/i40e/i40e_txrx.h
437
struct i40e_ring *ring; /* pointer to linked list of ring(s) */
drivers/net/ethernet/intel/i40e/i40e_txrx.h
448
for (pos = (head).ring; pos != NULL; pos = pos->next)
drivers/net/ethernet/intel/i40e/i40e_txrx.h
450
static inline unsigned int i40e_rx_pg_order(struct i40e_ring *ring)
drivers/net/ethernet/intel/i40e/i40e_txrx.h
453
if (ring->rx_buf_len > (PAGE_SIZE / 2))
drivers/net/ethernet/intel/i40e/i40e_txrx.h
473
u32 i40e_get_tx_pending(struct i40e_ring *ring, bool in_sw);
drivers/net/ethernet/intel/i40e/i40e_txrx.h
562
static inline struct netdev_queue *txring_txq(const struct i40e_ring *ring)
drivers/net/ethernet/intel/i40e/i40e_txrx.h
564
return netdev_get_tx_queue(ring->netdev, ring->queue_index);
drivers/net/ethernet/intel/i40e/i40e_xsk.c
696
struct i40e_ring *ring;
drivers/net/ethernet/intel/i40e/i40e_xsk.c
713
ring = vsi->xdp_rings[queue_id];
drivers/net/ethernet/intel/i40e/i40e_xsk.c
721
if (!napi_if_scheduled_mark_missed(&ring->q_vector->napi))
drivers/net/ethernet/intel/i40e/i40e_xsk.c
722
i40e_force_wb(vsi, ring->q_vector);
drivers/net/ethernet/intel/iavf/iavf_ethtool.c
162
iavf_add_queue_stats(u64 **data, struct iavf_ring *ring)
drivers/net/ethernet/intel/iavf/iavf_ethtool.c
175
start = !ring ? 0 : u64_stats_fetch_begin(&ring->syncp);
drivers/net/ethernet/intel/iavf/iavf_ethtool.c
177
iavf_add_one_ethtool_stat(&(*data)[i], ring, &stats[i]);
drivers/net/ethernet/intel/iavf/iavf_ethtool.c
178
} while (ring && u64_stats_fetch_retry(&ring->syncp, start));
drivers/net/ethernet/intel/iavf/iavf_ethtool.c
465
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/iavf/iavf_ethtool.c
471
ring->rx_max_pending = IAVF_MAX_RXD;
drivers/net/ethernet/intel/iavf/iavf_ethtool.c
472
ring->tx_max_pending = IAVF_MAX_TXD;
drivers/net/ethernet/intel/iavf/iavf_ethtool.c
473
ring->rx_pending = adapter->rx_desc_count;
drivers/net/ethernet/intel/iavf/iavf_ethtool.c
474
ring->tx_pending = adapter->tx_desc_count;
drivers/net/ethernet/intel/iavf/iavf_ethtool.c
488
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/iavf/iavf_ethtool.c
495
if ((ring->rx_mini_pending) || (ring->rx_jumbo_pending))
drivers/net/ethernet/intel/iavf/iavf_ethtool.c
498
if (ring->tx_pending > IAVF_MAX_TXD ||
drivers/net/ethernet/intel/iavf/iavf_ethtool.c
499
ring->tx_pending < IAVF_MIN_TXD ||
drivers/net/ethernet/intel/iavf/iavf_ethtool.c
500
ring->rx_pending > IAVF_MAX_RXD ||
drivers/net/ethernet/intel/iavf/iavf_ethtool.c
501
ring->rx_pending < IAVF_MIN_RXD) {
drivers/net/ethernet/intel/iavf/iavf_ethtool.c
503
ring->tx_pending, ring->rx_pending, IAVF_MIN_TXD,
drivers/net/ethernet/intel/iavf/iavf_ethtool.c
508
new_tx_count = ALIGN(ring->tx_pending, IAVF_REQ_DESCRIPTOR_MULTIPLE);
drivers/net/ethernet/intel/iavf/iavf_ethtool.c
509
if (new_tx_count != ring->tx_pending)
drivers/net/ethernet/intel/iavf/iavf_ethtool.c
513
new_rx_count = ALIGN(ring->rx_pending, IAVF_REQ_DESCRIPTOR_MULTIPLE);
drivers/net/ethernet/intel/iavf/iavf_ethtool.c
514
if (new_rx_count != ring->rx_pending)
drivers/net/ethernet/intel/iavf/iavf_main.c
1220
struct iavf_ring *ring = &adapter->rx_rings[i];
drivers/net/ethernet/intel/iavf/iavf_main.c
1222
iavf_alloc_rx_buffers(ring, IAVF_DESC_UNUSED(ring));
drivers/net/ethernet/intel/iavf/iavf_main.c
414
if (!q_vector->tx.ring && !q_vector->rx.ring)
drivers/net/ethernet/intel/iavf/iavf_main.c
436
rx_ring->next = q_vector->rx.ring;
drivers/net/ethernet/intel/iavf/iavf_main.c
438
q_vector->rx.ring = rx_ring;
drivers/net/ethernet/intel/iavf/iavf_main.c
462
tx_ring->next = q_vector->tx.ring;
drivers/net/ethernet/intel/iavf/iavf_main.c
464
q_vector->tx.ring = tx_ring;
drivers/net/ethernet/intel/iavf/iavf_main.c
530
if (q_vector->tx.ring && q_vector->rx.ring) {
drivers/net/ethernet/intel/iavf/iavf_main.c
534
} else if (q_vector->rx.ring) {
drivers/net/ethernet/intel/iavf/iavf_main.c
537
} else if (q_vector->tx.ring) {
drivers/net/ethernet/intel/iavf/iavf_trace.h
101
TP_ARGS(ring, desc, buf));
drivers/net/ethernet/intel/iavf/iavf_trace.h
105
TP_PROTO(struct iavf_ring *ring,
drivers/net/ethernet/intel/iavf/iavf_trace.h
109
TP_ARGS(ring, desc, buf));
drivers/net/ethernet/intel/iavf/iavf_trace.h
114
TP_PROTO(struct iavf_ring *ring,
drivers/net/ethernet/intel/iavf/iavf_trace.h
118
TP_ARGS(ring, desc, skb),
drivers/net/ethernet/intel/iavf/iavf_trace.h
121
__field(void*, ring)
drivers/net/ethernet/intel/iavf/iavf_trace.h
124
__string(devname, ring->netdev->name)
drivers/net/ethernet/intel/iavf/iavf_trace.h
128
__entry->ring = ring;
drivers/net/ethernet/intel/iavf/iavf_trace.h
136
__get_str(devname), __entry->ring,
drivers/net/ethernet/intel/iavf/iavf_trace.h
142
TP_PROTO(struct iavf_ring *ring,
drivers/net/ethernet/intel/iavf/iavf_trace.h
146
TP_ARGS(ring, desc, skb));
drivers/net/ethernet/intel/iavf/iavf_trace.h
150
TP_PROTO(struct iavf_ring *ring,
drivers/net/ethernet/intel/iavf/iavf_trace.h
154
TP_ARGS(ring, desc, skb));
drivers/net/ethernet/intel/iavf/iavf_trace.h
160
struct iavf_ring *ring),
drivers/net/ethernet/intel/iavf/iavf_trace.h
162
TP_ARGS(skb, ring),
drivers/net/ethernet/intel/iavf/iavf_trace.h
166
__field(void*, ring)
drivers/net/ethernet/intel/iavf/iavf_trace.h
167
__string(devname, ring->netdev->name)
drivers/net/ethernet/intel/iavf/iavf_trace.h
172
__entry->ring = ring;
drivers/net/ethernet/intel/iavf/iavf_trace.h
179
__entry->ring)
drivers/net/ethernet/intel/iavf/iavf_trace.h
185
struct iavf_ring *ring),
drivers/net/ethernet/intel/iavf/iavf_trace.h
187
TP_ARGS(skb, ring));
drivers/net/ethernet/intel/iavf/iavf_trace.h
192
struct iavf_ring *ring),
drivers/net/ethernet/intel/iavf/iavf_trace.h
194
TP_ARGS(skb, ring));
drivers/net/ethernet/intel/iavf/iavf_trace.h
62
TP_PROTO(struct iavf_ring *ring,
drivers/net/ethernet/intel/iavf/iavf_trace.h
66
TP_ARGS(ring, desc, buf),
drivers/net/ethernet/intel/iavf/iavf_trace.h
76
__field(void*, ring)
drivers/net/ethernet/intel/iavf/iavf_trace.h
79
__string(devname, ring->netdev->name)
drivers/net/ethernet/intel/iavf/iavf_trace.h
83
__entry->ring = ring;
drivers/net/ethernet/intel/iavf/iavf_trace.h
91
__get_str(devname), __entry->ring,
drivers/net/ethernet/intel/iavf/iavf_trace.h
97
TP_PROTO(struct iavf_ring *ring,
drivers/net/ethernet/intel/iavf/iavf_txrx.c
1037
static void iavf_legacy_rx_hash(const struct iavf_ring *ring, __le64 qw0,
drivers/net/ethernet/intel/iavf/iavf_txrx.c
1044
if (!libeth_rx_pt_has_hash(ring->netdev, decoded_pt))
drivers/net/ethernet/intel/iavf/iavf_txrx.c
1063
static void iavf_flex_rx_hash(const struct iavf_ring *ring, __le64 qw1,
drivers/net/ethernet/intel/iavf/iavf_txrx.c
1070
if (!libeth_rx_pt_has_hash(ring->netdev, decoded_pt))
drivers/net/ethernet/intel/iavf/iavf_txrx.c
135
static u32 iavf_get_tx_pending(struct iavf_ring *ring, bool in_sw)
drivers/net/ethernet/intel/iavf/iavf_txrx.c
142
head = ring->next_to_clean;
drivers/net/ethernet/intel/iavf/iavf_txrx.c
143
tail = ring->next_to_use;
drivers/net/ethernet/intel/iavf/iavf_txrx.c
147
tail - head : (tail + ring->count - head);
drivers/net/ethernet/intel/iavf/iavf_txrx.c
1601
struct iavf_ring *ring;
drivers/net/ethernet/intel/iavf/iavf_txrx.c
1615
iavf_for_each_ring(ring, q_vector->tx) {
drivers/net/ethernet/intel/iavf/iavf_txrx.c
1616
if (!iavf_clean_tx_irq(vsi, ring, budget)) {
drivers/net/ethernet/intel/iavf/iavf_txrx.c
1620
arm_wb |= !!(ring->flags & IAVF_TXR_FLAGS_ARM_WB);
drivers/net/ethernet/intel/iavf/iavf_txrx.c
1621
ring->flags &= ~IAVF_TXR_FLAGS_ARM_WB;
drivers/net/ethernet/intel/iavf/iavf_txrx.c
1633
iavf_for_each_ring(ring, q_vector->rx) {
drivers/net/ethernet/intel/iavf/iavf_txrx.c
1634
int cleaned = iavf_clean_rx_irq(ring, budget_per_ring);
drivers/net/ethernet/intel/iavf/iavf_txrx.c
1666
q_vector->tx.ring[0].tx_stats.tx_force_wb++;
drivers/net/ethernet/intel/iavf/iavf_txrx.c
378
u16 flags = q_vector->tx.ring[0].flags;
drivers/net/ethernet/intel/iavf/iavf_txrx.c
479
if (!rc->ring || !ITR_IS_DYNAMIC(rc->ring->itr_setting))
drivers/net/ethernet/intel/iavf/iavf_txrx.c
49
static void iavf_unmap_and_free_tx_resource(struct iavf_ring *ring,
drivers/net/ethernet/intel/iavf/iavf_txrx.c
58
dma_unmap_single(ring->dev,
drivers/net/ethernet/intel/iavf/iavf_txrx.c
63
dma_unmap_page(ring->dev,
drivers/net/ethernet/intel/iavf/iavf_txrx.h
299
struct iavf_ring *ring; /* pointer to linked list of ring(s) */
drivers/net/ethernet/intel/iavf/iavf_txrx.h
310
for (pos = (head).ring; pos != NULL; pos = pos->next)
drivers/net/ethernet/intel/iavf/iavf_txrx.h
388
static inline struct netdev_queue *txring_txq(const struct iavf_ring *ring)
drivers/net/ethernet/intel/iavf/iavf_txrx.h
390
return netdev_get_tx_queue(ring->netdev, ring->queue_index);
drivers/net/ethernet/intel/ice/ice.h
754
static inline void ice_set_ring_xdp(struct ice_tx_ring *ring)
drivers/net/ethernet/intel/ice/ice.h
756
ring->flags |= ICE_TX_FLAGS_RING_XDP;
drivers/net/ethernet/intel/ice/ice.h
765
static inline bool ice_is_txtime_ena(const struct ice_tx_ring *ring)
drivers/net/ethernet/intel/ice/ice.h
767
struct ice_vsi *vsi = ring->vsi;
drivers/net/ethernet/intel/ice/ice.h
770
return test_bit(ring->q_index, pf->txtime_txqs);
drivers/net/ethernet/intel/ice/ice.h
779
static inline bool ice_is_txtime_cfg(const struct ice_tx_ring *ring)
drivers/net/ethernet/intel/ice/ice.h
781
return !!(ring->flags & ICE_TX_FLAGS_TXTIME);
drivers/net/ethernet/intel/ice/ice.h
809
static inline void ice_rx_xsk_pool(struct ice_rx_ring *ring)
drivers/net/ethernet/intel/ice/ice.h
811
struct ice_vsi *vsi = ring->vsi;
drivers/net/ethernet/intel/ice/ice.h
812
u16 qid = ring->q_index;
drivers/net/ethernet/intel/ice/ice.h
814
WRITE_ONCE(ring->xsk_pool, ice_get_xp_from_qid(vsi, qid));
drivers/net/ethernet/intel/ice/ice.h
833
struct ice_tx_ring *ring;
drivers/net/ethernet/intel/ice/ice.h
835
ring = vsi->rx_rings[qid]->xdp_ring;
drivers/net/ethernet/intel/ice/ice.h
836
if (!ring)
drivers/net/ethernet/intel/ice/ice.h
839
WRITE_ONCE(ring->xsk_pool, ice_get_xp_from_qid(vsi, qid));
drivers/net/ethernet/intel/ice/ice_base.c
1039
ice_vsi_cfg_txq(const struct ice_vsi *vsi, struct ice_tx_ring *ring,
drivers/net/ethernet/intel/ice/ice_base.c
1045
struct ice_channel *ch = ring->ch;
drivers/net/ethernet/intel/ice/ice_base.c
1053
ice_cfg_xps_tx_ring(ring);
drivers/net/ethernet/intel/ice/ice_base.c
1055
pf_q = ring->reg_idx;
drivers/net/ethernet/intel/ice/ice_base.c
1056
status = ice_setup_tx_ctx(ring, &tlan_ctx, pf_q);
drivers/net/ethernet/intel/ice/ice_base.c
1069
ring->tail = hw->hw_addr + QTX_COMM_DBELL(pf_q);
drivers/net/ethernet/intel/ice/ice_base.c
1072
tc = ring->dcb_tc;
drivers/net/ethernet/intel/ice/ice_base.c
1079
ring->q_handle = ice_calc_txq_handle(vsi, ring, tc);
drivers/net/ethernet/intel/ice/ice_base.c
1088
status = ice_ena_vsi_txq(vsi->port_info, vsi_idx, tc, ring->q_handle,
drivers/net/ethernet/intel/ice/ice_base.c
1102
ring->txq_teid = le32_to_cpu(txq->q_teid);
drivers/net/ethernet/intel/ice/ice_base.c
1104
if (ice_is_txtime_ena(ring)) {
drivers/net/ethernet/intel/ice/ice_base.c
1105
status = ice_alloc_setup_tstamp_ring(ring);
drivers/net/ethernet/intel/ice/ice_base.c
1113
status = ice_cfg_tstamp(ring);
drivers/net/ethernet/intel/ice/ice_base.c
1123
ice_free_tx_tstamp_ring(ring);
drivers/net/ethernet/intel/ice/ice_base.c
1125
ice_dis_vsi_txq(vsi->port_info, vsi_idx, tc, 1, &ring->q_handle,
drivers/net/ethernet/intel/ice/ice_base.c
1126
&ring->reg_idx, &ring->txq_teid, ICE_NO_RESET,
drivers/net/ethernet/intel/ice/ice_base.c
1309
u16 rel_vmvf_num, struct ice_tx_ring *ring,
drivers/net/ethernet/intel/ice/ice_base.c
1319
val = rd32(hw, QINT_TQCTL(ring->reg_idx));
drivers/net/ethernet/intel/ice/ice_base.c
1321
wr32(hw, QINT_TQCTL(ring->reg_idx), val);
drivers/net/ethernet/intel/ice/ice_base.c
1329
q_vector = ring->q_vector;
drivers/net/ethernet/intel/ice/ice_base.c
1366
ice_fill_txq_meta(const struct ice_vsi *vsi, struct ice_tx_ring *ring,
drivers/net/ethernet/intel/ice/ice_base.c
1369
struct ice_channel *ch = ring->ch;
drivers/net/ethernet/intel/ice/ice_base.c
1373
tc = ring->dcb_tc;
drivers/net/ethernet/intel/ice/ice_base.c
1377
txq_meta->q_id = ring->reg_idx;
drivers/net/ethernet/intel/ice/ice_base.c
1378
txq_meta->q_teid = ring->txq_teid;
drivers/net/ethernet/intel/ice/ice_base.c
1379
txq_meta->q_handle = ring->q_handle;
drivers/net/ethernet/intel/ice/ice_base.c
249
ice_calc_txq_handle(const struct ice_vsi *vsi, struct ice_tx_ring *ring, u8 tc)
drivers/net/ethernet/intel/ice/ice_base.c
251
WARN_ONCE(ice_ring_is_xdp(ring) && tc, "XDP ring can't belong to TC other than 0\n");
drivers/net/ethernet/intel/ice/ice_base.c
253
if (ring->ch)
drivers/net/ethernet/intel/ice/ice_base.c
254
return ring->q_index - ring->ch->base_q;
drivers/net/ethernet/intel/ice/ice_base.c
260
return ring->q_index - vsi->tc_cfg.tc_info[tc].qoffset;
drivers/net/ethernet/intel/ice/ice_base.c
270
static void ice_cfg_xps_tx_ring(struct ice_tx_ring *ring)
drivers/net/ethernet/intel/ice/ice_base.c
272
if (!ring->q_vector || !ring->netdev)
drivers/net/ethernet/intel/ice/ice_base.c
276
if (test_and_set_bit(ICE_TX_XPS_INIT_DONE, ring->xps_state))
drivers/net/ethernet/intel/ice/ice_base.c
279
netif_set_xps_queue(ring->netdev,
drivers/net/ethernet/intel/ice/ice_base.c
280
&ring->q_vector->napi.config->affinity_mask,
drivers/net/ethernet/intel/ice/ice_base.c
281
ring->q_index);
drivers/net/ethernet/intel/ice/ice_base.c
293
ice_set_txq_ctx_vmvf(struct ice_tx_ring *ring, u8 *vmvf_type, u16 *vmvf_num)
drivers/net/ethernet/intel/ice/ice_base.c
295
struct ice_vsi *vsi = ring->vsi;
drivers/net/ethernet/intel/ice/ice_base.c
310
if (ring->ch)
drivers/net/ethernet/intel/ice/ice_base.c
343
ice_setup_tx_ctx(struct ice_tx_ring *ring, struct ice_tlan_ctx *tlan_ctx, u16 pf_q)
drivers/net/ethernet/intel/ice/ice_base.c
345
struct ice_vsi *vsi = ring->vsi;
drivers/net/ethernet/intel/ice/ice_base.c
350
tlan_ctx->base = ring->dma >> ICE_TLAN_CTX_BASE_S;
drivers/net/ethernet/intel/ice/ice_base.c
354
tlan_ctx->qlen = ring->count;
drivers/net/ethernet/intel/ice/ice_base.c
356
ice_set_cgd_num(tlan_ctx, ring->dcb_tc);
drivers/net/ethernet/intel/ice/ice_base.c
361
err = ice_set_txq_ctx_vmvf(ring, &tlan_ctx->vmvf_type,
drivers/net/ethernet/intel/ice/ice_base.c
367
if (ring->ch)
drivers/net/ethernet/intel/ice/ice_base.c
368
tlan_ctx->src_vsi = ring->ch->vsi_num;
drivers/net/ethernet/intel/ice/ice_base.c
381
tlan_ctx->quanta_prof_idx = ring->quanta_prof_id;
drivers/net/ethernet/intel/ice/ice_base.c
403
ice_setup_txtime_ctx(const struct ice_tstamp_ring *ring,
drivers/net/ethernet/intel/ice/ice_base.c
406
struct ice_tx_ring *tx_ring = ring->tx_ring;
drivers/net/ethernet/intel/ice/ice_base.c
411
txtime_ctx->base = ring->dma >> ICE_TXTIME_CTX_BASE_S;
drivers/net/ethernet/intel/ice/ice_base.c
414
txtime_ctx->qlen = ring->count;
drivers/net/ethernet/intel/ice/ice_base.c
473
static int ice_setup_rx_ctx(struct ice_rx_ring *ring)
drivers/net/ethernet/intel/ice/ice_base.c
475
struct ice_vsi *vsi = ring->vsi;
drivers/net/ethernet/intel/ice/ice_base.c
485
pf_q = vsi->rxq_map[ring->q_index];
drivers/net/ethernet/intel/ice/ice_base.c
494
rlan_ctx.base = ring->dma >> ICE_RLAN_BASE_S;
drivers/net/ethernet/intel/ice/ice_base.c
496
rlan_ctx.qlen = ring->count;
drivers/net/ethernet/intel/ice/ice_base.c
501
rlan_ctx.dbuf = DIV_ROUND_UP(ring->rx_buf_len,
drivers/net/ethernet/intel/ice/ice_base.c
510
rlan_ctx.crcstrip = !(ring->flags & ICE_RX_FLAGS_CRC_STRIP_DIS);
drivers/net/ethernet/intel/ice/ice_base.c
529
if (ring->hdr_pp) {
drivers/net/ethernet/intel/ice/ice_base.c
530
rlan_ctx.hbuf = ring->rx_hdr_len >> ICE_RLAN_CTX_HBUF_S;
drivers/net/ethernet/intel/ice/ice_base.c
564
ICE_MAX_CHAINED_RX_BUFS * ring->rx_buf_len);
drivers/net/ethernet/intel/ice/ice_base.c
576
ring->flags |= ICE_RX_FLAGS_MULTIDEV;
drivers/net/ethernet/intel/ice/ice_base.c
601
ring->tail = hw->hw_addr + QRX_TAIL(pf_q);
drivers/net/ethernet/intel/ice/ice_base.c
602
writel(0, ring->tail);
drivers/net/ethernet/intel/ice/ice_base.c
660
static int ice_vsi_cfg_rxq(struct ice_rx_ring *ring)
drivers/net/ethernet/intel/ice/ice_base.c
662
struct device *dev = ice_pf_to_dev(ring->vsi->back);
drivers/net/ethernet/intel/ice/ice_base.c
663
u32 num_bufs = ICE_DESC_UNUSED(ring);
drivers/net/ethernet/intel/ice/ice_base.c
666
if (ring->vsi->type == ICE_VSI_PF || ring->vsi->type == ICE_VSI_SF ||
drivers/net/ethernet/intel/ice/ice_base.c
667
ring->vsi->type == ICE_VSI_LB) {
drivers/net/ethernet/intel/ice/ice_base.c
668
ice_rx_xsk_pool(ring);
drivers/net/ethernet/intel/ice/ice_base.c
669
err = ice_realloc_rx_xdp_bufs(ring, ring->xsk_pool);
drivers/net/ethernet/intel/ice/ice_base.c
673
if (ring->xsk_pool) {
drivers/net/ethernet/intel/ice/ice_base.c
675
xsk_pool_get_rx_frag_step(ring->xsk_pool);
drivers/net/ethernet/intel/ice/ice_base.c
676
err = __xdp_rxq_info_reg(&ring->xdp_rxq, ring->netdev,
drivers/net/ethernet/intel/ice/ice_base.c
677
ring->q_index,
drivers/net/ethernet/intel/ice/ice_base.c
678
ring->q_vector->napi.napi_id,
drivers/net/ethernet/intel/ice/ice_base.c
682
err = xdp_rxq_info_reg_mem_model(&ring->xdp_rxq,
drivers/net/ethernet/intel/ice/ice_base.c
687
xsk_pool_set_rxq_info(ring->xsk_pool, &ring->xdp_rxq);
drivers/net/ethernet/intel/ice/ice_base.c
690
ring->q_index);
drivers/net/ethernet/intel/ice/ice_base.c
692
err = ice_rxq_pp_create(ring);
drivers/net/ethernet/intel/ice/ice_base.c
696
err = __xdp_rxq_info_reg(&ring->xdp_rxq, ring->netdev,
drivers/net/ethernet/intel/ice/ice_base.c
697
ring->q_index,
drivers/net/ethernet/intel/ice/ice_base.c
698
ring->q_vector->napi.napi_id,
drivers/net/ethernet/intel/ice/ice_base.c
699
ring->truesize);
drivers/net/ethernet/intel/ice/ice_base.c
703
xdp_rxq_info_attach_page_pool(&ring->xdp_rxq,
drivers/net/ethernet/intel/ice/ice_base.c
704
ring->pp);
drivers/net/ethernet/intel/ice/ice_base.c
708
ring->xdp.data = NULL;
drivers/net/ethernet/intel/ice/ice_base.c
709
err = ice_setup_rx_ctx(ring);
drivers/net/ethernet/intel/ice/ice_base.c
712
ring->q_index, err);
drivers/net/ethernet/intel/ice/ice_base.c
716
if (ring->xsk_pool) {
drivers/net/ethernet/intel/ice/ice_base.c
719
if (!xsk_buff_can_alloc(ring->xsk_pool, num_bufs)) {
drivers/net/ethernet/intel/ice/ice_base.c
721
num_bufs, ring->q_index);
drivers/net/ethernet/intel/ice/ice_base.c
727
ok = ice_alloc_rx_bufs_zc(ring, ring->xsk_pool, num_bufs);
drivers/net/ethernet/intel/ice/ice_base.c
729
u16 pf_q = ring->vsi->rxq_map[ring->q_index];
drivers/net/ethernet/intel/ice/ice_base.c
732
ring->q_index, pf_q);
drivers/net/ethernet/intel/ice/ice_base.c
738
if (ring->vsi->type == ICE_VSI_CTRL)
drivers/net/ethernet/intel/ice/ice_base.c
739
ice_init_ctrl_rx_descs(ring, num_bufs);
drivers/net/ethernet/intel/ice/ice_base.c
741
err = ice_alloc_rx_bufs(ring, num_bufs);
drivers/net/ethernet/intel/ice/ice_base.c
749
ice_rxq_pp_destroy(ring);
drivers/net/ethernet/intel/ice/ice_base.c
770
static void ice_vsi_cfg_frame_size(struct ice_vsi *vsi, struct ice_rx_ring *ring)
drivers/net/ethernet/intel/ice/ice_base.c
792
struct ice_rx_ring *ring = vsi->rx_rings[i];
drivers/net/ethernet/intel/ice/ice_base.c
796
ice_vsi_cfg_frame_size(vsi, ring);
drivers/net/ethernet/intel/ice/ice_base.c
798
err = ice_vsi_cfg_rxq(ring);
drivers/net/ethernet/intel/ice/ice_base.h
30
u16 rel_vmvf_num, struct ice_tx_ring *ring,
drivers/net/ethernet/intel/ice/ice_base.h
33
ice_fill_txq_meta(const struct ice_vsi *vsi, struct ice_tx_ring *ring,
drivers/net/ethernet/intel/ice/ice_controlq.c
132
static void ice_free_cq_ring(struct ice_hw *hw, struct ice_ctl_q_ring *ring)
drivers/net/ethernet/intel/ice/ice_controlq.c
134
dmam_free_coherent(ice_hw_to_dev(hw), ring->desc_buf.size,
drivers/net/ethernet/intel/ice/ice_controlq.c
135
ring->desc_buf.va, ring->desc_buf.pa);
drivers/net/ethernet/intel/ice/ice_controlq.c
136
ring->desc_buf.va = NULL;
drivers/net/ethernet/intel/ice/ice_controlq.c
137
ring->desc_buf.pa = 0;
drivers/net/ethernet/intel/ice/ice_controlq.c
138
ring->desc_buf.size = 0;
drivers/net/ethernet/intel/ice/ice_controlq.c
262
ice_cfg_cq_regs(struct ice_hw *hw, struct ice_ctl_q_ring *ring, u16 num_entries)
drivers/net/ethernet/intel/ice/ice_controlq.c
265
wr32(hw, ring->head, 0);
drivers/net/ethernet/intel/ice/ice_controlq.c
266
wr32(hw, ring->tail, 0);
drivers/net/ethernet/intel/ice/ice_controlq.c
269
wr32(hw, ring->len, (num_entries | ring->len_ena_mask));
drivers/net/ethernet/intel/ice/ice_controlq.c
270
wr32(hw, ring->bal, lower_32_bits(ring->desc_buf.pa));
drivers/net/ethernet/intel/ice/ice_controlq.c
271
wr32(hw, ring->bah, upper_32_bits(ring->desc_buf.pa));
drivers/net/ethernet/intel/ice/ice_controlq.c
274
if (rd32(hw, ring->bal) != lower_32_bits(ring->desc_buf.pa))
drivers/net/ethernet/intel/ice/ice_controlq.c
313
#define ICE_FREE_CQ_BUFS(hw, qi, ring) \
drivers/net/ethernet/intel/ice/ice_controlq.c
316
if ((qi)->ring.r.ring##_bi) { \
drivers/net/ethernet/intel/ice/ice_controlq.c
319
for (i = 0; i < (qi)->num_##ring##_entries; i++) \
drivers/net/ethernet/intel/ice/ice_controlq.c
320
if ((qi)->ring.r.ring##_bi[i].pa) { \
drivers/net/ethernet/intel/ice/ice_controlq.c
322
(qi)->ring.r.ring##_bi[i].size, \
drivers/net/ethernet/intel/ice/ice_controlq.c
323
(qi)->ring.r.ring##_bi[i].va, \
drivers/net/ethernet/intel/ice/ice_controlq.c
324
(qi)->ring.r.ring##_bi[i].pa); \
drivers/net/ethernet/intel/ice/ice_controlq.c
325
(qi)->ring.r.ring##_bi[i].va = NULL;\
drivers/net/ethernet/intel/ice/ice_controlq.c
326
(qi)->ring.r.ring##_bi[i].pa = 0;\
drivers/net/ethernet/intel/ice/ice_controlq.c
327
(qi)->ring.r.ring##_bi[i].size = 0;\
drivers/net/ethernet/intel/ice/ice_controlq.c
331
devm_kfree(ice_hw_to_dev(hw), (qi)->ring.dma_head); \
drivers/net/ethernet/intel/ice/ice_ethtool.c
3162
ice_get_ringparam(struct net_device *netdev, struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/ice/ice_ethtool.c
3171
ring->rx_max_pending = ICE_MAX_NUM_DESC_BY_MAC(hw);
drivers/net/ethernet/intel/ice/ice_ethtool.c
3172
ring->tx_max_pending = ICE_MAX_NUM_DESC_BY_MAC(hw);
drivers/net/ethernet/intel/ice/ice_ethtool.c
3174
ring->rx_pending = vsi->rx_rings[0]->count;
drivers/net/ethernet/intel/ice/ice_ethtool.c
3175
ring->tx_pending = vsi->tx_rings[0]->count;
drivers/net/ethernet/intel/ice/ice_ethtool.c
3177
ring->rx_pending = 0;
drivers/net/ethernet/intel/ice/ice_ethtool.c
3178
ring->tx_pending = 0;
drivers/net/ethernet/intel/ice/ice_ethtool.c
3182
ring->rx_mini_max_pending = 0;
drivers/net/ethernet/intel/ice/ice_ethtool.c
3183
ring->rx_jumbo_max_pending = 0;
drivers/net/ethernet/intel/ice/ice_ethtool.c
3184
ring->rx_mini_pending = 0;
drivers/net/ethernet/intel/ice/ice_ethtool.c
3185
ring->rx_jumbo_pending = 0;
drivers/net/ethernet/intel/ice/ice_ethtool.c
3193
ice_set_ringparam(struct net_device *netdev, struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/ice/ice_ethtool.c
3208
if (ring->tx_pending > ICE_MAX_NUM_DESC_BY_MAC(hw) ||
drivers/net/ethernet/intel/ice/ice_ethtool.c
3209
ring->tx_pending < ICE_MIN_NUM_DESC ||
drivers/net/ethernet/intel/ice/ice_ethtool.c
3210
ring->rx_pending > ICE_MAX_NUM_DESC_BY_MAC(hw) ||
drivers/net/ethernet/intel/ice/ice_ethtool.c
3211
ring->rx_pending < ICE_MIN_NUM_DESC) {
drivers/net/ethernet/intel/ice/ice_ethtool.c
3213
ring->tx_pending, ring->rx_pending,
drivers/net/ethernet/intel/ice/ice_ethtool.c
3223
new_tx_cnt = ALIGN(ring->tx_pending, ICE_REQ_DESC_MULTIPLE);
drivers/net/ethernet/intel/ice/ice_ethtool.c
3224
if (new_tx_cnt != ring->tx_pending)
drivers/net/ethernet/intel/ice/ice_ethtool.c
3227
new_rx_cnt = ALIGN(ring->rx_pending, ICE_REQ_DESC_MULTIPLE);
drivers/net/ethernet/intel/ice/ice_ethtool.c
3228
if (new_rx_cnt != ring->rx_pending)
drivers/net/ethernet/intel/ice/ice_ethtool_fdir.c
1797
ice_update_ring_dest_vsi(struct ice_vsi *vsi, u16 *dest_vsi, u32 *ring)
drivers/net/ethernet/intel/ice/ice_ethtool_fdir.c
1808
if ((*ring < ch->base_q) ||
drivers/net/ethernet/intel/ice/ice_ethtool_fdir.c
1809
(*ring >= (ch->base_q + ch->num_rxq)))
drivers/net/ethernet/intel/ice/ice_ethtool_fdir.c
1816
*ring -= ch->base_q;
drivers/net/ethernet/intel/ice/ice_ethtool_fdir.c
1848
u32 ring = ethtool_get_flow_spec_ring(fsp->ring_cookie);
drivers/net/ethernet/intel/ice/ice_ethtool_fdir.c
1856
if (ring >= vsi->num_rxq)
drivers/net/ethernet/intel/ice/ice_ethtool_fdir.c
1859
orig_q_index = ring;
drivers/net/ethernet/intel/ice/ice_ethtool_fdir.c
1860
ice_update_ring_dest_vsi(vsi, &dest_vsi, &ring);
drivers/net/ethernet/intel/ice/ice_ethtool_fdir.c
1862
q_index = ring;
drivers/net/ethernet/intel/ice/ice_lib.c
1399
struct ice_tx_ring *ring;
drivers/net/ethernet/intel/ice/ice_lib.c
1402
ring = kzalloc_obj(*ring);
drivers/net/ethernet/intel/ice/ice_lib.c
1404
if (!ring)
drivers/net/ethernet/intel/ice/ice_lib.c
1407
ring->q_index = i;
drivers/net/ethernet/intel/ice/ice_lib.c
1408
ring->reg_idx = vsi->txq_map[i];
drivers/net/ethernet/intel/ice/ice_lib.c
1409
ring->vsi = vsi;
drivers/net/ethernet/intel/ice/ice_lib.c
1410
ring->tx_tstamps = &pf->ptp.port.tx;
drivers/net/ethernet/intel/ice/ice_lib.c
1411
ring->dev = dev;
drivers/net/ethernet/intel/ice/ice_lib.c
1412
ring->count = vsi->num_tx_desc;
drivers/net/ethernet/intel/ice/ice_lib.c
1413
ring->txq_teid = ICE_INVAL_TEID;
drivers/net/ethernet/intel/ice/ice_lib.c
1415
ring->flags |= ICE_TX_FLAGS_RING_VLAN_L2TAG2;
drivers/net/ethernet/intel/ice/ice_lib.c
1417
ring->flags |= ICE_TX_FLAGS_RING_VLAN_L2TAG1;
drivers/net/ethernet/intel/ice/ice_lib.c
1418
WRITE_ONCE(vsi->tx_rings[i], ring);
drivers/net/ethernet/intel/ice/ice_lib.c
1423
struct ice_rx_ring *ring;
drivers/net/ethernet/intel/ice/ice_lib.c
1426
ring = kzalloc_obj(*ring);
drivers/net/ethernet/intel/ice/ice_lib.c
1427
if (!ring)
drivers/net/ethernet/intel/ice/ice_lib.c
1430
ring->q_index = i;
drivers/net/ethernet/intel/ice/ice_lib.c
1431
ring->reg_idx = vsi->rxq_map[i];
drivers/net/ethernet/intel/ice/ice_lib.c
1432
ring->vsi = vsi;
drivers/net/ethernet/intel/ice/ice_lib.c
1433
ring->netdev = vsi->netdev;
drivers/net/ethernet/intel/ice/ice_lib.c
1434
ring->count = vsi->num_rx_desc;
drivers/net/ethernet/intel/ice/ice_lib.c
1435
ring->cached_phctime = pf->ptp.cached_phc_time;
drivers/net/ethernet/intel/ice/ice_lib.c
1438
ring->flags |= ICE_RX_FLAGS_RING_GCS;
drivers/net/ethernet/intel/ice/ice_lib.c
1440
WRITE_ONCE(vsi->rx_rings[i], ring);
drivers/net/ethernet/intel/ice/ice_lib.c
3473
void ice_fetch_tx_ring_stats(const struct ice_tx_ring *ring,
drivers/net/ethernet/intel/ice/ice_lib.c
3479
start = u64_stats_fetch_begin(&ring->ring_stats->syncp);
drivers/net/ethernet/intel/ice/ice_lib.c
3480
*pkts = u64_stats_read(&ring->ring_stats->pkts);
drivers/net/ethernet/intel/ice/ice_lib.c
3481
*bytes = u64_stats_read(&ring->ring_stats->bytes);
drivers/net/ethernet/intel/ice/ice_lib.c
3482
} while (u64_stats_fetch_retry(&ring->ring_stats->syncp, start));
drivers/net/ethernet/intel/ice/ice_lib.c
3491
void ice_fetch_rx_ring_stats(const struct ice_rx_ring *ring,
drivers/net/ethernet/intel/ice/ice_lib.c
3497
start = u64_stats_fetch_begin(&ring->ring_stats->syncp);
drivers/net/ethernet/intel/ice/ice_lib.c
3498
*pkts = u64_stats_read(&ring->ring_stats->pkts);
drivers/net/ethernet/intel/ice/ice_lib.c
3499
*bytes = u64_stats_read(&ring->ring_stats->bytes);
drivers/net/ethernet/intel/ice/ice_lib.c
3500
} while (u64_stats_fetch_retry(&ring->ring_stats->syncp, start));
drivers/net/ethernet/intel/ice/ice_lib.c
391
struct ice_tx_ring *ring;
drivers/net/ethernet/intel/ice/ice_lib.c
393
ring = vsi->tx_rings[i];
drivers/net/ethernet/intel/ice/ice_lib.c
406
ring->ring_stats = ring_stats;
drivers/net/ethernet/intel/ice/ice_lib.c
412
struct ice_rx_ring *ring;
drivers/net/ethernet/intel/ice/ice_lib.c
414
ring = vsi->rx_rings[i];
drivers/net/ethernet/intel/ice/ice_lib.c
427
ring->ring_stats = ring_stats;
drivers/net/ethernet/intel/ice/ice_lib.h
91
void ice_update_tx_ring_stats(struct ice_tx_ring *ring, u64 pkts, u64 bytes);
drivers/net/ethernet/intel/ice/ice_lib.h
93
void ice_update_rx_ring_stats(struct ice_rx_ring *ring, u64 pkts, u64 bytes);
drivers/net/ethernet/intel/ice/ice_lib.h
95
void ice_fetch_tx_ring_stats(const struct ice_tx_ring *ring,
drivers/net/ethernet/intel/ice/ice_lib.h
98
void ice_fetch_rx_ring_stats(const struct ice_rx_ring *ring,
drivers/net/ethernet/intel/ice/ice_main.c
102
static u16 ice_get_tx_pending(struct ice_tx_ring *ring)
drivers/net/ethernet/intel/ice/ice_main.c
106
head = ring->next_to_clean;
drivers/net/ethernet/intel/ice/ice_main.c
107
tail = ring->next_to_use;
drivers/net/ethernet/intel/ice/ice_main.c
111
tail - head : (tail + ring->count - head);
drivers/net/ethernet/intel/ice/ice_main.c
2674
struct ice_tx_ring *ring;
drivers/net/ethernet/intel/ice/ice_main.c
2680
ice_for_each_tx_ring(ring, q_vector->tx)
drivers/net/ethernet/intel/ice/ice_main.c
2681
if (ice_ring_is_xdp(ring))
drivers/net/ethernet/intel/ice/ice_main.c
2682
return ring;
drivers/net/ethernet/intel/ice/ice_main.c
2735
struct ice_tx_ring *ring;
drivers/net/ethernet/intel/ice/ice_main.c
2737
ice_for_each_tx_ring(ring, q_vector->tx)
drivers/net/ethernet/intel/ice/ice_main.c
2738
if (!ring->tx_buf || !ice_ring_is_xdp(ring))
drivers/net/ethernet/intel/ice/ice_main.c
2742
q_vector->tx.tx_ring = ring;
drivers/net/ethernet/intel/ice/ice_main.c
6857
static void ice_fetch_u64_tx_stats(struct ice_tx_ring *ring,
drivers/net/ethernet/intel/ice/ice_main.c
6860
struct ice_ring_stats *stats = ring->ring_stats;
drivers/net/ethernet/intel/ice/ice_main.c
6882
static void ice_fetch_u64_rx_stats(struct ice_rx_ring *ring,
drivers/net/ethernet/intel/ice/ice_main.c
6885
struct ice_ring_stats *stats = ring->ring_stats;
drivers/net/ethernet/intel/ice/ice_main.c
6914
struct ice_tx_ring *ring;
drivers/net/ethernet/intel/ice/ice_main.c
6916
ring = READ_ONCE(rings[i]);
drivers/net/ethernet/intel/ice/ice_main.c
6917
if (!ring || !ring->ring_stats)
drivers/net/ethernet/intel/ice/ice_main.c
6920
ice_fetch_u64_tx_stats(ring, ©);
drivers/net/ethernet/intel/ice/ice_main.c
6945
struct ice_rx_ring *ring;
drivers/net/ethernet/intel/ice/ice_main.c
6947
ring = READ_ONCE(rings[i]);
drivers/net/ethernet/intel/ice/ice_main.c
6948
if (!ring || !ring->ring_stats)
drivers/net/ethernet/intel/ice/ice_main.c
6951
ice_fetch_u64_rx_stats(ring, ©);
drivers/net/ethernet/intel/ice/ice_main.c
7417
struct ice_tx_ring *ring = vsi->tx_rings[i];
drivers/net/ethernet/intel/ice/ice_main.c
7419
if (!ring)
drivers/net/ethernet/intel/ice/ice_main.c
7423
ring->netdev = vsi->netdev;
drivers/net/ethernet/intel/ice/ice_main.c
7424
err = ice_setup_tx_ring(ring);
drivers/net/ethernet/intel/ice/ice_main.c
7449
struct ice_rx_ring *ring = vsi->rx_rings[i];
drivers/net/ethernet/intel/ice/ice_main.c
7451
if (!ring)
drivers/net/ethernet/intel/ice/ice_main.c
7455
ring->netdev = vsi->netdev;
drivers/net/ethernet/intel/ice/ice_main.c
7456
err = ice_setup_rx_ring(ring);
drivers/net/ethernet/intel/ice/ice_tc_lib.c
1080
struct ice_rx_ring *ring = NULL;
drivers/net/ethernet/intel/ice/ice_tc_lib.c
1104
ring = ice_locate_rx_ring_using_queue(vsi, tc_fltr);
drivers/net/ethernet/intel/ice/ice_tc_lib.c
1105
if (!ring) {
drivers/net/ethernet/intel/ice/ice_trace.h
119
TP_PROTO(struct ice_tx_ring *ring, struct ice_tx_desc *desc,
drivers/net/ethernet/intel/ice/ice_trace.h
122
TP_ARGS(ring, desc, buf),
drivers/net/ethernet/intel/ice/ice_trace.h
123
TP_STRUCT__entry(__field(void *, ring)
drivers/net/ethernet/intel/ice/ice_trace.h
126
__string(devname, ring->netdev->name)),
drivers/net/ethernet/intel/ice/ice_trace.h
128
TP_fast_assign(__entry->ring = ring;
drivers/net/ethernet/intel/ice/ice_trace.h
134
__entry->ring, __entry->desc, __entry->buf)
drivers/net/ethernet/intel/ice/ice_trace.h
139
TP_PROTO(struct ice_tx_ring *ring, \
drivers/net/ethernet/intel/ice/ice_trace.h
142
TP_ARGS(ring, desc, buf))
drivers/net/ethernet/intel/ice/ice_trace.h
149
TP_PROTO(struct ice_rx_ring *ring, union ice_32b_rx_flex_desc *desc),
drivers/net/ethernet/intel/ice/ice_trace.h
151
TP_ARGS(ring, desc),
drivers/net/ethernet/intel/ice/ice_trace.h
153
TP_STRUCT__entry(__field(void *, ring)
drivers/net/ethernet/intel/ice/ice_trace.h
155
__string(devname, ring->netdev->name)),
drivers/net/ethernet/intel/ice/ice_trace.h
157
TP_fast_assign(__entry->ring = ring;
drivers/net/ethernet/intel/ice/ice_trace.h
162
__entry->ring, __entry->desc)
drivers/net/ethernet/intel/ice/ice_trace.h
165
TP_PROTO(struct ice_rx_ring *ring, union ice_32b_rx_flex_desc *desc),
drivers/net/ethernet/intel/ice/ice_trace.h
166
TP_ARGS(ring, desc)
drivers/net/ethernet/intel/ice/ice_trace.h
170
TP_PROTO(struct ice_rx_ring *ring, union ice_32b_rx_flex_desc *desc,
drivers/net/ethernet/intel/ice/ice_trace.h
173
TP_ARGS(ring, desc, skb),
drivers/net/ethernet/intel/ice/ice_trace.h
175
TP_STRUCT__entry(__field(void *, ring)
drivers/net/ethernet/intel/ice/ice_trace.h
178
__string(devname, ring->netdev->name)),
drivers/net/ethernet/intel/ice/ice_trace.h
180
TP_fast_assign(__entry->ring = ring;
drivers/net/ethernet/intel/ice/ice_trace.h
186
__entry->ring, __entry->desc, __entry->skb)
drivers/net/ethernet/intel/ice/ice_trace.h
190
TP_PROTO(struct ice_rx_ring *ring, union ice_32b_rx_flex_desc *desc,
drivers/net/ethernet/intel/ice/ice_trace.h
192
TP_ARGS(ring, desc, skb)
drivers/net/ethernet/intel/ice/ice_trace.h
196
TP_PROTO(struct ice_tx_ring *ring, struct sk_buff *skb),
drivers/net/ethernet/intel/ice/ice_trace.h
198
TP_ARGS(ring, skb),
drivers/net/ethernet/intel/ice/ice_trace.h
200
TP_STRUCT__entry(__field(void *, ring)
drivers/net/ethernet/intel/ice/ice_trace.h
202
__string(devname, ring->netdev->name)),
drivers/net/ethernet/intel/ice/ice_trace.h
204
TP_fast_assign(__entry->ring = ring;
drivers/net/ethernet/intel/ice/ice_trace.h
209
__entry->skb, __entry->ring)
drivers/net/ethernet/intel/ice/ice_trace.h
214
TP_PROTO(struct ice_tx_ring *ring, struct sk_buff *skb), \
drivers/net/ethernet/intel/ice/ice_trace.h
215
TP_ARGS(ring, skb))
drivers/net/ethernet/intel/ice/ice_txrx.c
114
ice_unmap_and_free_tx_buf(struct ice_tx_ring *ring, struct ice_tx_buf *tx_buf)
drivers/net/ethernet/intel/ice/ice_txrx.c
117
dma_unmap_page(ring->dev,
drivers/net/ethernet/intel/ice/ice_txrx.c
124
devm_kfree(ring->dev, tx_buf->raw_buf);
drivers/net/ethernet/intel/ice/ice_txrx.c
143
static struct netdev_queue *txring_txq(const struct ice_tx_ring *ring)
drivers/net/ethernet/intel/ice/ice_txrx.c
145
return netdev_get_tx_queue(ring->netdev, ring->q_index);
drivers/net/ethernet/intel/ice/ice_txrx.h
394
static inline bool ice_ring_ch_enabled(struct ice_tx_ring *ring)
drivers/net/ethernet/intel/ice/ice_txrx.h
396
return !!ring->ch;
drivers/net/ethernet/intel/ice/ice_txrx.h
399
static inline bool ice_ring_is_xdp(struct ice_tx_ring *ring)
drivers/net/ethernet/intel/ice/ice_txrx.h
401
return !!(ring->flags & ICE_TX_FLAGS_RING_XDP);
drivers/net/ethernet/intel/ice/ice_txrx.h
446
static inline unsigned int ice_rx_pg_order(struct ice_rx_ring *ring)
drivers/net/ethernet/intel/ice/ice_txrx_lib.c
108
ice_rx_csum(struct ice_rx_ring *ring, struct sk_buff *skb,
drivers/net/ethernet/intel/ice/ice_txrx_lib.c
119
if (!libeth_rx_pt_has_checksum(ring->netdev, decoded))
drivers/net/ethernet/intel/ice/ice_txrx_lib.c
125
if ((ring->flags & ICE_RX_FLAGS_RING_GCS) &&
drivers/net/ethernet/intel/ice/ice_txrx_lib.c
142
ring->vsi->back->hw_rx_eipe_error++;
drivers/net/ethernet/intel/ice/ice_txrx_lib.c
174
ring->vsi->back->hw_csum_rx_error++;
drivers/net/ethernet/intel/ice/ice_xsk.c
846
struct ice_tx_ring *ring;
drivers/net/ethernet/intel/ice/ice_xsk.c
857
ring = vsi->rx_rings[queue_id]->xdp_ring;
drivers/net/ethernet/intel/ice/ice_xsk.c
859
if (!READ_ONCE(ring->xsk_pool))
drivers/net/ethernet/intel/ice/ice_xsk.c
868
q_vector = ring->q_vector;
drivers/net/ethernet/intel/ice/virt/queues.c
328
struct ice_tx_ring *ring;
drivers/net/ethernet/intel/ice/virt/queues.c
335
ring = vsi->tx_rings[q_id];
drivers/net/ethernet/intel/ice/virt/queues.c
336
if (!ring)
drivers/net/ethernet/intel/ice/virt/queues.c
339
ice_fill_txq_meta(vsi, ring, &txq_meta);
drivers/net/ethernet/intel/ice/virt/queues.c
341
err = ice_vsi_stop_tx_ring(vsi, ICE_NO_RESET, vf->vf_id, ring, &txq_meta);
drivers/net/ethernet/intel/ice/virt/queues.c
830
struct ice_rx_ring *ring = vsi->rx_rings[q_idx];
drivers/net/ethernet/intel/ice/virt/queues.c
833
ring->dma = qpi->rxq.dma_ring_addr;
drivers/net/ethernet/intel/ice/virt/queues.c
834
ring->count = qpi->rxq.ring_len;
drivers/net/ethernet/intel/ice/virt/queues.c
837
ring->flags |= ICE_RX_FLAGS_CRC_STRIP_DIS;
drivers/net/ethernet/intel/ice/virt/queues.c
839
ring->flags &= ~ICE_RX_FLAGS_CRC_STRIP_DIS;
drivers/net/ethernet/intel/ice/virt/queues.c
846
ring->rx_buf_len = qpi->rxq.databuffer_size;
drivers/net/ethernet/intel/idpf/idpf_ethtool.c
633
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/idpf/idpf_ethtool.c
642
ring->rx_max_pending = IDPF_MAX_RXQ_DESC;
drivers/net/ethernet/intel/idpf/idpf_ethtool.c
643
ring->tx_max_pending = IDPF_MAX_TXQ_DESC;
drivers/net/ethernet/intel/idpf/idpf_ethtool.c
644
ring->rx_pending = vport->dflt_qv_rsrc.rxq_desc_count;
drivers/net/ethernet/intel/idpf/idpf_ethtool.c
645
ring->tx_pending = vport->dflt_qv_rsrc.txq_desc_count;
drivers/net/ethernet/intel/idpf/idpf_ethtool.c
663
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/idpf/idpf_ethtool.c
679
if (ring->tx_pending < IDPF_MIN_TXQ_DESC) {
drivers/net/ethernet/intel/idpf/idpf_ethtool.c
681
ring->tx_pending,
drivers/net/ethernet/intel/idpf/idpf_ethtool.c
687
if (ring->rx_pending < IDPF_MIN_RXQ_DESC) {
drivers/net/ethernet/intel/idpf/idpf_ethtool.c
689
ring->rx_pending,
drivers/net/ethernet/intel/idpf/idpf_ethtool.c
695
new_rx_count = ALIGN(ring->rx_pending, IDPF_REQ_RXQ_DESC_MULTIPLE);
drivers/net/ethernet/intel/idpf/idpf_ethtool.c
696
if (new_rx_count != ring->rx_pending)
drivers/net/ethernet/intel/idpf/idpf_ethtool.c
700
new_tx_count = ALIGN(ring->tx_pending, IDPF_REQ_DESC_MULTIPLE);
drivers/net/ethernet/intel/idpf/idpf_ethtool.c
701
if (new_tx_count != ring->tx_pending)
drivers/net/ethernet/intel/idpf/idpf_txrx.c
121
kfree(txq->refillq->ring);
drivers/net/ethernet/intel/idpf/idpf_txrx.c
1352
kfree(bufq_set->refillqs[j].ring);
drivers/net/ethernet/intel/idpf/idpf_txrx.c
1353
bufq_set->refillqs[j].ring = NULL;
drivers/net/ethernet/intel/idpf/idpf_txrx.c
1878
refillq->ring = kzalloc_objs(*refillq->ring,
drivers/net/ethernet/intel/idpf/idpf_txrx.c
1880
if (!refillq->ring) {
drivers/net/ethernet/intel/idpf/idpf_txrx.c
235
refillq->ring = kcalloc(refillq->desc_count, sizeof(u32),
drivers/net/ethernet/intel/idpf/idpf_txrx.c
237
if (!refillq->ring) {
drivers/net/ethernet/intel/idpf/idpf_txrx.c
243
refillq->ring[i] =
drivers/net/ethernet/intel/idpf/idpf_txrx.c
2574
refill_desc = refillq->ring[ntc];
drivers/net/ethernet/intel/idpf/idpf_txrx.c
3709
u32 buf_id, refill_desc = refillq->ring[ntc];
drivers/net/ethernet/intel/idpf/idpf_txrx.c
585
refillq->ring[nta] =
drivers/net/ethernet/intel/idpf/idpf_txrx.h
872
u32 *ring;
drivers/net/ethernet/intel/idpf/idpf_virtchnl.c
19
struct idpf_vc_xn ring[IDPF_VC_XN_RING_LEN];
drivers/net/ethernet/intel/idpf/idpf_virtchnl.c
330
for (i = 0; i < ARRAY_SIZE(vcxn_mngr->ring); i++) {
drivers/net/ethernet/intel/idpf/idpf_virtchnl.c
331
struct idpf_vc_xn *xn = &vcxn_mngr->ring[i];
drivers/net/ethernet/intel/idpf/idpf_virtchnl.c
358
for (i = 0; i < ARRAY_SIZE(vcxn_mngr->ring); i++) {
drivers/net/ethernet/intel/idpf/idpf_virtchnl.c
359
struct idpf_vc_xn *xn = &vcxn_mngr->ring[i];
drivers/net/ethernet/intel/idpf/idpf_virtchnl.c
387
xn = &vcxn_mngr->ring[free_idx];
drivers/net/ethernet/intel/idpf/idpf_virtchnl.c
584
if (xn_idx >= ARRAY_SIZE(adapter->vcxn_mngr->ring)) {
drivers/net/ethernet/intel/idpf/idpf_virtchnl.c
589
xn = &adapter->vcxn_mngr->ring[xn_idx];
drivers/net/ethernet/intel/igb/igb.h
311
struct igb_ring *ring; /* pointer to linked list of rings */
drivers/net/ethernet/intel/igb/igb.h
384
struct igb_ring ring[] ____cacheline_internodealigned_in_smp;
drivers/net/ethernet/intel/igb/igb.h
398
#define ring_uses_large_buffer(ring) \
drivers/net/ethernet/intel/igb/igb.h
399
test_bit(IGB_RING_FLAG_RX_3K_BUFFER, &(ring)->flags)
drivers/net/ethernet/intel/igb/igb.h
400
#define set_ring_uses_large_buffer(ring) \
drivers/net/ethernet/intel/igb/igb.h
401
set_bit(IGB_RING_FLAG_RX_3K_BUFFER, &(ring)->flags)
drivers/net/ethernet/intel/igb/igb.h
402
#define clear_ring_uses_large_buffer(ring) \
drivers/net/ethernet/intel/igb/igb.h
403
clear_bit(IGB_RING_FLAG_RX_3K_BUFFER, &(ring)->flags)
drivers/net/ethernet/intel/igb/igb.h
405
#define ring_uses_build_skb(ring) \
drivers/net/ethernet/intel/igb/igb.h
406
test_bit(IGB_RING_FLAG_RX_BUILD_SKB_ENABLED, &(ring)->flags)
drivers/net/ethernet/intel/igb/igb.h
407
#define set_ring_build_skb_enabled(ring) \
drivers/net/ethernet/intel/igb/igb.h
408
set_bit(IGB_RING_FLAG_RX_BUILD_SKB_ENABLED, &(ring)->flags)
drivers/net/ethernet/intel/igb/igb.h
409
#define clear_ring_build_skb_enabled(ring) \
drivers/net/ethernet/intel/igb/igb.h
410
clear_bit(IGB_RING_FLAG_RX_BUILD_SKB_ENABLED, &(ring)->flags)
drivers/net/ethernet/intel/igb/igb.h
412
static inline unsigned int igb_rx_bufsz(struct igb_ring *ring)
drivers/net/ethernet/intel/igb/igb.h
415
if (ring_uses_large_buffer(ring))
drivers/net/ethernet/intel/igb/igb.h
418
if (ring_uses_build_skb(ring))
drivers/net/ethernet/intel/igb/igb.h
424
static inline unsigned int igb_rx_pg_order(struct igb_ring *ring)
drivers/net/ethernet/intel/igb/igb.h
427
if (ring_uses_large_buffer(ring))
drivers/net/ethernet/intel/igb/igb.h
452
static inline int igb_desc_unused(struct igb_ring *ring)
drivers/net/ethernet/intel/igb/igb.h
454
if (ring->next_to_clean > ring->next_to_use)
drivers/net/ethernet/intel/igb/igb.h
455
return ring->next_to_clean - ring->next_to_use - 1;
drivers/net/ethernet/intel/igb/igb.h
457
return ring->count + ring->next_to_clean - ring->next_to_use - 1;
drivers/net/ethernet/intel/igb/igb.h
729
struct igb_ring *ring,
drivers/net/ethernet/intel/igb/igb.h
823
static inline void igb_xdp_ring_update_tail(struct igb_ring *ring)
drivers/net/ethernet/intel/igb/igb.h
825
lockdep_assert_held(&txring_txq(ring)->_xmit_lock);
drivers/net/ethernet/intel/igb/igb.h
831
writel(ring->next_to_use, ring->tail);
drivers/net/ethernet/intel/igb/igb.h
860
struct igb_ring *ring);
drivers/net/ethernet/intel/igb/igb_ethtool.c
2232
if (q_vector->rx.ring)
drivers/net/ethernet/intel/igb/igb_ethtool.c
2294
struct igb_ring *ring;
drivers/net/ethernet/intel/igb/igb_ethtool.c
2314
ring = adapter->tx_ring[j];
drivers/net/ethernet/intel/igb/igb_ethtool.c
2316
start = u64_stats_fetch_begin(&ring->tx_syncp);
drivers/net/ethernet/intel/igb/igb_ethtool.c
2317
data[i] = ring->tx_stats.packets;
drivers/net/ethernet/intel/igb/igb_ethtool.c
2318
data[i+1] = ring->tx_stats.bytes;
drivers/net/ethernet/intel/igb/igb_ethtool.c
2319
data[i+2] = ring->tx_stats.restart_queue;
drivers/net/ethernet/intel/igb/igb_ethtool.c
2320
} while (u64_stats_fetch_retry(&ring->tx_syncp, start));
drivers/net/ethernet/intel/igb/igb_ethtool.c
2322
start = u64_stats_fetch_begin(&ring->tx_syncp2);
drivers/net/ethernet/intel/igb/igb_ethtool.c
2323
restart2 = ring->tx_stats.restart_queue2;
drivers/net/ethernet/intel/igb/igb_ethtool.c
2324
} while (u64_stats_fetch_retry(&ring->tx_syncp2, start));
drivers/net/ethernet/intel/igb/igb_ethtool.c
2330
ring = adapter->rx_ring[j];
drivers/net/ethernet/intel/igb/igb_ethtool.c
2332
start = u64_stats_fetch_begin(&ring->rx_syncp);
drivers/net/ethernet/intel/igb/igb_ethtool.c
2333
data[i] = ring->rx_stats.packets;
drivers/net/ethernet/intel/igb/igb_ethtool.c
2334
data[i+1] = ring->rx_stats.bytes;
drivers/net/ethernet/intel/igb/igb_ethtool.c
2335
data[i+2] = ring->rx_stats.drops;
drivers/net/ethernet/intel/igb/igb_ethtool.c
2336
data[i+3] = ring->rx_stats.csum_err;
drivers/net/ethernet/intel/igb/igb_ethtool.c
2337
data[i+4] = ring->rx_stats.alloc_failed;
drivers/net/ethernet/intel/igb/igb_ethtool.c
2338
} while (u64_stats_fetch_retry(&ring->rx_syncp, start));
drivers/net/ethernet/intel/igb/igb_ethtool.c
436
struct igb_ring *ring = adapter->rx_ring[i];
drivers/net/ethernet/intel/igb/igb_ethtool.c
438
igb_setup_srrctl(adapter, ring);
drivers/net/ethernet/intel/igb/igb_ethtool.c
870
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/igb/igb_ethtool.c
876
ring->rx_max_pending = IGB_MAX_RXD;
drivers/net/ethernet/intel/igb/igb_ethtool.c
877
ring->tx_max_pending = IGB_MAX_TXD;
drivers/net/ethernet/intel/igb/igb_ethtool.c
878
ring->rx_pending = adapter->rx_ring_count;
drivers/net/ethernet/intel/igb/igb_ethtool.c
879
ring->tx_pending = adapter->tx_ring_count;
drivers/net/ethernet/intel/igb/igb_ethtool.c
883
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/igb/igb_ethtool.c
892
if ((ring->rx_mini_pending) || (ring->rx_jumbo_pending))
drivers/net/ethernet/intel/igb/igb_ethtool.c
895
new_rx_count = min_t(u32, ring->rx_pending, IGB_MAX_RXD);
drivers/net/ethernet/intel/igb/igb_ethtool.c
899
new_tx_count = min_t(u32, ring->tx_pending, IGB_MAX_TXD);
drivers/net/ethernet/intel/igb/igb_main.c
1009
if (q_vector->tx.ring)
drivers/net/ethernet/intel/igb/igb_main.c
1010
adapter->tx_ring[q_vector->tx.ring->queue_index] = NULL;
drivers/net/ethernet/intel/igb/igb_main.c
1012
if (q_vector->rx.ring)
drivers/net/ethernet/intel/igb/igb_main.c
1013
adapter->rx_ring[q_vector->rx.ring->queue_index] = NULL;
drivers/net/ethernet/intel/igb/igb_main.c
1146
static void igb_add_ring(struct igb_ring *ring,
drivers/net/ethernet/intel/igb/igb_main.c
1149
head->ring = ring;
drivers/net/ethernet/intel/igb/igb_main.c
1171
struct igb_ring *ring;
drivers/net/ethernet/intel/igb/igb_main.c
1180
size = kmalloc_size_roundup(struct_size(q_vector, ring, ring_count));
drivers/net/ethernet/intel/igb/igb_main.c
1215
ring = q_vector->ring;
drivers/net/ethernet/intel/igb/igb_main.c
1230
ring->dev = &adapter->pdev->dev;
drivers/net/ethernet/intel/igb/igb_main.c
1231
ring->netdev = adapter->netdev;
drivers/net/ethernet/intel/igb/igb_main.c
1234
ring->q_vector = q_vector;
drivers/net/ethernet/intel/igb/igb_main.c
1237
igb_add_ring(ring, &q_vector->tx);
drivers/net/ethernet/intel/igb/igb_main.c
1241
set_bit(IGB_RING_FLAG_TX_CTX_IDX, &ring->flags);
drivers/net/ethernet/intel/igb/igb_main.c
1244
ring->count = adapter->tx_ring_count;
drivers/net/ethernet/intel/igb/igb_main.c
1245
ring->queue_index = txr_idx;
drivers/net/ethernet/intel/igb/igb_main.c
1247
ring->cbs_enable = false;
drivers/net/ethernet/intel/igb/igb_main.c
1248
ring->idleslope = 0;
drivers/net/ethernet/intel/igb/igb_main.c
1249
ring->sendslope = 0;
drivers/net/ethernet/intel/igb/igb_main.c
1250
ring->hicredit = 0;
drivers/net/ethernet/intel/igb/igb_main.c
1251
ring->locredit = 0;
drivers/net/ethernet/intel/igb/igb_main.c
1253
u64_stats_init(&ring->tx_syncp);
drivers/net/ethernet/intel/igb/igb_main.c
1254
u64_stats_init(&ring->tx_syncp2);
drivers/net/ethernet/intel/igb/igb_main.c
1257
adapter->tx_ring[txr_idx] = ring;
drivers/net/ethernet/intel/igb/igb_main.c
1260
ring++;
drivers/net/ethernet/intel/igb/igb_main.c
1265
ring->dev = &adapter->pdev->dev;
drivers/net/ethernet/intel/igb/igb_main.c
1266
ring->netdev = adapter->netdev;
drivers/net/ethernet/intel/igb/igb_main.c
1269
ring->q_vector = q_vector;
drivers/net/ethernet/intel/igb/igb_main.c
1272
igb_add_ring(ring, &q_vector->rx);
drivers/net/ethernet/intel/igb/igb_main.c
1276
set_bit(IGB_RING_FLAG_RX_SCTP_CSUM, &ring->flags);
drivers/net/ethernet/intel/igb/igb_main.c
1282
set_bit(IGB_RING_FLAG_RX_LB_VLAN_BSWAP, &ring->flags);
drivers/net/ethernet/intel/igb/igb_main.c
1285
ring->count = adapter->rx_ring_count;
drivers/net/ethernet/intel/igb/igb_main.c
1286
ring->queue_index = rxr_idx;
drivers/net/ethernet/intel/igb/igb_main.c
1288
u64_stats_init(&ring->rx_syncp);
drivers/net/ethernet/intel/igb/igb_main.c
1291
adapter->rx_ring[rxr_idx] = ring;
drivers/net/ethernet/intel/igb/igb_main.c
1673
struct igb_ring *ring;
drivers/net/ethernet/intel/igb/igb_main.c
1679
ring = adapter->tx_ring[queue];
drivers/net/ethernet/intel/igb/igb_main.c
1685
if (ring->cbs_enable || ring->launchtime_enable) {
drivers/net/ethernet/intel/igb/igb_main.c
1694
if (ring->cbs_enable || queue == 0) {
drivers/net/ethernet/intel/igb/igb_main.c
1704
if (queue == 0 && !ring->cbs_enable) {
drivers/net/ethernet/intel/igb/igb_main.c
1706
ring->idleslope = 1000000;
drivers/net/ethernet/intel/igb/igb_main.c
1707
ring->hicredit = ETH_FRAME_LEN;
drivers/net/ethernet/intel/igb/igb_main.c
1775
value = DIV_ROUND_UP_ULL(ring->idleslope * 61034ULL, 1000000);
drivers/net/ethernet/intel/igb/igb_main.c
1783
0x80000000 + ring->hicredit * 0x7735);
drivers/net/ethernet/intel/igb/igb_main.c
1806
if (ring->launchtime_enable) {
drivers/net/ethernet/intel/igb/igb_main.c
1838
ring->cbs_enable ? "enabled" : "disabled",
drivers/net/ethernet/intel/igb/igb_main.c
1839
ring->launchtime_enable ? "enabled" : "disabled",
drivers/net/ethernet/intel/igb/igb_main.c
1841
ring->idleslope, ring->sendslope,
drivers/net/ethernet/intel/igb/igb_main.c
1842
ring->hicredit, ring->locredit);
drivers/net/ethernet/intel/igb/igb_main.c
1848
struct igb_ring *ring;
drivers/net/ethernet/intel/igb/igb_main.c
1853
ring = adapter->tx_ring[queue];
drivers/net/ethernet/intel/igb/igb_main.c
1854
ring->launchtime_enable = enable;
drivers/net/ethernet/intel/igb/igb_main.c
1863
struct igb_ring *ring;
drivers/net/ethernet/intel/igb/igb_main.c
1868
ring = adapter->tx_ring[queue];
drivers/net/ethernet/intel/igb/igb_main.c
1870
ring->cbs_enable = enable;
drivers/net/ethernet/intel/igb/igb_main.c
1871
ring->idleslope = idleslope;
drivers/net/ethernet/intel/igb/igb_main.c
1872
ring->sendslope = sendslope;
drivers/net/ethernet/intel/igb/igb_main.c
1873
ring->hicredit = hicredit;
drivers/net/ethernet/intel/igb/igb_main.c
1874
ring->locredit = locredit;
drivers/net/ethernet/intel/igb/igb_main.c
1997
struct igb_ring *ring = adapter->rx_ring[i];
drivers/net/ethernet/intel/igb/igb_main.c
1998
if (ring->xsk_pool)
drivers/net/ethernet/intel/igb/igb_main.c
1999
igb_alloc_rx_buffers_zc(ring, ring->xsk_pool,
drivers/net/ethernet/intel/igb/igb_main.c
2000
igb_desc_unused(ring));
drivers/net/ethernet/intel/igb/igb_main.c
2002
igb_alloc_rx_buffers(ring, igb_desc_unused(ring));
drivers/net/ethernet/intel/igb/igb_main.c
2107
if (q_vector->rx.ring)
drivers/net/ethernet/intel/igb/igb_main.c
2109
q_vector->rx.ring->queue_index,
drivers/net/ethernet/intel/igb/igb_main.c
2112
if (q_vector->tx.ring)
drivers/net/ethernet/intel/igb/igb_main.c
2114
q_vector->tx.ring->queue_index,
drivers/net/ethernet/intel/igb/igb_main.c
2899
struct igb_ring *ring = adapter->rx_ring[i];
drivers/net/ethernet/intel/igb/igb_main.c
2901
if (frame_size > igb_rx_bufsz(ring)) {
drivers/net/ethernet/intel/igb/igb_main.c
2905
igb_rx_bufsz(ring), frame_size);
drivers/net/ethernet/intel/igb/igb_main.c
4383
struct igb_ring *ring)
drivers/net/ethernet/intel/igb/igb_main.c
4387
u64 tdba = ring->dma;
drivers/net/ethernet/intel/igb/igb_main.c
4388
int reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/igb/igb_main.c
4390
WRITE_ONCE(ring->xsk_pool, igb_xsk_pool(adapter, ring));
drivers/net/ethernet/intel/igb/igb_main.c
4393
ring->count * sizeof(union e1000_adv_tx_desc));
drivers/net/ethernet/intel/igb/igb_main.c
4398
ring->tail = adapter->io_addr + E1000_TDT(reg_idx);
drivers/net/ethernet/intel/igb/igb_main.c
4400
writel(0, ring->tail);
drivers/net/ethernet/intel/igb/igb_main.c
4407
memset(ring->tx_buffer_info, 0,
drivers/net/ethernet/intel/igb/igb_main.c
4408
sizeof(struct igb_tx_buffer) * ring->count);
drivers/net/ethernet/intel/igb/igb_main.c
4743
void igb_setup_srrctl(struct igb_adapter *adapter, struct igb_ring *ring)
drivers/net/ethernet/intel/igb/igb_main.c
4746
int reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/igb/igb_main.c
4750
if (ring->xsk_pool)
drivers/net/ethernet/intel/igb/igb_main.c
4751
buf_size = xsk_pool_get_rx_frame_size(ring->xsk_pool);
drivers/net/ethernet/intel/igb/igb_main.c
4752
else if (ring_uses_large_buffer(ring))
drivers/net/ethernet/intel/igb/igb_main.c
4781
struct igb_ring *ring)
drivers/net/ethernet/intel/igb/igb_main.c
4785
u64 rdba = ring->dma;
drivers/net/ethernet/intel/igb/igb_main.c
4786
int reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/igb/igb_main.c
4789
xdp_rxq_info_unreg_mem_model(&ring->xdp_rxq);
drivers/net/ethernet/intel/igb/igb_main.c
4790
WRITE_ONCE(ring->xsk_pool, igb_xsk_pool(adapter, ring));
drivers/net/ethernet/intel/igb/igb_main.c
4791
if (ring->xsk_pool) {
drivers/net/ethernet/intel/igb/igb_main.c
4792
WARN_ON(xdp_rxq_info_reg_mem_model(&ring->xdp_rxq,
drivers/net/ethernet/intel/igb/igb_main.c
4795
xsk_pool_set_rxq_info(ring->xsk_pool, &ring->xdp_rxq);
drivers/net/ethernet/intel/igb/igb_main.c
4797
WARN_ON(xdp_rxq_info_reg_mem_model(&ring->xdp_rxq,
drivers/net/ethernet/intel/igb/igb_main.c
4810
ring->count * sizeof(union e1000_adv_rx_desc));
drivers/net/ethernet/intel/igb/igb_main.c
4813
ring->tail = adapter->io_addr + E1000_RDT(reg_idx);
drivers/net/ethernet/intel/igb/igb_main.c
4815
writel(0, ring->tail);
drivers/net/ethernet/intel/igb/igb_main.c
4818
igb_setup_srrctl(adapter, ring);
drivers/net/ethernet/intel/igb/igb_main.c
4827
if (ring->xsk_pool)
drivers/net/ethernet/intel/igb/igb_main.c
4828
memset(ring->rx_buffer_info_zc, 0,
drivers/net/ethernet/intel/igb/igb_main.c
4829
sizeof(*ring->rx_buffer_info_zc) * ring->count);
drivers/net/ethernet/intel/igb/igb_main.c
4831
memset(ring->rx_buffer_info, 0,
drivers/net/ethernet/intel/igb/igb_main.c
4832
sizeof(*ring->rx_buffer_info) * ring->count);
drivers/net/ethernet/intel/igb/igb_main.c
4835
rx_desc = IGB_RX_DESC(ring, 0);
drivers/net/ethernet/intel/igb/igb_main.c
5761
if (!q_vector->rx.ring)
drivers/net/ethernet/intel/igb/igb_main.c
5764
rx_ring = adapter->rx_ring[q_vector->rx.ring->queue_index];
drivers/net/ethernet/intel/igb/igb_main.c
5866
((q_vector->rx.ring && adapter->rx_itr_setting == 3) ||
drivers/net/ethernet/intel/igb/igb_main.c
5867
(!q_vector->rx.ring && adapter->tx_itr_setting == 3)))
drivers/net/ethernet/intel/igb/igb_main.c
5970
((q_vector->rx.ring && adapter->rx_itr_setting == 3) ||
drivers/net/ethernet/intel/igb/igb_main.c
5971
(!q_vector->rx.ring && adapter->tx_itr_setting == 3)))
drivers/net/ethernet/intel/igb/igb_main.c
6714
struct igb_ring *ring = adapter->rx_ring[i];
drivers/net/ethernet/intel/igb/igb_main.c
6716
if (max_frame > igb_rx_bufsz(ring)) {
drivers/net/ethernet/intel/igb/igb_main.c
6780
struct igb_ring *ring = adapter->rx_ring[i];
drivers/net/ethernet/intel/igb/igb_main.c
6786
ring->rx_stats.drops += rqdpc;
drivers/net/ethernet/intel/igb/igb_main.c
6791
start = u64_stats_fetch_begin(&ring->rx_syncp);
drivers/net/ethernet/intel/igb/igb_main.c
6792
_bytes = ring->rx_stats.bytes;
drivers/net/ethernet/intel/igb/igb_main.c
6793
_packets = ring->rx_stats.packets;
drivers/net/ethernet/intel/igb/igb_main.c
6794
} while (u64_stats_fetch_retry(&ring->rx_syncp, start));
drivers/net/ethernet/intel/igb/igb_main.c
6805
struct igb_ring *ring = adapter->tx_ring[i];
drivers/net/ethernet/intel/igb/igb_main.c
6807
start = u64_stats_fetch_begin(&ring->tx_syncp);
drivers/net/ethernet/intel/igb/igb_main.c
6808
_bytes = ring->tx_stats.bytes;
drivers/net/ethernet/intel/igb/igb_main.c
6809
_packets = ring->tx_stats.packets;
drivers/net/ethernet/intel/igb/igb_main.c
6810
} while (u64_stats_fetch_retry(&ring->tx_syncp, start));
drivers/net/ethernet/intel/igb/igb_main.c
7215
if (q_vector->tx.ring)
drivers/net/ethernet/intel/igb/igb_main.c
7216
igb_update_tx_dca(adapter, q_vector->tx.ring, cpu);
drivers/net/ethernet/intel/igb/igb_main.c
7218
if (q_vector->rx.ring)
drivers/net/ethernet/intel/igb/igb_main.c
7219
igb_update_rx_dca(adapter, q_vector->rx.ring, cpu);
drivers/net/ethernet/intel/igb/igb_main.c
766
if (q_vector->rx.ring)
drivers/net/ethernet/intel/igb/igb_main.c
767
rx_queue = q_vector->rx.ring->reg_idx;
drivers/net/ethernet/intel/igb/igb_main.c
768
if (q_vector->tx.ring)
drivers/net/ethernet/intel/igb/igb_main.c
769
tx_queue = q_vector->tx.ring->reg_idx;
drivers/net/ethernet/intel/igb/igb_main.c
8257
if ((q_vector->rx.ring && (adapter->rx_itr_setting & 3)) ||
drivers/net/ethernet/intel/igb/igb_main.c
8258
(!q_vector->rx.ring && (adapter->tx_itr_setting & 3))) {
drivers/net/ethernet/intel/igb/igb_main.c
8291
if (q_vector->tx.ring)
drivers/net/ethernet/intel/igb/igb_main.c
8294
if (q_vector->rx.ring) {
drivers/net/ethernet/intel/igb/igb_main.c
8297
xsk_pool = READ_ONCE(q_vector->rx.ring->xsk_pool);
drivers/net/ethernet/intel/igb/igb_main.c
8332
struct igb_ring *tx_ring = q_vector->tx.ring;
drivers/net/ethernet/intel/igb/igb_main.c
8779
static inline void igb_rx_checksum(struct igb_ring *ring,
drivers/net/ethernet/intel/igb/igb_main.c
8790
if (!(ring->netdev->features & NETIF_F_RXCSUM))
drivers/net/ethernet/intel/igb/igb_main.c
8802
test_bit(IGB_RING_FLAG_RX_SCTP_CSUM, &ring->flags))) {
drivers/net/ethernet/intel/igb/igb_main.c
8803
u64_stats_update_begin(&ring->rx_syncp);
drivers/net/ethernet/intel/igb/igb_main.c
8804
ring->rx_stats.csum_err++;
drivers/net/ethernet/intel/igb/igb_main.c
8805
u64_stats_update_end(&ring->rx_syncp);
drivers/net/ethernet/intel/igb/igb_main.c
8815
dev_dbg(ring->dev, "cksum success: bits %08X\n",
drivers/net/ethernet/intel/igb/igb_main.c
8819
static inline void igb_rx_hash(struct igb_ring *ring,
drivers/net/ethernet/intel/igb/igb_main.c
8823
if (ring->netdev->features & NETIF_F_RXHASH)
drivers/net/ethernet/intel/igb/igb_main.c
9005
struct igb_ring *ring = q_vector->rx.ring;
drivers/net/ethernet/intel/igb/igb_main.c
9007
u64_stats_update_begin(&ring->rx_syncp);
drivers/net/ethernet/intel/igb/igb_main.c
9008
ring->rx_stats.packets += packets;
drivers/net/ethernet/intel/igb/igb_main.c
9009
ring->rx_stats.bytes += bytes;
drivers/net/ethernet/intel/igb/igb_main.c
9010
u64_stats_update_end(&ring->rx_syncp);
drivers/net/ethernet/intel/igb/igb_main.c
9020
struct igb_ring *rx_ring = q_vector->rx.ring;
drivers/net/ethernet/intel/igb/igb_main.c
933
if (q_vector->rx.ring && q_vector->tx.ring)
drivers/net/ethernet/intel/igb/igb_main.c
935
q_vector->rx.ring->queue_index);
drivers/net/ethernet/intel/igb/igb_main.c
936
else if (q_vector->tx.ring)
drivers/net/ethernet/intel/igb/igb_main.c
938
q_vector->tx.ring->queue_index);
drivers/net/ethernet/intel/igb/igb_main.c
939
else if (q_vector->rx.ring)
drivers/net/ethernet/intel/igb/igb_main.c
941
q_vector->rx.ring->queue_index);
drivers/net/ethernet/intel/igb/igb_xsk.c
11
static int igb_realloc_rx_buffer_info(struct igb_ring *ring, bool pool_present)
drivers/net/ethernet/intel/igb/igb_xsk.c
14
sizeof(*ring->rx_buffer_info_zc) * ring->count :
drivers/net/ethernet/intel/igb/igb_xsk.c
15
sizeof(*ring->rx_buffer_info) * ring->count;
drivers/net/ethernet/intel/igb/igb_xsk.c
22
vfree(ring->rx_buffer_info);
drivers/net/ethernet/intel/igb/igb_xsk.c
23
ring->rx_buffer_info = NULL;
drivers/net/ethernet/intel/igb/igb_xsk.c
24
ring->rx_buffer_info_zc = buff_info;
drivers/net/ethernet/intel/igb/igb_xsk.c
26
vfree(ring->rx_buffer_info_zc);
drivers/net/ethernet/intel/igb/igb_xsk.c
27
ring->rx_buffer_info_zc = NULL;
drivers/net/ethernet/intel/igb/igb_xsk.c
28
ring->rx_buffer_info = buff_info;
drivers/net/ethernet/intel/igb/igb_xsk.c
346
struct igb_ring *rx_ring = q_vector->rx.ring;
drivers/net/ethernet/intel/igb/igb_xsk.c
541
struct igb_ring *ring;
drivers/net/ethernet/intel/igb/igb_xsk.c
553
ring = adapter->tx_ring[qid];
drivers/net/ethernet/intel/igb/igb_xsk.c
555
if (!READ_ONCE(ring->xsk_pool))
drivers/net/ethernet/intel/igb/igb_xsk.c
559
if (test_bit(IGB_RING_FLAG_TX_DISABLED, &ring->flags))
drivers/net/ethernet/intel/igb/igb_xsk.c
562
eics |= igb_sw_irq_prep(ring->q_vector);
drivers/net/ethernet/intel/igb/igb_xsk.c
571
ring = adapter->rx_ring[qid];
drivers/net/ethernet/intel/igb/igb_xsk.c
572
eics |= igb_sw_irq_prep(ring->q_vector);
drivers/net/ethernet/intel/igb/igb_xsk.c
84
struct igb_ring *ring)
drivers/net/ethernet/intel/igb/igb_xsk.c
86
int qid = ring->queue_index;
drivers/net/ethernet/intel/igbvf/ethtool.c
180
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/igbvf/ethtool.c
188
ring->rx_max_pending = IGBVF_MAX_RXD;
drivers/net/ethernet/intel/igbvf/ethtool.c
189
ring->tx_max_pending = IGBVF_MAX_TXD;
drivers/net/ethernet/intel/igbvf/ethtool.c
190
ring->rx_pending = rx_ring->count;
drivers/net/ethernet/intel/igbvf/ethtool.c
191
ring->tx_pending = tx_ring->count;
drivers/net/ethernet/intel/igbvf/ethtool.c
195
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/igbvf/ethtool.c
204
if ((ring->rx_mini_pending) || (ring->rx_jumbo_pending))
drivers/net/ethernet/intel/igbvf/ethtool.c
207
new_rx_count = max_t(u32, ring->rx_pending, IGBVF_MIN_RXD);
drivers/net/ethernet/intel/igbvf/ethtool.c
211
new_tx_count = max_t(u32, ring->tx_pending, IGBVF_MIN_TXD);
drivers/net/ethernet/intel/igbvf/netdev.c
66
static int igbvf_desc_unused(struct igbvf_ring *ring)
drivers/net/ethernet/intel/igbvf/netdev.c
68
if (ring->next_to_clean > ring->next_to_use)
drivers/net/ethernet/intel/igbvf/netdev.c
69
return ring->next_to_clean - ring->next_to_use - 1;
drivers/net/ethernet/intel/igbvf/netdev.c
71
return ring->count + ring->next_to_clean - ring->next_to_use - 1;
drivers/net/ethernet/intel/igc/igc.h
139
struct igc_ring *ring; /* pointer to linked list of rings */
drivers/net/ethernet/intel/igc/igc.h
355
int igc_setup_tx_resources(struct igc_ring *ring);
drivers/net/ethernet/intel/igc/igc.h
356
int igc_setup_rx_resources(struct igc_ring *ring);
drivers/net/ethernet/intel/igc/igc.h
357
void igc_free_tx_resources(struct igc_ring *ring);
drivers/net/ethernet/intel/igc/igc.h
358
void igc_free_rx_resources(struct igc_ring *ring);
drivers/net/ethernet/intel/igc/igc.h
367
void igc_disable_rx_ring(struct igc_ring *ring);
drivers/net/ethernet/intel/igc/igc.h
368
void igc_enable_rx_ring(struct igc_ring *ring);
drivers/net/ethernet/intel/igc/igc.h
369
void igc_disable_tx_ring(struct igc_ring *ring);
drivers/net/ethernet/intel/igc/igc.h
370
void igc_enable_tx_ring(struct igc_ring *ring);
drivers/net/ethernet/intel/igc/igc.h
625
struct igc_ring ring[] ____cacheline_internodealigned_in_smp;
drivers/net/ethernet/intel/igc/igc.h
684
static inline u16 igc_desc_unused(const struct igc_ring *ring)
drivers/net/ethernet/intel/igc/igc.h
686
u16 ntc = ring->next_to_clean;
drivers/net/ethernet/intel/igc/igc.h
687
u16 ntu = ring->next_to_use;
drivers/net/ethernet/intel/igc/igc.h
689
return ((ntc > ntu) ? 0 : ring->count) + ntc - ntu - 1;
drivers/net/ethernet/intel/igc/igc.h
725
#define ring_uses_large_buffer(ring) \
drivers/net/ethernet/intel/igc/igc.h
726
test_bit(IGC_RING_FLAG_RX_3K_BUFFER, &(ring)->flags)
drivers/net/ethernet/intel/igc/igc.h
727
#define set_ring_uses_large_buffer(ring) \
drivers/net/ethernet/intel/igc/igc.h
728
set_bit(IGC_RING_FLAG_RX_3K_BUFFER, &(ring)->flags)
drivers/net/ethernet/intel/igc/igc.h
729
#define clear_ring_uses_large_buffer(ring) \
drivers/net/ethernet/intel/igc/igc.h
730
clear_bit(IGC_RING_FLAG_RX_3K_BUFFER, &(ring)->flags)
drivers/net/ethernet/intel/igc/igc.h
732
#define ring_uses_build_skb(ring) \
drivers/net/ethernet/intel/igc/igc.h
733
test_bit(IGC_RING_FLAG_RX_BUILD_SKB_ENABLED, &(ring)->flags)
drivers/net/ethernet/intel/igc/igc.h
735
static inline unsigned int igc_rx_bufsz(struct igc_ring *ring)
drivers/net/ethernet/intel/igc/igc.h
738
if (ring_uses_large_buffer(ring))
drivers/net/ethernet/intel/igc/igc.h
741
if (ring_uses_build_skb(ring))
drivers/net/ethernet/intel/igc/igc.h
747
static inline unsigned int igc_rx_pg_order(struct igc_ring *ring)
drivers/net/ethernet/intel/igc/igc.h
750
if (ring_uses_large_buffer(ring))
drivers/net/ethernet/intel/igc/igc.h
772
void igc_flush_tx_descriptors(struct igc_ring *ring);
drivers/net/ethernet/intel/igc/igc_ethtool.c
576
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/igc/igc_ethtool.c
582
ring->rx_max_pending = IGC_MAX_RXD;
drivers/net/ethernet/intel/igc/igc_ethtool.c
583
ring->tx_max_pending = IGC_MAX_TXD;
drivers/net/ethernet/intel/igc/igc_ethtool.c
584
ring->rx_pending = adapter->rx_ring_count;
drivers/net/ethernet/intel/igc/igc_ethtool.c
585
ring->tx_pending = adapter->tx_ring_count;
drivers/net/ethernet/intel/igc/igc_ethtool.c
590
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/igc/igc_ethtool.c
599
if (ring->rx_mini_pending || ring->rx_jumbo_pending)
drivers/net/ethernet/intel/igc/igc_ethtool.c
602
new_rx_count = min_t(u32, ring->rx_pending, IGC_MAX_RXD);
drivers/net/ethernet/intel/igc/igc_ethtool.c
606
new_tx_count = min_t(u32, ring->tx_pending, IGC_MAX_TXD);
drivers/net/ethernet/intel/igc/igc_ethtool.c
823
struct igc_ring *ring;
drivers/net/ethernet/intel/igc/igc_ethtool.c
843
ring = adapter->tx_ring[j];
drivers/net/ethernet/intel/igc/igc_ethtool.c
845
start = u64_stats_fetch_begin(&ring->tx_syncp);
drivers/net/ethernet/intel/igc/igc_ethtool.c
846
data[i] = ring->tx_stats.packets;
drivers/net/ethernet/intel/igc/igc_ethtool.c
847
data[i + 1] = ring->tx_stats.bytes;
drivers/net/ethernet/intel/igc/igc_ethtool.c
848
data[i + 2] = ring->tx_stats.restart_queue;
drivers/net/ethernet/intel/igc/igc_ethtool.c
849
} while (u64_stats_fetch_retry(&ring->tx_syncp, start));
drivers/net/ethernet/intel/igc/igc_ethtool.c
851
start = u64_stats_fetch_begin(&ring->tx_syncp2);
drivers/net/ethernet/intel/igc/igc_ethtool.c
852
restart2 = ring->tx_stats.restart_queue2;
drivers/net/ethernet/intel/igc/igc_ethtool.c
853
} while (u64_stats_fetch_retry(&ring->tx_syncp2, start));
drivers/net/ethernet/intel/igc/igc_ethtool.c
859
ring = adapter->rx_ring[j];
drivers/net/ethernet/intel/igc/igc_ethtool.c
861
start = u64_stats_fetch_begin(&ring->rx_syncp);
drivers/net/ethernet/intel/igc/igc_ethtool.c
862
data[i] = ring->rx_stats.packets;
drivers/net/ethernet/intel/igc/igc_ethtool.c
863
data[i + 1] = ring->rx_stats.bytes;
drivers/net/ethernet/intel/igc/igc_ethtool.c
864
data[i + 2] = ring->rx_stats.drops;
drivers/net/ethernet/intel/igc/igc_ethtool.c
865
data[i + 3] = ring->rx_stats.csum_err;
drivers/net/ethernet/intel/igc/igc_ethtool.c
866
data[i + 4] = ring->rx_stats.alloc_failed;
drivers/net/ethernet/intel/igc/igc_ethtool.c
867
} while (u64_stats_fetch_retry(&ring->rx_syncp, start));
drivers/net/ethernet/intel/igc/igc_ethtool.c
949
if (q_vector->rx.ring)
drivers/net/ethernet/intel/igc/igc_main.c
1043
static __le32 igc_tx_launchtime(struct igc_ring *ring, ktime_t txtime,
drivers/net/ethernet/intel/igc/igc_main.c
1046
struct igc_adapter *adapter = netdev_priv(ring->netdev);
drivers/net/ethernet/intel/igc/igc_main.c
1060
if (baset_est != ring->last_ff_cycle) {
drivers/net/ethernet/intel/igc/igc_main.c
1062
ring->last_ff_cycle = baset_est;
drivers/net/ethernet/intel/igc/igc_main.c
1064
if (ktime_compare(end_of_cycle, ring->last_tx_cycle) > 0)
drivers/net/ethernet/intel/igc/igc_main.c
1075
netdev_warn(ring->netdev, "Packet with txtime=%llu may not be honoured\n",
drivers/net/ethernet/intel/igc/igc_main.c
1078
ring->last_tx_cycle = end_of_cycle;
drivers/net/ethernet/intel/igc/igc_main.c
1089
static int igc_init_empty_frame(struct igc_ring *ring,
drivers/net/ethernet/intel/igc/igc_main.c
1098
dma = dma_map_single(ring->dev, skb->data, size, DMA_TO_DEVICE);
drivers/net/ethernet/intel/igc/igc_main.c
1099
if (dma_mapping_error(ring->dev, dma)) {
drivers/net/ethernet/intel/igc/igc_main.c
1101
netdev_name(ring->netdev));
drivers/net/ethernet/intel/igc/igc_main.c
1117
static void igc_init_tx_empty_descriptor(struct igc_ring *ring,
drivers/net/ethernet/intel/igc/igc_main.c
1129
desc = IGC_TX_DESC(ring, ring->next_to_use);
drivers/net/ethernet/intel/igc/igc_main.c
1134
netdev_tx_sent_queue(txring_txq(ring), skb->len);
drivers/net/ethernet/intel/igc/igc_main.c
1138
ring->next_to_use++;
drivers/net/ethernet/intel/igc/igc_main.c
1139
if (ring->next_to_use == ring->count)
drivers/net/ethernet/intel/igc/igc_main.c
1140
ring->next_to_use = 0;
drivers/net/ethernet/intel/igc/igc_main.c
1746
static void igc_rx_checksum(struct igc_ring *ring,
drivers/net/ethernet/intel/igc/igc_main.c
1757
if (!(ring->netdev->features & NETIF_F_RXCSUM))
drivers/net/ethernet/intel/igc/igc_main.c
1769
test_bit(IGC_RING_FLAG_RX_SCTP_CSUM, &ring->flags))) {
drivers/net/ethernet/intel/igc/igc_main.c
1770
u64_stats_update_begin(&ring->rx_syncp);
drivers/net/ethernet/intel/igc/igc_main.c
1771
ring->rx_stats.csum_err++;
drivers/net/ethernet/intel/igc/igc_main.c
1772
u64_stats_update_end(&ring->rx_syncp);
drivers/net/ethernet/intel/igc/igc_main.c
1782
netdev_dbg(ring->netdev, "cksum success: bits %08X\n",
drivers/net/ethernet/intel/igc/igc_main.c
1806
static inline void igc_rx_hash(struct igc_ring *ring,
drivers/net/ethernet/intel/igc/igc_main.c
1810
if (ring->netdev->features & NETIF_F_RXHASH) {
drivers/net/ethernet/intel/igc/igc_main.c
1923
static unsigned int igc_get_rx_frame_truesize(struct igc_ring *ring,
drivers/net/ethernet/intel/igc/igc_main.c
1929
truesize = igc_rx_pg_size(ring) / 2;
drivers/net/ethernet/intel/igc/igc_main.c
1931
truesize = ring_uses_build_skb(ring) ?
drivers/net/ethernet/intel/igc/igc_main.c
2321
static bool igc_alloc_rx_buffers_zc(struct igc_ring *ring, u16 count)
drivers/net/ethernet/intel/igc/igc_main.c
2324
u16 i = ring->next_to_use;
drivers/net/ethernet/intel/igc/igc_main.c
2334
desc = IGC_RX_DESC(ring, i);
drivers/net/ethernet/intel/igc/igc_main.c
2335
bi = &ring->rx_buffer_info[i];
drivers/net/ethernet/intel/igc/igc_main.c
2336
i -= ring->count;
drivers/net/ethernet/intel/igc/igc_main.c
2339
bi->xdp = xsk_buff_alloc(ring->xsk_pool);
drivers/net/ethernet/intel/igc/igc_main.c
2352
desc = IGC_RX_DESC(ring, 0);
drivers/net/ethernet/intel/igc/igc_main.c
2353
bi = ring->rx_buffer_info;
drivers/net/ethernet/intel/igc/igc_main.c
2354
i -= ring->count;
drivers/net/ethernet/intel/igc/igc_main.c
2363
i += ring->count;
drivers/net/ethernet/intel/igc/igc_main.c
2365
if (ring->next_to_use != i) {
drivers/net/ethernet/intel/igc/igc_main.c
2366
ring->next_to_use = i;
drivers/net/ethernet/intel/igc/igc_main.c
2374
writel(i, ring->tail);
drivers/net/ethernet/intel/igc/igc_main.c
2381
static int igc_xdp_init_tx_descriptor(struct igc_ring *ring,
drivers/net/ethernet/intel/igc/igc_main.c
2386
u16 count, index = ring->next_to_use;
drivers/net/ethernet/intel/igc/igc_main.c
2387
struct igc_tx_buffer *head = &ring->tx_buffer_info[index];
drivers/net/ethernet/intel/igc/igc_main.c
2389
union igc_adv_tx_desc *desc = IGC_TX_DESC(ring, index);
drivers/net/ethernet/intel/igc/igc_main.c
2398
if (igc_maybe_stop_tx(ring, count + 3)) {
drivers/net/ethernet/intel/igc/igc_main.c
2415
dma = dma_map_single(ring->dev, data, len, DMA_TO_DEVICE);
drivers/net/ethernet/intel/igc/igc_main.c
2416
if (dma_mapping_error(ring->dev, dma)) {
drivers/net/ethernet/intel/igc/igc_main.c
2417
netdev_err_once(ring->netdev,
drivers/net/ethernet/intel/igc/igc_main.c
2433
if (++index == ring->count)
drivers/net/ethernet/intel/igc/igc_main.c
2439
buffer = &ring->tx_buffer_info[index];
drivers/net/ethernet/intel/igc/igc_main.c
2440
desc = IGC_TX_DESC(ring, index);
drivers/net/ethernet/intel/igc/igc_main.c
2449
netdev_tx_sent_queue(txring_txq(ring), head->bytecount);
drivers/net/ethernet/intel/igc/igc_main.c
2454
ring->next_to_use = index;
drivers/net/ethernet/intel/igc/igc_main.c
2460
buffer = &ring->tx_buffer_info[index];
drivers/net/ethernet/intel/igc/igc_main.c
2462
dma_unmap_page(ring->dev,
drivers/net/ethernet/intel/igc/igc_main.c
2471
index += ring->count;
drivers/net/ethernet/intel/igc/igc_main.c
2496
struct igc_ring *ring;
drivers/net/ethernet/intel/igc/igc_main.c
2502
ring = igc_get_tx_ring(adapter, cpu);
drivers/net/ethernet/intel/igc/igc_main.c
2503
nq = txring_txq(ring);
drivers/net/ethernet/intel/igc/igc_main.c
2508
res = igc_xdp_init_tx_descriptor(ring, xdpf);
drivers/net/ethernet/intel/igc/igc_main.c
2562
void igc_flush_tx_descriptors(struct igc_ring *ring)
drivers/net/ethernet/intel/igc/igc_main.c
2569
writel(ring->next_to_use, ring->tail);
drivers/net/ethernet/intel/igc/igc_main.c
2576
struct igc_ring *ring;
drivers/net/ethernet/intel/igc/igc_main.c
2579
ring = igc_get_tx_ring(adapter, cpu);
drivers/net/ethernet/intel/igc/igc_main.c
2580
nq = txring_txq(ring);
drivers/net/ethernet/intel/igc/igc_main.c
2583
igc_flush_tx_descriptors(ring);
drivers/net/ethernet/intel/igc/igc_main.c
2594
struct igc_ring *ring = q_vector->rx.ring;
drivers/net/ethernet/intel/igc/igc_main.c
2596
u64_stats_update_begin(&ring->rx_syncp);
drivers/net/ethernet/intel/igc/igc_main.c
2597
ring->rx_stats.packets += packets;
drivers/net/ethernet/intel/igc/igc_main.c
2598
ring->rx_stats.bytes += bytes;
drivers/net/ethernet/intel/igc/igc_main.c
2599
u64_stats_update_end(&ring->rx_syncp);
drivers/net/ethernet/intel/igc/igc_main.c
2609
struct igc_ring *rx_ring = q_vector->rx.ring;
drivers/net/ethernet/intel/igc/igc_main.c
2741
static struct sk_buff *igc_construct_skb_zc(struct igc_ring *ring,
drivers/net/ethernet/intel/igc/igc_main.c
2751
skb = napi_alloc_skb(&ring->q_vector->napi, totalsize);
drivers/net/ethernet/intel/igc/igc_main.c
2775
struct igc_ring *ring = q_vector->rx.ring;
drivers/net/ethernet/intel/igc/igc_main.c
2778
skb = igc_construct_skb_zc(ring, ctx);
drivers/net/ethernet/intel/igc/igc_main.c
2780
ring->rx_stats.alloc_failed++;
drivers/net/ethernet/intel/igc/igc_main.c
2781
set_bit(IGC_RING_FLAG_RX_ALLOC_FAILED, &ring->flags);
drivers/net/ethernet/intel/igc/igc_main.c
2785
if (igc_cleanup_headers(ring, desc, skb))
drivers/net/ethernet/intel/igc/igc_main.c
2788
igc_process_skb_fields(ring, desc, skb);
drivers/net/ethernet/intel/igc/igc_main.c
2804
struct igc_ring *ring = q_vector->rx.ring;
drivers/net/ethernet/intel/igc/igc_main.c
2805
u16 cleaned_count = igc_desc_unused(ring);
drivers/net/ethernet/intel/igc/igc_main.c
2807
u16 ntc = ring->next_to_clean;
drivers/net/ethernet/intel/igc/igc_main.c
2823
desc = IGC_RX_DESC(ring, ntc);
drivers/net/ethernet/intel/igc/igc_main.c
2834
bi = &ring->rx_buffer_info[ntc];
drivers/net/ethernet/intel/igc/igc_main.c
2875
if (ntc == ring->count)
drivers/net/ethernet/intel/igc/igc_main.c
2879
ring->next_to_clean = ntc;
drivers/net/ethernet/intel/igc/igc_main.c
2883
failure = !igc_alloc_rx_buffers_zc(ring, cleaned_count);
drivers/net/ethernet/intel/igc/igc_main.c
2890
if (xsk_uses_need_wakeup(ring->xsk_pool)) {
drivers/net/ethernet/intel/igc/igc_main.c
2891
if (failure || ring->next_to_clean == ring->next_to_use)
drivers/net/ethernet/intel/igc/igc_main.c
2892
xsk_set_rx_need_wakeup(ring->xsk_pool);
drivers/net/ethernet/intel/igc/igc_main.c
2894
xsk_clear_rx_need_wakeup(ring->xsk_pool);
drivers/net/ethernet/intel/igc/igc_main.c
2904
struct igc_ring *ring = q_vector->tx.ring;
drivers/net/ethernet/intel/igc/igc_main.c
2906
u64_stats_update_begin(&ring->tx_syncp);
drivers/net/ethernet/intel/igc/igc_main.c
2907
ring->tx_stats.bytes += bytes;
drivers/net/ethernet/intel/igc/igc_main.c
2908
ring->tx_stats.packets += packets;
drivers/net/ethernet/intel/igc/igc_main.c
2909
u64_stats_update_end(&ring->tx_syncp);
drivers/net/ethernet/intel/igc/igc_main.c
3037
static void igc_xdp_xmit_zc(struct igc_ring *ring)
drivers/net/ethernet/intel/igc/igc_main.c
3039
struct xsk_buff_pool *pool = ring->xsk_pool;
drivers/net/ethernet/intel/igc/igc_main.c
3040
struct netdev_queue *nq = txring_txq(ring);
drivers/net/ethernet/intel/igc/igc_main.c
3046
if (!netif_carrier_ok(ring->netdev))
drivers/net/ethernet/intel/igc/igc_main.c
3054
ntu = ring->next_to_use;
drivers/net/ethernet/intel/igc/igc_main.c
3055
budget = igc_desc_unused(ring);
drivers/net/ethernet/intel/igc/igc_main.c
3079
bi = &ring->tx_buffer_info[ntu];
drivers/net/ethernet/intel/igc/igc_main.c
3081
meta_req.tx_ring = ring;
drivers/net/ethernet/intel/igc/igc_main.c
3089
ntu = ring->next_to_use;
drivers/net/ethernet/intel/igc/igc_main.c
3097
tx_desc = IGC_TX_DESC(ring, ntu);
drivers/net/ethernet/intel/igc/igc_main.c
3109
netdev_tx_sent_queue(txring_txq(ring), xdp_desc.len);
drivers/net/ethernet/intel/igc/igc_main.c
3112
if (ntu == ring->count)
drivers/net/ethernet/intel/igc/igc_main.c
3115
ring->next_to_use = ntu;
drivers/net/ethernet/intel/igc/igc_main.c
3120
igc_flush_tx_descriptors(ring);
drivers/net/ethernet/intel/igc/igc_main.c
3139
struct igc_ring *tx_ring = q_vector->tx.ring;
drivers/net/ethernet/intel/igc/igc_main.c
326
static void igc_disable_tx_ring_hw(struct igc_ring *ring)
drivers/net/ethernet/intel/igc/igc_main.c
328
struct igc_hw *hw = &ring->q_vector->adapter->hw;
drivers/net/ethernet/intel/igc/igc_main.c
329
u8 idx = ring->reg_idx;
drivers/net/ethernet/intel/igc/igc_main.c
4190
struct igc_ring *ring = adapter->rx_ring[i];
drivers/net/ethernet/intel/igc/igc_main.c
4192
if (ring->xsk_pool)
drivers/net/ethernet/intel/igc/igc_main.c
4193
igc_alloc_rx_buffers_zc(ring, igc_desc_unused(ring));
drivers/net/ethernet/intel/igc/igc_main.c
4195
igc_alloc_rx_buffers(ring, igc_desc_unused(ring));
drivers/net/ethernet/intel/igc/igc_main.c
4231
if (q_vector->rx.ring)
drivers/net/ethernet/intel/igc/igc_main.c
4232
rx_queue = q_vector->rx.ring->reg_idx;
drivers/net/ethernet/intel/igc/igc_main.c
4233
if (q_vector->tx.ring)
drivers/net/ethernet/intel/igc/igc_main.c
4234
tx_queue = q_vector->tx.ring->reg_idx;
drivers/net/ethernet/intel/igc/igc_main.c
4406
if (q_vector->tx.ring)
drivers/net/ethernet/intel/igc/igc_main.c
4407
adapter->tx_ring[q_vector->tx.ring->queue_index] = NULL;
drivers/net/ethernet/intel/igc/igc_main.c
4409
if (q_vector->rx.ring)
drivers/net/ethernet/intel/igc/igc_main.c
4410
adapter->rx_ring[q_vector->rx.ring->queue_index] = NULL;
drivers/net/ethernet/intel/igc/igc_main.c
450
static void igc_clean_rx_ring_xsk_pool(struct igc_ring *ring)
drivers/net/ethernet/intel/igc/igc_main.c
4548
((q_vector->rx.ring && adapter->rx_itr_setting == 3) ||
drivers/net/ethernet/intel/igc/igc_main.c
4549
(!q_vector->rx.ring && adapter->tx_itr_setting == 3)))
drivers/net/ethernet/intel/igc/igc_main.c
455
for (i = 0; i < ring->count; i++) {
drivers/net/ethernet/intel/igc/igc_main.c
456
bi = &ring->rx_buffer_info[i];
drivers/net/ethernet/intel/igc/igc_main.c
469
static void igc_clean_rx_ring(struct igc_ring *ring)
drivers/net/ethernet/intel/igc/igc_main.c
471
if (ring->xsk_pool)
drivers/net/ethernet/intel/igc/igc_main.c
472
igc_clean_rx_ring_xsk_pool(ring);
drivers/net/ethernet/intel/igc/igc_main.c
4733
((q_vector->rx.ring && adapter->rx_itr_setting == 3) ||
drivers/net/ethernet/intel/igc/igc_main.c
4734
(!q_vector->rx.ring && adapter->tx_itr_setting == 3)))
drivers/net/ethernet/intel/igc/igc_main.c
474
igc_clean_rx_ring_page_shared(ring);
drivers/net/ethernet/intel/igc/igc_main.c
4754
if ((q_vector->rx.ring && (adapter->rx_itr_setting & 3)) ||
drivers/net/ethernet/intel/igc/igc_main.c
4755
(!q_vector->rx.ring && (adapter->tx_itr_setting & 3))) {
drivers/net/ethernet/intel/igc/igc_main.c
476
clear_ring_uses_large_buffer(ring);
drivers/net/ethernet/intel/igc/igc_main.c
4770
static void igc_add_ring(struct igc_ring *ring,
drivers/net/ethernet/intel/igc/igc_main.c
4773
head->ring = ring;
drivers/net/ethernet/intel/igc/igc_main.c
478
ring->next_to_alloc = 0;
drivers/net/ethernet/intel/igc/igc_main.c
479
ring->next_to_clean = 0;
drivers/net/ethernet/intel/igc/igc_main.c
480
ring->next_to_use = 0;
drivers/net/ethernet/intel/igc/igc_main.c
4809
struct igc_ring *rx_ring = q_vector->rx.ring;
drivers/net/ethernet/intel/igc/igc_main.c
4813
if (q_vector->tx.ring)
drivers/net/ethernet/intel/igc/igc_main.c
4857
struct igc_ring *ring;
drivers/net/ethernet/intel/igc/igc_main.c
4869
q_vector = kzalloc_flex(*q_vector, ring, ring_count);
drivers/net/ethernet/intel/igc/igc_main.c
4871
memset(q_vector, 0, struct_size(q_vector, ring, ring_count));
drivers/net/ethernet/intel/igc/igc_main.c
4890
ring = q_vector->ring;
drivers/net/ethernet/intel/igc/igc_main.c
4905
ring->dev = &adapter->pdev->dev;
drivers/net/ethernet/intel/igc/igc_main.c
4906
ring->netdev = adapter->netdev;
drivers/net/ethernet/intel/igc/igc_main.c
4909
ring->q_vector = q_vector;
drivers/net/ethernet/intel/igc/igc_main.c
4912
igc_add_ring(ring, &q_vector->tx);
drivers/net/ethernet/intel/igc/igc_main.c
4915
ring->count = adapter->tx_ring_count;
drivers/net/ethernet/intel/igc/igc_main.c
4916
ring->queue_index = txr_idx;
drivers/net/ethernet/intel/igc/igc_main.c
4919
adapter->tx_ring[txr_idx] = ring;
drivers/net/ethernet/intel/igc/igc_main.c
4922
ring++;
drivers/net/ethernet/intel/igc/igc_main.c
4927
ring->dev = &adapter->pdev->dev;
drivers/net/ethernet/intel/igc/igc_main.c
4928
ring->netdev = adapter->netdev;
drivers/net/ethernet/intel/igc/igc_main.c
4931
ring->q_vector = q_vector;
drivers/net/ethernet/intel/igc/igc_main.c
4934
igc_add_ring(ring, &q_vector->rx);
drivers/net/ethernet/intel/igc/igc_main.c
4937
ring->count = adapter->rx_ring_count;
drivers/net/ethernet/intel/igc/igc_main.c
4938
ring->queue_index = rxr_idx;
drivers/net/ethernet/intel/igc/igc_main.c
4941
adapter->rx_ring[rxr_idx] = ring;
drivers/net/ethernet/intel/igc/igc_main.c
5097
if (q_vector->rx.ring)
drivers/net/ethernet/intel/igc/igc_main.c
5099
q_vector->rx.ring->queue_index,
drivers/net/ethernet/intel/igc/igc_main.c
5102
if (q_vector->tx.ring)
drivers/net/ethernet/intel/igc/igc_main.c
5104
q_vector->tx.ring->queue_index,
drivers/net/ethernet/intel/igc/igc_main.c
5173
struct igc_ring *ring = adapter->rx_ring[i];
drivers/net/ethernet/intel/igc/igc_main.c
5180
ring->rx_stats.drops += rqdpc;
drivers/net/ethernet/intel/igc/igc_main.c
5185
start = u64_stats_fetch_begin(&ring->rx_syncp);
drivers/net/ethernet/intel/igc/igc_main.c
5186
_bytes = ring->rx_stats.bytes;
drivers/net/ethernet/intel/igc/igc_main.c
5187
_packets = ring->rx_stats.packets;
drivers/net/ethernet/intel/igc/igc_main.c
5188
} while (u64_stats_fetch_retry(&ring->rx_syncp, start));
drivers/net/ethernet/intel/igc/igc_main.c
5199
struct igc_ring *ring = adapter->tx_ring[i];
drivers/net/ethernet/intel/igc/igc_main.c
5202
start = u64_stats_fetch_begin(&ring->tx_syncp);
drivers/net/ethernet/intel/igc/igc_main.c
5203
_bytes = ring->tx_stats.bytes;
drivers/net/ethernet/intel/igc/igc_main.c
5204
_packets = ring->tx_stats.packets;
drivers/net/ethernet/intel/igc/igc_main.c
5205
} while (u64_stats_fetch_retry(&ring->tx_syncp, start));
drivers/net/ethernet/intel/igc/igc_main.c
5727
if (q_vector->rx.ring && q_vector->tx.ring)
drivers/net/ethernet/intel/igc/igc_main.c
5729
q_vector->rx.ring->queue_index);
drivers/net/ethernet/intel/igc/igc_main.c
5730
else if (q_vector->tx.ring)
drivers/net/ethernet/intel/igc/igc_main.c
5732
q_vector->tx.ring->queue_index);
drivers/net/ethernet/intel/igc/igc_main.c
5733
else if (q_vector->rx.ring)
drivers/net/ethernet/intel/igc/igc_main.c
5735
q_vector->rx.ring->queue_index);
drivers/net/ethernet/intel/igc/igc_main.c
5996
if (!q_vector->rx.ring)
drivers/net/ethernet/intel/igc/igc_main.c
5999
rx_ring = adapter->rx_ring[q_vector->rx.ring->queue_index];
drivers/net/ethernet/intel/igc/igc_main.c
616
struct igc_ring *ring)
drivers/net/ethernet/intel/igc/igc_main.c
619
!test_bit(IGC_RING_FLAG_AF_XDP_ZC, &ring->flags))
drivers/net/ethernet/intel/igc/igc_main.c
622
return xsk_get_pool_from_qid(ring->netdev, ring->queue_index);
drivers/net/ethernet/intel/igc/igc_main.c
633
struct igc_ring *ring)
drivers/net/ethernet/intel/igc/igc_main.c
6333
struct igc_ring *ring;
drivers/net/ethernet/intel/igc/igc_main.c
6338
ring = adapter->tx_ring[queue];
drivers/net/ethernet/intel/igc/igc_main.c
6339
ring->launchtime_enable = enable;
drivers/net/ethernet/intel/igc/igc_main.c
637
int reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/igc/igc_main.c
639
u64 rdba = ring->dma;
drivers/net/ethernet/intel/igc/igc_main.c
642
xdp_rxq_info_unreg_mem_model(&ring->xdp_rxq);
drivers/net/ethernet/intel/igc/igc_main.c
643
ring->xsk_pool = igc_get_xsk_pool(adapter, ring);
drivers/net/ethernet/intel/igc/igc_main.c
6434
struct igc_ring *ring = adapter->tx_ring[i];
drivers/net/ethernet/intel/igc/igc_main.c
6436
ring->start_time = 0;
drivers/net/ethernet/intel/igc/igc_main.c
6437
ring->end_time = NSEC_PER_SEC;
drivers/net/ethernet/intel/igc/igc_main.c
6438
ring->max_sdu = 0;
drivers/net/ethernet/intel/igc/igc_main.c
6439
ring->preemptible = false;
drivers/net/ethernet/intel/igc/igc_main.c
644
if (ring->xsk_pool) {
drivers/net/ethernet/intel/igc/igc_main.c
6447
struct igc_ring *ring = adapter->tx_ring[i];
drivers/net/ethernet/intel/igc/igc_main.c
6449
ring->oper_gate_closed = false;
drivers/net/ethernet/intel/igc/igc_main.c
645
WARN_ON(xdp_rxq_info_reg_mem_model(&ring->xdp_rxq,
drivers/net/ethernet/intel/igc/igc_main.c
6450
ring->admin_gate_closed = false;
drivers/net/ethernet/intel/igc/igc_main.c
648
xsk_pool_set_rxq_info(ring->xsk_pool, &ring->xdp_rxq);
drivers/net/ethernet/intel/igc/igc_main.c
650
WARN_ON(xdp_rxq_info_reg_mem_model(&ring->xdp_rxq,
drivers/net/ethernet/intel/igc/igc_main.c
6543
struct igc_ring *ring = adapter->tx_ring[i];
drivers/net/ethernet/intel/igc/igc_main.c
6553
ring->start_time = start_time;
drivers/net/ethernet/intel/igc/igc_main.c
6554
ring->end_time = end_time;
drivers/net/ethernet/intel/igc/igc_main.c
6556
if (ring->start_time >= adapter->cycle_time)
drivers/net/ethernet/intel/igc/igc_main.c
656
set_ring_uses_large_buffer(ring);
drivers/net/ethernet/intel/igc/igc_main.c
6571
struct igc_ring *ring = adapter->tx_ring[i];
drivers/net/ethernet/intel/igc/igc_main.c
6574
ring->admin_gate_closed = false;
drivers/net/ethernet/intel/igc/igc_main.c
6576
ring->oper_gate_closed = false;
drivers/net/ethernet/intel/igc/igc_main.c
6577
ring->admin_gate_closed = false;
drivers/net/ethernet/intel/igc/igc_main.c
6582
ring->admin_gate_closed = true;
drivers/net/ethernet/intel/igc/igc_main.c
6584
ring->oper_gate_closed = true;
drivers/net/ethernet/intel/igc/igc_main.c
6586
ring->start_time = end_time;
drivers/net/ethernet/intel/igc/igc_main.c
6587
ring->end_time = end_time;
drivers/net/ethernet/intel/igc/igc_main.c
6594
struct igc_ring *ring = adapter->tx_ring[i];
drivers/net/ethernet/intel/igc/igc_main.c
6598
ring->max_sdu = qopt->max_sdu[i] + dev->hard_header_len - ETH_TLEN;
drivers/net/ethernet/intel/igc/igc_main.c
6600
ring->max_sdu = 0;
drivers/net/ethernet/intel/igc/igc_main.c
6646
struct igc_ring *ring;
drivers/net/ethernet/intel/igc/igc_main.c
6655
ring = adapter->tx_ring[queue];
drivers/net/ethernet/intel/igc/igc_main.c
666
ring->count * sizeof(union igc_adv_rx_desc));
drivers/net/ethernet/intel/igc/igc_main.c
6678
ring->cbs_enable = enable;
drivers/net/ethernet/intel/igc/igc_main.c
6679
ring->idleslope = idleslope;
drivers/net/ethernet/intel/igc/igc_main.c
6680
ring->sendslope = sendslope;
drivers/net/ethernet/intel/igc/igc_main.c
6681
ring->hicredit = hicredit;
drivers/net/ethernet/intel/igc/igc_main.c
6682
ring->locredit = locredit;
drivers/net/ethernet/intel/igc/igc_main.c
669
ring->tail = adapter->io_addr + IGC_RDT(reg_idx);
drivers/net/ethernet/intel/igc/igc_main.c
671
writel(0, ring->tail);
drivers/net/ethernet/intel/igc/igc_main.c
674
ring->next_to_clean = 0;
drivers/net/ethernet/intel/igc/igc_main.c
675
ring->next_to_use = 0;
drivers/net/ethernet/intel/igc/igc_main.c
677
if (ring->xsk_pool)
drivers/net/ethernet/intel/igc/igc_main.c
678
buf_size = xsk_pool_get_rx_frame_size(ring->xsk_pool);
drivers/net/ethernet/intel/igc/igc_main.c
679
else if (ring_uses_large_buffer(ring))
drivers/net/ethernet/intel/igc/igc_main.c
6877
struct igc_ring *ring;
drivers/net/ethernet/intel/igc/igc_main.c
6886
ring = igc_get_tx_ring(adapter, cpu);
drivers/net/ethernet/intel/igc/igc_main.c
6887
nq = txring_txq(ring);
drivers/net/ethernet/intel/igc/igc_main.c
6899
err = igc_xdp_init_tx_descriptor(ring, xdpf);
drivers/net/ethernet/intel/igc/igc_main.c
6906
igc_flush_tx_descriptors(ring);
drivers/net/ethernet/intel/igc/igc_main.c
6927
struct igc_ring *ring;
drivers/net/ethernet/intel/igc/igc_main.c
6939
ring = adapter->rx_ring[queue_id];
drivers/net/ethernet/intel/igc/igc_main.c
6941
if (!ring->xsk_pool)
drivers/net/ethernet/intel/igc/igc_main.c
6945
eics |= igc_sw_irq_prep(ring->q_vector);
drivers/net/ethernet/intel/igc/igc_main.c
6953
ring = adapter->tx_ring[queue_id];
drivers/net/ethernet/intel/igc/igc_main.c
6954
eics |= igc_sw_irq_prep(ring->q_vector);
drivers/net/ethernet/intel/igc/igc_main.c
698
memset(ring->rx_buffer_info, 0,
drivers/net/ethernet/intel/igc/igc_main.c
699
sizeof(struct igc_rx_buffer) * ring->count);
drivers/net/ethernet/intel/igc/igc_main.c
702
rx_desc = IGC_RX_DESC(ring, 0);
drivers/net/ethernet/intel/igc/igc_main.c
736
struct igc_ring *ring)
drivers/net/ethernet/intel/igc/igc_main.c
739
int reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/igc/igc_main.c
740
u64 tdba = ring->dma;
drivers/net/ethernet/intel/igc/igc_main.c
743
ring->xsk_pool = igc_get_xsk_pool(adapter, ring);
drivers/net/ethernet/intel/igc/igc_main.c
750
ring->count * sizeof(union igc_adv_tx_desc));
drivers/net/ethernet/intel/igc/igc_main.c
755
ring->tail = adapter->io_addr + IGC_TDT(reg_idx);
drivers/net/ethernet/intel/igc/igc_main.c
757
writel(0, ring->tail);
drivers/net/ethernet/intel/igc/igc_main.c
7799
static void igc_disable_rx_ring_hw(struct igc_ring *ring)
drivers/net/ethernet/intel/igc/igc_main.c
7801
struct igc_hw *hw = &ring->q_vector->adapter->hw;
drivers/net/ethernet/intel/igc/igc_main.c
7802
u8 idx = ring->reg_idx;
drivers/net/ethernet/intel/igc/igc_main.c
7811
void igc_disable_rx_ring(struct igc_ring *ring)
drivers/net/ethernet/intel/igc/igc_main.c
7813
igc_disable_rx_ring_hw(ring);
drivers/net/ethernet/intel/igc/igc_main.c
7814
igc_clean_rx_ring(ring);
drivers/net/ethernet/intel/igc/igc_main.c
7817
void igc_enable_rx_ring(struct igc_ring *ring)
drivers/net/ethernet/intel/igc/igc_main.c
7819
struct igc_adapter *adapter = ring->q_vector->adapter;
drivers/net/ethernet/intel/igc/igc_main.c
7821
igc_configure_rx_ring(adapter, ring);
drivers/net/ethernet/intel/igc/igc_main.c
7823
if (ring->xsk_pool)
drivers/net/ethernet/intel/igc/igc_main.c
7824
igc_alloc_rx_buffers_zc(ring, igc_desc_unused(ring));
drivers/net/ethernet/intel/igc/igc_main.c
7826
igc_alloc_rx_buffers(ring, igc_desc_unused(ring));
drivers/net/ethernet/intel/igc/igc_main.c
7829
void igc_disable_tx_ring(struct igc_ring *ring)
drivers/net/ethernet/intel/igc/igc_main.c
7831
igc_disable_tx_ring_hw(ring);
drivers/net/ethernet/intel/igc/igc_main.c
7832
igc_clean_tx_ring(ring);
drivers/net/ethernet/intel/igc/igc_main.c
7835
void igc_enable_tx_ring(struct igc_ring *ring)
drivers/net/ethernet/intel/igc/igc_main.c
7837
struct igc_adapter *adapter = ring->q_vector->adapter;
drivers/net/ethernet/intel/igc/igc_main.c
7839
igc_configure_tx_ring(adapter, ring);
drivers/net/ethernet/intel/igc/igc_tsn.c
100
ring = igc_get_tx_ring(adapter, cpu);
drivers/net/ethernet/intel/igc/igc_tsn.c
101
nq = txring_txq(ring);
drivers/net/ethernet/intel/igc/igc_tsn.c
111
err = igc_fpe_init_tx_descriptor(ring, skb, type);
drivers/net/ethernet/intel/igc/igc_tsn.c
112
igc_flush_tx_descriptors(ring);
drivers/net/ethernet/intel/igc/igc_tsn.c
210
struct igc_ring *ring = adapter->tx_ring[i];
drivers/net/ethernet/intel/igc/igc_tsn.c
212
if (ring->launchtime_enable)
drivers/net/ethernet/intel/igc/igc_tsn.c
224
struct igc_ring *ring = adapter->tx_ring[i];
drivers/net/ethernet/intel/igc/igc_tsn.c
226
if (ring->cbs_enable)
drivers/net/ethernet/intel/igc/igc_tsn.c
25
static int igc_fpe_init_smd_frame(struct igc_ring *ring,
drivers/net/ethernet/intel/igc/igc_tsn.c
29
dma_addr_t dma = dma_map_single(ring->dev, skb->data, skb->len,
drivers/net/ethernet/intel/igc/igc_tsn.c
32
if (dma_mapping_error(ring->dev, dma)) {
drivers/net/ethernet/intel/igc/igc_tsn.c
33
netdev_err_once(ring->netdev, "Failed to map DMA for TX\n");
drivers/net/ethernet/intel/igc/igc_tsn.c
469
struct igc_ring *ring = adapter->tx_ring[i];
drivers/net/ethernet/intel/igc/igc_tsn.c
470
u32 txdctl = rd32(IGC_TXDCTL(ring->reg_idx));
drivers/net/ethernet/intel/igc/igc_tsn.c
475
wr32(IGC_STQT(i), ring->start_time);
drivers/net/ethernet/intel/igc/igc_tsn.c
476
wr32(IGC_ENDQT(i), ring->end_time);
drivers/net/ethernet/intel/igc/igc_tsn.c
48
static int igc_fpe_init_tx_descriptor(struct igc_ring *ring,
drivers/net/ethernet/intel/igc/igc_tsn.c
501
if (ring->launchtime_enable)
drivers/net/ethernet/intel/igc/igc_tsn.c
508
} else if (ring->preemptible) {
drivers/net/ethernet/intel/igc/igc_tsn.c
518
wr32(IGC_TXDCTL(ring->reg_idx), txdctl);
drivers/net/ethernet/intel/igc/igc_tsn.c
524
if (ring->cbs_enable) {
drivers/net/ethernet/intel/igc/igc_tsn.c
57
if (!igc_desc_unused(ring))
drivers/net/ethernet/intel/igc/igc_tsn.c
580
cbs_value = DIV_ROUND_UP_ULL(ring->idleslope
drivers/net/ethernet/intel/igc/igc_tsn.c
589
0x80000000 + ring->hicredit * 0x7736);
drivers/net/ethernet/intel/igc/igc_tsn.c
60
buffer = &ring->tx_buffer_info[ring->next_to_use];
drivers/net/ethernet/intel/igc/igc_tsn.c
61
err = igc_fpe_init_smd_frame(ring, buffer, skb);
drivers/net/ethernet/intel/igc/igc_tsn.c
78
desc = IGC_TX_DESC(ring, ring->next_to_use);
drivers/net/ethernet/intel/igc/igc_tsn.c
83
netdev_tx_sent_queue(txring_txq(ring), skb->len);
drivers/net/ethernet/intel/igc/igc_tsn.c
86
ring->next_to_use = (ring->next_to_use + 1) % ring->count;
drivers/net/ethernet/intel/igc/igc_tsn.c
96
struct igc_ring *ring;
drivers/net/ethernet/intel/ixgbe/ixgbe.h
1054
static inline struct netdev_queue *txring_txq(const struct ixgbe_ring *ring)
drivers/net/ethernet/intel/ixgbe/ixgbe.h
1056
return netdev_get_tx_queue(ring->netdev, ring->queue_index);
drivers/net/ethernet/intel/ixgbe/ixgbe.h
327
#define ring_uses_build_skb(ring) \
drivers/net/ethernet/intel/ixgbe/ixgbe.h
328
test_bit(__IXGBE_RX_BUILD_SKB_ENABLED, &(ring)->state)
drivers/net/ethernet/intel/ixgbe/ixgbe.h
338
#define check_for_tx_hang(ring) \
drivers/net/ethernet/intel/ixgbe/ixgbe.h
339
test_bit(__IXGBE_TX_DETECT_HANG, &(ring)->state)
drivers/net/ethernet/intel/ixgbe/ixgbe.h
340
#define set_check_for_tx_hang(ring) \
drivers/net/ethernet/intel/ixgbe/ixgbe.h
341
set_bit(__IXGBE_TX_DETECT_HANG, &(ring)->state)
drivers/net/ethernet/intel/ixgbe/ixgbe.h
342
#define clear_check_for_tx_hang(ring) \
drivers/net/ethernet/intel/ixgbe/ixgbe.h
343
clear_bit(__IXGBE_TX_DETECT_HANG, &(ring)->state)
drivers/net/ethernet/intel/ixgbe/ixgbe.h
344
#define ring_is_rsc_enabled(ring) \
drivers/net/ethernet/intel/ixgbe/ixgbe.h
345
test_bit(__IXGBE_RX_RSC_ENABLED, &(ring)->state)
drivers/net/ethernet/intel/ixgbe/ixgbe.h
346
#define set_ring_rsc_enabled(ring) \
drivers/net/ethernet/intel/ixgbe/ixgbe.h
347
set_bit(__IXGBE_RX_RSC_ENABLED, &(ring)->state)
drivers/net/ethernet/intel/ixgbe/ixgbe.h
348
#define clear_ring_rsc_enabled(ring) \
drivers/net/ethernet/intel/ixgbe/ixgbe.h
349
clear_bit(__IXGBE_RX_RSC_ENABLED, &(ring)->state)
drivers/net/ethernet/intel/ixgbe/ixgbe.h
350
#define ring_is_xdp(ring) \
drivers/net/ethernet/intel/ixgbe/ixgbe.h
351
test_bit(__IXGBE_TX_XDP_RING, &(ring)->state)
drivers/net/ethernet/intel/ixgbe/ixgbe.h
352
#define set_ring_xdp(ring) \
drivers/net/ethernet/intel/ixgbe/ixgbe.h
353
set_bit(__IXGBE_TX_XDP_RING, &(ring)->state)
drivers/net/ethernet/intel/ixgbe/ixgbe.h
354
#define clear_ring_xdp(ring) \
drivers/net/ethernet/intel/ixgbe/ixgbe.h
355
clear_bit(__IXGBE_TX_XDP_RING, &(ring)->state)
drivers/net/ethernet/intel/ixgbe/ixgbe.h
454
static inline unsigned int ixgbe_rx_bufsz(struct ixgbe_ring *ring)
drivers/net/ethernet/intel/ixgbe/ixgbe.h
456
if (test_bit(__IXGBE_RX_3K_BUFFER, &ring->state))
drivers/net/ethernet/intel/ixgbe/ixgbe.h
459
if (ring_uses_build_skb(ring))
drivers/net/ethernet/intel/ixgbe/ixgbe.h
465
static inline unsigned int ixgbe_rx_pg_order(struct ixgbe_ring *ring)
drivers/net/ethernet/intel/ixgbe/ixgbe.h
468
if (test_bit(__IXGBE_RX_3K_BUFFER, &ring->state))
drivers/net/ethernet/intel/ixgbe/ixgbe.h
482
struct ixgbe_ring *ring; /* pointer to linked list of rings */
drivers/net/ethernet/intel/ixgbe/ixgbe.h
493
for (pos = (head).ring; pos != NULL; pos = pos->next)
drivers/net/ethernet/intel/ixgbe/ixgbe.h
521
struct ixgbe_ring ring[] ____cacheline_internodealigned_in_smp;
drivers/net/ethernet/intel/ixgbe/ixgbe.h
563
static inline u16 ixgbe_desc_unused(struct ixgbe_ring *ring)
drivers/net/ethernet/intel/ixgbe/ixgbe.h
565
u16 ntc = ring->next_to_clean;
drivers/net/ethernet/intel/ixgbe/ixgbe.h
566
u16 ntu = ring->next_to_use;
drivers/net/ethernet/intel/ixgbe/ixgbe.h
568
return ((ntc > ntu) ? 0 : ring->count) + ntc - ntu - 1;
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1224
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1232
ring->rx_max_pending = ixgbe_get_max_rxd(adapter);
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1233
ring->tx_max_pending = ixgbe_get_max_txd(adapter);
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1234
ring->rx_pending = rx_ring->count;
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1235
ring->tx_pending = tx_ring->count;
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1239
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1248
if ((ring->rx_mini_pending) || (ring->rx_jumbo_pending))
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1251
new_tx_count = clamp_t(u32, ring->tx_pending,
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1255
new_rx_count = clamp_t(u32, ring->rx_pending,
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1407
struct ixgbe_ring *ring;
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1432
ring = adapter->tx_ring[j];
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1433
if (!ring) {
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1441
start = u64_stats_fetch_begin(&ring->syncp);
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1442
data[i] = ring->stats.packets;
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1443
data[i+1] = ring->stats.bytes;
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1444
} while (u64_stats_fetch_retry(&ring->syncp, start));
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1448
ring = adapter->rx_ring[j];
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1449
if (!ring) {
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1457
start = u64_stats_fetch_begin(&ring->syncp);
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1458
data[i] = ring->stats.packets;
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1459
data[i+1] = ring->stats.bytes;
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
1460
} while (u64_stats_fetch_retry(&ring->syncp, start));
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
2961
u32 ring = ethtool_get_flow_spec_ring(fsp->ring_cookie);
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
2964
if (!vf && (ring >= adapter->num_rx_queues))
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
2968
ring >= adapter->num_rx_queues_per_pool))
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
2973
queue = adapter->rx_ring[ring]->reg_idx;
drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c
2976
adapter->num_rx_queues_per_pool) + ring;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
1003
ring++;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
1021
struct ixgbe_ring *ring;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
1023
ixgbe_for_each_ring(ring, q_vector->tx) {
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
1024
if (ring_is_xdp(ring))
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
1025
WRITE_ONCE(adapter->xdp_ring[ring->queue_index], NULL);
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
1027
WRITE_ONCE(adapter->tx_ring[ring->queue_index], NULL);
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
1030
ixgbe_for_each_ring(ring, q_vector->rx)
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
1031
WRITE_ONCE(adapter->rx_ring[ring->queue_index], NULL);
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
809
static void ixgbe_add_ring(struct ixgbe_ring *ring,
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
812
ring->next = head->ring;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
813
head->ring = ring;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
840
struct ixgbe_ring *ring;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
857
q_vector = kzalloc_node(struct_size(q_vector, ring, ring_count),
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
860
q_vector = kzalloc_flex(*q_vector, ring, ring_count);
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
907
ring = q_vector->ring;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
911
ring->dev = &adapter->pdev->dev;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
912
ring->netdev = adapter->netdev;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
915
ring->q_vector = q_vector;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
918
ixgbe_add_ring(ring, &q_vector->tx);
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
921
ring->count = adapter->tx_ring_count;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
922
ring->queue_index = txr_idx;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
925
WRITE_ONCE(adapter->tx_ring[txr_idx], ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
932
ring++;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
937
ring->dev = &adapter->pdev->dev;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
938
ring->netdev = adapter->netdev;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
941
ring->q_vector = q_vector;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
944
ixgbe_add_ring(ring, &q_vector->tx);
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
947
ring->count = adapter->tx_ring_count;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
948
ring->queue_index = xdp_idx;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
949
set_ring_xdp(ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
950
spin_lock_init(&ring->tx_lock);
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
953
WRITE_ONCE(adapter->xdp_ring[xdp_idx], ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
960
ring++;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
965
ring->dev = &adapter->pdev->dev;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
966
ring->netdev = adapter->netdev;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
969
ring->q_vector = q_vector;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
972
ixgbe_add_ring(ring, &q_vector->rx);
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
979
set_bit(__IXGBE_RX_CSUM_UDP_ZERO_ERR, &ring->state);
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
987
set_bit(__IXGBE_RX_FCOE, &ring->state);
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
992
ring->count = adapter->rx_ring_count;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
993
ring->queue_index = rxr_idx;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
996
WRITE_ONCE(adapter->rx_ring[rxr_idx], ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1026
static u64 ixgbe_get_tx_completed(struct ixgbe_ring *ring)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1028
return ring->stats.packets;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1031
static u64 ixgbe_get_tx_pending(struct ixgbe_ring *ring)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1035
head = ring->next_to_clean;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1036
tail = ring->next_to_use;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1038
return ((head <= tail) ? tail : tail + ring->count) - head;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10833
struct ixgbe_ring *ring = adapter->rx_ring[rxbase + i];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10834
struct ixgbe_q_vector *qv = ring->q_vector;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10841
ring->netdev = NULL;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10910
struct ixgbe_ring *ring = adapter->rx_ring[i];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10912
if (ring_is_rsc_enabled(ring))
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10915
if (frame_size > ixgbe_rx_bufsz(ring))
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10983
void ixgbe_xdp_ring_update_tail(struct ixgbe_ring *ring)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10989
writel(ring->next_to_use, ring->tail);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10992
void ixgbe_xdp_ring_update_tail_locked(struct ixgbe_ring *ring)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10995
spin_lock(&ring->tx_lock);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10996
ixgbe_xdp_ring_update_tail(ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10998
spin_unlock(&ring->tx_lock);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11005
struct ixgbe_ring *ring;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11022
ring = adapter->xdp_prog ? ixgbe_determine_xdp_ring(adapter) : NULL;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11023
if (unlikely(!ring))
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11026
if (unlikely(test_bit(__IXGBE_TX_DISABLED, &ring->state)))
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11030
spin_lock(&ring->tx_lock);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11036
err = ixgbe_xmit_xdp_ring(ring, xdpf);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11043
ixgbe_xdp_ring_update_tail(ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11046
spin_unlock(&ring->tx_lock);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11192
static void ixgbe_irq_disable_single(struct ixgbe_adapter *adapter, u32 ring)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11195
u64 qmask = BIT_ULL(ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11220
synchronize_irq(adapter->msix_entries[ring].vector);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11233
void ixgbe_txrx_ring_disable(struct ixgbe_adapter *adapter, int ring)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11237
rx_ring = adapter->rx_ring[ring];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11238
tx_ring = adapter->tx_ring[ring];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11239
xdp_ring = adapter->xdp_ring[ring];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11241
ixgbe_irq_disable_single(adapter, ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11273
void ixgbe_txrx_ring_enable(struct ixgbe_adapter *adapter, int ring)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11277
rx_ring = adapter->rx_ring[ring];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11278
tx_ring = adapter->tx_ring[ring];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11279
xdp_ring = adapter->xdp_ring[ring];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
11292
ixgbe_irq_enable_queues(adapter, BIT_ULL(ring));
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1554
struct ixgbe_ring *ring;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1560
ixgbe_for_each_ring(ring, q_vector->tx)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1561
ixgbe_update_tx_dca(adapter, ring, cpu);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1563
ixgbe_for_each_ring(ring, q_vector->rx)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1564
ixgbe_update_rx_dca(adapter, ring, cpu);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1630
static inline void ixgbe_rx_hash(struct ixgbe_ring *ring,
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1636
if (!(ring->netdev->features & NETIF_F_RXHASH))
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1658
static inline bool ixgbe_rx_is_fcoe(struct ixgbe_ring *ring,
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1663
return test_bit(__IXGBE_RX_FCOE, &ring->state) &&
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1676
static inline void ixgbe_rx_checksum(struct ixgbe_ring *ring,
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1686
if (!(ring->netdev->features & NETIF_F_RXCSUM))
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1698
ring->rx_stats.csum_err++;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1711
test_bit(__IXGBE_RX_CSUM_UDP_ZERO_ERR, &ring->state))
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1714
ring->rx_stats.csum_err++;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
1852
static void ixgbe_set_rsc_gso_size(struct ixgbe_ring *ring,
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
2406
struct ixgbe_ring *ring;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
2425
ring = ixgbe_determine_xdp_ring(adapter);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
2427
spin_lock(&ring->tx_lock);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
2428
result = ixgbe_xmit_xdp_ring(ring, xdpf);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
2430
spin_unlock(&ring->tx_lock);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
2637
struct ixgbe_ring *ring = ixgbe_determine_xdp_ring(adapter);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
2639
ixgbe_xdp_ring_update_tail_locked(ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
2672
struct ixgbe_ring *ring;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
2675
ixgbe_for_each_ring(ring, q_vector->rx)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
2676
ixgbe_set_ivar(adapter, 0, ring->reg_idx, v_idx);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
2678
ixgbe_for_each_ring(ring, q_vector->tx)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
2679
ixgbe_set_ivar(adapter, 1, ring->reg_idx, v_idx);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
2738
if (!ring_container->ring)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3527
struct ixgbe_ring *ring = adapter->tx_ring[i];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3529
&ring->state))
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3564
if (q_vector->rx.ring || q_vector->tx.ring)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3582
struct ixgbe_ring *ring;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3591
ixgbe_for_each_ring(ring, q_vector->tx) {
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3592
bool wd = ring->xsk_pool ?
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3593
ixgbe_clean_xdp_tx_irq(q_vector, ring, budget) :
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3594
ixgbe_clean_tx_irq(q_vector, ring, budget);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3611
ixgbe_for_each_ring(ring, q_vector->rx) {
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3612
int cleaned = ring->xsk_pool ?
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3613
ixgbe_clean_rx_irq_zc(q_vector, ring,
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3615
ixgbe_clean_rx_irq(q_vector, ring,
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3656
if (q_vector->tx.ring && q_vector->rx.ring) {
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3660
} else if (q_vector->rx.ring) {
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3663
} else if (q_vector->tx.ring) {
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3831
if (!q_vector->rx.ring && !q_vector->tx.ring)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3904
struct ixgbe_ring *ring)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3907
u64 tdba = ring->dma;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3910
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3912
ring->xsk_pool = NULL;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3913
if (ring_is_xdp(ring))
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3914
ring->xsk_pool = ixgbe_xsk_pool(adapter, ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3924
ring->count * sizeof(union ixgbe_adv_tx_desc));
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3927
ring->tail = adapter->io_addr + IXGBE_TDT(reg_idx);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3939
if (!ring->q_vector || (ring->q_vector->itr < IXGBE_100K_ITR))
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3953
ring->atr_sample_rate = adapter->atr_sample_rate;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3954
ring->atr_count = 0;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3955
set_bit(__IXGBE_TX_FDIR_INIT_DONE, &ring->state);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3957
ring->atr_sample_rate = 0;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3961
if (!test_and_set_bit(__IXGBE_TX_XPS_INIT_DONE, &ring->state)) {
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3962
struct ixgbe_q_vector *q_vector = ring->q_vector;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3965
netif_set_xps_queue(ring->netdev,
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3967
ring->queue_index);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3970
clear_bit(__IXGBE_HANG_CHECK_ARMED, &ring->state);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3973
memset(ring->tx_buffer_info, 0,
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
3974
sizeof(struct ixgbe_tx_buffer) * ring->count);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4077
struct ixgbe_ring *ring)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4080
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4089
struct ixgbe_ring *ring)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4092
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4450
struct ixgbe_ring *ring)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4454
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4456
if (!ring_is_rsc_enabled(ring))
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4472
struct ixgbe_ring *ring)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4477
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4498
struct ixgbe_ring *ring)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4502
u64 rdba = ring->dma;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4504
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4506
xdp_rxq_info_unreg_mem_model(&ring->xdp_rxq);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4507
ring->xsk_pool = ixgbe_xsk_pool(adapter, ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4508
if (ring->xsk_pool) {
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4509
WARN_ON(xdp_rxq_info_reg_mem_model(&ring->xdp_rxq,
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4512
xsk_pool_set_rxq_info(ring->xsk_pool, &ring->xdp_rxq);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4514
WARN_ON(xdp_rxq_info_reg_mem_model(&ring->xdp_rxq,
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4529
ring->count * sizeof(union ixgbe_adv_rx_desc));
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4535
ring->tail = adapter->io_addr + IXGBE_RDT(reg_idx);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4537
ixgbe_configure_srrctl(adapter, ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4538
ixgbe_configure_rscctl(adapter, ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4560
if (ring_uses_build_skb(ring) &&
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4561
!test_bit(__IXGBE_RX_3K_BUFFER, &ring->state))
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4567
ring->rx_offset = ixgbe_rx_offset(ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4569
if (ring->xsk_pool && hw->mac.type != ixgbe_mac_82599EB) {
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4570
u32 xsk_buf_len = xsk_pool_get_rx_frame_size(ring->xsk_pool);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4576
ring->rx_buf_len = xsk_buf_len;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4580
memset(ring->rx_buffer_info, 0,
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4581
sizeof(struct ixgbe_rx_buffer) * ring->count);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4584
rx_desc = IXGBE_RX_DESC(ring, 0);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4591
ixgbe_rx_desc_queue_enable(adapter, ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4592
if (ring->xsk_pool)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4593
ixgbe_alloc_rx_buffers_zc(ring, ixgbe_desc_unused(ring));
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4595
ixgbe_alloc_rx_buffers(ring, ixgbe_desc_unused(ring));
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4953
struct ixgbe_ring *ring = adapter->rx_ring[i];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4955
if (!netif_is_ixgbe(ring->netdev))
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4958
j = ring->reg_idx;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4992
struct ixgbe_ring *ring = adapter->rx_ring[i];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4994
if (!netif_is_ixgbe(ring->netdev))
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4997
j = ring->reg_idx;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
567
static void ixgbe_print_buffer(struct ixgbe_ring *ring, int n)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5706
u32 ring = ethtool_get_flow_spec_ring(filter->action);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5709
if (!vf && (ring >= adapter->num_rx_queues)) {
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
571
tx_buffer = &ring->tx_buffer_info[ring->next_to_clean];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5711
ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5715
ring >= adapter->num_rx_queues_per_pool)) {
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5717
vf, ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5723
queue = adapter->rx_ring[ring]->reg_idx;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5726
adapter->num_rx_queues_per_pool) + ring;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
573
n, ring->next_to_use, ring->next_to_clean,
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
589
struct ixgbe_ring *ring;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
629
ring = adapter->tx_ring[n];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
630
ixgbe_print_buffer(ring, n);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
634
ring = adapter->xdp_ring[n];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
635
ixgbe_print_buffer(ring, n);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
6365
struct ixgbe_ring *ring = adapter->rx_ring[i];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
6366
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
6408
struct ixgbe_ring *ring = adapter->rx_ring[i];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
6409
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
6434
struct ixgbe_ring *ring = adapter->tx_ring[i];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
6435
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
6442
struct ixgbe_ring *ring = adapter->xdp_ring[i];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
6443
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
6483
struct ixgbe_ring *ring = adapter->tx_ring[i];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
6484
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
6489
struct ixgbe_ring *ring = adapter->xdp_ring[i];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
6490
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
680
ring = adapter->tx_ring[n];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
682
pr_info("TX QUEUE INDEX = %d\n", ring->queue_index);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
689
for (i = 0; ring->desc && (i < ring->count); i++) {
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
690
tx_desc = IXGBE_TX_DESC(ring, i);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
691
tx_buffer = &ring->tx_buffer_info[i];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
696
if (i == ring->next_to_use &&
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
697
i == ring->next_to_clean)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
699
else if (i == ring->next_to_use)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
701
else if (i == ring->next_to_clean)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
8007
if (qv->rx.ring || qv->tx.ring)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9098
static void ixgbe_atr(struct ixgbe_ring *ring,
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9101
struct ixgbe_q_vector *q_vector = ring->q_vector;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9120
if (!ring->atr_sample_rate)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9123
ring->atr_count++;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9190
if (!th->syn && (ring->atr_count < ring->atr_sample_rate))
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9194
ring->atr_count = 0;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9242
input, common, ring->queue_index);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9292
int ixgbe_xmit_xdp_ring(struct ixgbe_ring *ring,
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9297
u16 i = 0, index = ring->next_to_use;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9298
struct ixgbe_tx_buffer *tx_head = &ring->tx_buffer_info[index];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9300
union ixgbe_adv_tx_desc *tx_desc = IXGBE_TX_DESC(ring, index);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9304
if (unlikely(ixgbe_desc_unused(ring) < 1 + nr_frags))
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9317
dma = dma_map_single(ring->dev, data, len, DMA_TO_DEVICE);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9318
if (dma_mapping_error(ring->dev, dma))
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9330
if (++index == ring->count)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9336
tx_buff = &ring->tx_buffer_info[index];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9337
tx_desc = IXGBE_TX_DESC(ring, index);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9351
ring->next_to_use = index;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9357
tx_buff = &ring->tx_buffer_info[index];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9359
dma_unmap_page(ring->dev, dma_unmap_addr(tx_buff, dma),
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9367
index += ring->count;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9528
struct ixgbe_ring *ring)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9540
tx_ring = ring ? ring : adapter->tx_ring[skb_get_queue_mapping(skb)];
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9684
struct ixgbe_ring *ring)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9689
if (ring) {
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9691
start = u64_stats_fetch_begin(&ring->syncp);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9692
packets = ring->stats.packets;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9693
bytes = ring->stats.bytes;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9694
} while (u64_stats_fetch_retry(&ring->syncp, start));
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9708
struct ixgbe_ring *ring = READ_ONCE(adapter->rx_ring[i]);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9712
if (ring) {
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9714
start = u64_stats_fetch_begin(&ring->syncp);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9715
packets = ring->stats.packets;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9716
bytes = ring->stats.bytes;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9717
} while (u64_stats_fetch_retry(&ring->syncp, start));
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9724
struct ixgbe_ring *ring = READ_ONCE(adapter->tx_ring[i]);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9726
ixgbe_get_ring_stats64(stats, ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9729
struct ixgbe_ring *ring = READ_ONCE(adapter->xdp_ring[i]);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9731
ixgbe_get_ring_stats64(stats, ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_txrx_common.h
16
int ixgbe_xmit_xdp_ring(struct ixgbe_ring *ring,
drivers/net/ethernet/intel/ixgbe/ixgbe_txrx_common.h
26
void ixgbe_xdp_ring_update_tail(struct ixgbe_ring *ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_txrx_common.h
27
void ixgbe_xdp_ring_update_tail_locked(struct ixgbe_ring *ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_txrx_common.h
30
void ixgbe_txrx_ring_disable(struct ixgbe_adapter *adapter, int ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_txrx_common.h
31
void ixgbe_txrx_ring_enable(struct ixgbe_adapter *adapter, int ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_txrx_common.h
34
struct ixgbe_ring *ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
103
struct ixgbe_ring *ring;
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
12
struct ixgbe_ring *ring)
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
128
ring = ixgbe_determine_xdp_ring(adapter);
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
130
spin_lock(&ring->tx_lock);
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
131
result = ixgbe_xmit_xdp_ring(ring, xdpf);
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
133
spin_unlock(&ring->tx_lock);
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
15
int qid = ring->ring_idx;
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
356
struct ixgbe_ring *ring = ixgbe_determine_xdp_ring(adapter);
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
358
ixgbe_xdp_ring_update_tail_locked(ring);
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
514
struct ixgbe_ring *ring;
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
525
ring = adapter->xdp_ring[qid];
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
527
if (test_bit(__IXGBE_TX_DISABLED, &ring->state))
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
530
if (!ring->xsk_pool)
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
533
if (!napi_if_scheduled_mark_missed(&ring->q_vector->napi)) {
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
534
u64 eics = BIT_ULL(ring->q_vector->v_idx);
drivers/net/ethernet/intel/ixgbevf/ethtool.c
224
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/ixgbevf/ethtool.c
230
ring->rx_max_pending = IXGBEVF_MAX_RXD;
drivers/net/ethernet/intel/ixgbevf/ethtool.c
231
ring->tx_max_pending = IXGBEVF_MAX_TXD;
drivers/net/ethernet/intel/ixgbevf/ethtool.c
232
ring->rx_pending = adapter->rx_ring_count;
drivers/net/ethernet/intel/ixgbevf/ethtool.c
233
ring->tx_pending = adapter->tx_ring_count;
drivers/net/ethernet/intel/ixgbevf/ethtool.c
237
struct ethtool_ringparam *ring,
drivers/net/ethernet/intel/ixgbevf/ethtool.c
246
if ((ring->rx_mini_pending) || (ring->rx_jumbo_pending))
drivers/net/ethernet/intel/ixgbevf/ethtool.c
249
new_tx_count = max_t(u32, ring->tx_pending, IXGBEVF_MIN_TXD);
drivers/net/ethernet/intel/ixgbevf/ethtool.c
253
new_rx_count = max_t(u32, ring->rx_pending, IXGBEVF_MIN_RXD);
drivers/net/ethernet/intel/ixgbevf/ethtool.c
426
struct ixgbevf_ring *ring;
drivers/net/ethernet/intel/ixgbevf/ethtool.c
453
ring = adapter->tx_ring[j];
drivers/net/ethernet/intel/ixgbevf/ethtool.c
454
if (!ring) {
drivers/net/ethernet/intel/ixgbevf/ethtool.c
461
start = u64_stats_fetch_begin(&ring->syncp);
drivers/net/ethernet/intel/ixgbevf/ethtool.c
462
data[i] = ring->stats.packets;
drivers/net/ethernet/intel/ixgbevf/ethtool.c
463
data[i + 1] = ring->stats.bytes;
drivers/net/ethernet/intel/ixgbevf/ethtool.c
464
} while (u64_stats_fetch_retry(&ring->syncp, start));
drivers/net/ethernet/intel/ixgbevf/ethtool.c
470
ring = adapter->xdp_ring[j];
drivers/net/ethernet/intel/ixgbevf/ethtool.c
471
if (!ring) {
drivers/net/ethernet/intel/ixgbevf/ethtool.c
478
start = u64_stats_fetch_begin(&ring->syncp);
drivers/net/ethernet/intel/ixgbevf/ethtool.c
479
data[i] = ring->stats.packets;
drivers/net/ethernet/intel/ixgbevf/ethtool.c
480
data[i + 1] = ring->stats.bytes;
drivers/net/ethernet/intel/ixgbevf/ethtool.c
481
} while (u64_stats_fetch_retry(&ring->syncp, start));
drivers/net/ethernet/intel/ixgbevf/ethtool.c
487
ring = adapter->rx_ring[j];
drivers/net/ethernet/intel/ixgbevf/ethtool.c
488
if (!ring) {
drivers/net/ethernet/intel/ixgbevf/ethtool.c
495
start = u64_stats_fetch_begin(&ring->syncp);
drivers/net/ethernet/intel/ixgbevf/ethtool.c
496
data[i] = ring->stats.packets;
drivers/net/ethernet/intel/ixgbevf/ethtool.c
497
data[i + 1] = ring->stats.bytes;
drivers/net/ethernet/intel/ixgbevf/ethtool.c
498
} while (u64_stats_fetch_retry(&ring->syncp, start));
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
172
#define ring_uses_large_buffer(ring) \
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
173
test_bit(__IXGBEVF_RX_3K_BUFFER, &(ring)->state)
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
174
#define set_ring_uses_large_buffer(ring) \
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
175
set_bit(__IXGBEVF_RX_3K_BUFFER, &(ring)->state)
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
176
#define clear_ring_uses_large_buffer(ring) \
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
177
clear_bit(__IXGBEVF_RX_3K_BUFFER, &(ring)->state)
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
179
#define ring_uses_build_skb(ring) \
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
180
test_bit(__IXGBEVF_RX_BUILD_SKB_ENABLED, &(ring)->state)
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
181
#define set_ring_build_skb_enabled(ring) \
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
182
set_bit(__IXGBEVF_RX_BUILD_SKB_ENABLED, &(ring)->state)
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
183
#define clear_ring_build_skb_enabled(ring) \
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
184
clear_bit(__IXGBEVF_RX_BUILD_SKB_ENABLED, &(ring)->state)
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
186
static inline unsigned int ixgbevf_rx_bufsz(struct ixgbevf_ring *ring)
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
189
if (ring_uses_large_buffer(ring))
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
192
if (ring_uses_build_skb(ring))
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
198
static inline unsigned int ixgbevf_rx_pg_order(struct ixgbevf_ring *ring)
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
201
if (ring_uses_large_buffer(ring))
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
209
#define check_for_tx_hang(ring) \
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
210
test_bit(__IXGBEVF_TX_DETECT_HANG, &(ring)->state)
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
211
#define set_check_for_tx_hang(ring) \
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
212
set_bit(__IXGBEVF_TX_DETECT_HANG, &(ring)->state)
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
213
#define clear_check_for_tx_hang(ring) \
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
214
clear_bit(__IXGBEVF_TX_DETECT_HANG, &(ring)->state)
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
217
struct ixgbevf_ring *ring; /* pointer to linked list of rings */
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
226
for (pos = (head).ring; pos != NULL; pos = pos->next)
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
244
struct ixgbevf_ring ring[] ____cacheline_internodealigned_in_smp;
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
270
static inline u16 ixgbevf_desc_unused(struct ixgbevf_ring *ring)
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
272
u16 ntc = ring->next_to_clean;
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
273
u16 ntu = ring->next_to_use;
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
275
return ((ntc > ntu) ? 0 : ring->count) + ntc - ntu - 1;
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
278
static inline void ixgbevf_write_tail(struct ixgbevf_ring *ring, u32 value)
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
280
writel(value, ring->tail);
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
83
#define ring_is_xdp(ring) \
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
84
test_bit(__IXGBEVF_TX_XDP_RING, &(ring)->state)
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
85
#define set_ring_xdp(ring) \
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
86
set_bit(__IXGBEVF_TX_XDP_RING, &(ring)->state)
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
87
#define clear_ring_xdp(ring) \
drivers/net/ethernet/intel/ixgbevf/ixgbevf.h
88
clear_bit(__IXGBEVF_TX_XDP_RING, &(ring)->state)
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1003
if (!test_bit(__IXGBEVF_TX_XDP_RING_PRIMED, &ring->state)) {
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1006
set_bit(__IXGBEVF_TX_XDP_RING_PRIMED, &ring->state);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1008
context_desc = IXGBEVF_TX_CTXTDESC(ring, 0);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1026
tx_desc = IXGBEVF_TX_DESC(ring, i);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1039
if (i == ring->count)
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1043
ring->next_to_use = i;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1276
struct ixgbevf_ring *ring;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1280
ixgbevf_for_each_ring(ring, q_vector->tx) {
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1281
if (!ixgbevf_clean_tx_irq(q_vector, ring, budget))
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1296
ixgbevf_for_each_ring(ring, q_vector->rx) {
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1297
int cleaned = ixgbevf_clean_rx_irq(q_vector, ring,
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1361
struct ixgbevf_ring *ring;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1365
ixgbevf_for_each_ring(ring, q_vector->rx)
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1366
ixgbevf_set_ivar(adapter, 0, ring->reg_idx, v_idx);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1368
ixgbevf_for_each_ring(ring, q_vector->tx)
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1369
ixgbevf_set_ivar(adapter, 1, ring->reg_idx, v_idx);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1371
if (q_vector->tx.ring && !q_vector->rx.ring) {
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1527
if (q_vector->rx.ring || q_vector->tx.ring)
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1551
if (q_vector->tx.ring && q_vector->rx.ring) {
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1555
} else if (q_vector->rx.ring) {
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1558
} else if (q_vector->tx.ring) {
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1637
if (!adapter->q_vector[i]->rx.ring &&
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1638
!adapter->q_vector[i]->tx.ring)
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1686
struct ixgbevf_ring *ring)
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1689
u64 tdba = ring->dma;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1692
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1701
ring->count * sizeof(union ixgbe_adv_tx_desc));
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1715
ring->tail = adapter->io_addr + IXGBE_VFTDT(reg_idx);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1718
ring->next_to_clean = 0;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1719
ring->next_to_use = 0;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1732
memset(ring->tx_buffer_info, 0,
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1733
sizeof(struct ixgbevf_tx_buffer) * ring->count);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1735
clear_bit(__IXGBEVF_HANG_CHECK_ARMED, &ring->state);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1736
clear_bit(__IXGBEVF_TX_XDP_RING_PRIMED, &ring->state);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1769
struct ixgbevf_ring *ring, int index)
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1777
if (ring_uses_large_buffer(ring))
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1803
struct ixgbevf_ring *ring)
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1808
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1830
struct ixgbevf_ring *ring)
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1835
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1907
struct ixgbevf_ring *ring)
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1911
u64 rdba = ring->dma;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1913
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1917
ixgbevf_disable_rx_queue(adapter, ring);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1922
ring->count * sizeof(union ixgbe_adv_rx_desc));
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1937
ring->tail = adapter->io_addr + IXGBE_VFRDT(reg_idx);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
194
static u64 ixgbevf_get_tx_completed(struct ixgbevf_ring *ring)
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1940
memset(ring->rx_buffer_info, 0,
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1941
sizeof(struct ixgbevf_rx_buffer) * ring->count);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1944
rx_desc = IXGBEVF_RX_DESC(ring, 0);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1948
ring->next_to_clean = 0;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1949
ring->next_to_use = 0;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1950
ring->next_to_alloc = 0;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1952
ixgbevf_configure_srrctl(adapter, ring, reg_idx);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
196
return ring->stats.packets;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1961
if (ring_uses_build_skb(ring) &&
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1962
!ring_uses_large_buffer(ring))
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1971
ixgbevf_rx_desc_queue_enable(adapter, ring);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
1972
ixgbevf_alloc_rx_buffers(ring, ixgbevf_desc_unused(ring));
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
199
static u32 ixgbevf_get_tx_pending(struct ixgbevf_ring *ring)
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
201
struct ixgbevf_adapter *adapter = netdev_priv(ring->netdev);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
204
u32 head = IXGBE_READ_REG(hw, IXGBE_VFTDH(ring->reg_idx));
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
205
u32 tail = IXGBE_READ_REG(hw, IXGBE_VFTDT(ring->reg_idx));
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
209
tail - head : (tail + ring->count - head);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2733
static void ixgbevf_add_ring(struct ixgbevf_ring *ring,
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2736
ring->next = head->ring;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2737
head->ring = ring;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2761
struct ixgbevf_ring *ring;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2765
size = sizeof(*q_vector) + (sizeof(*ring) * ring_count);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2781
ring = q_vector->ring;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2785
ring->dev = &adapter->pdev->dev;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2786
ring->netdev = adapter->netdev;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2789
ring->q_vector = q_vector;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2792
ixgbevf_add_ring(ring, &q_vector->tx);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2795
ring->count = adapter->tx_ring_count;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2796
ring->queue_index = txr_idx;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2797
ring->reg_idx = reg_idx;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2800
adapter->tx_ring[txr_idx] = ring;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2808
ring++;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2813
ring->dev = &adapter->pdev->dev;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2814
ring->netdev = adapter->netdev;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2817
ring->q_vector = q_vector;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2820
ixgbevf_add_ring(ring, &q_vector->tx);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2823
ring->count = adapter->tx_ring_count;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2824
ring->queue_index = xdp_idx;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2825
ring->reg_idx = reg_idx;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2826
set_ring_xdp(ring);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2829
adapter->xdp_ring[xdp_idx] = ring;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2837
ring++;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2842
ring->dev = &adapter->pdev->dev;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2843
ring->netdev = adapter->netdev;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2846
ring->q_vector = q_vector;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2849
ixgbevf_add_ring(ring, &q_vector->rx);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2852
ring->count = adapter->rx_ring_count;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2853
ring->queue_index = rxr_idx;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2854
ring->reg_idx = rxr_idx;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2857
adapter->rx_ring[rxr_idx] = ring;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2864
ring++;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2882
struct ixgbevf_ring *ring;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2884
ixgbevf_for_each_ring(ring, q_vector->tx) {
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2885
if (ring_is_xdp(ring))
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2886
adapter->xdp_ring[ring->queue_index] = NULL;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2888
adapter->tx_ring[ring->queue_index] = NULL;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2891
ixgbevf_for_each_ring(ring, q_vector->rx)
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
2892
adapter->rx_ring[ring->queue_index] = NULL;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
3269
if (qv->rx.ring || qv->tx.ring)
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
4383
const struct ixgbevf_ring *ring)
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
4388
if (ring) {
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
4390
start = u64_stats_fetch_begin(&ring->syncp);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
4391
bytes = ring->stats.bytes;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
4392
packets = ring->stats.packets;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
4393
} while (u64_stats_fetch_retry(&ring->syncp, start));
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
4405
const struct ixgbevf_ring *ring;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
4414
ring = adapter->rx_ring[i];
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
4416
start = u64_stats_fetch_begin(&ring->syncp);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
4417
bytes = ring->stats.bytes;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
4418
packets = ring->stats.packets;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
4419
} while (u64_stats_fetch_retry(&ring->syncp, start));
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
4425
ring = adapter->tx_ring[i];
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
4426
ixgbevf_get_tx_ring_stats(stats, ring);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
4430
ring = adapter->xdp_ring[i];
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
4431
ixgbevf_get_tx_ring_stats(stats, ring);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
445
static inline void ixgbevf_rx_hash(struct ixgbevf_ring *ring,
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
4478
struct ixgbevf_ring *ring = adapter->rx_ring[i];
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
4480
if (frame_size > ixgbevf_rx_bufsz(ring))
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
451
if (!(ring->netdev->features & NETIF_F_RXHASH))
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
471
static inline void ixgbevf_rx_checksum(struct ixgbevf_ring *ring,
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
478
if (!(ring->netdev->features & NETIF_F_RXCSUM))
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
484
ring->rx_stats.csum_err++;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
492
ring->rx_stats.csum_err++;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
971
static int ixgbevf_xmit_xdp_ring(struct ixgbevf_ring *ring,
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
982
if (unlikely(!ixgbevf_desc_unused(ring)))
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
985
dma = dma_map_single(ring->dev, xdp->data, len, DMA_TO_DEVICE);
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
986
if (dma_mapping_error(ring->dev, dma))
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
990
i = ring->next_to_use;
drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c
991
tx_buffer = &ring->tx_buffer_info[i];
drivers/net/ethernet/intel/libie/fwlog.c
101
ring->data_size = LIBIE_AQ_MAX_BUF_LEN;
drivers/net/ethernet/intel/libie/fwlog.c
102
ring->data = mem;
drivers/net/ethernet/intel/libie/fwlog.c
1038
fwlog->ring.rings = kzalloc_objs(*fwlog->ring.rings,
drivers/net/ethernet/intel/libie/fwlog.c
1040
if (!fwlog->ring.rings) {
drivers/net/ethernet/intel/libie/fwlog.c
1045
fwlog->ring.size = LIBIE_FWLOG_RING_SIZE_DFLT;
drivers/net/ethernet/intel/libie/fwlog.c
1046
fwlog->ring.index = LIBIE_FWLOG_RING_SIZE_INDEX_DFLT;
drivers/net/ethernet/intel/libie/fwlog.c
1048
status = libie_fwlog_alloc_ring_buffs(&fwlog->ring);
drivers/net/ethernet/intel/libie/fwlog.c
1051
libie_fwlog_free_ring_buffs(&fwlog->ring);
drivers/net/ethernet/intel/libie/fwlog.c
1052
kfree(fwlog->ring.rings);
drivers/net/ethernet/intel/libie/fwlog.c
1097
if (fwlog->ring.rings) {
drivers/net/ethernet/intel/libie/fwlog.c
1098
libie_fwlog_free_ring_buffs(&fwlog->ring);
drivers/net/ethernet/intel/libie/fwlog.c
1099
kfree(fwlog->ring.rings);
drivers/net/ethernet/intel/libie/fwlog.c
1114
log = &fwlog->ring.rings[fwlog->ring.tail];
drivers/net/ethernet/intel/libie/fwlog.c
1120
libie_fwlog_ring_increment(&fwlog->ring.tail, fwlog->ring.size);
drivers/net/ethernet/intel/libie/fwlog.c
1122
if (libie_fwlog_ring_full(&fwlog->ring)) {
drivers/net/ethernet/intel/libie/fwlog.c
1124
libie_fwlog_ring_increment(&fwlog->ring.head, fwlog->ring.size);
drivers/net/ethernet/intel/libie/fwlog.c
114
struct libie_fwlog_data *ring = &rings->rings[i];
drivers/net/ethernet/intel/libie/fwlog.c
120
vfree(ring->data);
drivers/net/ethernet/intel/libie/fwlog.c
122
ring->data = NULL;
drivers/net/ethernet/intel/libie/fwlog.c
123
ring->data_size = 0;
drivers/net/ethernet/intel/libie/fwlog.c
136
struct libie_fwlog_ring ring;
drivers/net/ethernet/intel/libie/fwlog.c
149
if (ring_size == fwlog->ring.size)
drivers/net/ethernet/intel/libie/fwlog.c
156
ring.rings = kzalloc_objs(*ring.rings, ring_size);
drivers/net/ethernet/intel/libie/fwlog.c
157
if (!ring.rings)
drivers/net/ethernet/intel/libie/fwlog.c
160
ring.size = ring_size;
drivers/net/ethernet/intel/libie/fwlog.c
162
status = libie_fwlog_alloc_ring_buffs(&ring);
drivers/net/ethernet/intel/libie/fwlog.c
165
libie_fwlog_free_ring_buffs(&ring);
drivers/net/ethernet/intel/libie/fwlog.c
166
kfree(ring.rings);
drivers/net/ethernet/intel/libie/fwlog.c
170
libie_fwlog_free_ring_buffs(&fwlog->ring);
drivers/net/ethernet/intel/libie/fwlog.c
171
kfree(fwlog->ring.rings);
drivers/net/ethernet/intel/libie/fwlog.c
173
fwlog->ring.rings = ring.rings;
drivers/net/ethernet/intel/libie/fwlog.c
174
fwlog->ring.size = ring.size;
drivers/net/ethernet/intel/libie/fwlog.c
175
fwlog->ring.index = index;
drivers/net/ethernet/intel/libie/fwlog.c
176
fwlog->ring.head = 0;
drivers/net/ethernet/intel/libie/fwlog.c
177
fwlog->ring.tail = 0;
drivers/net/ethernet/intel/libie/fwlog.c
672
index = fwlog->ring.index;
drivers/net/ethernet/intel/libie/fwlog.c
763
if (libie_fwlog_ring_empty(&fwlog->ring))
drivers/net/ethernet/intel/libie/fwlog.c
766
while (!libie_fwlog_ring_empty(&fwlog->ring) && !done) {
drivers/net/ethernet/intel/libie/fwlog.c
770
log = &fwlog->ring.rings[fwlog->ring.head];
drivers/net/ethernet/intel/libie/fwlog.c
789
libie_fwlog_ring_increment(&fwlog->ring.head, fwlog->ring.size);
drivers/net/ethernet/intel/libie/fwlog.c
818
fwlog->ring.head = 0;
drivers/net/ethernet/intel/libie/fwlog.c
819
fwlog->ring.tail = 0;
drivers/net/ethernet/intel/libie/fwlog.c
99
struct libie_fwlog_data *ring = &rings->rings[i];
drivers/net/ethernet/marvell/mvneta.c
4783
struct ethtool_ringparam *ring,
drivers/net/ethernet/marvell/mvneta.c
4789
ring->rx_max_pending = MVNETA_MAX_RXD;
drivers/net/ethernet/marvell/mvneta.c
4790
ring->tx_max_pending = MVNETA_MAX_TXD;
drivers/net/ethernet/marvell/mvneta.c
4791
ring->rx_pending = pp->rx_ring_size;
drivers/net/ethernet/marvell/mvneta.c
4792
ring->tx_pending = pp->tx_ring_size;
drivers/net/ethernet/marvell/mvneta.c
4797
struct ethtool_ringparam *ring,
drivers/net/ethernet/marvell/mvneta.c
4803
if ((ring->rx_pending == 0) || (ring->tx_pending == 0))
drivers/net/ethernet/marvell/mvneta.c
4805
pp->rx_ring_size = ring->rx_pending < MVNETA_MAX_RXD ?
drivers/net/ethernet/marvell/mvneta.c
4806
ring->rx_pending : MVNETA_MAX_RXD;
drivers/net/ethernet/marvell/mvneta.c
4808
pp->tx_ring_size = clamp_t(u16, ring->tx_pending,
drivers/net/ethernet/marvell/mvneta.c
4810
if (pp->tx_ring_size != ring->tx_pending)
drivers/net/ethernet/marvell/mvneta.c
4812
pp->tx_ring_size, ring->tx_pending);
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
4651
struct ethtool_ringparam *ring)
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
4653
u16 new_rx_pending = ring->rx_pending;
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
4654
u16 new_tx_pending = ring->tx_pending;
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
4656
if (ring->rx_pending == 0 || ring->tx_pending == 0)
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
4659
if (ring->rx_pending > MVPP2_MAX_RXD_MAX)
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
4661
else if (ring->rx_pending < MSS_THRESHOLD_START)
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
4663
else if (!IS_ALIGNED(ring->rx_pending, 16))
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
4664
new_rx_pending = ALIGN(ring->rx_pending, 16);
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
4666
if (ring->tx_pending > MVPP2_MAX_TXD_MAX)
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
4668
else if (!IS_ALIGNED(ring->tx_pending, 32))
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
4669
new_tx_pending = ALIGN(ring->tx_pending, 32);
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
4677
if (ring->rx_pending != new_rx_pending) {
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
4679
ring->rx_pending, new_rx_pending);
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
4680
ring->rx_pending = new_rx_pending;
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
4683
if (ring->tx_pending != new_tx_pending) {
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
4685
ring->tx_pending, new_tx_pending);
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
4686
ring->tx_pending = new_tx_pending;
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
5462
struct ethtool_ringparam *ring,
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
5468
ring->rx_max_pending = MVPP2_MAX_RXD_MAX;
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
5469
ring->tx_max_pending = MVPP2_MAX_TXD_MAX;
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
5470
ring->rx_pending = port->rx_ring_size;
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
5471
ring->tx_pending = port->tx_ring_size;
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
5476
struct ethtool_ringparam *ring,
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
5485
err = mvpp2_check_ringparam_valid(dev, ring);
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
5490
port->rx_ring_size = ring->rx_pending;
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
5491
port->tx_ring_size = ring->tx_pending;
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
5502
port->rx_ring_size = ring->rx_pending;
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
5503
port->tx_ring_size = ring->tx_pending;
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
5509
ring->rx_pending = prev_rx_ring_size;
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
5518
ring->tx_pending = prev_tx_ring_size;
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
265
int ring = 0;
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
271
ring = rings_per_vf * i;
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
272
oct->mbox[ring] = vzalloc(sizeof(*oct->mbox[ring]));
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
274
if (!oct->mbox[ring])
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
277
memset(oct->mbox[ring], 0, sizeof(struct octep_mbox));
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
279
mutex_init(&oct->mbox[ring]->lock);
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
280
INIT_WORK(&oct->mbox[ring]->wk.work, octep_pfvf_mbox_work);
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
281
oct->mbox[ring]->wk.ctxptr = oct->mbox[ring];
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
282
oct->mbox[ring]->oct = oct;
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
283
oct->mbox[ring]->vf_id = i;
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
284
oct->hw_ops.setup_mbox_regs(oct, ring);
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
291
ring = rings_per_vf * i;
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
292
cancel_work_sync(&oct->mbox[ring]->wk.work);
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
293
mutex_destroy(&oct->mbox[ring]->lock);
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
294
vfree(oct->mbox[ring]);
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
295
oct->mbox[ring] = NULL;
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
304
int i = 0, ring = 0, vf_srn = 0;
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
307
ring = vf_srn + rings_per_vf * i;
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
308
if (!oct->mbox[ring])
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
311
if (work_pending(&oct->mbox[ring]->wk.work))
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
312
cancel_work_sync(&oct->mbox[ring]->wk.work);
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
314
mutex_destroy(&oct->mbox[ring]->lock);
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
315
vfree(oct->mbox[ring]);
drivers/net/ethernet/marvell/octeon_ep/octep_pfvf_mbox.c
316
oct->mbox[ring] = NULL;
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
131
#define CN93_SDP_R_OUT_CONTROL(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
132
(CN93_SDP_R_OUT_CONTROL_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
134
#define CN93_SDP_R_OUT_ENABLE(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
135
(CN93_SDP_R_OUT_ENABLE_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
137
#define CN93_SDP_R_OUT_SLIST_BADDR(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
138
(CN93_SDP_R_OUT_SLIST_BADDR_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
140
#define CN93_SDP_R_OUT_SLIST_RSIZE(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
141
(CN93_SDP_R_OUT_SLIST_RSIZE_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
143
#define CN93_SDP_R_OUT_SLIST_DBELL(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
144
(CN93_SDP_R_OUT_SLIST_DBELL_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
146
#define CN93_SDP_R_OUT_CNTS(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
147
(CN93_SDP_R_OUT_CNTS_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
149
#define CN93_SDP_R_OUT_INT_LEVELS(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
150
(CN93_SDP_R_OUT_INT_LEVELS_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
152
#define CN93_SDP_R_OUT_PKT_CNT(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
153
(CN93_SDP_R_OUT_PKT_CNT_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
155
#define CN93_SDP_R_OUT_BYTE_CNT(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
156
(CN93_SDP_R_OUT_BYTE_CNT_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
183
#define CN93_SDP_R_IN_INT_MDRT_CTL0(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
184
(CN93_SDP_R_IN_INT_MDRT_CTL0_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
186
#define CN93_SDP_R_IN_INT_MDRT_CTL1(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
187
(CN93_SDP_R_IN_INT_MDRT_CTL1_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
189
#define CN93_SDP_R_IN_INT_MDRT_DBG(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
190
(CN93_SDP_R_IN_INT_MDRT_DBG_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
192
#define CN93_SDP_R_OUT_INT_MDRT_CTL0(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
193
(CN93_SDP_R_OUT_INT_MDRT_CTL0_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
195
#define CN93_SDP_R_OUT_INT_MDRT_CTL1(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
196
(CN93_SDP_R_OUT_INT_MDRT_CTL1_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
198
#define CN93_SDP_R_OUT_INT_MDRT_DBG(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
199
(CN93_SDP_R_OUT_INT_MDRT_DBG_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
216
#define CN93_SDP_R_MBOX_PF_VF_DATA(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
217
(CN93_SDP_R_MBOX_PF_VF_DATA_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
219
#define CN93_SDP_R_MBOX_PF_VF_INT(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
220
(CN93_SDP_R_MBOX_PF_VF_INT_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
222
#define CN93_SDP_R_MBOX_VF_PF_DATA(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
223
(CN93_SDP_R_MBOX_VF_PF_DATA_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
225
#define CN93_SDP_MBOX_VF_PF_DATA(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
226
(CN93_SDP_MBOX_VF_PF_DATA_START + ((ring) * CN93_EPVF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
228
#define CN93_SDP_MBOX_PF_VF_DATA(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
229
(CN93_SDP_MBOX_PF_VF_DATA_START + ((ring) * CN93_EPVF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
234
#define CN93_SDP_R_ERR_TYPE(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
235
(CN93_SDP_R_ERR_TYPE_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
241
#define CN93_SDP_R_MBOX_ISM(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
242
(CN93_SDP_R_MBOX_ISM_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
244
#define CN93_SDP_R_OUT_CNTS_ISM(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
245
(CN93_SDP_R_OUT_CNTS_ISM_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
247
#define CN93_SDP_R_IN_CNTS_ISM(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
248
(CN93_SDP_R_IN_CNTS_ISM_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
363
#define CN93_SDP_EPVF_RING(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
364
(CN93_SDP_EPVF_RING_START + ((ring) * CN93_EPVF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
365
#define CN93_SDP_IN_RING_TB_MAP(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
366
(CN93_SDP_N_RING_TB_MAP_START + ((ring) * CN93_EPVF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
367
#define CN93_SDP_IN_RATE_LIMIT(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
368
(CN93_SDP_IN_RATE_LIMIT_START + ((ring) * CN93_EPVF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
74
#define CN93_SDP_R_IN_CONTROL(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
75
(CN93_SDP_R_IN_CONTROL_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
77
#define CN93_SDP_R_IN_ENABLE(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
78
(CN93_SDP_R_IN_ENABLE_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
80
#define CN93_SDP_R_IN_INSTR_BADDR(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
81
(CN93_SDP_R_IN_INSTR_BADDR_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
83
#define CN93_SDP_R_IN_INSTR_RSIZE(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
84
(CN93_SDP_R_IN_INSTR_RSIZE_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
86
#define CN93_SDP_R_IN_INSTR_DBELL(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
87
(CN93_SDP_R_IN_INSTR_DBELL_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
89
#define CN93_SDP_R_IN_CNTS(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
90
(CN93_SDP_R_IN_CNTS_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
92
#define CN93_SDP_R_IN_INT_LEVELS(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
93
(CN93_SDP_R_IN_INT_LEVELS_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
95
#define CN93_SDP_R_IN_PKT_CNT(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
96
(CN93_SDP_R_IN_PKT_CNT_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
98
#define CN93_SDP_R_IN_BYTE_CNT(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cn9k_pf.h
99
(CN93_SDP_R_IN_BYTE_CNT_START + ((ring) * CN93_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
131
#define CNXK_SDP_R_OUT_CONTROL(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
132
(CNXK_SDP_R_OUT_CONTROL_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
134
#define CNXK_SDP_R_OUT_ENABLE(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
135
(CNXK_SDP_R_OUT_ENABLE_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
137
#define CNXK_SDP_R_OUT_SLIST_BADDR(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
138
(CNXK_SDP_R_OUT_SLIST_BADDR_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
140
#define CNXK_SDP_R_OUT_SLIST_RSIZE(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
141
(CNXK_SDP_R_OUT_SLIST_RSIZE_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
143
#define CNXK_SDP_R_OUT_SLIST_DBELL(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
144
(CNXK_SDP_R_OUT_SLIST_DBELL_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
146
#define CNXK_SDP_R_OUT_WMARK(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
147
(CNXK_SDP_R_OUT_WMARK_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
149
#define CNXK_SDP_R_OUT_CNTS(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
150
(CNXK_SDP_R_OUT_CNTS_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
152
#define CNXK_SDP_R_OUT_INT_LEVELS(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
153
(CNXK_SDP_R_OUT_INT_LEVELS_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
155
#define CNXK_SDP_R_OUT_PKT_CNT(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
156
(CNXK_SDP_R_OUT_PKT_CNT_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
158
#define CNXK_SDP_R_OUT_BYTE_CNT(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
159
(CNXK_SDP_R_OUT_BYTE_CNT_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
190
#define CNXK_SDP_R_IN_INT_MDRT_CTL0(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
191
(CNXK_SDP_R_IN_INT_MDRT_CTL0_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
193
#define CNXK_SDP_R_IN_INT_MDRT_CTL1(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
194
(CNXK_SDP_R_IN_INT_MDRT_CTL1_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
196
#define CNXK_SDP_R_IN_INT_MDRT_DBG(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
197
(CNXK_SDP_R_IN_INT_MDRT_DBG_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
199
#define CNXK_SDP_R_OUT_INT_MDRT_CTL0(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
200
(CNXK_SDP_R_OUT_INT_MDRT_CTL0_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
202
#define CNXK_SDP_R_OUT_INT_MDRT_CTL1(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
203
(CNXK_SDP_R_OUT_INT_MDRT_CTL1_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
205
#define CNXK_SDP_R_OUT_INT_MDRT_DBG(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
206
(CNXK_SDP_R_OUT_INT_MDRT_DBG_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
208
#define CNXK_SDP_R_MBOX_ISM(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
209
(CNXK_SDP_R_MBOX_ISM_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
211
#define CNXK_SDP_R_OUT_CNTS_ISM(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
212
(CNXK_SDP_R_OUT_CNTS_ISM_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
214
#define CNXK_SDP_R_IN_CNTS_ISM(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
215
(CNXK_SDP_R_IN_CNTS_ISM_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
232
#define CNXK_SDP_R_MBOX_PF_VF_DATA(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
233
(CNXK_SDP_R_MBOX_PF_VF_DATA_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
235
#define CNXK_SDP_R_MBOX_PF_VF_INT(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
236
(CNXK_SDP_R_MBOX_PF_VF_INT_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
238
#define CNXK_SDP_R_MBOX_VF_PF_DATA(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
239
(CNXK_SDP_R_MBOX_VF_PF_DATA_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
241
#define CNXK_SDP_MBOX_VF_PF_DATA(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
242
(CNXK_SDP_MBOX_VF_PF_DATA_START + ((ring) * CNXK_EPVF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
244
#define CNXK_SDP_MBOX_PF_VF_DATA(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
245
(CNXK_SDP_MBOX_PF_VF_DATA_START + ((ring) * CNXK_EPVF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
250
#define CNXK_SDP_R_ERR_TYPE(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
251
(CNXK_SDP_R_ERR_TYPE_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
257
#define CNXK_SDP_R_MBOX_ISM(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
258
(CNXK_SDP_R_MBOX_ISM_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
260
#define CNXK_SDP_R_OUT_CNTS_ISM(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
261
(CNXK_SDP_R_OUT_CNTS_ISM_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
263
#define CNXK_SDP_R_IN_CNTS_ISM(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
264
(CNXK_SDP_R_IN_CNTS_ISM_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
379
#define CNXK_SDP_EPVF_RING(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
380
(CNXK_SDP_EPVF_RING_START + ((ring) * CNXK_EPVF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
381
#define CNXK_SDP_IN_RING_TB_MAP(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
382
(CNXK_SDP_N_RING_TB_MAP_START + ((ring) * CNXK_EPVF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
383
#define CNXK_SDP_IN_RATE_LIMIT(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
384
(CNXK_SDP_IN_RATE_LIMIT_START + ((ring) * CNXK_EPVF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
73
#define CNXK_SDP_R_IN_CONTROL(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
74
(CNXK_SDP_R_IN_CONTROL_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
76
#define CNXK_SDP_R_IN_ENABLE(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
77
(CNXK_SDP_R_IN_ENABLE_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
79
#define CNXK_SDP_R_IN_INSTR_BADDR(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
80
(CNXK_SDP_R_IN_INSTR_BADDR_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
82
#define CNXK_SDP_R_IN_INSTR_RSIZE(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
83
(CNXK_SDP_R_IN_INSTR_RSIZE_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
85
#define CNXK_SDP_R_IN_INSTR_DBELL(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
86
(CNXK_SDP_R_IN_INSTR_DBELL_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
88
#define CNXK_SDP_R_IN_CNTS(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
89
(CNXK_SDP_R_IN_CNTS_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
91
#define CNXK_SDP_R_IN_INT_LEVELS(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
92
(CNXK_SDP_R_IN_INT_LEVELS_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
94
#define CNXK_SDP_R_IN_PKT_CNT(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
95
(CNXK_SDP_R_IN_PKT_CNT_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
97
#define CNXK_SDP_R_IN_BYTE_CNT(ring) \
drivers/net/ethernet/marvell/octeon_ep/octep_regs_cnxk_pf.h
98
(CNXK_SDP_R_IN_BYTE_CNT_START + ((ring) * CNXK_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_mbox.c
25
int ring = 0;
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_mbox.c
33
oct->hw_ops.setup_mbox_regs(oct, ring);
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
101
#define CN93_VF_SDP_R_OUT_SLIST_RSIZE(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
102
(CN93_VF_SDP_R_OUT_SLIST_RSIZE_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
104
#define CN93_VF_SDP_R_OUT_SLIST_DBELL(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
105
(CN93_VF_SDP_R_OUT_SLIST_DBELL_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
107
#define CN93_VF_SDP_R_OUT_CNTS(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
108
(CN93_VF_SDP_R_OUT_CNTS_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
110
#define CN93_VF_SDP_R_OUT_INT_LEVELS(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
111
(CN93_VF_SDP_R_OUT_INT_LEVELS_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
113
#define CN93_VF_SDP_R_OUT_PKT_CNT(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
114
(CN93_VF_SDP_R_OUT_PKT_CNT_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
116
#define CN93_VF_SDP_R_OUT_BYTE_CNT(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
117
(CN93_VF_SDP_R_OUT_BYTE_CNT_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
146
#define CN93_VF_SDP_R_MBOX_PF_VF_DATA(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
147
(CN93_VF_SDP_R_MBOX_PF_VF_DATA_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
149
#define CN93_VF_SDP_R_MBOX_PF_VF_INT(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
150
(CN93_VF_SDP_R_MBOX_PF_VF_INT_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
152
#define CN93_VF_SDP_R_MBOX_VF_PF_DATA(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
153
(CN93_VF_SDP_R_MBOX_VF_PF_DATA_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
33
#define CN93_VF_SDP_R_IN_CONTROL(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
34
(CN93_VF_SDP_R_IN_CONTROL_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
36
#define CN93_VF_SDP_R_IN_ENABLE(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
37
(CN93_VF_SDP_R_IN_ENABLE_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
39
#define CN93_VF_SDP_R_IN_INSTR_BADDR(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
40
(CN93_VF_SDP_R_IN_INSTR_BADDR_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
42
#define CN93_VF_SDP_R_IN_INSTR_RSIZE(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
43
(CN93_VF_SDP_R_IN_INSTR_RSIZE_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
45
#define CN93_VF_SDP_R_IN_INSTR_DBELL(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
46
(CN93_VF_SDP_R_IN_INSTR_DBELL_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
48
#define CN93_VF_SDP_R_IN_CNTS(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
49
(CN93_VF_SDP_R_IN_CNTS_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
51
#define CN93_VF_SDP_R_IN_INT_LEVELS(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
52
(CN93_VF_SDP_R_IN_INT_LEVELS_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
54
#define CN93_VF_SDP_R_IN_PKT_CNT(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
55
(CN93_VF_SDP_R_IN_PKT_CNT_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
57
#define CN93_VF_SDP_R_IN_BYTE_CNT(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
58
(CN93_VF_SDP_R_IN_BYTE_CNT_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
92
#define CN93_VF_SDP_R_OUT_CONTROL(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
93
(CN93_VF_SDP_R_OUT_CONTROL_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
95
#define CN93_VF_SDP_R_OUT_ENABLE(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
96
(CN93_VF_SDP_R_OUT_ENABLE_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
98
#define CN93_VF_SDP_R_OUT_SLIST_BADDR(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cn9k.h
99
(CN93_VF_SDP_R_OUT_SLIST_BADDR_START + ((ring) * CN93_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
100
#define CNXK_VF_SDP_R_OUT_ENABLE(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
101
(CNXK_VF_SDP_R_OUT_ENABLE_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
103
#define CNXK_VF_SDP_R_OUT_SLIST_BADDR(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
104
(CNXK_VF_SDP_R_OUT_SLIST_BADDR_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
106
#define CNXK_VF_SDP_R_OUT_SLIST_RSIZE(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
107
(CNXK_VF_SDP_R_OUT_SLIST_RSIZE_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
109
#define CNXK_VF_SDP_R_OUT_SLIST_DBELL(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
110
(CNXK_VF_SDP_R_OUT_SLIST_DBELL_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
112
#define CNXK_VF_SDP_R_OUT_WMARK(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
113
(CNXK_VF_SDP_R_OUT_WMARK_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
115
#define CNXK_VF_SDP_R_OUT_CNTS(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
116
(CNXK_VF_SDP_R_OUT_CNTS_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
118
#define CNXK_VF_SDP_R_OUT_INT_LEVELS(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
119
(CNXK_VF_SDP_R_OUT_INT_LEVELS_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
121
#define CNXK_VF_SDP_R_OUT_PKT_CNT(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
122
(CNXK_VF_SDP_R_OUT_PKT_CNT_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
124
#define CNXK_VF_SDP_R_OUT_BYTE_CNT(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
125
(CNXK_VF_SDP_R_OUT_BYTE_CNT_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
154
#define CNXK_VF_SDP_R_MBOX_PF_VF_DATA(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
155
(CNXK_VF_SDP_R_MBOX_PF_VF_DATA_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
157
#define CNXK_VF_SDP_R_MBOX_PF_VF_INT(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
158
(CNXK_VF_SDP_R_MBOX_PF_VF_INT_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
160
#define CNXK_VF_SDP_R_MBOX_VF_PF_DATA(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
161
(CNXK_VF_SDP_R_MBOX_VF_PF_DATA_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
34
#define CNXK_VF_SDP_R_ERR_TYPE(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
35
(CNXK_VF_SDP_R_ERR_TYPE_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
37
#define CNXK_VF_SDP_R_IN_CONTROL(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
38
(CNXK_VF_SDP_R_IN_CONTROL_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
40
#define CNXK_VF_SDP_R_IN_ENABLE(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
41
(CNXK_VF_SDP_R_IN_ENABLE_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
43
#define CNXK_VF_SDP_R_IN_INSTR_BADDR(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
44
(CNXK_VF_SDP_R_IN_INSTR_BADDR_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
46
#define CNXK_VF_SDP_R_IN_INSTR_RSIZE(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
47
(CNXK_VF_SDP_R_IN_INSTR_RSIZE_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
49
#define CNXK_VF_SDP_R_IN_INSTR_DBELL(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
50
(CNXK_VF_SDP_R_IN_INSTR_DBELL_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
52
#define CNXK_VF_SDP_R_IN_CNTS(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
53
(CNXK_VF_SDP_R_IN_CNTS_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
55
#define CNXK_VF_SDP_R_IN_INT_LEVELS(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
56
(CNXK_VF_SDP_R_IN_INT_LEVELS_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
58
#define CNXK_VF_SDP_R_IN_PKT_CNT(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
59
(CNXK_VF_SDP_R_IN_PKT_CNT_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
61
#define CNXK_VF_SDP_R_IN_BYTE_CNT(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
62
(CNXK_VF_SDP_R_IN_BYTE_CNT_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
97
#define CNXK_VF_SDP_R_OUT_CONTROL(ring) \
drivers/net/ethernet/marvell/octeon_ep_vf/octep_vf_regs_cnxk.h
98
(CNXK_VF_SDP_R_OUT_CONTROL_START + ((ring) * CNXK_VF_RING_OFFSET))
drivers/net/ethernet/marvell/octeontx2/nic/otx2_ethtool.c
370
struct ethtool_ringparam *ring,
drivers/net/ethernet/marvell/octeontx2/nic/otx2_ethtool.c
377
ring->rx_max_pending = Q_COUNT(Q_SIZE_MAX);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_ethtool.c
378
ring->rx_pending = qs->rqe_cnt ? qs->rqe_cnt : Q_COUNT(Q_SIZE_256);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_ethtool.c
379
ring->tx_max_pending = Q_COUNT(Q_SIZE_MAX);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_ethtool.c
380
ring->tx_pending = qs->sqe_cnt ? qs->sqe_cnt : Q_COUNT(Q_SIZE_4K);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_ethtool.c
386
struct ethtool_ringparam *ring,
drivers/net/ethernet/marvell/octeontx2/nic/otx2_ethtool.c
398
if (ring->rx_mini_pending || ring->rx_jumbo_pending)
drivers/net/ethernet/marvell/octeontx2/nic/otx2_ethtool.c
417
rx_count = ring->rx_pending;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_ethtool.c
424
if (ring->rx_pending < 16) {
drivers/net/ethernet/marvell/octeontx2/nic/otx2_ethtool.c
427
ring->rx_pending);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_ethtool.c
437
tx_count = clamp_t(u32, ring->tx_pending,
drivers/net/ethernet/marvell/octeontx2/nic/otx2_flows.c
1100
u32 ring;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_flows.c
1108
ring = ethtool_get_flow_spec_ring(fsp->ring_cookie);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_flows.c
1126
ring >= pfvf->hw.rx_queues && fsp->ring_cookie != RX_CLS_FLOW_DISC)
drivers/net/ethernet/marvell/prestera/prestera_rxtx.c
291
struct prestera_rx_ring *ring = &sdma->rx_ring[q];
drivers/net/ethernet/marvell/prestera/prestera_rxtx.c
294
int buf_idx = ring->next_rx;
drivers/net/ethernet/marvell/prestera/prestera_rxtx.c
297
buf = &ring->bufs[buf_idx];
drivers/net/ethernet/marvell/prestera/prestera_rxtx.c
320
ring->next_rx = prestera_sdma_next_rx_buf_idx(buf_idx);
drivers/net/ethernet/marvell/prestera/prestera_rxtx.c
343
struct prestera_rx_ring *ring = &sdma->rx_ring[q];
drivers/net/ethernet/marvell/prestera/prestera_rxtx.c
345
if (!ring->bufs)
drivers/net/ethernet/marvell/prestera/prestera_rxtx.c
349
struct prestera_sdma_buf *buf = &ring->bufs[b];
drivers/net/ethernet/marvell/prestera/prestera_rxtx.c
380
struct prestera_rx_ring *ring = &sdma->rx_ring[q];
drivers/net/ethernet/marvell/prestera/prestera_rxtx.c
382
ring->bufs = kmalloc_objs(*head, bnum);
drivers/net/ethernet/marvell/prestera/prestera_rxtx.c
383
if (!ring->bufs)
drivers/net/ethernet/marvell/prestera/prestera_rxtx.c
386
ring->next_rx = 0;
drivers/net/ethernet/marvell/prestera/prestera_rxtx.c
388
tail = &ring->bufs[bnum - 1];
drivers/net/ethernet/marvell/prestera/prestera_rxtx.c
389
head = &ring->bufs[0];
drivers/net/ethernet/marvell/prestera/prestera_rxtx.c
575
struct prestera_tx_ring *ring = &sdma->tx_ring;
drivers/net/ethernet/marvell/prestera/prestera_rxtx.c
581
if (!ring->bufs)
drivers/net/ethernet/marvell/prestera/prestera_rxtx.c
585
struct prestera_sdma_buf *buf = &ring->bufs[b];
drivers/net/ethernet/marvell/skge.c
1005
} while ((e = e->next) != ring->start);
drivers/net/ethernet/marvell/skge.c
1015
struct skge_ring *ring = &skge->rx_ring;
drivers/net/ethernet/marvell/skge.c
1018
e = ring->start;
drivers/net/ethernet/marvell/skge.c
1032
} while ((e = e->next) != ring->start);
drivers/net/ethernet/marvell/skge.c
1034
ring->to_clean = ring->start;
drivers/net/ethernet/marvell/skge.c
2731
static inline int skge_avail(const struct skge_ring *ring)
drivers/net/ethernet/marvell/skge.c
2734
return ((ring->to_clean > ring->to_use) ? 0 : ring->count)
drivers/net/ethernet/marvell/skge.c
2735
+ (ring->to_clean - ring->to_use) - 1;
drivers/net/ethernet/marvell/skge.c
3152
struct skge_ring *ring = &skge->tx_ring;
drivers/net/ethernet/marvell/skge.c
3158
for (e = ring->to_clean; e != ring->to_use; e = e->next) {
drivers/net/ethernet/marvell/skge.c
3200
struct skge_ring *ring = &skge->rx_ring;
drivers/net/ethernet/marvell/skge.c
3208
for (e = ring->to_clean; prefetch(e->next), work_done < budget; e = e->next) {
drivers/net/ethernet/marvell/skge.c
3224
ring->to_clean = e;
drivers/net/ethernet/marvell/skge.c
915
static int skge_ring_alloc(struct skge_ring *ring, void *vaddr, u32 base)
drivers/net/ethernet/marvell/skge.c
921
ring->start = kzalloc_objs(*e, ring->count);
drivers/net/ethernet/marvell/skge.c
922
if (!ring->start)
drivers/net/ethernet/marvell/skge.c
925
for (i = 0, e = ring->start, d = vaddr; i < ring->count; i++, e++, d++) {
drivers/net/ethernet/marvell/skge.c
927
if (i == ring->count - 1) {
drivers/net/ethernet/marvell/skge.c
928
e->next = ring->start;
drivers/net/ethernet/marvell/skge.c
935
ring->to_use = ring->to_clean = ring->start;
drivers/net/ethernet/marvell/skge.c
990
struct skge_ring *ring = &skge->rx_ring;
drivers/net/ethernet/marvell/skge.c
993
e = ring->start;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1361
static void *mtk_qdma_phys_to_virt(struct mtk_tx_ring *ring, u32 desc)
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1363
return ring->dma + (desc - ring->phys);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1366
static struct mtk_tx_buf *mtk_desc_to_tx_buf(struct mtk_tx_ring *ring,
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1369
int idx = (txd - ring->dma) / txd_size;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1371
return &ring->buf[idx];
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1374
static struct mtk_tx_dma *qdma_to_pdma(struct mtk_tx_ring *ring,
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1377
return ring->dma_pdma - (struct mtk_tx_dma *)ring->dma + dma;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1380
static int txd_to_idx(struct mtk_tx_ring *ring, void *dma, u32 txd_size)
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1382
return (dma - ring->dma) / txd_size;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1561
int tx_num, struct mtk_tx_ring *ring, bool gso)
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1585
itxd = ring->next_free;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1586
itxd_pdma = qdma_to_pdma(ring, itxd);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1587
if (itxd == ring->last_free)
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1590
itx_buf = mtk_desc_to_tx_buf(ring, itxd, soc->tx.desc_size);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1607
txd_pdma = qdma_to_pdma(ring, txd);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1619
txd = mtk_qdma_phys_to_virt(ring, txd->txd2);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1620
txd_pdma = qdma_to_pdma(ring, txd);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1621
if (txd == ring->last_free)
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1643
tx_buf = mtk_desc_to_tx_buf(ring, txd,
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1673
ring->next_free = mtk_qdma_phys_to_virt(ring, txd->txd2);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1674
atomic_sub(n_desc, &ring->free_count);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1687
next_idx = NEXT_DESP_IDX(txd_to_idx(ring, txd, soc->tx.desc_size),
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1688
ring->dma_size);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1696
tx_buf = mtk_desc_to_tx_buf(ring, itxd, soc->tx.desc_size);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1705
itxd = mtk_qdma_phys_to_virt(ring, itxd->txd2);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1706
itxd_pdma = qdma_to_pdma(ring, itxd);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1759
struct mtk_tx_ring *ring = ð->tx_ring;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1781
if (unlikely(atomic_read(&ring->free_count) <= tx_num)) {
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1804
if (mtk_tx_map(skb, dev, tx_num, ring, gso) < 0)
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1807
if (unlikely(atomic_read(&ring->free_count) <= ring->thresh))
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1825
struct mtk_rx_ring *ring;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1834
ring = ð->rx_ring[i];
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1835
idx = NEXT_DESP_IDX(ring->calc_idx, ring->dma_size);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1836
rxd = ring->dma + idx * eth->soc->rx.desc_size;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1838
ring->calc_idx_update = true;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1839
return ring;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1848
struct mtk_rx_ring *ring;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1852
ring = ð->rx_ring[0];
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1853
mtk_w32(eth, ring->calc_idx, ring->crx_idx_reg);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1856
ring = ð->rx_ring[i];
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1857
if (ring->calc_idx_update) {
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1858
ring->calc_idx_update = false;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1859
mtk_w32(eth, ring->calc_idx, ring->crx_idx_reg);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1924
static void mtk_rx_put_buff(struct mtk_rx_ring *ring, void *data, bool napi)
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1926
if (ring->page_pool)
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1927
page_pool_put_full_page(ring->page_pool,
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1938
struct mtk_tx_ring *ring = ð->tx_ring;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1963
txd_pdma = qdma_to_pdma(ring, txd);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1975
struct mtk_tx_ring *ring = ð->tx_ring;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1992
if (unlikely(atomic_read(&ring->free_count) <= 1 + nr_frags))
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1997
txd = ring->next_free;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1998
if (txd == ring->last_free) {
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2004
tx_buf = mtk_desc_to_tx_buf(ring, txd, soc->tx.desc_size);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2018
txd = mtk_qdma_phys_to_virt(ring, txd->txd2);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2019
if (txd == ring->last_free)
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2022
tx_buf = mtk_desc_to_tx_buf(ring, txd,
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2040
struct mtk_tx_dma *txd_pdma = qdma_to_pdma(ring, txd);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2048
ring->next_free = mtk_qdma_phys_to_virt(ring, txd->txd2);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2049
atomic_sub(n_desc, &ring->free_count);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2061
idx = txd_to_idx(ring, txd, soc->tx.desc_size);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2062
mtk_w32(eth, NEXT_DESP_IDX(idx, ring->dma_size),
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2072
tx_buf = mtk_desc_to_tx_buf(ring, htxd, soc->tx.desc_size);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2077
struct mtk_tx_dma *txd_pdma = qdma_to_pdma(ring, htxd);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2082
htxd = mtk_qdma_phys_to_virt(ring, htxd->txd2);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2115
static u32 mtk_xdp_run(struct mtk_eth *eth, struct mtk_rx_ring *ring,
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2165
page_pool_put_full_page(ring->page_pool,
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2182
struct mtk_rx_ring *ring;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2199
ring = mtk_get_rx_ring(eth);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2200
if (unlikely(!ring))
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2203
idx = NEXT_DESP_IDX(ring->calc_idx, ring->dma_size);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2204
rxd = ring->dma + idx * eth->soc->rx.desc_size;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2205
data = ring->data[idx];
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2243
if (ring->page_pool) {
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2248
new_data = mtk_page_pool_get_buff(ring->page_pool,
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2258
pktlen, page_pool_get_dma_dir(ring->page_pool));
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2260
xdp_init_buff(&xdp, PAGE_SIZE, &ring->xdp_q);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2265
ret = mtk_xdp_run(eth, ring, &xdp, netdev);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2274
page_pool_put_full_page(ring->page_pool,
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2287
if (ring->frag_size <= PAGE_SIZE)
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2288
new_data = napi_alloc_frag(ring->frag_size);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2299
ring->buf_size, DMA_FROM_DEVICE);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2311
ring->buf_size, DMA_FROM_DEVICE);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2313
skb = build_skb(data, ring->frag_size);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2368
ring->data[idx] = new_data;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2382
rxd->rxd2 = RX_DMA_PREP_PLEN0(ring->buf_size) | addr64;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2384
ring->calc_idx = idx;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2451
struct mtk_tx_ring *ring = ð->tx_ring;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2457
cpu = ring->last_free_ptr;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2460
desc = mtk_qdma_phys_to_virt(ring, cpu);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2466
desc = mtk_qdma_phys_to_virt(ring, desc->txd2);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2470
tx_buf = mtk_desc_to_tx_buf(ring, desc,
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2484
ring->last_free = desc;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2485
atomic_inc(&ring->free_count);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2491
ring->last_free_ptr = cpu;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2500
struct mtk_tx_ring *ring = ð->tx_ring;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2506
cpu = ring->cpu_idx;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2511
tx_buf = &ring->buf[cpu];
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2522
desc = ring->dma + cpu * eth->soc->tx.desc_size;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2523
ring->last_free = desc;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2524
atomic_inc(&ring->free_count);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2526
cpu = NEXT_DESP_IDX(cpu, ring->dma_size);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2530
ring->cpu_idx = cpu;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2537
struct mtk_tx_ring *ring = ð->tx_ring;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2554
(atomic_read(&ring->free_count) > ring->thresh))
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2639
struct mtk_tx_ring *ring = ð->tx_ring;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2650
ring->buf = kzalloc_objs(*ring->buf, ring_size);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2651
if (!ring->buf)
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2654
ring->dma = mtk_dma_ring_alloc(eth, ring_size * sz, &ring->phys, true);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2655
if (!ring->dma)
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2660
u32 next_ptr = ring->phys + next * sz;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2662
txd = ring->dma + i * sz;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2679
ring->dma_pdma = dma_alloc_coherent(eth->dma_dev, ring_size * sz,
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2680
&ring->phys_pdma, GFP_KERNEL);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2681
if (!ring->dma_pdma)
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2685
ring->dma_pdma[i].txd2 = TX_DMA_DESP2_DEF;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2686
ring->dma_pdma[i].txd4 = 0;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2690
ring->dma_size = ring_size;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2691
atomic_set(&ring->free_count, ring_size - 2);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2692
ring->next_free = ring->dma;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2693
ring->last_free = (void *)txd;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2694
ring->last_free_ptr = (u32)(ring->phys + ((ring_size - 1) * sz));
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2695
ring->thresh = MAX_SKB_FRAGS;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2703
mtk_w32(eth, ring->phys, soc->reg_map->qdma.ctx_ptr);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2704
mtk_w32(eth, ring->phys, soc->reg_map->qdma.dtx_ptr);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2706
ring->phys + ((ring_size - 1) * sz),
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2708
mtk_w32(eth, ring->last_free_ptr, soc->reg_map->qdma.drx_ptr);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2729
mtk_w32(eth, ring->phys_pdma, MT7628_TX_BASE_PTR0);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2744
struct mtk_tx_ring *ring = ð->tx_ring;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2747
if (ring->buf) {
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2748
for (i = 0; i < ring->dma_size; i++)
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2749
mtk_tx_unmap(eth, &ring->buf[i], NULL, false);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2750
kfree(ring->buf);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2751
ring->buf = NULL;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2754
if (ring->dma) {
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2755
mtk_dma_ring_free(eth, ring->dma_size * soc->tx.desc_size,
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2756
ring->dma, ring->phys, true);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2757
ring->dma = NULL;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2760
if (ring->dma_pdma) {
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2762
ring->dma_size * soc->tx.desc_size,
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2763
ring->dma_pdma, ring->phys_pdma);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2764
ring->dma_pdma = NULL;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2772
struct mtk_rx_ring *ring;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2779
ring = ð->rx_ring_qdma;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2781
ring = ð->rx_ring[ring_no];
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2792
ring->frag_size = mtk_max_frag_size(rx_data_len);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2793
ring->buf_size = mtk_max_buf_size(ring->frag_size);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2794
ring->data = kcalloc(rx_dma_size, sizeof(*ring->data),
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2796
if (!ring->data)
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2802
pp = mtk_create_page_pool(eth, &ring->xdp_q, ring_no,
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2807
ring->page_pool = pp;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2810
ring->dma = mtk_dma_ring_alloc(eth,
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2812
&ring->phys,
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2814
if (!ring->dma)
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2822
rxd = ring->dma + i * eth->soc->rx.desc_size;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2823
if (ring->page_pool) {
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2824
data = mtk_page_pool_get_buff(ring->page_pool,
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2829
if (ring->frag_size <= PAGE_SIZE)
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2830
data = netdev_alloc_frag(ring->frag_size);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2839
ring->buf_size, DMA_FROM_DEVICE);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2847
ring->data[i] = data;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2852
rxd->rxd2 = RX_DMA_PREP_PLEN0(ring->buf_size);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2867
ring->dma_size = rx_dma_size;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2868
ring->calc_idx_update = false;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2869
ring->calc_idx = rx_dma_size - 1;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2871
ring->crx_idx_reg = reg_map->qdma.qcrx_ptr +
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2874
ring->crx_idx_reg = reg_map->pdma.pcrx_ptr +
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2882
mtk_w32(eth, ring->phys,
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2889
mtk_w32(eth, ring->phys,
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2896
mtk_w32(eth, ring->calc_idx, ring->crx_idx_reg);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2901
static void mtk_rx_clean(struct mtk_eth *eth, struct mtk_rx_ring *ring, bool in_sram)
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2906
if (ring->data && ring->dma) {
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2907
for (i = 0; i < ring->dma_size; i++) {
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2910
if (!ring->data[i])
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2913
rxd = ring->dma + i * eth->soc->rx.desc_size;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2921
ring->buf_size, DMA_FROM_DEVICE);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2922
mtk_rx_put_buff(ring, ring->data[i], false);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2924
kfree(ring->data);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2925
ring->data = NULL;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2928
if (ring->dma) {
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2929
mtk_dma_ring_free(eth, ring->dma_size * eth->soc->rx.desc_size,
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2930
ring->dma, ring->phys, in_sram);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2931
ring->dma = NULL;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2934
if (ring->page_pool) {
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2935
if (xdp_rxq_info_is_reg(&ring->xdp_q))
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2936
xdp_rxq_info_unreg(&ring->xdp_q);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2937
page_pool_destroy(ring->page_pool);
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2938
ring->page_pool = NULL;
drivers/net/ethernet/mediatek/mtk_eth_soc.c
4594
struct mtk_rx_ring *ring = ð->rx_ring[i];
drivers/net/ethernet/mediatek/mtk_eth_soc.c
4596
if (!ring->page_pool)
drivers/net/ethernet/mediatek/mtk_eth_soc.c
4599
page_pool_get_stats(ring->page_pool, &stats);
drivers/net/ethernet/mediatek/mtk_star_emac.c
1101
struct mtk_star_ring *ring = &priv->tx_ring;
drivers/net/ethernet/mediatek/mtk_star_emac.c
1106
if (unlikely(mtk_star_tx_ring_avail(ring) < nfrags + 1)) {
drivers/net/ethernet/mediatek/mtk_star_emac.c
1121
mtk_star_ring_push_head_tx(ring, &desc_data);
drivers/net/ethernet/mediatek/mtk_star_emac.c
1142
struct mtk_star_ring *ring = &priv->tx_ring;
drivers/net/ethernet/mediatek/mtk_star_emac.c
1146
ret = mtk_star_ring_pop_tail(ring, &desc_data);
drivers/net/ethernet/mediatek/mtk_star_emac.c
1162
struct mtk_star_ring *ring = &priv->tx_ring;
drivers/net/ethernet/mediatek/mtk_star_emac.c
1164
unsigned int head = ring->head;
drivers/net/ethernet/mediatek/mtk_star_emac.c
1165
unsigned int entry = ring->tail;
drivers/net/ethernet/mediatek/mtk_star_emac.c
1176
entry = ring->tail;
drivers/net/ethernet/mediatek/mtk_star_emac.c
1182
(mtk_star_tx_ring_avail(ring) > MTK_STAR_TX_THRESH))
drivers/net/ethernet/mediatek/mtk_star_emac.c
1273
struct mtk_star_ring *ring = &priv->rx_ring;
drivers/net/ethernet/mediatek/mtk_star_emac.c
1282
ret = mtk_star_ring_pop_tail(ring, &desc_data);
drivers/net/ethernet/mediatek/mtk_star_emac.c
1334
mtk_star_ring_push_head_rx(ring, &desc_data);
drivers/net/ethernet/mediatek/mtk_star_emac.c
301
static void mtk_star_ring_init(struct mtk_star_ring *ring,
drivers/net/ethernet/mediatek/mtk_star_emac.c
304
memset(ring, 0, sizeof(*ring));
drivers/net/ethernet/mediatek/mtk_star_emac.c
305
ring->descs = descs;
drivers/net/ethernet/mediatek/mtk_star_emac.c
306
ring->head = 0;
drivers/net/ethernet/mediatek/mtk_star_emac.c
307
ring->tail = 0;
drivers/net/ethernet/mediatek/mtk_star_emac.c
310
static int mtk_star_ring_pop_tail(struct mtk_star_ring *ring,
drivers/net/ethernet/mediatek/mtk_star_emac.c
313
struct mtk_star_ring_desc *desc = &ring->descs[ring->tail];
drivers/net/ethernet/mediatek/mtk_star_emac.c
324
desc_data->dma_addr = ring->dma_addrs[ring->tail];
drivers/net/ethernet/mediatek/mtk_star_emac.c
325
desc_data->skb = ring->skbs[ring->tail];
drivers/net/ethernet/mediatek/mtk_star_emac.c
327
ring->dma_addrs[ring->tail] = 0;
drivers/net/ethernet/mediatek/mtk_star_emac.c
328
ring->skbs[ring->tail] = NULL;
drivers/net/ethernet/mediatek/mtk_star_emac.c
335
ring->tail = (ring->tail + 1) % MTK_STAR_RING_NUM_DESCS;
drivers/net/ethernet/mediatek/mtk_star_emac.c
340
static void mtk_star_ring_push_head(struct mtk_star_ring *ring,
drivers/net/ethernet/mediatek/mtk_star_emac.c
344
struct mtk_star_ring_desc *desc = &ring->descs[ring->head];
drivers/net/ethernet/mediatek/mtk_star_emac.c
349
ring->skbs[ring->head] = desc_data->skb;
drivers/net/ethernet/mediatek/mtk_star_emac.c
350
ring->dma_addrs[ring->head] = desc_data->dma_addr;
drivers/net/ethernet/mediatek/mtk_star_emac.c
363
ring->head = (ring->head + 1) % MTK_STAR_RING_NUM_DESCS;
drivers/net/ethernet/mediatek/mtk_star_emac.c
367
mtk_star_ring_push_head_rx(struct mtk_star_ring *ring,
drivers/net/ethernet/mediatek/mtk_star_emac.c
370
mtk_star_ring_push_head(ring, desc_data, 0);
drivers/net/ethernet/mediatek/mtk_star_emac.c
374
mtk_star_ring_push_head_tx(struct mtk_star_ring *ring,
drivers/net/ethernet/mediatek/mtk_star_emac.c
381
mtk_star_ring_push_head(ring, desc_data, flags);
drivers/net/ethernet/mediatek/mtk_star_emac.c
384
static unsigned int mtk_star_tx_ring_avail(struct mtk_star_ring *ring)
drivers/net/ethernet/mediatek/mtk_star_emac.c
388
if (ring->tail > ring->head)
drivers/net/ethernet/mediatek/mtk_star_emac.c
389
avail = ring->tail - ring->head - 1;
drivers/net/ethernet/mediatek/mtk_star_emac.c
391
avail = MTK_STAR_RING_NUM_DESCS - ring->head + ring->tail - 1;
drivers/net/ethernet/mediatek/mtk_star_emac.c
696
struct mtk_star_ring *ring = &priv->rx_ring;
drivers/net/ethernet/mediatek/mtk_star_emac.c
714
desc = &ring->descs[i];
drivers/net/ethernet/mediatek/mtk_star_emac.c
718
ring->skbs[i] = skb;
drivers/net/ethernet/mediatek/mtk_star_emac.c
719
ring->dma_addrs[i] = dma_addr;
drivers/net/ethernet/mediatek/mtk_star_emac.c
726
mtk_star_ring_free_skbs(struct mtk_star_priv *priv, struct mtk_star_ring *ring,
drivers/net/ethernet/mediatek/mtk_star_emac.c
734
if (!ring->dma_addrs[i])
drivers/net/ethernet/mediatek/mtk_star_emac.c
737
desc_data.dma_addr = ring->dma_addrs[i];
drivers/net/ethernet/mediatek/mtk_star_emac.c
738
desc_data.skb = ring->skbs[i];
drivers/net/ethernet/mediatek/mtk_star_emac.c
747
struct mtk_star_ring *ring = &priv->rx_ring;
drivers/net/ethernet/mediatek/mtk_star_emac.c
749
mtk_star_ring_free_skbs(priv, ring, mtk_star_dma_unmap_rx);
drivers/net/ethernet/mediatek/mtk_star_emac.c
754
struct mtk_star_ring *ring = &priv->tx_ring;
drivers/net/ethernet/mediatek/mtk_star_emac.c
756
mtk_star_ring_free_skbs(priv, ring, mtk_star_dma_unmap_tx);
drivers/net/ethernet/mediatek/mtk_wed.c
1016
struct mtk_wed_ring *ring)
drivers/net/ethernet/mediatek/mtk_wed.c
1021
u32 cur_idx = readl(ring->wpdma + MTK_WED_RING_OFS_CPU_IDX);
drivers/net/ethernet/mediatek/mtk_wed.c
1313
mtk_wed_rro_ring_alloc(struct mtk_wed_device *dev, struct mtk_wed_ring *ring,
drivers/net/ethernet/mediatek/mtk_wed.c
1316
ring->desc = dma_alloc_coherent(dev->hw->dev,
drivers/net/ethernet/mediatek/mtk_wed.c
1317
size * sizeof(*ring->desc),
drivers/net/ethernet/mediatek/mtk_wed.c
1318
&ring->desc_phys, GFP_KERNEL);
drivers/net/ethernet/mediatek/mtk_wed.c
1319
if (!ring->desc)
drivers/net/ethernet/mediatek/mtk_wed.c
1322
ring->desc_size = sizeof(*ring->desc);
drivers/net/ethernet/mediatek/mtk_wed.c
1323
ring->size = size;
drivers/net/ethernet/mediatek/mtk_wed.c
1342
return mtk_wed_rro_ring_alloc(dev, &dev->rro.ring,
drivers/net/ethernet/mediatek/mtk_wed.c
1355
} ring[2];
drivers/net/ethernet/mediatek/mtk_wed.c
1359
.ring[0] = {
drivers/net/ethernet/mediatek/mtk_wed.c
1364
.ring[1] = {
drivers/net/ethernet/mediatek/mtk_wed.c
1393
wed_w32(dev, MTK_WED_RROQ_BASE_L, dev->rro.ring.desc_phys);
drivers/net/ethernet/mediatek/mtk_wed.c
1541
mtk_wed_ring_reset(struct mtk_wed_ring *ring, int size, bool tx)
drivers/net/ethernet/mediatek/mtk_wed.c
1543
void *head = (void *)ring->desc;
drivers/net/ethernet/mediatek/mtk_wed.c
1549
desc = (struct mtk_wdma_desc *)(head + i * ring->desc_size);
drivers/net/ethernet/mediatek/mtk_wed.c
1859
mtk_wed_ring_alloc(struct mtk_wed_device *dev, struct mtk_wed_ring *ring,
drivers/net/ethernet/mediatek/mtk_wed.c
1862
ring->desc = dma_alloc_coherent(dev->hw->dev, size * desc_size,
drivers/net/ethernet/mediatek/mtk_wed.c
1863
&ring->desc_phys, GFP_KERNEL);
drivers/net/ethernet/mediatek/mtk_wed.c
1864
if (!ring->desc)
drivers/net/ethernet/mediatek/mtk_wed.c
1867
ring->desc_size = desc_size;
drivers/net/ethernet/mediatek/mtk_wed.c
1868
ring->size = size;
drivers/net/ethernet/mediatek/mtk_wed.c
1869
mtk_wed_ring_reset(ring, size, tx);
drivers/net/ethernet/mediatek/mtk_wed.c
2139
struct mtk_wed_ring *ring = &dev->rx_ring[i];
drivers/net/ethernet/mediatek/mtk_wed.c
2142
if (!(ring->flags & MTK_WED_RING_CONFIGURED))
drivers/net/ethernet/mediatek/mtk_wed.c
2145
if (mtk_wed_check_wfdma_rx_fill(dev, ring)) {
drivers/net/ethernet/mediatek/mtk_wed.c
2212
struct mtk_wed_ring *ring = &dev->rx_rro_ring[i];
drivers/net/ethernet/mediatek/mtk_wed.c
2214
if (!(ring->flags & MTK_WED_RING_CONFIGURED))
drivers/net/ethernet/mediatek/mtk_wed.c
2217
if (mtk_wed_check_wfdma_rx_fill(dev, ring))
drivers/net/ethernet/mediatek/mtk_wed.c
2223
struct mtk_wed_ring *ring = &dev->rx_page_ring[i];
drivers/net/ethernet/mediatek/mtk_wed.c
2225
if (!(ring->flags & MTK_WED_RING_CONFIGURED))
drivers/net/ethernet/mediatek/mtk_wed.c
2228
if (mtk_wed_check_wfdma_rx_fill(dev, ring))
drivers/net/ethernet/mediatek/mtk_wed.c
2238
struct mtk_wed_ring *ring = &dev->rx_rro_ring[idx];
drivers/net/ethernet/mediatek/mtk_wed.c
2240
ring->wpdma = regs;
drivers/net/ethernet/mediatek/mtk_wed.c
2245
ring->flags |= MTK_WED_RING_CONFIGURED;
drivers/net/ethernet/mediatek/mtk_wed.c
2251
struct mtk_wed_ring *ring = &dev->rx_page_ring[idx];
drivers/net/ethernet/mediatek/mtk_wed.c
2253
ring->wpdma = regs;
drivers/net/ethernet/mediatek/mtk_wed.c
2258
ring->flags |= MTK_WED_RING_CONFIGURED;
drivers/net/ethernet/mediatek/mtk_wed.c
2264
struct mtk_wed_ring *ring = &dev->ind_cmd_ring;
drivers/net/ethernet/mediatek/mtk_wed.c
2268
ring->wpdma = regs;
drivers/net/ethernet/mediatek/mtk_wed.c
2475
struct mtk_wed_ring *ring = &dev->tx_ring[idx];
drivers/net/ethernet/mediatek/mtk_wed.c
2492
if (!reset && mtk_wed_ring_alloc(dev, ring, MTK_WED_TX_RING_SIZE,
drivers/net/ethernet/mediatek/mtk_wed.c
2493
sizeof(*ring->desc), true))
drivers/net/ethernet/mediatek/mtk_wed.c
2500
ring->reg_base = MTK_WED_RING_TX(idx);
drivers/net/ethernet/mediatek/mtk_wed.c
2501
ring->wpdma = regs;
drivers/net/ethernet/mediatek/mtk_wed.c
2521
wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_BASE, ring->desc_phys);
drivers/net/ethernet/mediatek/mtk_wed.c
2526
ring->desc_phys);
drivers/net/ethernet/mediatek/mtk_wed.c
2537
struct mtk_wed_ring *ring = &dev->txfree_ring;
drivers/net/ethernet/mediatek/mtk_wed.c
2545
ring->reg_base = MTK_WED_RING_RX(index);
drivers/net/ethernet/mediatek/mtk_wed.c
2546
ring->wpdma = regs;
drivers/net/ethernet/mediatek/mtk_wed.c
2562
struct mtk_wed_ring *ring = &dev->rx_ring[idx];
drivers/net/ethernet/mediatek/mtk_wed.c
2567
if (!reset && mtk_wed_ring_alloc(dev, ring, MTK_WED_RX_RING_SIZE,
drivers/net/ethernet/mediatek/mtk_wed.c
2568
sizeof(*ring->desc), false))
drivers/net/ethernet/mediatek/mtk_wed.c
2575
ring->reg_base = MTK_WED_RING_RX_DATA(idx);
drivers/net/ethernet/mediatek/mtk_wed.c
2576
ring->wpdma = regs;
drivers/net/ethernet/mediatek/mtk_wed.c
2577
ring->flags |= MTK_WED_RING_CONFIGURED;
drivers/net/ethernet/mediatek/mtk_wed.c
2580
wpdma_rx_w32(dev, idx, MTK_WED_RING_OFS_BASE, ring->desc_phys);
drivers/net/ethernet/mediatek/mtk_wed.c
2584
ring->desc_phys);
drivers/net/ethernet/mediatek/mtk_wed.c
940
mtk_wed_free_ring(struct mtk_wed_device *dev, struct mtk_wed_ring *ring)
drivers/net/ethernet/mediatek/mtk_wed.c
942
if (!ring->desc)
drivers/net/ethernet/mediatek/mtk_wed.c
945
dma_free_coherent(dev->hw->dev, ring->size * ring->desc_size,
drivers/net/ethernet/mediatek/mtk_wed.c
946
ring->desc, ring->desc_phys);
drivers/net/ethernet/mediatek/mtk_wed.c
953
mtk_wed_free_ring(dev, &dev->rro.ring);
drivers/net/ethernet/mediatek/mtk_wed.h
114
wpdma_tx_r32(struct mtk_wed_device *dev, int ring, u32 reg)
drivers/net/ethernet/mediatek/mtk_wed.h
116
if (!dev->tx_ring[ring].wpdma)
drivers/net/ethernet/mediatek/mtk_wed.h
119
return readl(dev->tx_ring[ring].wpdma + reg);
drivers/net/ethernet/mediatek/mtk_wed.h
123
wpdma_tx_w32(struct mtk_wed_device *dev, int ring, u32 reg, u32 val)
drivers/net/ethernet/mediatek/mtk_wed.h
125
if (!dev->tx_ring[ring].wpdma)
drivers/net/ethernet/mediatek/mtk_wed.h
128
writel(val, dev->tx_ring[ring].wpdma + reg);
drivers/net/ethernet/mediatek/mtk_wed.h
132
wpdma_rx_r32(struct mtk_wed_device *dev, int ring, u32 reg)
drivers/net/ethernet/mediatek/mtk_wed.h
134
if (!dev->rx_ring[ring].wpdma)
drivers/net/ethernet/mediatek/mtk_wed.h
137
return readl(dev->rx_ring[ring].wpdma + reg);
drivers/net/ethernet/mediatek/mtk_wed.h
141
wpdma_rx_w32(struct mtk_wed_device *dev, int ring, u32 reg, u32 val)
drivers/net/ethernet/mediatek/mtk_wed.h
143
if (!dev->rx_ring[ring].wpdma)
drivers/net/ethernet/mediatek/mtk_wed.h
146
writel(val, dev->rx_ring[ring].wpdma + reg);
drivers/net/ethernet/mellanox/mlx4/en_cq.c
107
cq->vector = cpumask_first(priv->rx_ring[cq->ring]->affinity_mask);
drivers/net/ethernet/mellanox/mlx4/en_cq.c
133
cq->size = priv->rx_ring[cq->ring]->actual_size;
drivers/net/ethernet/mellanox/mlx4/en_cq.c
48
int entries, int ring, enum cq_type mode,
drivers/net/ethernet/mellanox/mlx4/en_cq.c
64
cq->ring = ring;
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
1464
int ring, err;
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
1472
for (ring = 0; ring < priv->rx_ring_num; ring++) {
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
1473
rx_packets = READ_ONCE(priv->rx_ring[ring]->packets);
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
1474
rx_bytes = READ_ONCE(priv->rx_ring[ring]->bytes);
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
1476
rx_pkt_diff = rx_packets - priv->last_moder_packets[ring];
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
1480
priv->last_moder_bytes[ring]) / packets : 0;
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
1499
cq = priv->rx_cq[ring];
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
1500
if (moder_time != priv->last_moder_time[ring] ||
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
1502
priv->last_moder_time[ring] = moder_time;
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
1508
ring);
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
1510
priv->last_moder_packets[ring] = rx_packets;
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
1511
priv->last_moder_bytes[ring] = rx_bytes;
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
1601
struct mlx4_en_rx_ring *ring = priv->rx_ring[ring_idx];
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
1604
if (!zalloc_cpumask_var(&ring->affinity_mask, GFP_KERNEL))
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
1608
ring->affinity_mask);
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
3094
const struct mlx4_en_rx_ring *ring;
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
3101
ring = priv->rx_ring[i];
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
3102
stats->packets = READ_ONCE(ring->packets);
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
3103
stats->bytes = READ_ONCE(ring->bytes);
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
3104
stats->alloc_fail = READ_ONCE(ring->alloc_fail);
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
3114
const struct mlx4_en_tx_ring *ring;
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
3121
ring = priv->tx_ring[TX][i];
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
3122
stats->packets = READ_ONCE(ring->packets);
drivers/net/ethernet/mellanox/mlx4/en_netdev.c
3123
stats->bytes = READ_ONCE(ring->bytes);
drivers/net/ethernet/mellanox/mlx4/en_port.c
163
const struct mlx4_en_rx_ring *ring = priv->rx_ring[i];
drivers/net/ethernet/mellanox/mlx4/en_port.c
165
packets += READ_ONCE(ring->packets);
drivers/net/ethernet/mellanox/mlx4/en_port.c
166
bytes += READ_ONCE(ring->bytes);
drivers/net/ethernet/mellanox/mlx4/en_port.c
174
const struct mlx4_en_tx_ring *ring = priv->tx_ring[TX][i];
drivers/net/ethernet/mellanox/mlx4/en_port.c
176
packets += READ_ONCE(ring->packets);
drivers/net/ethernet/mellanox/mlx4/en_port.c
177
bytes += READ_ONCE(ring->bytes);
drivers/net/ethernet/mellanox/mlx4/en_port.c
252
const struct mlx4_en_rx_ring *ring = priv->rx_ring[i];
drivers/net/ethernet/mellanox/mlx4/en_port.c
254
sw_rx_dropped += READ_ONCE(ring->dropped);
drivers/net/ethernet/mellanox/mlx4/en_port.c
255
priv->port_stats.rx_chksum_good += READ_ONCE(ring->csum_ok);
drivers/net/ethernet/mellanox/mlx4/en_port.c
256
priv->port_stats.rx_chksum_none += READ_ONCE(ring->csum_none);
drivers/net/ethernet/mellanox/mlx4/en_port.c
257
priv->port_stats.rx_chksum_complete += READ_ONCE(ring->csum_complete);
drivers/net/ethernet/mellanox/mlx4/en_port.c
258
priv->port_stats.rx_alloc_pages += READ_ONCE(ring->rx_alloc_pages);
drivers/net/ethernet/mellanox/mlx4/en_port.c
259
priv->xdp_stats.rx_xdp_drop += READ_ONCE(ring->xdp_drop);
drivers/net/ethernet/mellanox/mlx4/en_port.c
260
priv->xdp_stats.rx_xdp_redirect += READ_ONCE(ring->xdp_redirect);
drivers/net/ethernet/mellanox/mlx4/en_port.c
261
priv->xdp_stats.rx_xdp_redirect_fail += READ_ONCE(ring->xdp_redirect_fail);
drivers/net/ethernet/mellanox/mlx4/en_port.c
262
priv->xdp_stats.rx_xdp_tx += READ_ONCE(ring->xdp_tx);
drivers/net/ethernet/mellanox/mlx4/en_port.c
263
priv->xdp_stats.rx_xdp_tx_full += READ_ONCE(ring->xdp_tx_full);
drivers/net/ethernet/mellanox/mlx4/en_port.c
272
const struct mlx4_en_tx_ring *ring = priv->tx_ring[TX][i];
drivers/net/ethernet/mellanox/mlx4/en_port.c
274
sw_tx_dropped += READ_ONCE(ring->tx_dropped);
drivers/net/ethernet/mellanox/mlx4/en_port.c
275
priv->port_stats.tx_chksum_offload += READ_ONCE(ring->tx_csum);
drivers/net/ethernet/mellanox/mlx4/en_port.c
276
priv->port_stats.queue_stopped += READ_ONCE(ring->queue_stopped);
drivers/net/ethernet/mellanox/mlx4/en_port.c
277
priv->port_stats.wake_queue += READ_ONCE(ring->wake_queue);
drivers/net/ethernet/mellanox/mlx4/en_port.c
278
priv->port_stats.tso_packets += READ_ONCE(ring->tso_packets);
drivers/net/ethernet/mellanox/mlx4/en_port.c
279
priv->port_stats.xmit_more += READ_ONCE(ring->xmit_more);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1089
struct mlx4_en_rx_ring *ring,
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1108
mlx4_en_fill_qp_context(priv, ring->actual_size, ring->stride, 0, 0,
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1109
qpn, ring->cqn, -1, context);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
111
possible_frags = (ring->stride - sizeof(struct mlx4_en_rx_desc)) / DS_SIZE;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1110
context->db_rec_addr = cpu_to_be64(ring->wqres.db.dma);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1116
ring->fcs_del = 0;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1118
ring->fcs_del = ETH_FCS_LEN;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1120
ring->fcs_del = 0;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1122
err = mlx4_qp_to_ready(mdev->dev, &ring->wqres.mtt, context, qp, state);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1127
mlx4_en_update_rx_prod_db(ring);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
120
struct mlx4_en_rx_ring *ring, int index,
drivers/net/ethernet/mellanox/mlx4/en_rx.c
123
struct mlx4_en_rx_desc *rx_desc = ring->buf +
drivers/net/ethernet/mellanox/mlx4/en_rx.c
124
(index << ring->log_stride);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
125
struct mlx4_en_rx_alloc *frags = ring->rx_info +
drivers/net/ethernet/mellanox/mlx4/en_rx.c
128
return mlx4_en_alloc_frags(priv, ring, rx_desc, frags, gfp);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
131
static bool mlx4_en_is_ring_empty(const struct mlx4_en_rx_ring *ring)
drivers/net/ethernet/mellanox/mlx4/en_rx.c
133
return ring->prod == ring->cons;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
136
static inline void mlx4_en_update_rx_prod_db(struct mlx4_en_rx_ring *ring)
drivers/net/ethernet/mellanox/mlx4/en_rx.c
138
*ring->wqres.db.db = cpu_to_be32(ring->prod & 0xffff);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
143
struct mlx4_en_rx_ring *ring,
drivers/net/ethernet/mellanox/mlx4/en_rx.c
149
frags = ring->rx_info + (index << priv->log_rx_info);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
152
mlx4_en_free_frag(priv, ring, frags + nr);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
159
struct mlx4_en_rx_ring *ring;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
166
ring = priv->rx_ring[ring_ind];
drivers/net/ethernet/mellanox/mlx4/en_rx.c
168
if (mlx4_en_prepare_rx_desc(priv, ring,
drivers/net/ethernet/mellanox/mlx4/en_rx.c
169
ring->actual_size,
drivers/net/ethernet/mellanox/mlx4/en_rx.c
171
if (ring->actual_size < MLX4_EN_MIN_RX_SIZE) {
drivers/net/ethernet/mellanox/mlx4/en_rx.c
175
new_size = rounddown_pow_of_two(ring->actual_size);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
177
ring->actual_size, new_size);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
181
ring->actual_size++;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
182
ring->prod++;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
189
ring = priv->rx_ring[ring_ind];
drivers/net/ethernet/mellanox/mlx4/en_rx.c
190
while (ring->actual_size > new_size) {
drivers/net/ethernet/mellanox/mlx4/en_rx.c
191
ring->actual_size--;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
192
ring->prod--;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
193
mlx4_en_free_rx_desc(priv, ring, ring->actual_size);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
201
struct mlx4_en_rx_ring *ring)
drivers/net/ethernet/mellanox/mlx4/en_rx.c
206
ring->cons, ring->prod);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
209
for (index = 0; index < ring->size; index++) {
drivers/net/ethernet/mellanox/mlx4/en_rx.c
211
mlx4_en_free_rx_desc(priv, ring, index);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
213
ring->cons = 0;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
214
ring->prod = 0;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
243
struct mlx4_en_rx_ring *ring;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
247
ring = kzalloc_node(sizeof(*ring), GFP_KERNEL, node);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
248
if (!ring) {
drivers/net/ethernet/mellanox/mlx4/en_rx.c
253
ring->prod = 0;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
254
ring->cons = 0;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
255
ring->size = size;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
256
ring->size_mask = size - 1;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
257
ring->stride = stride;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
258
ring->log_stride = ffs(ring->stride) - 1;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
259
ring->buf_size = ring->size * ring->stride + TXBB_SIZE;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
269
ring->pp = page_pool_create(&pp);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
270
if (IS_ERR(ring->pp)) {
drivers/net/ethernet/mellanox/mlx4/en_rx.c
271
err = PTR_ERR(ring->pp);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
275
if (xdp_rxq_info_reg(&ring->xdp_rxq, priv->dev, queue_index, 0) < 0)
drivers/net/ethernet/mellanox/mlx4/en_rx.c
278
err = xdp_rxq_info_reg_mem_model(&ring->xdp_rxq, MEM_TYPE_PAGE_POOL,
drivers/net/ethernet/mellanox/mlx4/en_rx.c
279
ring->pp);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
285
ring->rx_info = kvzalloc_node(tmp, GFP_KERNEL, node);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
286
if (!ring->rx_info) {
drivers/net/ethernet/mellanox/mlx4/en_rx.c
292
ring->rx_info, tmp);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
296
err = mlx4_alloc_hwq_res(mdev->dev, &ring->wqres, ring->buf_size);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
301
ring->buf = ring->wqres.buf.direct.buf;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
303
ring->hwtstamp_rx_filter = priv->hwtstamp_config.rx_filter;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
305
*pring = ring;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
309
kvfree(ring->rx_info);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
310
ring->rx_info = NULL;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
312
xdp_rxq_info_unreg(&ring->xdp_rxq);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
314
page_pool_destroy(ring->pp);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
316
kfree(ring);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
324
struct mlx4_en_rx_ring *ring;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
332
ring = priv->rx_ring[ring_ind];
drivers/net/ethernet/mellanox/mlx4/en_rx.c
334
ring->prod = 0;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
335
ring->cons = 0;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
336
ring->actual_size = 0;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
337
ring->cqn = priv->rx_cq[ring_ind]->mcq.cqn;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
339
ring->stride = stride;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
340
if (ring->stride <= TXBB_SIZE) {
drivers/net/ethernet/mellanox/mlx4/en_rx.c
342
__be32 *ptr = (__be32 *)ring->buf;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
346
ring->buf += TXBB_SIZE;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
349
ring->log_stride = ffs(ring->stride) - 1;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
350
ring->buf_size = ring->size * ring->stride;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
352
memset(ring->buf, 0, ring->buf_size);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
353
mlx4_en_update_rx_prod_db(ring);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
356
for (i = 0; i < ring->size; i++)
drivers/net/ethernet/mellanox/mlx4/en_rx.c
357
mlx4_en_init_rx_desc(priv, ring, i);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
364
ring = priv->rx_ring[ring_ind];
drivers/net/ethernet/mellanox/mlx4/en_rx.c
366
ring->size_mask = ring->actual_size - 1;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
367
mlx4_en_update_rx_prod_db(ring);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
391
int ring;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
396
for (ring = 0; ring < priv->rx_ring_num; ring++) {
drivers/net/ethernet/mellanox/mlx4/en_rx.c
397
if (mlx4_en_is_ring_empty(priv->rx_ring[ring])) {
drivers/net/ethernet/mellanox/mlx4/en_rx.c
399
napi_schedule(&priv->rx_cq[ring]->napi);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
410
struct mlx4_en_rx_ring *ring = *pring;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
414
ring->xdp_prog,
drivers/net/ethernet/mellanox/mlx4/en_rx.c
418
xdp_rxq_info_unreg(&ring->xdp_rxq);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
419
mlx4_free_hwq_res(mdev->dev, &ring->wqres, size * stride + TXBB_SIZE);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
420
kvfree(ring->rx_info);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
421
page_pool_destroy(ring->pp);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
422
ring->rx_info = NULL;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
423
kfree(ring);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
428
struct mlx4_en_rx_ring *ring)
drivers/net/ethernet/mellanox/mlx4/en_rx.c
430
mlx4_en_free_rx_buf(priv, ring);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
431
if (ring->stride <= TXBB_SIZE)
drivers/net/ethernet/mellanox/mlx4/en_rx.c
432
ring->buf -= TXBB_SIZE;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
519
struct mlx4_en_rx_ring *ring)
drivers/net/ethernet/mellanox/mlx4/en_rx.c
521
u32 missing = ring->actual_size - (ring->prod - ring->cons);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
527
if (mlx4_en_prepare_rx_desc(priv, ring,
drivers/net/ethernet/mellanox/mlx4/en_rx.c
528
ring->prod & ring->size_mask,
drivers/net/ethernet/mellanox/mlx4/en_rx.c
531
ring->prod++;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
534
mlx4_en_update_rx_prod_db(ring);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
56
struct mlx4_en_rx_ring *ring,
drivers/net/ethernet/mellanox/mlx4/en_rx.c
639
struct mlx4_en_rx_ring *ring;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
647
if (unlikely(_ctx->ring->hwtstamp_rx_filter != HWTSTAMP_FILTER_ALL))
drivers/net/ethernet/mellanox/mlx4/en_rx.c
66
frags->page = page_pool_alloc_pages(ring->pp, gfp);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
68
ring->alloc_fail++;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
689
struct mlx4_en_rx_ring *ring;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
691
int cq_ring = cq->ring;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
701
ring = priv->rx_ring[cq_ring];
drivers/net/ethernet/mellanox/mlx4/en_rx.c
703
xdp_prog = rcu_dereference_bh(ring->xdp_prog);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
704
xdp_init_buff(&mxbuf.xdp, priv->frag_info[0].frag_stride, &ring->xdp_rxq);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
711
index = cq->mcq.cons_index & ring->size_mask;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
725
frags = ring->rx_info + (index << priv->log_rx_info);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
74
ring->rx_alloc_pages++;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
785
length -= ring->fcs_del;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
806
mxbuf.ring = ring;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
823
ring->xdp_redirect++;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
828
ring->xdp_redirect_fail++;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
83
struct mlx4_en_rx_ring *ring,
drivers/net/ethernet/mellanox/mlx4/en_rx.c
832
if (likely(!mlx4_en_xmit_frame(ring, frags, priv,
drivers/net/ethernet/mellanox/mlx4/en_rx.c
847
ring->xdp_drop++;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
853
ring->bytes += length;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
854
ring->packets++;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
861
if (unlikely(ring->hwtstamp_rx_filter == HWTSTAMP_FILTER_ALL)) {
drivers/net/ethernet/mellanox/mlx4/en_rx.c
87
page_pool_put_full_page(ring->pp, frag->page, false);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
887
ring->csum_ok++;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
896
ring->csum_complete++;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
902
ring->csum_none++;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
933
index = (cq->mcq.cons_index) & ring->size_mask;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
95
struct mlx4_en_rx_ring *ring, int index)
drivers/net/ethernet/mellanox/mlx4/en_rx.c
950
ring->cons = cq->mcq.cons_index;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
953
mlx4_en_refill_rx_buffers(priv, ring);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
97
struct mlx4_en_rx_desc *rx_desc = ring->buf + ring->stride * index;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
984
xdp_tx_cq = priv->tx_cq[TX_XDP][cq->ring];
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1011
((ring->prod & ring->size) ?
drivers/net/ethernet/mellanox/mlx4/en_tx.c
102
ring->buf = ring->sp_wqres.buf.direct.buf;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1023
ring->tso_packets++;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1027
ring->packets += i;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1031
((ring->prod & ring->size) ?
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1034
ring->packets++;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1036
ring->bytes += tx_info->nr_bytes;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
105
ring, ring->buf, ring->size, ring->buf_size,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1059
WRITE_ONCE(ring->prod, ring->prod + nr_txbb);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
106
(unsigned long long) ring->sp_wqres.buf.direct.map);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1063
tx_desc = mlx4_en_bounce_to_desc(priv, ring, index, desc_size);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1068
stop_queue = mlx4_en_is_tx_ring_full(ring);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1070
netif_tx_stop_queue(ring->tx_queue);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1071
ring->queue_stopped++;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1074
send_doorbell = __netdev_tx_sent_queue(ring->tx_queue,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
108
err = mlx4_qp_reserve_range(mdev->dev, 1, 1, &ring->qpn,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1083
qpn_vlan.bf_qpn = ring->doorbell_qpn | cpu_to_be32(real_size);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1087
mlx4_en_tx_write_desc(ring, tx_desc, qpn_vlan, desc_size, bf_index,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1099
if (unlikely(!mlx4_en_is_tx_ring_full(ring))) {
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1100
netif_tx_wake_queue(ring->tx_queue);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1101
ring->wake_queue++;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1107
ring->tx_dropped++;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1118
struct mlx4_en_tx_ring *ring)
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1122
for (i = 0; i < ring->size; i++) {
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1123
struct mlx4_en_tx_info *tx_info = &ring->tx_info[i];
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1124
struct mlx4_en_tx_desc *tx_desc = ring->buf +
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1135
tx_desc->data.lkey = ring->mr_key;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1149
struct mlx4_en_tx_ring *ring;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1157
ring = priv->tx_ring[TX_XDP][tx_ind];
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1159
if (unlikely(mlx4_en_is_tx_ring_full(ring)))
drivers/net/ethernet/mellanox/mlx4/en_tx.c
116
err = mlx4_qp_alloc(mdev->dev, ring->qpn, &ring->sp_qp);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1162
index = ring->prod & ring->size_mask;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1163
tx_info = &ring->tx_info[index];
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1165
tx_desc = ring->buf + (index << LOG_TXBB_SIZE);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
118
en_err(priv, "Failed allocating qp %d\n", ring->qpn);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1185
((ring->prod & ring->size) ?
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1190
WRITE_ONCE(ring->prod, ring->prod + MLX4_EN_XDP_TX_NRTXBB);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
1197
ring->xmit_more++;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
121
ring->sp_qp.event = mlx4_en_sqp_event;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
123
err = mlx4_bf_alloc(mdev->dev, &ring->bf, node);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
126
ring->bf.uar = &mdev->priv_uar;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
127
ring->bf.uar->map = mdev->uar_map;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
128
ring->bf_enabled = false;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
129
ring->bf_alloced = false;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
132
ring->bf_alloced = true;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
133
ring->bf_enabled = !!(priv->pflags &
drivers/net/ethernet/mellanox/mlx4/en_tx.c
136
ring->doorbell_address = ring->bf.uar->map + MLX4_SEND_DOORBELL;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
138
ring->hwtstamp_tx_type = priv->hwtstamp_config.tx_type;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
139
ring->queue_index = queue_index;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
144
&ring->sp_affinity_mask);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
146
*pring = ring;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
150
mlx4_qp_release_range(mdev->dev, ring->qpn, 1);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
152
mlx4_free_hwq_res(mdev->dev, &ring->sp_wqres, ring->buf_size);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
154
kfree(ring->bounce_buf);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
155
ring->bounce_buf = NULL;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
157
kvfree(ring->tx_info);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
158
ring->tx_info = NULL;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
160
kfree(ring);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
169
struct mlx4_en_tx_ring *ring = *pring;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
170
en_dbg(DRV, priv, "Destroying tx ring, qpn: %d\n", ring->qpn);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
172
if (ring->bf_alloced)
drivers/net/ethernet/mellanox/mlx4/en_tx.c
173
mlx4_bf_free(mdev->dev, &ring->bf);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
174
mlx4_qp_remove(mdev->dev, &ring->sp_qp);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
175
mlx4_qp_free(mdev->dev, &ring->sp_qp);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
176
mlx4_qp_release_range(priv->mdev->dev, ring->qpn, 1);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
177
mlx4_free_hwq_res(mdev->dev, &ring->sp_wqres, ring->buf_size);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
178
kfree(ring->bounce_buf);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
179
ring->bounce_buf = NULL;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
180
kvfree(ring->tx_info);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
181
ring->tx_info = NULL;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
182
kfree(ring);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
187
struct mlx4_en_tx_ring *ring,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
193
ring->sp_cqn = cq;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
194
ring->prod = 0;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
195
ring->cons = 0xffffffff;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
196
ring->last_nr_txbb = 1;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
197
memset(ring->tx_info, 0, ring->size * sizeof(struct mlx4_en_tx_info));
drivers/net/ethernet/mellanox/mlx4/en_tx.c
198
memset(ring->buf, 0, ring->buf_size);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
199
ring->free_tx_desc = mlx4_en_free_tx_desc;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
201
ring->sp_qp_state = MLX4_QP_STATE_RST;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
202
ring->doorbell_qpn = cpu_to_be32(ring->sp_qp.qpn << 8);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
203
ring->mr_key = cpu_to_be32(mdev->mr.key);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
205
mlx4_en_fill_qp_context(priv, ring->size, ring->sp_stride, 1, 0, ring->qpn,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
206
ring->sp_cqn, user_prio, &ring->sp_context);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
207
if (ring->bf_alloced)
drivers/net/ethernet/mellanox/mlx4/en_tx.c
208
ring->sp_context.usr_page =
drivers/net/ethernet/mellanox/mlx4/en_tx.c
210
ring->bf.uar->index));
drivers/net/ethernet/mellanox/mlx4/en_tx.c
212
err = mlx4_qp_to_ready(mdev->dev, &ring->sp_wqres.mtt, &ring->sp_context,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
213
&ring->sp_qp, &ring->sp_qp_state);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
214
if (!cpumask_empty(&ring->sp_affinity_mask))
drivers/net/ethernet/mellanox/mlx4/en_tx.c
215
netif_set_xps_queue(priv->dev, &ring->sp_affinity_mask,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
216
ring->queue_index);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
222
struct mlx4_en_tx_ring *ring)
drivers/net/ethernet/mellanox/mlx4/en_tx.c
226
mlx4_qp_modify(mdev->dev, NULL, ring->sp_qp_state,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
227
MLX4_QP_STATE_RST, NULL, 0, 0, &ring->sp_qp);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
230
static inline bool mlx4_en_is_tx_ring_full(struct mlx4_en_tx_ring *ring)
drivers/net/ethernet/mellanox/mlx4/en_tx.c
232
u32 used = READ_ONCE(ring->prod) - READ_ONCE(ring->cons);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
234
return used > ring->full_size;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
238
struct mlx4_en_tx_ring *ring, int index,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
242
struct mlx4_en_tx_desc *tx_desc = ring->buf + (index << LOG_TXBB_SIZE);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
243
struct mlx4_en_tx_info *tx_info = &ring->tx_info[index];
drivers/net/ethernet/mellanox/mlx4/en_tx.c
244
void *end = ring->buf + ring->buf_size;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
264
ptr = ring->buf;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
272
struct mlx4_en_tx_ring *ring,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
277
struct mlx4_en_tx_ring *ring,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
281
struct mlx4_en_tx_info *tx_info = &ring->tx_info[index];
drivers/net/ethernet/mellanox/mlx4/en_tx.c
282
struct mlx4_en_tx_desc *tx_desc = ring->buf + (index << LOG_TXBB_SIZE);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
284
void *end = ring->buf + ring->buf_size;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
324
data = ring->buf + ((void *)data - end);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
330
data = ring->buf;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
344
struct mlx4_en_tx_ring *ring,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
349
struct mlx4_en_tx_ring *ring,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
353
struct mlx4_en_tx_info *tx_info = &ring->tx_info[index];
drivers/net/ethernet/mellanox/mlx4/en_tx.c
354
struct page_pool *pool = ring->recycle_ring->pp;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
362
int mlx4_en_free_tx_buf(struct net_device *dev, struct mlx4_en_tx_ring *ring)
drivers/net/ethernet/mellanox/mlx4/en_tx.c
368
ring->cons += ring->last_nr_txbb;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
370
ring->cons, ring->prod);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
372
if ((u32) (ring->prod - ring->cons) > ring->size) {
drivers/net/ethernet/mellanox/mlx4/en_tx.c
378
while (ring->cons != ring->prod) {
drivers/net/ethernet/mellanox/mlx4/en_tx.c
379
ring->last_nr_txbb = ring->free_tx_desc(priv, ring,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
380
ring->cons & ring->size_mask,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
382
ring->cons += ring->last_nr_txbb;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
386
if (ring->tx_queue)
drivers/net/ethernet/mellanox/mlx4/en_tx.c
387
netdev_tx_reset_queue(ring->tx_queue);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
396
u16 cqe_index, struct mlx4_en_tx_ring *ring)
drivers/net/ethernet/mellanox/mlx4/en_tx.c
405
ring->sp_cqn, cqe_index, err_cqe->vendor_err_syndrome, err_cqe->syndrome);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
409
wqe_index = be16_to_cpu(err_cqe->wqe_index) & ring->size_mask;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
410
tx_info = &ring->tx_info[wqe_index];
drivers/net/ethernet/mellanox/mlx4/en_tx.c
412
en_err(priv, "Related WQE - qpn 0x%x, wqe index 0x%x, wqe size 0x%x\n", ring->qpn,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
414
tx_desc = ring->buf + (wqe_index << LOG_TXBB_SIZE);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
429
struct mlx4_en_tx_ring *ring = priv->tx_ring[cq->type][cq->ring];
drivers/net/ethernet/mellanox/mlx4/en_tx.c
436
u32 size_mask = ring->size_mask;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
451
netdev_txq_bql_complete_prefetchw(ring->tx_queue);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
455
last_nr_txbb = READ_ONCE(ring->last_nr_txbb);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
456
ring_cons = READ_ONCE(ring->cons);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
473
if (!test_and_set_bit(MLX4_EN_TX_RING_STATE_RECOVERING, &ring->state))
drivers/net/ethernet/mellanox/mlx4/en_tx.c
475
ring);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
486
if (unlikely(ring->tx_info[ring_index].ts_requested))
drivers/net/ethernet/mellanox/mlx4/en_tx.c
490
last_nr_txbb = INDIRECT_CALL_2(ring->free_tx_desc,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
493
priv, ring, ring_index,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
496
mlx4_en_stamp_wqe(priv, ring, stamp_index,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
498
ring->size));
drivers/net/ethernet/mellanox/mlx4/en_tx.c
502
bytes += ring->tx_info[ring_index].nr_bytes;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
519
WRITE_ONCE(ring->last_nr_txbb, last_nr_txbb);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
520
WRITE_ONCE(ring->cons, ring_cons + txbbs_skipped);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
525
netdev_tx_completed_queue(ring->tx_queue, packets, bytes);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
529
if (netif_tx_queue_stopped(ring->tx_queue) &&
drivers/net/ethernet/mellanox/mlx4/en_tx.c
530
!mlx4_en_is_tx_ring_full(ring)) {
drivers/net/ethernet/mellanox/mlx4/en_tx.c
531
netif_tx_wake_queue(ring->tx_queue);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
532
ring->wake_queue++;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
56
struct mlx4_en_tx_ring *ring;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
568
struct mlx4_en_tx_ring *ring,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
572
u32 copy = (ring->size - index) << LOG_TXBB_SIZE;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
579
*((u32 *) (ring->buf + i)) =
drivers/net/ethernet/mellanox/mlx4/en_tx.c
580
*((u32 *) (ring->bounce_buf + copy + i));
drivers/net/ethernet/mellanox/mlx4/en_tx.c
587
*((u32 *)(ring->buf + (index << LOG_TXBB_SIZE) + i)) =
drivers/net/ethernet/mellanox/mlx4/en_tx.c
588
*((u32 *) (ring->bounce_buf + i));
drivers/net/ethernet/mellanox/mlx4/en_tx.c
592
return ring->buf + (index << LOG_TXBB_SIZE);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
60
ring = kzalloc_node(sizeof(*ring), GFP_KERNEL, node);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
61
if (!ring) {
drivers/net/ethernet/mellanox/mlx4/en_tx.c
66
ring->size = size;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
67
ring->size_mask = size - 1;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
68
ring->sp_stride = stride;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
69
ring->full_size = ring->size - HEADROOM - MLX4_MAX_DESC_TXBBS;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
72
ring->tx_info = kvmalloc_node(tmp, GFP_KERNEL, node);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
73
if (!ring->tx_info) {
drivers/net/ethernet/mellanox/mlx4/en_tx.c
746
void mlx4_en_xmit_doorbell(struct mlx4_en_tx_ring *ring)
drivers/net/ethernet/mellanox/mlx4/en_tx.c
759
(__force u32)ring->doorbell_qpn, ring->doorbell_address);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
762
static void mlx4_en_tx_write_desc(struct mlx4_en_tx_ring *ring,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
781
mlx4_bf_copy(ring->bf.reg + ring->bf.offset, &tx_desc->ctrl,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
786
ring->bf.offset ^= ring->bf.buf_size;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
79
ring->tx_info, tmp);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
794
mlx4_en_xmit_doorbell(ring);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
796
ring->xmit_more++;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
81
ring->bounce_buf = kmalloc_node(MLX4_TX_BOUNCE_BUFFER_SIZE,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
83
if (!ring->bounce_buf) {
drivers/net/ethernet/mellanox/mlx4/en_tx.c
84
ring->bounce_buf = kmalloc(MLX4_TX_BOUNCE_BUFFER_SIZE,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
86
if (!ring->bounce_buf) {
drivers/net/ethernet/mellanox/mlx4/en_tx.c
869
struct mlx4_en_tx_ring *ring;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
890
ring = priv->tx_ring[TX][tx_ind];
drivers/net/ethernet/mellanox/mlx4/en_tx.c
904
bf_ok = ring->bf_enabled;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
91
ring->buf_size = ALIGN(size * ring->sp_stride, MLX4_EN_PAGE_SIZE);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
919
netdev_txq_bql_enqueue_prefetchw(ring->tx_queue);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
922
index = ring->prod & ring->size_mask;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
923
bf_index = ring->prod;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
927
if (likely(index + nr_txbb <= ring->size))
drivers/net/ethernet/mellanox/mlx4/en_tx.c
928
tx_desc = ring->buf + (index << LOG_TXBB_SIZE);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
935
tx_desc = (struct mlx4_en_tx_desc *) ring->bounce_buf;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
941
tx_info = &ring->tx_info[index];
drivers/net/ethernet/mellanox/mlx4/en_tx.c
95
err = mlx4_alloc_hwq_res(mdev->dev, &ring->sp_wqres, ring->buf_size);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
967
lso_header_size, ring->mr_key,
drivers/net/ethernet/mellanox/mlx4/en_tx.c
976
if (unlikely(ring->hwtstamp_tx_type == HWTSTAMP_TX_ON &&
drivers/net/ethernet/mellanox/mlx4/en_tx.c
991
ring->tx_csum++;
drivers/net/ethernet/mellanox/mlx4/mlx4_en.h
270
struct mlx4_en_tx_ring *ring,
drivers/net/ethernet/mellanox/mlx4/mlx4_en.h
358
int ring;
drivers/net/ethernet/mellanox/mlx4/mlx4_en.h
684
int entries, int ring, enum cq_type mode, int node);
drivers/net/ethernet/mellanox/mlx4/mlx4_en.h
700
void mlx4_en_xmit_doorbell(struct mlx4_en_tx_ring *ring);
drivers/net/ethernet/mellanox/mlx4/mlx4_en.h
709
struct mlx4_en_tx_ring *ring);
drivers/net/ethernet/mellanox/mlx4/mlx4_en.h
711
struct mlx4_en_tx_ring *ring,
drivers/net/ethernet/mellanox/mlx4/mlx4_en.h
714
struct mlx4_en_tx_ring *ring);
drivers/net/ethernet/mellanox/mlx4/mlx4_en.h
725
struct mlx4_en_rx_ring *ring);
drivers/net/ethernet/mellanox/mlx4/mlx4_en.h
734
struct mlx4_en_tx_ring *ring,
drivers/net/ethernet/mellanox/mlx4/mlx4_en.h
738
struct mlx4_en_tx_ring *ring,
drivers/net/ethernet/mellanox/mlx4/mlx4_en.h
752
int mlx4_en_free_tx_buf(struct net_device *dev, struct mlx4_en_tx_ring *ring);
drivers/net/ethernet/mellanox/mlx5/core/en_stats.c
511
rq_stats->pp_recycle_ring = stats.recycle_stats.ring;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/send.c
1000
err = hws_send_ring_open_cq(ctx->mdev, queue, numa_node, &ring->send_cq);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/send.c
1004
err = hws_send_ring_open_sq(ctx, numa_node, queue, &ring->send_sq,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/send.c
1005
&ring->send_cq);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/send.c
1012
hws_send_ring_close_cq(&ring->send_cq);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/send.c
997
struct mlx5hws_send_ring *ring = &queue->send_ring;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_dbg.c
885
struct mlx5dr_send_ring *ring,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_dbg.c
893
DR_DBG_PTR_TO_ID(ring), domain_id,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_dbg.c
894
ring->cq->mcq.cqn, ring->qp->qpn);
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
109
struct fbnic_ring *ring = s->private;
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
121
if (!ring->desc) {
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
126
for (i = 0; i <= ring->size_mask; i++)
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
134
struct fbnic_ring *ring = s->private;
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
146
if (!ring->desc) {
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
151
for (i = 0; i <= ring->size_mask; i++) {
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
152
u64 tcd = le64_to_cpu(ring->desc[i]);
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
183
struct fbnic_ring *ring = s->private;
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
195
if (!ring->desc) {
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
200
for (i = 0; i < (ring->size_mask + 1) * FBNIC_BD_FRAG_COUNT; i++) {
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
201
u64 bd = le64_to_cpu(ring->desc[i]);
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
213
struct fbnic_ring *ring = s->private;
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
214
u64 rcd = le64_to_cpu(ring->desc[i]);
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
256
struct fbnic_ring *ring = s->private;
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
26
struct fbnic_ring *ring = s->private;
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
272
if (!ring->desc) {
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
277
for (i = 0; i <= ring->size_mask; i++)
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
285
struct fbnic_ring *ring = inode->i_private;
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
288
switch (ring->doorbell - fbnic_ring_csr_base(ring)) {
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
307
return single_open(file, show, ring);
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
31
csr_base = fbnic_ring_csr_base(ring);
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
32
doorbell_offset = ring->doorbell - csr_base;
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
35
doorbell_offset, ring->q_idx);
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
37
ring->size_mask, ring->size, ring->flags);
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
39
ring->head, ring->tail);
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
78
struct fbnic_ring *ring = s->private;
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
79
u64 twd = le64_to_cpu(ring->desc[i]);
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
1626
struct fbnic_ring *ring;
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
1633
ring = fbn->tx[i];
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
1635
start = u64_stats_fetch_begin(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
1636
ts_packets = ring->stats.twq.ts_packets;
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
1637
ts_lost = ring->stats.twq.ts_lost;
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
1638
} while (u64_stats_fetch_retry(&ring->stats.syncp, start));
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
281
fbnic_get_ringparam(struct net_device *netdev, struct ethtool_ringparam *ring,
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
287
ring->rx_max_pending = FBNIC_QUEUE_SIZE_MAX;
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
288
ring->rx_mini_max_pending = FBNIC_QUEUE_SIZE_MAX;
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
289
ring->rx_jumbo_max_pending = FBNIC_QUEUE_SIZE_MAX;
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
290
ring->tx_max_pending = FBNIC_QUEUE_SIZE_MAX;
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
292
ring->rx_pending = fbn->rcq_size;
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
293
ring->rx_mini_pending = fbn->hpq_size;
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
294
ring->rx_jumbo_pending = fbn->ppq_size;
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
295
ring->tx_pending = fbn->txq_size;
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
303
struct ethtool_ringparam *ring,
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
306
fbn->rcq_size = ring->rx_pending;
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
307
fbn->hpq_size = ring->rx_mini_pending;
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
308
fbn->ppq_size = ring->rx_jumbo_pending;
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
309
fbn->txq_size = ring->tx_pending;
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
314
fbnic_set_ringparam(struct net_device *netdev, struct ethtool_ringparam *ring,
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
323
ring->rx_pending = roundup_pow_of_two(ring->rx_pending);
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
324
ring->rx_mini_pending = roundup_pow_of_two(ring->rx_mini_pending);
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
325
ring->rx_jumbo_pending = roundup_pow_of_two(ring->rx_jumbo_pending);
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
326
ring->tx_pending = roundup_pow_of_two(ring->tx_pending);
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
333
if (ring->rx_pending < max(FBNIC_QUEUE_SIZE_MIN, FBNIC_RX_DESC_MIN) ||
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
334
ring->rx_mini_pending < FBNIC_QUEUE_SIZE_MIN ||
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
335
ring->rx_jumbo_pending < FBNIC_QUEUE_SIZE_MIN ||
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
336
ring->tx_pending < max(FBNIC_QUEUE_SIZE_MIN, FBNIC_TX_DESC_MIN)) {
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
358
fbnic_set_rings(fbn, ring, kernel_ring);
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
366
fbnic_set_rings(clone, ring, kernel_ring);
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
494
static void fbnic_get_xdp_queue_stats(struct fbnic_ring *ring, u64 **data)
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
499
if (!ring) {
drivers/net/ethernet/meta/fbnic/fbnic_ethtool.c
506
u8 *p = (u8 *)ring + stat->offset;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
105
static unsigned int fbnic_desc_unused(struct fbnic_ring *ring)
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
107
return (ring->head - ring->tail - 1) & ring->size_mask;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1072
struct fbnic_ring *ring = &nv->qt[0].sub1;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1074
unsigned int tail = ring->tail;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1088
if (fbnic_desc_unused(ring) < nsegs) {
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1089
u64_stats_update_begin(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1090
ring->stats.dropped++;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1091
u64_stats_update_end(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
110
static unsigned int fbnic_desc_used(struct fbnic_ring *ring)
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1106
ring->tx_buf[tail] = page;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1108
twd = &ring->desc[tail];
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1115
tail &= ring->size_mask;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
112
return (ring->tail - ring->head) & ring->size_mask;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1131
ring->tail = tail;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1139
struct fbnic_ring *ring = &nv->qt[0].sub1;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1144
writel(pkt_tail, ring->doorbell);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
116
const struct fbnic_ring *ring)
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
118
return netdev_get_tx_queue(dev, ring->q_idx);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
122
struct fbnic_ring *ring,
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
125
struct netdev_queue *txq = txring_txq(dev, ring);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
128
res = netif_txq_maybe_stop(txq, fbnic_desc_unused(ring), size,
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
131
u64_stats_update_begin(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
132
ring->stats.twq.stop++;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
133
u64_stats_update_end(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
139
static bool fbnic_tx_sent_queue(struct sk_buff *skb, struct fbnic_ring *ring)
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
141
struct netdev_queue *dev_queue = txring_txq(skb->dev, ring);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1605
static void fbnic_ring_init(struct fbnic_ring *ring, u32 __iomem *doorbell,
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1608
u64_stats_init(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1609
ring->doorbell = doorbell;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1610
ring->q_idx = q_idx;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1611
ring->flags = flags;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1612
ring->deferred_head = -1;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1815
struct fbnic_ring *ring)
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1817
kvfree(ring->buffer);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1818
ring->buffer = NULL;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1821
if (!ring->size)
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1824
dma_free_coherent(dev, ring->size, ring->desc, ring->dma);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1825
ring->size_mask = 0;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
1826
ring->size = 0;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
196
fbnic_tx_lso(struct fbnic_ring *ring, struct sk_buff *skb,
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
251
u64_stats_update_begin(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
252
ring->stats.twq.lso += shinfo->gso_segs;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
253
u64_stats_update_end(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
259
fbnic_tx_offloads(struct fbnic_ring *ring, struct sk_buff *skb, __le64 *meta)
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
277
if (fbnic_tx_lso(ring, skb, shinfo, meta, &l2len, &i3len))
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
281
u64_stats_update_begin(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
282
ring->stats.twq.csum_partial++;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
283
u64_stats_update_end(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
314
fbnic_tx_map(struct fbnic_ring *ring, struct sk_buff *skb, __le64 *meta)
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
317
unsigned int tail = ring->tail, first;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
324
ring->tx_buf[tail] = skb;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
327
tail &= ring->size_mask;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
340
twd = &ring->desc[tail];
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
350
ring->tx_buf[tail] = FBNIC_XMIT_NOUNMAP;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
353
tail &= ring->size_mask;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
370
FBNIC_XMIT_CB(skb)->desc_count = ((twd - meta) + 1) & ring->size_mask;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
372
ring->tail = tail;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
378
fbnic_maybe_stop_tx(skb->dev, ring, FBNIC_MAX_SKB_DESC);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
380
if (fbnic_tx_sent_queue(skb, ring)) {
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
386
writel(tail, ring->doorbell);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
396
tail &= ring->size_mask;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
397
twd = &ring->desc[tail];
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
400
else if (ring->tx_buf[tail] == FBNIC_XMIT_NOUNMAP)
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
401
ring->tx_buf[tail] = NULL;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
412
fbnic_xmit_frame_ring(struct sk_buff *skb, struct fbnic_ring *ring)
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
414
__le64 *meta = &ring->desc[ring->tail];
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
42
u32 __iomem *fbnic_ring_csr_base(const struct fbnic_ring *ring)
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
427
if (fbnic_maybe_stop_tx(skb->dev, ring, desc_needed))
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
438
if (fbnic_tx_offloads(ring, skb, meta))
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
44
unsigned long csr_base = (unsigned long)ring->doorbell;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
441
if (fbnic_tx_map(ring, skb, meta))
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
449
u64_stats_update_begin(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
450
ring->stats.dropped++;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
451
u64_stats_update_end(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
51
static u32 fbnic_ring_rd32(struct fbnic_ring *ring, unsigned int csr)
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
53
u32 __iomem *csr_base = fbnic_ring_csr_base(ring);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
545
struct fbnic_ring *ring, bool discard,
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
549
unsigned int head = ring->head;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
553
clean_desc = (hw_head - head) & ring->size_mask;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
556
struct sk_buff *skb = ring->tx_buf[head];
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
570
ring->tx_buf[head] = NULL;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
574
while (!(ring->desc[head] & FBNIC_TWD_TYPE(AL))) {
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
576
head &= ring->size_mask;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
58
static void fbnic_ring_wr32(struct fbnic_ring *ring, unsigned int csr, u32 val)
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
580
fbnic_unmap_single_twd(nv->dev, &ring->desc[head]);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
582
head &= ring->size_mask;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
586
if (ring->tx_buf[head] != FBNIC_XMIT_NOUNMAP)
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
588
&ring->desc[head]);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
590
ring->tx_buf[head] = NULL;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
592
head &= ring->size_mask;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
60
u32 __iomem *csr_base = fbnic_ring_csr_base(ring);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
604
ring->head = head;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
606
txq = txring_txq(nv->napi.dev, ring);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
609
u64_stats_update_begin(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
610
ring->stats.dropped += total_packets;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
611
ring->stats.twq.ts_lost += ts_lost;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
612
u64_stats_update_end(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
618
u64_stats_update_begin(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
619
ring->stats.bytes += total_bytes;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
620
ring->stats.packets += total_packets;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
621
u64_stats_update_end(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
624
fbnic_desc_unused(ring),
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
626
u64_stats_update_begin(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
627
ring->stats.twq.wake++;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
628
u64_stats_update_end(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
633
struct fbnic_ring *ring, bool discard,
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
637
unsigned int head = ring->head;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
643
if (unlikely(!(ring->desc[head] & FBNIC_TWD_TYPE(AL))))
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
646
twd = le64_to_cpu(ring->desc[head]);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
647
page = ring->tx_buf[head];
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
660
head &= ring->size_mask;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
666
ring->head = head;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
669
u64_stats_update_begin(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
670
ring->stats.dropped += total_packets;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
671
u64_stats_update_end(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
675
u64_stats_update_begin(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
676
ring->stats.bytes += total_bytes;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
677
ring->stats.packets += total_packets;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
678
u64_stats_update_end(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
682
struct fbnic_ring *ring,
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
691
head = (*ts_head < 0) ? ring->head : *ts_head;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
696
if (head == ring->tail) {
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
703
skb = ring->tx_buf[head];
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
707
head &= ring->size_mask;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
726
u64_stats_update_begin(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
727
ring->stats.twq.ts_packets++;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
728
u64_stats_update_end(&ring->stats.syncp);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
731
static void fbnic_page_pool_init(struct fbnic_ring *ring, unsigned int idx,
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
734
struct fbnic_rx_buf *rx_buf = &ring->rx_buf[idx];
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
762
static void fbnic_page_pool_drain(struct fbnic_ring *ring, unsigned int idx,
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
765
struct fbnic_rx_buf *rx_buf = &ring->rx_buf[idx];
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
769
page_pool_put_unrefed_netmem(ring->page_pool, netmem, -1,
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
855
static void fbnic_clean_bdq(struct fbnic_ring *ring, unsigned int hw_head,
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
858
unsigned int head = ring->head;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
864
fbnic_page_pool_drain(ring, head, napi_budget);
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
867
head &= ring->size_mask;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.c
870
ring->head = head;
drivers/net/ethernet/meta/fbnic/fbnic_txrx.h
196
u32 __iomem *fbnic_ring_csr_base(const struct fbnic_ring *ring);
drivers/net/ethernet/micrel/ksz884x.c
1534
*desc = &info->ring[info->last];
drivers/net/ethernet/micrel/ksz884x.c
1554
*desc = &info->ring[info->next];
drivers/net/ethernet/micrel/ksz884x.c
3461
struct ksz_desc *cur = desc_info->ring;
drivers/net/ethernet/micrel/ksz884x.c
3477
desc_info->cur = desc_info->ring;
drivers/net/ethernet/micrel/ksz884x.c
3497
info->cur = info->ring;
drivers/net/ethernet/micrel/ksz884x.c
3994
desc_info->ring = kzalloc_objs(struct ksz_desc, desc_info->alloc);
drivers/net/ethernet/micrel/ksz884x.c
3995
if (!desc_info->ring)
drivers/net/ethernet/micrel/ksz884x.c
4186
kfree(hw->rx_desc_info.ring);
drivers/net/ethernet/micrel/ksz884x.c
4187
hw->rx_desc_info.ring = NULL;
drivers/net/ethernet/micrel/ksz884x.c
4188
kfree(hw->tx_desc_info.ring);
drivers/net/ethernet/micrel/ksz884x.c
4189
hw->tx_desc_info.ring = NULL;
drivers/net/ethernet/micrel/ksz884x.c
4205
struct ksz_desc *desc = desc_info->ring;
drivers/net/ethernet/micrel/ksz884x.c
4376
desc = &info->ring[last];
drivers/net/ethernet/micrel/ksz884x.c
4669
desc = &info->ring[next];
drivers/net/ethernet/micrel/ksz884x.c
4705
desc = &info->ring[next];
drivers/net/ethernet/micrel/ksz884x.c
4750
desc = &info->ring[next];
drivers/net/ethernet/micrel/ksz884x.c
6026
struct ethtool_ringparam *ring,
drivers/net/ethernet/micrel/ksz884x.c
6034
ring->tx_max_pending = (1 << 9);
drivers/net/ethernet/micrel/ksz884x.c
6035
ring->tx_pending = hw->tx_desc_info.alloc;
drivers/net/ethernet/micrel/ksz884x.c
6036
ring->rx_max_pending = (1 << 9);
drivers/net/ethernet/micrel/ksz884x.c
6037
ring->rx_pending = hw->rx_desc_info.alloc;
drivers/net/ethernet/micrel/ksz884x.c
933
struct ksz_desc *ring;
drivers/net/ethernet/microsoft/mana/mana_ethtool.c
426
struct ethtool_ringparam *ring,
drivers/net/ethernet/microsoft/mana/mana_ethtool.c
432
ring->rx_pending = apc->rx_queue_size;
drivers/net/ethernet/microsoft/mana/mana_ethtool.c
433
ring->tx_pending = apc->tx_queue_size;
drivers/net/ethernet/microsoft/mana/mana_ethtool.c
434
ring->rx_max_pending = MAX_RX_BUFFERS_PER_QUEUE;
drivers/net/ethernet/microsoft/mana/mana_ethtool.c
435
ring->tx_max_pending = MAX_TX_BUFFERS_PER_QUEUE;
drivers/net/ethernet/microsoft/mana/mana_ethtool.c
439
struct ethtool_ringparam *ring,
drivers/net/ethernet/microsoft/mana/mana_ethtool.c
451
if (ring->tx_pending < MIN_TX_BUFFERS_PER_QUEUE) {
drivers/net/ethernet/microsoft/mana/mana_ethtool.c
452
NL_SET_ERR_MSG_FMT(extack, "tx:%d less than the min:%d", ring->tx_pending,
drivers/net/ethernet/microsoft/mana/mana_ethtool.c
457
if (ring->rx_pending < MIN_RX_BUFFERS_PER_QUEUE) {
drivers/net/ethernet/microsoft/mana/mana_ethtool.c
458
NL_SET_ERR_MSG_FMT(extack, "rx:%d less than the min:%d", ring->rx_pending,
drivers/net/ethernet/microsoft/mana/mana_ethtool.c
463
new_rx = roundup_pow_of_two(ring->rx_pending);
drivers/net/ethernet/microsoft/mana/mana_ethtool.c
464
new_tx = roundup_pow_of_two(ring->tx_pending);
drivers/net/ethernet/myricom/myri10ge/myri10ge.c
1717
struct ethtool_ringparam *ring,
drivers/net/ethernet/myricom/myri10ge/myri10ge.c
1723
ring->rx_mini_max_pending = mgp->ss[0].rx_small.mask + 1;
drivers/net/ethernet/myricom/myri10ge/myri10ge.c
1724
ring->rx_max_pending = mgp->ss[0].rx_big.mask + 1;
drivers/net/ethernet/myricom/myri10ge/myri10ge.c
1725
ring->rx_jumbo_max_pending = 0;
drivers/net/ethernet/myricom/myri10ge/myri10ge.c
1726
ring->tx_max_pending = mgp->ss[0].tx.mask + 1;
drivers/net/ethernet/myricom/myri10ge/myri10ge.c
1727
ring->rx_mini_pending = ring->rx_mini_max_pending;
drivers/net/ethernet/myricom/myri10ge/myri10ge.c
1728
ring->rx_pending = ring->rx_max_pending;
drivers/net/ethernet/myricom/myri10ge/myri10ge.c
1729
ring->rx_jumbo_pending = ring->rx_jumbo_max_pending;
drivers/net/ethernet/myricom/myri10ge/myri10ge.c
1730
ring->tx_pending = ring->tx_max_pending;
drivers/net/ethernet/netronome/nfp/flower/metadata.c
140
struct circ_buf *ring;
drivers/net/ethernet/netronome/nfp/flower/metadata.c
142
ring = &priv->mask_ids.mask_id_free_list;
drivers/net/ethernet/netronome/nfp/flower/metadata.c
146
if (CIRC_SPACE(ring->head, ring->tail, NFP_FLOWER_MASK_ENTRY_RS) == 0)
drivers/net/ethernet/netronome/nfp/flower/metadata.c
152
memcpy(&ring->buf[ring->head * NFP_FLOWER_MASK_ELEMENT_RS], &mask_id,
drivers/net/ethernet/netronome/nfp/flower/metadata.c
154
ring->head = (ring->head + 1) & (NFP_FLOWER_MASK_ENTRY_RS - 1);
drivers/net/ethernet/netronome/nfp/flower/metadata.c
165
struct circ_buf *ring;
drivers/net/ethernet/netronome/nfp/flower/metadata.c
168
ring = &priv->mask_ids.mask_id_free_list;
drivers/net/ethernet/netronome/nfp/flower/metadata.c
178
if (ring->head == ring->tail)
drivers/net/ethernet/netronome/nfp/flower/metadata.c
184
memcpy(&temp_id, &ring->buf[ring->tail * NFP_FLOWER_MASK_ELEMENT_RS],
drivers/net/ethernet/netronome/nfp/flower/metadata.c
194
memcpy(&ring->buf[ring->tail * NFP_FLOWER_MASK_ELEMENT_RS], &freed_id,
drivers/net/ethernet/netronome/nfp/flower/metadata.c
197
ring->tail = (ring->tail + 1) & (NFP_FLOWER_MASK_ENTRY_RS - 1);
drivers/net/ethernet/netronome/nfp/flower/metadata.c
43
struct circ_buf *ring;
drivers/net/ethernet/netronome/nfp/flower/metadata.c
45
ring = &priv->stats_ids.free_list;
drivers/net/ethernet/netronome/nfp/flower/metadata.c
47
if (!CIRC_SPACE(ring->head, ring->tail, priv->stats_ring_size))
drivers/net/ethernet/netronome/nfp/flower/metadata.c
51
memcpy(&ring->buf[ring->head * NFP_FL_STATS_ELEM_RS],
drivers/net/ethernet/netronome/nfp/flower/metadata.c
53
ring->head = (ring->head + 1) & (priv->stats_ring_size - 1);
drivers/net/ethernet/netronome/nfp/flower/metadata.c
62
struct circ_buf *ring;
drivers/net/ethernet/netronome/nfp/flower/metadata.c
64
ring = &priv->stats_ids.free_list;
drivers/net/ethernet/netronome/nfp/flower/metadata.c
83
if (ring->head == ring->tail) {
drivers/net/ethernet/netronome/nfp/flower/metadata.c
89
memcpy(&temp_stats_id, &ring->buf[ring->tail * NFP_FL_STATS_ELEM_RS],
drivers/net/ethernet/netronome/nfp/flower/metadata.c
92
memcpy(&ring->buf[ring->tail * NFP_FL_STATS_ELEM_RS], &freed_stats_id,
drivers/net/ethernet/netronome/nfp/flower/metadata.c
95
ring->tail = (ring->tail + 1) & (priv->stats_ring_size - 1);
drivers/net/ethernet/netronome/nfp/nfp_net.h
118
#define D_IDX(ring, idx) ((idx) & ((ring)->cnt - 1))
drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c
623
struct ethtool_ringparam *ring,
drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c
630
ring->rx_max_pending = qc_max;
drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c
631
ring->tx_max_pending = qc_max / nn->dp.ops->tx_min_desc_per_pkt;
drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c
632
ring->rx_pending = nn->dp.rxd_cnt;
drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c
633
ring->tx_pending = nn->dp.txd_cnt;
drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c
652
struct ethtool_ringparam *ring,
drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c
660
if (ring->rx_mini_pending || ring->rx_jumbo_pending)
drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c
667
rxd_cnt = roundup_pow_of_two(ring->rx_pending);
drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c
668
txd_cnt = roundup_pow_of_two(ring->tx_pending);
drivers/net/ethernet/nvidia/forcedeth.c
4641
struct ethtool_ringparam *ring,
drivers/net/ethernet/nvidia/forcedeth.c
4647
ring->rx_max_pending = (np->desc_ver == DESC_VER_1) ? RING_MAX_DESC_VER_1 : RING_MAX_DESC_VER_2_3;
drivers/net/ethernet/nvidia/forcedeth.c
4648
ring->tx_max_pending = (np->desc_ver == DESC_VER_1) ? RING_MAX_DESC_VER_1 : RING_MAX_DESC_VER_2_3;
drivers/net/ethernet/nvidia/forcedeth.c
4650
ring->rx_pending = np->rx_ring_size;
drivers/net/ethernet/nvidia/forcedeth.c
4651
ring->tx_pending = np->tx_ring_size;
drivers/net/ethernet/nvidia/forcedeth.c
4655
struct ethtool_ringparam *ring,
drivers/net/ethernet/nvidia/forcedeth.c
4664
if (ring->rx_pending < RX_RING_MIN ||
drivers/net/ethernet/nvidia/forcedeth.c
4665
ring->tx_pending < TX_RING_MIN ||
drivers/net/ethernet/nvidia/forcedeth.c
4666
ring->rx_mini_pending != 0 ||
drivers/net/ethernet/nvidia/forcedeth.c
4667
ring->rx_jumbo_pending != 0 ||
drivers/net/ethernet/nvidia/forcedeth.c
4669
(ring->rx_pending > RING_MAX_DESC_VER_1 ||
drivers/net/ethernet/nvidia/forcedeth.c
4670
ring->tx_pending > RING_MAX_DESC_VER_1)) ||
drivers/net/ethernet/nvidia/forcedeth.c
4672
(ring->rx_pending > RING_MAX_DESC_VER_2_3 ||
drivers/net/ethernet/nvidia/forcedeth.c
4673
ring->tx_pending > RING_MAX_DESC_VER_2_3))) {
drivers/net/ethernet/nvidia/forcedeth.c
4681
(ring->rx_pending +
drivers/net/ethernet/nvidia/forcedeth.c
4682
ring->tx_pending),
drivers/net/ethernet/nvidia/forcedeth.c
4687
(ring->rx_pending +
drivers/net/ethernet/nvidia/forcedeth.c
4688
ring->tx_pending),
drivers/net/ethernet/nvidia/forcedeth.c
4691
rx_skbuff = kmalloc_array(ring->rx_pending, sizeof(struct nv_skb_map),
drivers/net/ethernet/nvidia/forcedeth.c
4693
tx_skbuff = kmalloc_array(ring->tx_pending, sizeof(struct nv_skb_map),
drivers/net/ethernet/nvidia/forcedeth.c
4701
(ring->rx_pending +
drivers/net/ethernet/nvidia/forcedeth.c
4702
ring->tx_pending),
drivers/net/ethernet/nvidia/forcedeth.c
4708
(ring->rx_pending +
drivers/net/ethernet/nvidia/forcedeth.c
4709
ring->tx_pending),
drivers/net/ethernet/nvidia/forcedeth.c
4734
np->rx_ring_size = ring->rx_pending;
drivers/net/ethernet/nvidia/forcedeth.c
4735
np->tx_ring_size = ring->tx_pending;
drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_ethtool.c
277
struct ethtool_ringparam *ring,
drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_ethtool.c
285
ring->rx_max_pending = PCH_GBE_MAX_RXD;
drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_ethtool.c
286
ring->tx_max_pending = PCH_GBE_MAX_TXD;
drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_ethtool.c
287
ring->rx_pending = rxdr->count;
drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_ethtool.c
288
ring->tx_pending = txdr->count;
drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_ethtool.c
302
struct ethtool_ringparam *ring,
drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_ethtool.c
312
if ((ring->rx_mini_pending) || (ring->rx_jumbo_pending))
drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_ethtool.c
336
clamp_val(ring->rx_pending, PCH_GBE_MIN_RXD, PCH_GBE_MAX_RXD);
drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_ethtool.c
340
clamp_val(ring->tx_pending, PCH_GBE_MIN_RXD, PCH_GBE_MAX_RXD);
drivers/net/ethernet/pasemi/pasemi_mac.c
269
struct pasemi_mac_csring *ring;
drivers/net/ethernet/pasemi/pasemi_mac.c
274
ring = pasemi_dma_alloc_chan(TXCHAN, sizeof(struct pasemi_mac_csring),
drivers/net/ethernet/pasemi/pasemi_mac.c
277
if (!ring) {
drivers/net/ethernet/pasemi/pasemi_mac.c
282
chno = ring->chan.chno;
drivers/net/ethernet/pasemi/pasemi_mac.c
284
ring->size = CS_RING_SIZE;
drivers/net/ethernet/pasemi/pasemi_mac.c
285
ring->next_to_fill = 0;
drivers/net/ethernet/pasemi/pasemi_mac.c
288
if (pasemi_dma_alloc_ring(&ring->chan, CS_RING_SIZE))
drivers/net/ethernet/pasemi/pasemi_mac.c
292
PAS_DMA_TXCHAN_BASEL_BRBL(ring->chan.ring_dma));
drivers/net/ethernet/pasemi/pasemi_mac.c
293
val = PAS_DMA_TXCHAN_BASEU_BRBH(ring->chan.ring_dma >> 32);
drivers/net/ethernet/pasemi/pasemi_mac.c
298
ring->events[0] = pasemi_dma_alloc_flag();
drivers/net/ethernet/pasemi/pasemi_mac.c
299
ring->events[1] = pasemi_dma_alloc_flag();
drivers/net/ethernet/pasemi/pasemi_mac.c
300
if (ring->events[0] < 0 || ring->events[1] < 0)
drivers/net/ethernet/pasemi/pasemi_mac.c
303
pasemi_dma_clear_flag(ring->events[0]);
drivers/net/ethernet/pasemi/pasemi_mac.c
304
pasemi_dma_clear_flag(ring->events[1]);
drivers/net/ethernet/pasemi/pasemi_mac.c
306
ring->fun = pasemi_dma_alloc_fun();
drivers/net/ethernet/pasemi/pasemi_mac.c
307
if (ring->fun < 0)
drivers/net/ethernet/pasemi/pasemi_mac.c
311
PAS_DMA_TXCHAN_CFG_TATTR(ring->fun) |
drivers/net/ethernet/pasemi/pasemi_mac.c
320
pasemi_dma_start_chan(&ring->chan, PAS_DMA_TXCHAN_TCMDSTA_SZ |
drivers/net/ethernet/pasemi/pasemi_mac.c
325
return ring;
drivers/net/ethernet/pasemi/pasemi_mac.c
329
if (ring->events[0] >= 0)
drivers/net/ethernet/pasemi/pasemi_mac.c
330
pasemi_dma_free_flag(ring->events[0]);
drivers/net/ethernet/pasemi/pasemi_mac.c
331
if (ring->events[1] >= 0)
drivers/net/ethernet/pasemi/pasemi_mac.c
332
pasemi_dma_free_flag(ring->events[1]);
drivers/net/ethernet/pasemi/pasemi_mac.c
333
pasemi_dma_free_ring(&ring->chan);
drivers/net/ethernet/pasemi/pasemi_mac.c
335
pasemi_dma_free_chan(&ring->chan);
drivers/net/ethernet/pasemi/pasemi_mac.c
367
struct pasemi_mac_rxring *ring;
drivers/net/ethernet/pasemi/pasemi_mac.c
372
ring = pasemi_dma_alloc_chan(RXCHAN, sizeof(struct pasemi_mac_rxring),
drivers/net/ethernet/pasemi/pasemi_mac.c
375
if (!ring) {
drivers/net/ethernet/pasemi/pasemi_mac.c
379
chno = ring->chan.chno;
drivers/net/ethernet/pasemi/pasemi_mac.c
381
spin_lock_init(&ring->lock);
drivers/net/ethernet/pasemi/pasemi_mac.c
383
ring->size = RX_RING_SIZE;
drivers/net/ethernet/pasemi/pasemi_mac.c
384
ring->ring_info = kzalloc_objs(struct pasemi_mac_buffer, RX_RING_SIZE);
drivers/net/ethernet/pasemi/pasemi_mac.c
386
if (!ring->ring_info)
drivers/net/ethernet/pasemi/pasemi_mac.c
390
if (pasemi_dma_alloc_ring(&ring->chan, RX_RING_SIZE))
drivers/net/ethernet/pasemi/pasemi_mac.c
393
ring->buffers = dma_alloc_coherent(&mac->dma_pdev->dev,
drivers/net/ethernet/pasemi/pasemi_mac.c
395
&ring->buf_dma, GFP_KERNEL);
drivers/net/ethernet/pasemi/pasemi_mac.c
396
if (!ring->buffers)
drivers/net/ethernet/pasemi/pasemi_mac.c
400
PAS_DMA_RXCHAN_BASEL_BRBL(ring->chan.ring_dma));
drivers/net/ethernet/pasemi/pasemi_mac.c
403
PAS_DMA_RXCHAN_BASEU_BRBH(ring->chan.ring_dma >> 32) |
drivers/net/ethernet/pasemi/pasemi_mac.c
414
PAS_DMA_RXINT_BASEL_BRBL(ring->buf_dma));
drivers/net/ethernet/pasemi/pasemi_mac.c
417
PAS_DMA_RXINT_BASEU_BRBH(ring->buf_dma >> 32) |
drivers/net/ethernet/pasemi/pasemi_mac.c
429
ring->next_to_fill = 0;
drivers/net/ethernet/pasemi/pasemi_mac.c
430
ring->next_to_clean = 0;
drivers/net/ethernet/pasemi/pasemi_mac.c
431
ring->mac = mac;
drivers/net/ethernet/pasemi/pasemi_mac.c
432
mac->rx = ring;
drivers/net/ethernet/pasemi/pasemi_mac.c
437
kfree(ring->ring_info);
drivers/net/ethernet/pasemi/pasemi_mac.c
439
pasemi_dma_free_chan(&ring->chan);
drivers/net/ethernet/pasemi/pasemi_mac.c
449
struct pasemi_mac_txring *ring;
drivers/net/ethernet/pasemi/pasemi_mac.c
453
ring = pasemi_dma_alloc_chan(TXCHAN, sizeof(struct pasemi_mac_txring),
drivers/net/ethernet/pasemi/pasemi_mac.c
456
if (!ring) {
drivers/net/ethernet/pasemi/pasemi_mac.c
461
chno = ring->chan.chno;
drivers/net/ethernet/pasemi/pasemi_mac.c
463
spin_lock_init(&ring->lock);
drivers/net/ethernet/pasemi/pasemi_mac.c
465
ring->size = TX_RING_SIZE;
drivers/net/ethernet/pasemi/pasemi_mac.c
466
ring->ring_info = kzalloc_objs(struct pasemi_mac_buffer, TX_RING_SIZE);
drivers/net/ethernet/pasemi/pasemi_mac.c
467
if (!ring->ring_info)
drivers/net/ethernet/pasemi/pasemi_mac.c
471
if (pasemi_dma_alloc_ring(&ring->chan, TX_RING_SIZE))
drivers/net/ethernet/pasemi/pasemi_mac.c
475
PAS_DMA_TXCHAN_BASEL_BRBL(ring->chan.ring_dma));
drivers/net/ethernet/pasemi/pasemi_mac.c
476
val = PAS_DMA_TXCHAN_BASEU_BRBH(ring->chan.ring_dma >> 32);
drivers/net/ethernet/pasemi/pasemi_mac.c
491
ring->next_to_fill = 0;
drivers/net/ethernet/pasemi/pasemi_mac.c
492
ring->next_to_clean = 0;
drivers/net/ethernet/pasemi/pasemi_mac.c
493
ring->mac = mac;
drivers/net/ethernet/pasemi/pasemi_mac.c
495
return ring;
drivers/net/ethernet/pasemi/pasemi_mac.c
498
kfree(ring->ring_info);
drivers/net/ethernet/pasemi/pasemi_mac.c
500
pasemi_dma_free_chan(&ring->chan);
drivers/net/ethernet/pasemi/pasemi_mac.h
101
#define RING_USED(ring) (((ring)->next_to_fill - (ring)->next_to_clean) \
drivers/net/ethernet/pasemi/pasemi_mac.h
102
& ((ring)->size - 1))
drivers/net/ethernet/pasemi/pasemi_mac.h
103
#define RING_AVAIL(ring) ((ring->size) - RING_USED(ring))
drivers/net/ethernet/pensando/ionic/ionic_ethtool.c
652
struct ethtool_ringparam *ring,
drivers/net/ethernet/pensando/ionic/ionic_ethtool.c
658
ring->tx_max_pending = IONIC_MAX_TX_DESC;
drivers/net/ethernet/pensando/ionic/ionic_ethtool.c
659
ring->tx_pending = lif->ntxq_descs;
drivers/net/ethernet/pensando/ionic/ionic_ethtool.c
660
ring->rx_max_pending = IONIC_MAX_RX_DESC;
drivers/net/ethernet/pensando/ionic/ionic_ethtool.c
661
ring->rx_pending = lif->nrxq_descs;
drivers/net/ethernet/pensando/ionic/ionic_ethtool.c
667
struct ethtool_ringparam *ring,
drivers/net/ethernet/pensando/ionic/ionic_ethtool.c
680
if (ring->rx_mini_pending || ring->rx_jumbo_pending) {
drivers/net/ethernet/pensando/ionic/ionic_ethtool.c
685
if (!is_power_of_2(ring->tx_pending) ||
drivers/net/ethernet/pensando/ionic/ionic_ethtool.c
686
!is_power_of_2(ring->rx_pending)) {
drivers/net/ethernet/pensando/ionic/ionic_ethtool.c
692
if (ring->tx_pending == lif->ntxq_descs &&
drivers/net/ethernet/pensando/ionic/ionic_ethtool.c
693
ring->rx_pending == lif->nrxq_descs &&
drivers/net/ethernet/pensando/ionic/ionic_ethtool.c
698
qparam.ntxq_descs = ring->tx_pending;
drivers/net/ethernet/pensando/ionic/ionic_ethtool.c
699
qparam.nrxq_descs = ring->rx_pending;
drivers/net/ethernet/pensando/ionic/ionic_ethtool.c
715
if (ring->tx_pending != lif->ntxq_descs)
drivers/net/ethernet/pensando/ionic/ionic_ethtool.c
717
lif->ntxq_descs, ring->tx_pending);
drivers/net/ethernet/pensando/ionic/ionic_ethtool.c
719
if (ring->rx_pending != lif->nrxq_descs)
drivers/net/ethernet/pensando/ionic/ionic_ethtool.c
721
lif->nrxq_descs, ring->rx_pending);
drivers/net/ethernet/pensando/ionic/ionic_ethtool.c
725
lif->ntxq_descs = ring->tx_pending;
drivers/net/ethernet/pensando/ionic/ionic_ethtool.c
726
lif->nrxq_descs = ring->rx_pending;
drivers/net/ethernet/pensando/ionic/ionic_if.h
3367
u8 ring;
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
684
int ring;
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
697
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
698
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
700
hwctx->rcv_rings[ring].addr =
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
702
hwctx->rcv_rings[ring].size =
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
706
for (ring = 0; ring < adapter->max_sds_rings; ring++) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
707
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
709
if (ring == 0) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
713
hwctx->sts_rings[ring].addr = cpu_to_le64(sds_ring->phys_addr);
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
714
hwctx->sts_rings[ring].size = cpu_to_le32(sds_ring->num_desc);
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
715
hwctx->sts_rings[ring].msi_index = cpu_to_le16(ring);
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
735
int ring;
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
777
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
778
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
785
netdev->name, ring);
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
794
recv_crb_registers[port].crb_rcv_producer[ring]);
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
797
for (ring = 0; ring < adapter->max_sds_rings; ring++) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
798
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
806
netdev->name, ring);
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
815
recv_crb_registers[port].crb_sts_consumer[ring]);
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
819
recv_crb_registers[port].sw_int_mask[ring]);
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
853
int ring;
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
891
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
892
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
903
for (ring = 0; ring < adapter->max_sds_rings; ring++) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_ctx.c
904
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/netxen/netxen_nic_ethtool.c
290
int ring, i = 0;
drivers/net/ethernet/qlogic/netxen/netxen_nic_ethtool.c
346
for (ring = 0; ring < adapter->max_sds_rings; ring++) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_ethtool.c
347
sds_ring = &(recv_ctx->sds_rings[ring]);
drivers/net/ethernet/qlogic/netxen/netxen_nic_ethtool.c
395
struct ethtool_ringparam *ring,
drivers/net/ethernet/qlogic/netxen/netxen_nic_ethtool.c
401
ring->rx_pending = adapter->num_rxd;
drivers/net/ethernet/qlogic/netxen/netxen_nic_ethtool.c
402
ring->rx_jumbo_pending = adapter->num_jumbo_rxd;
drivers/net/ethernet/qlogic/netxen/netxen_nic_ethtool.c
403
ring->rx_jumbo_pending += adapter->num_lro_rxd;
drivers/net/ethernet/qlogic/netxen/netxen_nic_ethtool.c
404
ring->tx_pending = adapter->num_txd;
drivers/net/ethernet/qlogic/netxen/netxen_nic_ethtool.c
407
ring->rx_max_pending = MAX_RCV_DESCRIPTORS_1G;
drivers/net/ethernet/qlogic/netxen/netxen_nic_ethtool.c
408
ring->rx_jumbo_max_pending = MAX_JUMBO_RCV_DESCRIPTORS_1G;
drivers/net/ethernet/qlogic/netxen/netxen_nic_ethtool.c
410
ring->rx_max_pending = MAX_RCV_DESCRIPTORS_10G;
drivers/net/ethernet/qlogic/netxen/netxen_nic_ethtool.c
411
ring->rx_jumbo_max_pending = MAX_JUMBO_RCV_DESCRIPTORS_10G;
drivers/net/ethernet/qlogic/netxen/netxen_nic_ethtool.c
414
ring->tx_max_pending = MAX_CMD_DESCRIPTORS;
drivers/net/ethernet/qlogic/netxen/netxen_nic_ethtool.c
435
struct ethtool_ringparam *ring,
drivers/net/ethernet/qlogic/netxen/netxen_nic_ethtool.c
447
if (ring->rx_mini_pending)
drivers/net/ethernet/qlogic/netxen/netxen_nic_ethtool.c
455
num_rxd = netxen_validate_ringparam(ring->rx_pending,
drivers/net/ethernet/qlogic/netxen/netxen_nic_ethtool.c
458
num_jumbo_rxd = netxen_validate_ringparam(ring->rx_jumbo_pending,
drivers/net/ethernet/qlogic/netxen/netxen_nic_ethtool.c
461
num_txd = netxen_validate_ringparam(ring->tx_pending,
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
100
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
1513
int ring, u64 sts_data0)
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
152
int ring;
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
1522
if (unlikely(ring >= adapter->max_rds_rings))
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
1525
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
1567
int ring, u64 sts_data0, u64 sts_data1)
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
1583
if (unlikely(ring >= adapter->max_rds_rings))
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
1586
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
159
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
160
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
1656
int opcode, ring = 0, desc_cnt;
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
1673
ring = netxen_get_sts_type(sts_data0);
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
1675
ring, sts_data0);
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
1678
ring = netxen_get_lro_sts_type(sts_data0);
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
1681
ring, sts_data0, sts_data1);
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
1693
list_add_tail(&rxbuf->list, &sds_ring->free_list[ring]);
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
1705
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
1707
&adapter->recv_ctx.rds_rings[ring];
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
1709
if (!list_empty(&sds_ring->free_list[ring])) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
1710
list_for_each(cur, &sds_ring->free_list[ring]) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
1716
netxen_merge_rx_buffers(&sds_ring->free_list[ring],
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
183
int ring, i;
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
211
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
212
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
213
switch (ring) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
277
for (ring = 0; ring < adapter->max_sds_rings; ring++) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
278
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
279
sds_ring->irq = adapter->msix_entries[ring].vector;
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
96
int i, ring;
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
99
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
1034
int err, ring;
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
1050
for (ring = 0; ring < adapter->max_sds_rings; ring++) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
1051
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
1052
sprintf(sds_ring->name, "%s[%d]", netdev->name, ring);
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
1065
int ring;
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
1070
for (ring = 0; ring < adapter->max_sds_rings; ring++) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
1071
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
1195
int err, ring;
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
1246
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
1247
rds_ring = &adapter->recv_ctx.rds_rings[ring];
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
1248
netxen_post_rx_buffers(adapter, ring, rds_ring);
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
166
int ring;
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
173
for (ring = 0; ring < adapter->max_sds_rings; ring++) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
174
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
184
int ring;
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
188
for (ring = 0; ring < adapter->max_sds_rings; ring++) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
189
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
199
int ring;
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
203
for (ring = 0; ring < adapter->max_sds_rings; ring++) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
204
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
213
int ring;
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
217
for (ring = 0; ring < adapter->max_sds_rings; ring++) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c
218
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic.h
1944
int ring)
drivers/net/ethernet/qlogic/qlcnic/qlcnic.h
1946
return adapter->ahw->hw_ops->create_tx_ctx(adapter, ptr, ring);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_83xx_hw.c
1318
struct qlcnic_host_tx_ring *tx, int ring)
drivers/net/ethernet/qlogic/qlcnic/qlcnic_83xx_hw.c
1344
msix_vector = adapter->drv_sds_rings + ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_83xx_hw.c
1385
intr_mask = ahw->intr_tbl[adapter->drv_sds_rings + ring].src;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_83xx_hw.c
1403
u8 ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_83xx_hw.c
1434
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_83xx_hw.c
1435
rds_ring = &adapter->recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_83xx_hw.c
1436
qlcnic_post_rx_buffers(adapter, rds_ring, ring);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_83xx_hw.c
1440
for (ring = 0; ring < adapter->drv_sds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_83xx_hw.c
1441
sds_ring = &adapter->recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_83xx_hw.c
1460
int ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_83xx_hw.c
1464
for (ring = 0; ring < adapter->drv_sds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_83xx_hw.c
1465
sds_ring = &adapter->recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_83xx_hw.h
552
u16 vlan, struct qlcnic_host_tx_ring *ring);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
414
int ring)
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
464
index = temp_nsds_rings + ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
500
index = adapter->drv_sds_rings + ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
559
int err, ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
570
for (ring = 0; ring < adapter->drv_tx_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
571
tx_ring = &adapter->tx_ring[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
593
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
594
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
606
for (ring = 0; ring < adapter->drv_sds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
607
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
628
int i, err, ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
660
for (ring = 0; ring < dev->drv_tx_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
662
&dev->tx_ring[ring],
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
663
ring);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
666
if (ring == 0)
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
669
for (i = 0; i < ring; i++)
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
695
int ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
699
for (ring = 0; ring < adapter->drv_tx_rings; ring++)
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
701
&adapter->tx_ring[ring]);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
725
int ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
729
for (ring = 0; ring < adapter->drv_tx_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
730
tx_ring = &adapter->tx_ring[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
748
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
749
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
760
for (ring = 0; ring < adapter->drv_sds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ctx.c
761
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
1308
int ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
1311
for (ring = 0; ring < adapter->drv_tx_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
1312
tx_ring = &adapter->tx_ring[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
1349
int index, ret, length, size, ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
1354
for (ring = 0; ring < adapter->drv_tx_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
1356
tx_ring = &adapter->tx_ring[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
537
int ring, i = 0;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
562
for (ring = 0; ring < adapter->drv_tx_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
563
tx_ring = &adapter->tx_ring[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
575
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
576
rds_rings = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
582
for (ring = 0; ring < adapter->drv_sds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
583
sds_ring = &(recv_ctx->sds_rings[ring]);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
635
struct ethtool_ringparam *ring,
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
641
ring->rx_pending = adapter->num_rxd;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
642
ring->rx_jumbo_pending = adapter->num_jumbo_rxd;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
643
ring->tx_pending = adapter->num_txd;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
645
ring->rx_max_pending = adapter->max_rxd;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
646
ring->rx_jumbo_max_pending = adapter->max_jumbo_rxd;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
647
ring->tx_max_pending = MAX_CMD_DESCRIPTORS;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
668
struct ethtool_ringparam *ring,
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
675
if (ring->rx_mini_pending)
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
678
num_rxd = qlcnic_validate_ringparam(ring->rx_pending,
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
681
num_jumbo_rxd = qlcnic_validate_ringparam(ring->rx_jumbo_pending,
drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c
685
num_txd = qlcnic_validate_ringparam(ring->tx_pending,
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
110
int i, ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
113
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
114
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
167
int ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
174
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
175
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
188
int ring, i;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
199
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
200
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
201
switch (ring) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
240
for (ring = 0; ring < adapter->drv_sds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
241
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
242
sds_ring->irq = adapter->msix_entries[ring].vector;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
248
sds_ring->tx_ring = &adapter->tx_ring[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
87
int i, ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
90
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
91
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1138
struct qlcnic_host_rds_ring *ring,
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1144
buffer = &ring->rx_buf_arr[index];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1150
dma_unmap_single(&adapter->pdev->dev, buffer->dma, ring->dma_size,
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1194
struct qlcnic_host_sds_ring *sds_ring, int ring,
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1205
if (unlikely(ring >= adapter->max_rds_rings))
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1208
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1262
int ring, u64 sts_data0, u64 sts_data1)
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1277
if (unlikely(ring >= adapter->max_rds_rings))
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1280
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1365
u8 ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1381
ring = qlcnic_get_sts_type(sts_data0);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1382
rxbuf = qlcnic_process_rcv(adapter, sds_ring, ring,
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1386
ring = qlcnic_get_lro_sts_type(sts_data0);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1388
rxbuf = qlcnic_process_lro(adapter, ring, sts_data0,
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1400
list_add_tail(&rxbuf->list, &sds_ring->free_list[ring]);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1412
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1413
rds_ring = &adapter->recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1414
if (!list_empty(&sds_ring->free_list[ring])) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1415
list_for_each(cur, &sds_ring->free_list[ring]) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1421
list_splice_tail_init(&sds_ring->free_list[ring],
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1426
qlcnic_post_rx_buffers_nodb(adapter, rds_ring, ring);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1491
static void qlcnic_process_rcv_diag(struct qlcnic_adapter *adapter, int ring,
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1499
if (unlikely(ring >= adapter->max_rds_rings))
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1502
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1541
int ring, opcode, desc_cnt;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1558
ring = qlcnic_get_sts_type(sts_data0);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1559
qlcnic_process_rcv_diag(adapter, ring, sts_data0);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1576
int ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1584
for (ring = 0; ring < adapter->drv_sds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1585
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1591
if (ring == (adapter->drv_sds_rings - 1))
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1606
for (ring = 0; ring < adapter->drv_tx_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1607
tx_ring = &adapter->tx_ring[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1618
int ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1623
for (ring = 0; ring < adapter->drv_sds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1624
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1631
for (ring = 0; ring < adapter->drv_tx_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1632
tx_ring = &adapter->tx_ring[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1642
int ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1650
for (ring = 0; ring < adapter->drv_sds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1651
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1659
for (ring = 0; ring < adapter->drv_tx_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1660
tx_ring = &adapter->tx_ring[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1669
int ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1677
for (ring = 0; ring < adapter->drv_sds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1678
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1687
for (ring = 0; ring < adapter->drv_tx_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1688
tx_ring = &adapter->tx_ring[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1717
u8 ring, u64 sts_data[])
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1728
if (unlikely(ring >= adapter->max_rds_rings))
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1731
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1783
u8 ring, u64 sts_data[])
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1800
if (unlikely(ring >= adapter->max_rds_rings))
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1803
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1886
u8 ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1898
ring = QLCNIC_FETCH_RING_ID(sts_data[0]);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1903
ring, sts_data);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1906
rxbuf = qlcnic_83xx_process_lro(adapter, ring,
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1916
list_add_tail(&rxbuf->list, &sds_ring->free_list[ring]);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1926
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1927
rds_ring = &adapter->recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1928
if (!list_empty(&sds_ring->free_list[ring])) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1929
list_for_each(cur, &sds_ring->free_list[ring]) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1935
list_splice_tail_init(&sds_ring->free_list[ring],
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
1939
qlcnic_post_rx_buffers_nodb(adapter, rds_ring, ring);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2045
int ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2053
for (ring = 0; ring < adapter->drv_sds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2054
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2062
for (ring = 0; ring < adapter->drv_tx_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2063
tx_ring = &adapter->tx_ring[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2072
int ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2080
for (ring = 0; ring < adapter->drv_sds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2081
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2090
for (ring = 0; ring < adapter->drv_tx_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2091
tx_ring = &adapter->tx_ring[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2102
int ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2110
for (ring = 0; ring < adapter->drv_sds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2111
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2133
for (ring = 0; ring < adapter->drv_tx_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2134
tx_ring = &adapter->tx_ring[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2145
int ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2150
for (ring = 0; ring < adapter->drv_sds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2151
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2159
for (ring = 0; ring < adapter->drv_tx_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2160
tx_ring = &adapter->tx_ring[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2169
int ring, u64 sts_data[])
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2176
if (unlikely(ring >= adapter->max_rds_rings))
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2179
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2209
int ring, opcode;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2219
ring = QLCNIC_FETCH_RING_ID(sts_data[0]);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c
2220
qlcnic_83xx_process_rcv_diag(adapter, ring, sts_data);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1694
int err, ring, num_sds_rings;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1728
for (ring = 0; ring < num_sds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1729
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1732
(ring == (num_sds_rings - 1))) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1742
netdev->name, ring);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1747
netdev->name, ring);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1761
for (ring = 0; ring < adapter->drv_tx_rings;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1762
ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1763
tx_ring = &adapter->tx_ring[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1765
"%s-tx-%d", netdev->name, ring);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1779
int ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1789
for (ring = 0; ring < adapter->drv_sds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1790
sds_ring = &recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1798
for (ring = 0; ring < adapter->drv_tx_rings;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1799
ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1800
tx_ring = &adapter->tx_ring[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1852
int ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1869
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1870
rds_ring = &adapter->recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1871
qlcnic_post_rx_buffers(adapter, rds_ring, ring);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1911
int ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1940
for (ring = 0; ring < adapter->drv_tx_rings; ring++)
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
1941
qlcnic_release_tx_buffers(adapter, &adapter->tx_ring[ring]);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2025
int ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2029
for (ring = 0; ring < adapter->drv_sds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2030
sds_ring = &adapter->recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2108
int ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2135
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2136
rds_ring = &adapter->recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2137
qlcnic_post_rx_buffers(adapter, rds_ring, ring);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2141
for (ring = 0; ring < adapter->drv_sds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2142
sds_ring = &adapter->recv_ctx->sds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2333
int ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2336
for (ring = 0; ring < adapter->drv_tx_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2337
tx_ring = &adapter->tx_ring[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2349
int ring, vector, index;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2360
for (ring = 0; ring < adapter->drv_tx_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2361
tx_ring = &adapter->tx_ring[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2363
tx_ring->txq = netdev_get_tx_queue(netdev, ring);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2375
for (ring = 0; ring < adapter->drv_tx_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2376
tx_ring = &adapter->tx_ring[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2379
index = adapter->drv_sds_rings + ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2952
int ring;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2957
for (ring = 0; ring < adapter->max_rds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2958
rds_ring = &recv_ctx->rds_rings[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2963
ring, readl(rds_ring->crb_rcv_producer),
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2967
for (ring = 0; ring < adapter->drv_sds_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2968
sds_ring = &(recv_ctx->sds_rings[ring]);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2973
ring, readl(sds_ring->crb_sts_consumer),
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2978
for (ring = 0; ring < adapter->drv_tx_rings; ring++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2979
tx_ring = &adapter->tx_ring[ring];
drivers/net/ethernet/qlogic/qlcnic/qlcnic_main.c
2983
ring, tx_ring->ctx_id);
drivers/net/ethernet/qualcomm/emac/emac-ethtool.c
136
struct ethtool_ringparam *ring,
drivers/net/ethernet/qualcomm/emac/emac-ethtool.c
142
ring->rx_max_pending = EMAC_MAX_RX_DESCS;
drivers/net/ethernet/qualcomm/emac/emac-ethtool.c
143
ring->tx_max_pending = EMAC_MAX_TX_DESCS;
drivers/net/ethernet/qualcomm/emac/emac-ethtool.c
144
ring->rx_pending = adpt->rx_desc_cnt;
drivers/net/ethernet/qualcomm/emac/emac-ethtool.c
145
ring->tx_pending = adpt->tx_desc_cnt;
drivers/net/ethernet/qualcomm/emac/emac-ethtool.c
149
struct ethtool_ringparam *ring,
drivers/net/ethernet/qualcomm/emac/emac-ethtool.c
158
if (ring->rx_mini_pending || ring->rx_jumbo_pending)
drivers/net/ethernet/qualcomm/emac/emac-ethtool.c
162
clamp_val(ring->tx_pending, EMAC_MIN_TX_DESCS, EMAC_MAX_TX_DESCS);
drivers/net/ethernet/qualcomm/emac/emac-ethtool.c
165
clamp_val(ring->rx_pending, EMAC_MIN_RX_DESCS, EMAC_MAX_RX_DESCS);
drivers/net/ethernet/qualcomm/qca_debug.c
236
qcaspi_get_ringparam(struct net_device *dev, struct ethtool_ringparam *ring,
drivers/net/ethernet/qualcomm/qca_debug.c
242
ring->rx_max_pending = QCASPI_RX_MAX_FRAMES;
drivers/net/ethernet/qualcomm/qca_debug.c
243
ring->tx_max_pending = QCASPI_TX_RING_MAX_LEN;
drivers/net/ethernet/qualcomm/qca_debug.c
244
ring->rx_pending = QCASPI_RX_MAX_FRAMES;
drivers/net/ethernet/qualcomm/qca_debug.c
245
ring->tx_pending = qca->txr.count;
drivers/net/ethernet/qualcomm/qca_debug.c
249
qcaspi_set_ringparam(struct net_device *dev, struct ethtool_ringparam *ring,
drivers/net/ethernet/qualcomm/qca_debug.c
255
if (ring->rx_pending != QCASPI_RX_MAX_FRAMES ||
drivers/net/ethernet/qualcomm/qca_debug.c
256
(ring->rx_mini_pending) ||
drivers/net/ethernet/qualcomm/qca_debug.c
257
(ring->rx_jumbo_pending))
drivers/net/ethernet/qualcomm/qca_debug.c
263
qca->txr.count = max_t(u32, ring->tx_pending, QCASPI_TX_RING_MIN_LEN);
drivers/net/ethernet/realtek/8139cp.c
1391
struct ethtool_ringparam *ring,
drivers/net/ethernet/realtek/8139cp.c
1395
ring->rx_max_pending = CP_RX_RING_SIZE;
drivers/net/ethernet/realtek/8139cp.c
1396
ring->tx_max_pending = CP_TX_RING_SIZE;
drivers/net/ethernet/realtek/8139cp.c
1397
ring->rx_pending = CP_RX_RING_SIZE;
drivers/net/ethernet/realtek/8139cp.c
1398
ring->tx_pending = CP_TX_RING_SIZE;
drivers/net/ethernet/realtek/8139too.c
1902
static inline void wrap_copy(struct sk_buff *skb, const unsigned char *ring,
drivers/net/ethernet/realtek/8139too.c
1908
skb_copy_to_linear_data(skb, ring + offset, left);
drivers/net/ethernet/realtek/8139too.c
1909
skb_copy_to_linear_data_offset(skb, left, ring, size - left);
drivers/net/ethernet/realtek/8139too.c
1911
skb_copy_to_linear_data(skb, ring + offset, size);
drivers/net/ethernet/realtek/rtase/rtase.h
301
int (*ring_handler)(struct rtase_ring *ring, int budget);
drivers/net/ethernet/realtek/rtase/rtase_main.c
1072
struct rtase_ring *ring;
drivers/net/ethernet/realtek/rtase/rtase_main.c
1078
list_for_each_entry(ring, &ivec->ring_list, ring_entry)
drivers/net/ethernet/realtek/rtase/rtase_main.c
1079
total_workdone += ring->ring_handler(ring, budget);
drivers/net/ethernet/realtek/rtase/rtase_main.c
1164
struct rtase_ring *ring, *tmp;
drivers/net/ethernet/realtek/rtase/rtase_main.c
1170
list_for_each_entry_safe(ring, tmp, &ivec->ring_list,
drivers/net/ethernet/realtek/rtase/rtase_main.c
1172
netif_queue_set_napi(tp->dev, ring->index,
drivers/net/ethernet/realtek/rtase/rtase_main.c
1173
ring->type, NULL);
drivers/net/ethernet/realtek/rtase/rtase_main.c
1175
list_del(&ring->ring_entry);
drivers/net/ethernet/realtek/rtase/rtase_main.c
1250
static int rtase_xmit_frags(struct rtase_ring *ring, struct sk_buff *skb,
drivers/net/ethernet/realtek/rtase/rtase_main.c
1254
const struct rtase_private *tp = ring->ivec->tp;
drivers/net/ethernet/realtek/rtase/rtase_main.c
1259
entry = ring->cur_idx;
drivers/net/ethernet/realtek/rtase/rtase_main.c
1268
txd = ring->desc + sizeof(struct rtase_tx_desc) * entry;
drivers/net/ethernet/realtek/rtase/rtase_main.c
1288
ring->skbuff[entry] = skb;
drivers/net/ethernet/realtek/rtase/rtase_main.c
1292
ring->mis.len[entry] = len;
drivers/net/ethernet/realtek/rtase/rtase_main.c
1304
rtase_tx_clear_range(ring, ring->cur_idx + 1, cur_frag);
drivers/net/ethernet/realtek/rtase/rtase_main.c
1318
struct rtase_ring *ring;
drivers/net/ethernet/realtek/rtase/rtase_main.c
1324
ring = &tp->tx_ring[q_idx];
drivers/net/ethernet/realtek/rtase/rtase_main.c
1327
if (unlikely(!rtase_tx_avail(ring))) {
drivers/net/ethernet/realtek/rtase/rtase_main.c
1336
entry = ring->cur_idx % RTASE_NUM_DESC;
drivers/net/ethernet/realtek/rtase/rtase_main.c
1337
txd = ring->desc + sizeof(struct rtase_tx_desc) * entry;
drivers/net/ethernet/realtek/rtase/rtase_main.c
1363
frags = rtase_xmit_frags(ring, skb, opts1, opts2);
drivers/net/ethernet/realtek/rtase/rtase_main.c
1372
ring->skbuff[entry] = skb;
drivers/net/ethernet/realtek/rtase/rtase_main.c
1391
ring->mis.len[entry] = len;
drivers/net/ethernet/realtek/rtase/rtase_main.c
1409
WRITE_ONCE(ring->cur_idx, ring->cur_idx + frags + 1);
drivers/net/ethernet/realtek/rtase/rtase_main.c
1411
stop_queue = !netif_subqueue_maybe_stop(dev, ring->index,
drivers/net/ethernet/realtek/rtase/rtase_main.c
1412
rtase_tx_avail(ring),
drivers/net/ethernet/realtek/rtase/rtase_main.c
1417
rtase_w8(tp, RTASE_TPPOLL, BIT(ring->index));
drivers/net/ethernet/realtek/rtase/rtase_main.c
1422
ring->skbuff[entry] = NULL;
drivers/net/ethernet/realtek/rtase/rtase_main.c
1423
rtase_tx_clear_range(ring, ring->cur_idx + 1, frags);
drivers/net/ethernet/realtek/rtase/rtase_main.c
1515
struct rtase_ring *ring, *tmp;
drivers/net/ethernet/realtek/rtase/rtase_main.c
1531
list_for_each_entry_safe(ring, tmp, &ivec->ring_list,
drivers/net/ethernet/realtek/rtase/rtase_main.c
1533
netif_queue_set_napi(tp->dev, ring->index,
drivers/net/ethernet/realtek/rtase/rtase_main.c
1534
ring->type, NULL);
drivers/net/ethernet/realtek/rtase/rtase_main.c
1536
list_del(&ring->ring_entry);
drivers/net/ethernet/realtek/rtase/rtase_main.c
1578
const struct rtase_ring *ring;
drivers/net/ethernet/realtek/rtase/rtase_main.c
1582
ring = &tp->tx_ring[0];
drivers/net/ethernet/realtek/rtase/rtase_main.c
1584
netdev_err(dev, "Tx curIdx = 0x%x\n", ring->cur_idx);
drivers/net/ethernet/realtek/rtase/rtase_main.c
1585
netdev_err(dev, "Tx dirtyIdx = 0x%x\n", ring->dirty_idx);
drivers/net/ethernet/realtek/rtase/rtase_main.c
1586
netdev_err(dev, "Tx phyAddr = %pad\n", &ring->phy_addr);
drivers/net/ethernet/realtek/rtase/rtase_main.c
1588
ring = &tp->rx_ring[0];
drivers/net/ethernet/realtek/rtase/rtase_main.c
1590
netdev_err(dev, "Rx curIdx = 0x%x\n", ring->cur_idx);
drivers/net/ethernet/realtek/rtase/rtase_main.c
1591
netdev_err(dev, "Rx dirtyIdx = 0x%x\n", ring->dirty_idx);
drivers/net/ethernet/realtek/rtase/rtase_main.c
1592
netdev_err(dev, "Rx phyAddr = %pad\n", &ring->phy_addr);
drivers/net/ethernet/realtek/rtase/rtase_main.c
205
static void rtase_tx_clear_range(struct rtase_ring *ring, u32 start, u32 n)
drivers/net/ethernet/realtek/rtase/rtase_main.c
207
struct rtase_tx_desc *desc_base = ring->desc;
drivers/net/ethernet/realtek/rtase/rtase_main.c
208
struct rtase_private *tp = ring->ivec->tp;
drivers/net/ethernet/realtek/rtase/rtase_main.c
214
u32 len = ring->mis.len[entry];
drivers/net/ethernet/realtek/rtase/rtase_main.c
221
ring->mis.len[entry] = 0;
drivers/net/ethernet/realtek/rtase/rtase_main.c
222
skb = ring->skbuff[entry];
drivers/net/ethernet/realtek/rtase/rtase_main.c
228
ring->skbuff[entry] = NULL;
drivers/net/ethernet/realtek/rtase/rtase_main.c
234
struct rtase_ring *ring;
drivers/net/ethernet/realtek/rtase/rtase_main.c
238
ring = &tp->tx_ring[i];
drivers/net/ethernet/realtek/rtase/rtase_main.c
239
rtase_tx_clear_range(ring, ring->dirty_idx, RTASE_NUM_DESC);
drivers/net/ethernet/realtek/rtase/rtase_main.c
240
ring->cur_idx = 0;
drivers/net/ethernet/realtek/rtase/rtase_main.c
241
ring->dirty_idx = 0;
drivers/net/ethernet/realtek/rtase/rtase_main.c
256
static u32 rtase_tx_avail(struct rtase_ring *ring)
drivers/net/ethernet/realtek/rtase/rtase_main.c
258
return READ_ONCE(ring->dirty_idx) + RTASE_NUM_DESC -
drivers/net/ethernet/realtek/rtase/rtase_main.c
259
READ_ONCE(ring->cur_idx);
drivers/net/ethernet/realtek/rtase/rtase_main.c
262
static int tx_handler(struct rtase_ring *ring, int budget)
drivers/net/ethernet/realtek/rtase/rtase_main.c
264
const struct rtase_private *tp = ring->ivec->tp;
drivers/net/ethernet/realtek/rtase/rtase_main.c
271
dirty_tx = ring->dirty_idx;
drivers/net/ethernet/realtek/rtase/rtase_main.c
272
tx_left = READ_ONCE(ring->cur_idx) - dirty_tx;
drivers/net/ethernet/realtek/rtase/rtase_main.c
276
struct rtase_tx_desc *desc = ring->desc +
drivers/net/ethernet/realtek/rtase/rtase_main.c
285
rtase_unmap_tx_skb(tp->pdev, ring->mis.len[entry], desc);
drivers/net/ethernet/realtek/rtase/rtase_main.c
286
ring->mis.len[entry] = 0;
drivers/net/ethernet/realtek/rtase/rtase_main.c
287
if (ring->skbuff[entry]) {
drivers/net/ethernet/realtek/rtase/rtase_main.c
289
bytes_compl += ring->skbuff[entry]->len;
drivers/net/ethernet/realtek/rtase/rtase_main.c
290
napi_consume_skb(ring->skbuff[entry], budget);
drivers/net/ethernet/realtek/rtase/rtase_main.c
291
ring->skbuff[entry] = NULL;
drivers/net/ethernet/realtek/rtase/rtase_main.c
302
if (ring->dirty_idx != dirty_tx) {
drivers/net/ethernet/realtek/rtase/rtase_main.c
304
WRITE_ONCE(ring->dirty_idx, dirty_tx);
drivers/net/ethernet/realtek/rtase/rtase_main.c
306
netif_subqueue_completed_wake(dev, ring->index, pkts_compl,
drivers/net/ethernet/realtek/rtase/rtase_main.c
308
rtase_tx_avail(ring),
drivers/net/ethernet/realtek/rtase/rtase_main.c
311
if (ring->cur_idx != dirty_tx)
drivers/net/ethernet/realtek/rtase/rtase_main.c
312
rtase_w8(tp, RTASE_TPPOLL, BIT(ring->index));
drivers/net/ethernet/realtek/rtase/rtase_main.c
320
struct rtase_ring *ring = &tp->tx_ring[idx];
drivers/net/ethernet/realtek/rtase/rtase_main.c
324
memset(ring->desc, 0x0, RTASE_TX_RING_DESC_SIZE);
drivers/net/ethernet/realtek/rtase/rtase_main.c
325
memset(ring->skbuff, 0x0, sizeof(ring->skbuff));
drivers/net/ethernet/realtek/rtase/rtase_main.c
326
ring->cur_idx = 0;
drivers/net/ethernet/realtek/rtase/rtase_main.c
327
ring->dirty_idx = 0;
drivers/net/ethernet/realtek/rtase/rtase_main.c
328
ring->index = idx;
drivers/net/ethernet/realtek/rtase/rtase_main.c
329
ring->type = NETDEV_QUEUE_TYPE_TX;
drivers/net/ethernet/realtek/rtase/rtase_main.c
330
ring->alloc_fail = 0;
drivers/net/ethernet/realtek/rtase/rtase_main.c
333
ring->mis.len[i] = 0;
drivers/net/ethernet/realtek/rtase/rtase_main.c
335
desc = ring->desc + sizeof(struct rtase_tx_desc) * i;
drivers/net/ethernet/realtek/rtase/rtase_main.c
340
ring->ring_handler = tx_handler;
drivers/net/ethernet/realtek/rtase/rtase_main.c
342
ring->ivec = &tp->int_vector[idx];
drivers/net/ethernet/realtek/rtase/rtase_main.c
343
list_add_tail(&ring->ring_entry,
drivers/net/ethernet/realtek/rtase/rtase_main.c
346
ring->ivec = &tp->int_vector[0];
drivers/net/ethernet/realtek/rtase/rtase_main.c
347
list_add_tail(&ring->ring_entry, &tp->int_vector[0].ring_list);
drivers/net/ethernet/realtek/rtase/rtase_main.c
350
netif_queue_set_napi(tp->dev, ring->index,
drivers/net/ethernet/realtek/rtase/rtase_main.c
351
ring->type, &ring->ivec->napi);
drivers/net/ethernet/realtek/rtase/rtase_main.c
368
static int rtase_alloc_rx_data_buf(struct rtase_ring *ring,
drivers/net/ethernet/realtek/rtase/rtase_main.c
373
struct rtase_int_vector *ivec = ring->ivec;
drivers/net/ethernet/realtek/rtase/rtase_main.c
380
ring->alloc_fail++;
drivers/net/ethernet/realtek/rtase/rtase_main.c
397
static u32 rtase_rx_ring_fill(struct rtase_ring *ring, u32 ring_start,
drivers/net/ethernet/realtek/rtase/rtase_main.c
400
union rtase_rx_desc *desc_base = ring->desc;
drivers/net/ethernet/realtek/rtase/rtase_main.c
408
if (ring->data_buf[i])
drivers/net/ethernet/realtek/rtase/rtase_main.c
411
ret = rtase_alloc_rx_data_buf(ring, &ring->data_buf[i], desc,
drivers/net/ethernet/realtek/rtase/rtase_main.c
412
&ring->mis.data_phy_addr[i]);
drivers/net/ethernet/realtek/rtase/rtase_main.c
426
struct rtase_ring *ring)
drivers/net/ethernet/realtek/rtase/rtase_main.c
433
desc = ring->desc + sizeof(union rtase_rx_desc) * i;
drivers/net/ethernet/realtek/rtase/rtase_main.c
434
page = virt_to_head_page(ring->data_buf[i]);
drivers/net/ethernet/realtek/rtase/rtase_main.c
436
if (ring->data_buf[i])
drivers/net/ethernet/realtek/rtase/rtase_main.c
478
static void rtase_rx_skb(const struct rtase_ring *ring, struct sk_buff *skb)
drivers/net/ethernet/realtek/rtase/rtase_main.c
480
struct rtase_int_vector *ivec = ring->ivec;
drivers/net/ethernet/realtek/rtase/rtase_main.c
485
static int rx_handler(struct rtase_ring *ring, int budget)
drivers/net/ethernet/realtek/rtase/rtase_main.c
487
union rtase_rx_desc *desc_base = ring->desc;
drivers/net/ethernet/realtek/rtase/rtase_main.c
489
struct rtase_private *tp = ring->ivec->tp;
drivers/net/ethernet/realtek/rtase/rtase_main.c
495
cur_rx = ring->cur_idx;
drivers/net/ethernet/realtek/rtase/rtase_main.c
547
ring->mis.data_phy_addr[entry],
drivers/net/ethernet/realtek/rtase/rtase_main.c
550
skb = build_skb(ring->data_buf[entry], PAGE_SIZE);
drivers/net/ethernet/realtek/rtase/rtase_main.c
556
ring->data_buf[entry] = NULL;
drivers/net/ethernet/realtek/rtase/rtase_main.c
569
rtase_rx_skb(ring, skb);
drivers/net/ethernet/realtek/rtase/rtase_main.c
577
desc = ring->desc + sizeof(union rtase_rx_desc) * entry;
drivers/net/ethernet/realtek/rtase/rtase_main.c
580
ring->cur_idx = cur_rx;
drivers/net/ethernet/realtek/rtase/rtase_main.c
581
delta = rtase_rx_ring_fill(ring, ring->dirty_idx, ring->cur_idx);
drivers/net/ethernet/realtek/rtase/rtase_main.c
582
ring->dirty_idx += delta;
drivers/net/ethernet/realtek/rtase/rtase_main.c
589
struct rtase_ring *ring = &tp->rx_ring[idx];
drivers/net/ethernet/realtek/rtase/rtase_main.c
592
memset(ring->desc, 0x0, RTASE_RX_RING_DESC_SIZE);
drivers/net/ethernet/realtek/rtase/rtase_main.c
593
memset(ring->data_buf, 0x0, sizeof(ring->data_buf));
drivers/net/ethernet/realtek/rtase/rtase_main.c
594
ring->cur_idx = 0;
drivers/net/ethernet/realtek/rtase/rtase_main.c
595
ring->dirty_idx = 0;
drivers/net/ethernet/realtek/rtase/rtase_main.c
596
ring->index = idx;
drivers/net/ethernet/realtek/rtase/rtase_main.c
597
ring->type = NETDEV_QUEUE_TYPE_RX;
drivers/net/ethernet/realtek/rtase/rtase_main.c
598
ring->alloc_fail = 0;
drivers/net/ethernet/realtek/rtase/rtase_main.c
601
ring->mis.data_phy_addr[i] = 0;
drivers/net/ethernet/realtek/rtase/rtase_main.c
603
ring->ring_handler = rx_handler;
drivers/net/ethernet/realtek/rtase/rtase_main.c
604
ring->ivec = &tp->int_vector[idx];
drivers/net/ethernet/realtek/rtase/rtase_main.c
605
netif_queue_set_napi(tp->dev, ring->index,
drivers/net/ethernet/realtek/rtase/rtase_main.c
606
ring->type, &ring->ivec->napi);
drivers/net/ethernet/realtek/rtase/rtase_main.c
607
list_add_tail(&ring->ring_entry, &tp->int_vector[idx].ring_list);
drivers/net/ethernet/realtek/rtase/rtase_main.c
695
const struct rtase_ring *ring;
drivers/net/ethernet/realtek/rtase/rtase_main.c
700
ring = &tp->tx_ring[i];
drivers/net/ethernet/realtek/rtase/rtase_main.c
703
lower_32_bits(ring->phy_addr));
drivers/net/ethernet/realtek/rtase/rtase_main.c
705
upper_32_bits(ring->phy_addr));
drivers/net/ethernet/realtek/rtase/rtase_main.c
721
ring = &tp->rx_ring[i];
drivers/net/ethernet/realtek/rtase/rtase_main.c
725
lower_32_bits(ring->phy_addr));
drivers/net/ethernet/realtek/rtase/rtase_main.c
727
upper_32_bits(ring->phy_addr));
drivers/net/ethernet/realtek/rtase/rtase_main.c
730
lower_32_bits(ring->phy_addr));
drivers/net/ethernet/realtek/rtase/rtase_main.c
732
upper_32_bits(ring->phy_addr));
drivers/net/ethernet/renesas/ravb_main.c
1679
struct ethtool_ringparam *ring,
drivers/net/ethernet/renesas/ravb_main.c
1685
ring->rx_max_pending = BE_RX_RING_MAX;
drivers/net/ethernet/renesas/ravb_main.c
1686
ring->tx_max_pending = BE_TX_RING_MAX;
drivers/net/ethernet/renesas/ravb_main.c
1687
ring->rx_pending = priv->num_rx_ring[RAVB_BE];
drivers/net/ethernet/renesas/ravb_main.c
1688
ring->tx_pending = priv->num_tx_ring[RAVB_BE];
drivers/net/ethernet/renesas/ravb_main.c
1692
struct ethtool_ringparam *ring,
drivers/net/ethernet/renesas/ravb_main.c
1700
if (ring->tx_pending > BE_TX_RING_MAX ||
drivers/net/ethernet/renesas/ravb_main.c
1701
ring->rx_pending > BE_RX_RING_MAX ||
drivers/net/ethernet/renesas/ravb_main.c
1702
ring->tx_pending < BE_TX_RING_MIN ||
drivers/net/ethernet/renesas/ravb_main.c
1703
ring->rx_pending < BE_RX_RING_MIN)
drivers/net/ethernet/renesas/ravb_main.c
1705
if (ring->rx_mini_pending || ring->rx_jumbo_pending)
drivers/net/ethernet/renesas/ravb_main.c
1729
priv->num_rx_ring[RAVB_BE] = ring->rx_pending;
drivers/net/ethernet/renesas/ravb_main.c
1730
priv->num_tx_ring[RAVB_BE] = ring->tx_pending;
drivers/net/ethernet/renesas/sh_eth.c
2289
struct ethtool_ringparam *ring,
drivers/net/ethernet/renesas/sh_eth.c
2295
ring->rx_max_pending = RX_RING_MAX;
drivers/net/ethernet/renesas/sh_eth.c
2296
ring->tx_max_pending = TX_RING_MAX;
drivers/net/ethernet/renesas/sh_eth.c
2297
ring->rx_pending = mdp->num_rx_ring;
drivers/net/ethernet/renesas/sh_eth.c
2298
ring->tx_pending = mdp->num_tx_ring;
drivers/net/ethernet/renesas/sh_eth.c
2302
struct ethtool_ringparam *ring,
drivers/net/ethernet/renesas/sh_eth.c
2309
if (ring->tx_pending > TX_RING_MAX ||
drivers/net/ethernet/renesas/sh_eth.c
2310
ring->rx_pending > RX_RING_MAX ||
drivers/net/ethernet/renesas/sh_eth.c
2311
ring->tx_pending < TX_RING_MIN ||
drivers/net/ethernet/renesas/sh_eth.c
2312
ring->rx_pending < RX_RING_MIN)
drivers/net/ethernet/renesas/sh_eth.c
2314
if (ring->rx_mini_pending || ring->rx_jumbo_pending)
drivers/net/ethernet/renesas/sh_eth.c
2338
mdp->num_rx_ring = ring->rx_pending;
drivers/net/ethernet/renesas/sh_eth.c
2339
mdp->num_tx_ring = ring->tx_pending;
drivers/net/ethernet/sfc/ef100_ethtool.c
25
struct ethtool_ringparam *ring,
drivers/net/ethernet/sfc/ef100_ethtool.c
31
ring->rx_max_pending = EFX_EF100_MAX_DMAQ_SIZE;
drivers/net/ethernet/sfc/ef100_ethtool.c
32
ring->tx_max_pending = EFX_EF100_MAX_DMAQ_SIZE;
drivers/net/ethernet/sfc/ef100_ethtool.c
33
ring->rx_pending = efx->rxq_entries;
drivers/net/ethernet/sfc/ef100_ethtool.c
34
ring->tx_pending = efx->txq_entries;
drivers/net/ethernet/sfc/ef100_rep.c
171
struct ethtool_ringparam *ring,
drivers/net/ethernet/sfc/ef100_rep.c
177
ring->rx_max_pending = U32_MAX;
drivers/net/ethernet/sfc/ef100_rep.c
178
ring->rx_pending = efv->rx_pring_size;
drivers/net/ethernet/sfc/ef100_rep.c
182
struct ethtool_ringparam *ring,
drivers/net/ethernet/sfc/ef100_rep.c
188
if (ring->rx_mini_pending || ring->rx_jumbo_pending || ring->tx_pending)
drivers/net/ethernet/sfc/ef100_rep.c
191
efv->rx_pring_size = ring->rx_pending;
drivers/net/ethernet/sfc/ethtool.c
162
struct ethtool_ringparam *ring,
drivers/net/ethernet/sfc/ethtool.c
168
ring->rx_max_pending = EFX_MAX_DMAQ_SIZE;
drivers/net/ethernet/sfc/ethtool.c
169
ring->tx_max_pending = EFX_TXQ_MAX_ENT(efx);
drivers/net/ethernet/sfc/ethtool.c
170
ring->rx_pending = efx->rxq_entries;
drivers/net/ethernet/sfc/ethtool.c
171
ring->tx_pending = efx->txq_entries;
drivers/net/ethernet/sfc/ethtool.c
176
struct ethtool_ringparam *ring,
drivers/net/ethernet/sfc/ethtool.c
183
if (ring->rx_mini_pending || ring->rx_jumbo_pending ||
drivers/net/ethernet/sfc/ethtool.c
184
ring->rx_pending > EFX_MAX_DMAQ_SIZE ||
drivers/net/ethernet/sfc/ethtool.c
185
ring->tx_pending > EFX_TXQ_MAX_ENT(efx))
drivers/net/ethernet/sfc/ethtool.c
188
if (ring->rx_pending < EFX_RXQ_MIN_ENT) {
drivers/net/ethernet/sfc/ethtool.c
195
txq_entries = max(ring->tx_pending, EFX_TXQ_MIN_ENT(efx));
drivers/net/ethernet/sfc/ethtool.c
196
if (txq_entries != ring->tx_pending)
drivers/net/ethernet/sfc/ethtool.c
201
return efx_realloc_channels(efx, ring->rx_pending, txq_entries);
drivers/net/ethernet/sfc/falcon/ethtool.c
636
struct ethtool_ringparam *ring,
drivers/net/ethernet/sfc/falcon/ethtool.c
642
ring->rx_max_pending = EF4_MAX_DMAQ_SIZE;
drivers/net/ethernet/sfc/falcon/ethtool.c
643
ring->tx_max_pending = EF4_MAX_DMAQ_SIZE;
drivers/net/ethernet/sfc/falcon/ethtool.c
644
ring->rx_pending = efx->rxq_entries;
drivers/net/ethernet/sfc/falcon/ethtool.c
645
ring->tx_pending = efx->txq_entries;
drivers/net/ethernet/sfc/falcon/ethtool.c
650
struct ethtool_ringparam *ring,
drivers/net/ethernet/sfc/falcon/ethtool.c
657
if (ring->rx_mini_pending || ring->rx_jumbo_pending ||
drivers/net/ethernet/sfc/falcon/ethtool.c
658
ring->rx_pending > EF4_MAX_DMAQ_SIZE ||
drivers/net/ethernet/sfc/falcon/ethtool.c
659
ring->tx_pending > EF4_MAX_DMAQ_SIZE)
drivers/net/ethernet/sfc/falcon/ethtool.c
662
if (ring->rx_pending < EF4_RXQ_MIN_ENT) {
drivers/net/ethernet/sfc/falcon/ethtool.c
669
txq_entries = max(ring->tx_pending, EF4_TXQ_MIN_ENT(efx));
drivers/net/ethernet/sfc/falcon/ethtool.c
670
if (txq_entries != ring->tx_pending)
drivers/net/ethernet/sfc/falcon/ethtool.c
675
return ef4_realloc_channels(efx, ring->rx_pending, txq_entries);
drivers/net/ethernet/sfc/siena/ethtool.c
162
struct ethtool_ringparam *ring,
drivers/net/ethernet/sfc/siena/ethtool.c
168
ring->rx_max_pending = EFX_MAX_DMAQ_SIZE;
drivers/net/ethernet/sfc/siena/ethtool.c
169
ring->tx_max_pending = EFX_TXQ_MAX_ENT(efx);
drivers/net/ethernet/sfc/siena/ethtool.c
170
ring->rx_pending = efx->rxq_entries;
drivers/net/ethernet/sfc/siena/ethtool.c
171
ring->tx_pending = efx->txq_entries;
drivers/net/ethernet/sfc/siena/ethtool.c
176
struct ethtool_ringparam *ring,
drivers/net/ethernet/sfc/siena/ethtool.c
183
if (ring->rx_mini_pending || ring->rx_jumbo_pending ||
drivers/net/ethernet/sfc/siena/ethtool.c
184
ring->rx_pending > EFX_MAX_DMAQ_SIZE ||
drivers/net/ethernet/sfc/siena/ethtool.c
185
ring->tx_pending > EFX_TXQ_MAX_ENT(efx))
drivers/net/ethernet/sfc/siena/ethtool.c
188
if (ring->rx_pending < EFX_RXQ_MIN_ENT) {
drivers/net/ethernet/sfc/siena/ethtool.c
195
txq_entries = max(ring->tx_pending, EFX_TXQ_MIN_ENT(efx));
drivers/net/ethernet/sfc/siena/ethtool.c
196
if (txq_entries != ring->tx_pending)
drivers/net/ethernet/sfc/siena/ethtool.c
201
return efx_siena_realloc_channels(efx, ring->rx_pending, txq_entries);
drivers/net/ethernet/sgi/ioc3-eth.c
743
unsigned long ring;
drivers/net/ethernet/sgi/ioc3-eth.c
746
ring = ioc3_map(ip->rxr_dma, PCI64_ATTR_PREC);
drivers/net/ethernet/sgi/ioc3-eth.c
747
writel(ring >> 32, ®s->erbr_h);
drivers/net/ethernet/sgi/ioc3-eth.c
748
writel(ring & 0xffffffff, ®s->erbr_l);
drivers/net/ethernet/sgi/ioc3-eth.c
752
ring = ioc3_map(ip->txr_dma, PCI64_ATTR_PREC);
drivers/net/ethernet/sgi/ioc3-eth.c
757
writel(ring >> 32, ®s->etbr_h);
drivers/net/ethernet/sgi/ioc3-eth.c
758
writel(ring & 0xffffffff, ®s->etbr_l);
drivers/net/ethernet/stmicro/stmmac/stmmac_ethtool.c
392
struct ethtool_ringparam *ring,
drivers/net/ethernet/stmicro/stmmac/stmmac_ethtool.c
398
ring->rx_max_pending = DMA_MAX_RX_SIZE;
drivers/net/ethernet/stmicro/stmmac/stmmac_ethtool.c
399
ring->tx_max_pending = DMA_MAX_TX_SIZE;
drivers/net/ethernet/stmicro/stmmac/stmmac_ethtool.c
400
ring->rx_pending = priv->dma_conf.dma_rx_size;
drivers/net/ethernet/stmicro/stmmac/stmmac_ethtool.c
401
ring->tx_pending = priv->dma_conf.dma_tx_size;
drivers/net/ethernet/stmicro/stmmac/stmmac_ethtool.c
405
struct ethtool_ringparam *ring,
drivers/net/ethernet/stmicro/stmmac/stmmac_ethtool.c
409
if (ring->rx_mini_pending || ring->rx_jumbo_pending ||
drivers/net/ethernet/stmicro/stmmac/stmmac_ethtool.c
410
ring->rx_pending < DMA_MIN_RX_SIZE ||
drivers/net/ethernet/stmicro/stmmac/stmmac_ethtool.c
411
ring->rx_pending > DMA_MAX_RX_SIZE ||
drivers/net/ethernet/stmicro/stmmac/stmmac_ethtool.c
412
!is_power_of_2(ring->rx_pending) ||
drivers/net/ethernet/stmicro/stmmac/stmmac_ethtool.c
413
ring->tx_pending < DMA_MIN_TX_SIZE ||
drivers/net/ethernet/stmicro/stmmac/stmmac_ethtool.c
414
ring->tx_pending > DMA_MAX_TX_SIZE ||
drivers/net/ethernet/stmicro/stmmac/stmmac_ethtool.c
415
!is_power_of_2(ring->tx_pending))
drivers/net/ethernet/stmicro/stmmac/stmmac_ethtool.c
418
return stmmac_reinit_ringparam(netdev, ring->rx_pending,
drivers/net/ethernet/stmicro/stmmac/stmmac_ethtool.c
419
ring->tx_pending);
drivers/net/ethernet/sun/cassini.c
1352
static cas_page_t *cas_page_swap(struct cas *cp, const int ring,
drivers/net/ethernet/sun/cassini.c
1811
static inline void cas_tx_ringN(struct cas *cp, int ring, int limit)
drivers/net/ethernet/sun/cassini.c
1818
spin_lock(&cp->tx_lock[ring]);
drivers/net/ethernet/sun/cassini.c
1819
txds = cp->init_txds[ring];
drivers/net/ethernet/sun/cassini.c
1820
skbs = cp->tx_skbs[ring];
drivers/net/ethernet/sun/cassini.c
1821
entry = cp->tx_old[ring];
drivers/net/ethernet/sun/cassini.c
1823
count = TX_BUFF_COUNT(ring, entry, limit);
drivers/net/ethernet/sun/cassini.c
1832
entry = TX_DESC_NEXT(ring, entry);
drivers/net/ethernet/sun/cassini.c
1838
+ cp->tx_tiny_use[ring][entry].nbufs + 1;
drivers/net/ethernet/sun/cassini.c
1843
"tx[%d] done, slot %d\n", ring, entry);
drivers/net/ethernet/sun/cassini.c
1846
cp->tx_tiny_use[ring][entry].nbufs = 0;
drivers/net/ethernet/sun/cassini.c
1856
entry = TX_DESC_NEXT(ring, entry);
drivers/net/ethernet/sun/cassini.c
1859
if (cp->tx_tiny_use[ring][entry].used) {
drivers/net/ethernet/sun/cassini.c
1860
cp->tx_tiny_use[ring][entry].used = 0;
drivers/net/ethernet/sun/cassini.c
1861
entry = TX_DESC_NEXT(ring, entry);
drivers/net/ethernet/sun/cassini.c
1865
spin_lock(&cp->stat_lock[ring]);
drivers/net/ethernet/sun/cassini.c
1866
cp->net_stats[ring].tx_packets++;
drivers/net/ethernet/sun/cassini.c
1867
cp->net_stats[ring].tx_bytes += skb->len;
drivers/net/ethernet/sun/cassini.c
1868
spin_unlock(&cp->stat_lock[ring]);
drivers/net/ethernet/sun/cassini.c
1871
cp->tx_old[ring] = entry;
drivers/net/ethernet/sun/cassini.c
1878
(TX_BUFFS_AVAIL(cp, ring) > CAS_TABORT(cp)*(MAX_SKB_FRAGS + 1)))
drivers/net/ethernet/sun/cassini.c
1880
spin_unlock(&cp->tx_lock[ring]);
drivers/net/ethernet/sun/cassini.c
1886
int limit, ring;
drivers/net/ethernet/sun/cassini.c
1894
for (ring = 0; ring < N_TX_RINGS; ring++) {
drivers/net/ethernet/sun/cassini.c
1901
limit = readl(cp->regs + REG_TX_COMPN(ring));
drivers/net/ethernet/sun/cassini.c
1903
if (cp->tx_old[ring] != limit)
drivers/net/ethernet/sun/cassini.c
1904
cas_tx_ringN(cp, ring, limit);
drivers/net/ethernet/sun/cassini.c
2137
static void cas_post_page(struct cas *cp, const int ring, const int index)
drivers/net/ethernet/sun/cassini.c
2142
entry = cp->rx_old[ring];
drivers/net/ethernet/sun/cassini.c
2144
new = cas_page_swap(cp, ring, index);
drivers/net/ethernet/sun/cassini.c
2145
cp->init_rxds[ring][entry].buffer = cpu_to_le64(new->dma_addr);
drivers/net/ethernet/sun/cassini.c
2146
cp->init_rxds[ring][entry].index =
drivers/net/ethernet/sun/cassini.c
2148
CAS_BASE(RX_INDEX_RING, ring));
drivers/net/ethernet/sun/cassini.c
2150
entry = RX_DESC_ENTRY(ring, entry + 1);
drivers/net/ethernet/sun/cassini.c
2151
cp->rx_old[ring] = entry;
drivers/net/ethernet/sun/cassini.c
2156
if (ring == 0)
drivers/net/ethernet/sun/cassini.c
2165
static int cas_post_rxds_ringN(struct cas *cp, int ring, int num)
drivers/net/ethernet/sun/cassini.c
2169
cas_page_t **page = cp->rx_pages[ring];
drivers/net/ethernet/sun/cassini.c
2171
entry = cp->rx_old[ring];
drivers/net/ethernet/sun/cassini.c
2174
"rxd[%d] interrupt, done: %d\n", ring, entry);
drivers/net/ethernet/sun/cassini.c
2178
last = RX_DESC_ENTRY(ring, num ? entry + num - 4: entry - 4);
drivers/net/ethernet/sun/cassini.c
2188
cp->cas_flags |= CAS_FLAG_RXD_POST(ring);
drivers/net/ethernet/sun/cassini.c
2192
cp->rx_old[ring] = entry;
drivers/net/ethernet/sun/cassini.c
2193
cp->rx_last[ring] = num ? num - released : 0;
drivers/net/ethernet/sun/cassini.c
2199
cp->init_rxds[ring][entry].buffer =
drivers/net/ethernet/sun/cassini.c
2210
entry = RX_DESC_ENTRY(ring, entry + 1);
drivers/net/ethernet/sun/cassini.c
2212
cp->rx_old[ring] = entry;
drivers/net/ethernet/sun/cassini.c
2217
if (ring == 0)
drivers/net/ethernet/sun/cassini.c
2238
static int cas_rx_ringN(struct cas *cp, int ring, int budget)
drivers/net/ethernet/sun/cassini.c
2240
struct cas_rx_comp *rxcs = cp->init_rxcs[ring];
drivers/net/ethernet/sun/cassini.c
2246
ring,
drivers/net/ethernet/sun/cassini.c
2247
readl(cp->regs + REG_RX_COMP_HEAD), cp->rx_new[ring]);
drivers/net/ethernet/sun/cassini.c
2249
entry = cp->rx_new[ring];
drivers/net/ethernet/sun/cassini.c
2275
spin_lock(&cp->stat_lock[ring]);
drivers/net/ethernet/sun/cassini.c
2276
cp->net_stats[ring].rx_errors++;
drivers/net/ethernet/sun/cassini.c
2278
cp->net_stats[ring].rx_length_errors++;
drivers/net/ethernet/sun/cassini.c
2280
cp->net_stats[ring].rx_crc_errors++;
drivers/net/ethernet/sun/cassini.c
2281
spin_unlock(&cp->stat_lock[ring]);
drivers/net/ethernet/sun/cassini.c
2285
spin_lock(&cp->stat_lock[ring]);
drivers/net/ethernet/sun/cassini.c
2286
++cp->net_stats[ring].rx_dropped;
drivers/net/ethernet/sun/cassini.c
2287
spin_unlock(&cp->stat_lock[ring]);
drivers/net/ethernet/sun/cassini.c
2307
spin_lock(&cp->stat_lock[ring]);
drivers/net/ethernet/sun/cassini.c
2308
cp->net_stats[ring].rx_packets++;
drivers/net/ethernet/sun/cassini.c
2309
cp->net_stats[ring].rx_bytes += len;
drivers/net/ethernet/sun/cassini.c
2310
spin_unlock(&cp->stat_lock[ring]);
drivers/net/ethernet/sun/cassini.c
2338
entry = RX_COMP_ENTRY(ring, entry + 1 +
drivers/net/ethernet/sun/cassini.c
2345
cp->rx_new[ring] = entry;
drivers/net/ethernet/sun/cassini.c
2355
struct cas *cp, int ring)
drivers/net/ethernet/sun/cassini.c
2357
struct cas_rx_comp *rxc = cp->init_rxcs[ring];
drivers/net/ethernet/sun/cassini.c
2360
last = cp->rx_cur[ring];
drivers/net/ethernet/sun/cassini.c
2361
entry = cp->rx_new[ring];
drivers/net/ethernet/sun/cassini.c
2364
ring, readl(cp->regs + REG_RX_COMP_HEAD), entry);
drivers/net/ethernet/sun/cassini.c
2369
last = RX_COMP_ENTRY(ring, last + 1);
drivers/net/ethernet/sun/cassini.c
2371
cp->rx_cur[ring] = last;
drivers/net/ethernet/sun/cassini.c
2373
if (ring == 0)
drivers/net/ethernet/sun/cassini.c
2376
writel(last, cp->regs + REG_PLUS_RX_COMPN_TAIL(ring));
drivers/net/ethernet/sun/cassini.c
2387
const int ring)
drivers/net/ethernet/sun/cassini.c
2390
cas_post_rxcs_ringN(dev, cp, ring);
drivers/net/ethernet/sun/cassini.c
2398
int ring = (irq == cp->pci_irq_INTC) ? 2 : 3;
drivers/net/ethernet/sun/cassini.c
2399
u32 status = readl(cp->regs + REG_PLUS_INTRN_STATUS(ring));
drivers/net/ethernet/sun/cassini.c
2411
cas_rx_ringN(cp, ring, 0);
drivers/net/ethernet/sun/cassini.c
2417
cas_handle_irqN(dev, cp, status, ring);
drivers/net/ethernet/sun/cassini.c
2675
static inline int cas_intme(int ring, int entry)
drivers/net/ethernet/sun/cassini.c
2678
if (!(entry & ((TX_DESC_RINGN_SIZE(ring) >> 1) - 1)))
drivers/net/ethernet/sun/cassini.c
2684
static void cas_write_txd(struct cas *cp, int ring, int entry,
drivers/net/ethernet/sun/cassini.c
2687
struct cas_tx_desc *txd = cp->init_txds[ring] + entry;
drivers/net/ethernet/sun/cassini.c
2690
if (cas_intme(ring, entry))
drivers/net/ethernet/sun/cassini.c
2698
static inline void *tx_tiny_buf(struct cas *cp, const int ring,
drivers/net/ethernet/sun/cassini.c
270
static void cas_disable_irq(struct cas *cp, const int ring)
drivers/net/ethernet/sun/cassini.c
2701
return cp->tx_tiny_bufs[ring] + TX_TINY_BUF_LEN*entry;
drivers/net/ethernet/sun/cassini.c
2704
static inline dma_addr_t tx_tiny_map(struct cas *cp, const int ring,
drivers/net/ethernet/sun/cassini.c
2707
cp->tx_tiny_use[ring][tentry].nbufs++;
drivers/net/ethernet/sun/cassini.c
2708
cp->tx_tiny_use[ring][entry].used = 1;
drivers/net/ethernet/sun/cassini.c
2709
return cp->tx_tiny_dvma[ring] + TX_TINY_BUF_LEN*entry;
drivers/net/ethernet/sun/cassini.c
2712
static inline int cas_xmit_tx_ringN(struct cas *cp, int ring,
drivers/net/ethernet/sun/cassini.c
2722
spin_lock_irqsave(&cp->tx_lock[ring], flags);
drivers/net/ethernet/sun/cassini.c
2725
if (TX_BUFFS_AVAIL(cp, ring) <=
drivers/net/ethernet/sun/cassini.c
2728
spin_unlock_irqrestore(&cp->tx_lock[ring], flags);
drivers/net/ethernet/sun/cassini.c
273
if (ring == 0) {
drivers/net/ethernet/sun/cassini.c
2743
entry = cp->tx_new[ring];
drivers/net/ethernet/sun/cassini.c
2744
cp->tx_skbs[ring][entry] = skb;
drivers/net/ethernet/sun/cassini.c
2755
cas_write_txd(cp, ring, entry, mapping, len - tabort,
drivers/net/ethernet/sun/cassini.c
2757
entry = TX_DESC_NEXT(ring, entry);
drivers/net/ethernet/sun/cassini.c
2760
tx_tiny_buf(cp, ring, entry), tabort);
drivers/net/ethernet/sun/cassini.c
2761
mapping = tx_tiny_map(cp, ring, entry, tentry);
drivers/net/ethernet/sun/cassini.c
2762
cas_write_txd(cp, ring, entry, mapping, tabort, ctrl,
drivers/net/ethernet/sun/cassini.c
2765
cas_write_txd(cp, ring, entry, mapping, len, ctrl |
drivers/net/ethernet/sun/cassini.c
2768
entry = TX_DESC_NEXT(ring, entry);
drivers/net/ethernet/sun/cassini.c
2780
cas_write_txd(cp, ring, entry, mapping, len - tabort,
drivers/net/ethernet/sun/cassini.c
2782
entry = TX_DESC_NEXT(ring, entry);
drivers/net/ethernet/sun/cassini.c
2783
memcpy_from_page(tx_tiny_buf(cp, ring, entry),
drivers/net/ethernet/sun/cassini.c
2787
mapping = tx_tiny_map(cp, ring, entry, tentry);
drivers/net/ethernet/sun/cassini.c
2791
cas_write_txd(cp, ring, entry, mapping, len, ctrl,
drivers/net/ethernet/sun/cassini.c
2793
entry = TX_DESC_NEXT(ring, entry);
drivers/net/ethernet/sun/cassini.c
2796
cp->tx_new[ring] = entry;
drivers/net/ethernet/sun/cassini.c
2797
if (TX_BUFFS_AVAIL(cp, ring) <= CAS_TABORT(cp)*(MAX_SKB_FRAGS + 1))
drivers/net/ethernet/sun/cassini.c
280
switch (ring) {
drivers/net/ethernet/sun/cassini.c
2802
ring, entry, skb->len, TX_BUFFS_AVAIL(cp, ring));
drivers/net/ethernet/sun/cassini.c
2803
writel(entry, cp->regs + REG_TX_KICKN(ring));
drivers/net/ethernet/sun/cassini.c
2804
spin_unlock_irqrestore(&cp->tx_lock[ring], flags);
drivers/net/ethernet/sun/cassini.c
2815
static int ring;
drivers/net/ethernet/sun/cassini.c
2823
if (cas_xmit_tx_ringN(cp, ring++ & N_TX_RINGS_MASK, skb))
drivers/net/ethernet/sun/cassini.c
292
cp->regs + REG_PLUS_INTRN_MASK(ring));
drivers/net/ethernet/sun/cassini.c
297
REG_PLUS_INTRN_MASK(ring));
drivers/net/ethernet/sun/cassini.c
311
static void cas_enable_irq(struct cas *cp, const int ring)
drivers/net/ethernet/sun/cassini.c
313
if (ring == 0) { /* all but TX_DONE */
drivers/net/ethernet/sun/cassini.c
319
switch (ring) {
drivers/net/ethernet/sun/cassini.c
331
REG_PLUS_INTRN_MASK(ring));
drivers/net/ethernet/sun/cassini.c
3831
static void cas_clean_txd(struct cas *cp, int ring)
drivers/net/ethernet/sun/cassini.c
3833
struct cas_tx_desc *txd = cp->init_txds[ring];
drivers/net/ethernet/sun/cassini.c
3834
struct sk_buff *skb, **skbs = cp->tx_skbs[ring];
drivers/net/ethernet/sun/cassini.c
3838
size = TX_DESC_RINGN_SIZE(ring);
drivers/net/ethernet/sun/cassini.c
3867
if (cp->tx_tiny_use[ring][ent].used)
drivers/net/ethernet/sun/cassini.c
3875
memset(cp->tx_tiny_use[ring], 0, size*sizeof(*cp->tx_tiny_use[ring]));
drivers/net/ethernet/sun/cassini.c
3879
static inline void cas_free_rx_desc(struct cas *cp, int ring)
drivers/net/ethernet/sun/cassini.c
3881
cas_page_t **page = cp->rx_pages[ring];
drivers/net/ethernet/sun/cassini.c
3884
size = RX_DESC_RINGN_SIZE(ring);
drivers/net/ethernet/sun/cassini.c
3919
static inline int cas_alloc_rx_desc(struct cas *cp, int ring)
drivers/net/ethernet/sun/cassini.c
3921
cas_page_t **page = cp->rx_pages[ring];
drivers/net/ethernet/sun/cassini.c
3924
size = RX_DESC_RINGN_SIZE(ring);
drivers/net/ethernet/synopsys/dwc-xlgmac-common.c
283
struct xlgmac_ring *ring,
drivers/net/ethernet/synopsys/dwc-xlgmac-common.c
292
desc_data = XLGMAC_GET_DESC_DATA(ring, idx);
drivers/net/ethernet/synopsys/dwc-xlgmac-common.c
310
struct xlgmac_ring *ring,
drivers/net/ethernet/synopsys/dwc-xlgmac-common.c
316
desc_data = XLGMAC_GET_DESC_DATA(ring, idx);
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
100
ring->rx_hdr_pa.pages_offset = 0;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
101
ring->rx_hdr_pa.pages_dma = 0;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
104
if (ring->rx_buf_pa.pages) {
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
105
dma_unmap_page(pdata->dev, ring->rx_buf_pa.pages_dma,
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
106
ring->rx_buf_pa.pages_len, DMA_FROM_DEVICE);
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
107
put_page(ring->rx_buf_pa.pages);
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
109
ring->rx_buf_pa.pages = NULL;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
110
ring->rx_buf_pa.pages_len = 0;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
111
ring->rx_buf_pa.pages_offset = 0;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
112
ring->rx_buf_pa.pages_dma = 0;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
115
if (ring->dma_desc_head) {
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
118
ring->dma_desc_count),
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
119
ring->dma_desc_head,
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
120
ring->dma_desc_head_addr);
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
121
ring->dma_desc_head = NULL;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
126
struct xlgmac_ring *ring,
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
129
if (!ring)
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
133
ring->dma_desc_count = dma_desc_count;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
134
ring->dma_desc_head = dma_alloc_coherent(pdata->dev,
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
137
&ring->dma_desc_head_addr,
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
139
if (!ring->dma_desc_head)
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
143
ring->desc_data_head = kzalloc_objs(struct xlgmac_desc_data,
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
145
if (!ring->desc_data_head)
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
150
ring->dma_desc_head,
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
151
&ring->dma_desc_head_addr,
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
152
ring->desc_data_head);
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
386
struct xlgmac_ring *ring,
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
391
if (!ring->rx_hdr_pa.pages) {
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
392
ret = xlgmac_alloc_pages(pdata, &ring->rx_hdr_pa,
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
398
if (!ring->rx_buf_pa.pages) {
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
400
ret = xlgmac_alloc_pages(pdata, &ring->rx_buf_pa,
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
407
xlgmac_set_buffer_data(&desc_data->rx.hdr, &ring->rx_hdr_pa,
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
411
xlgmac_set_buffer_data(&desc_data->rx.buf, &ring->rx_buf_pa,
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
423
struct xlgmac_ring *ring;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
429
ring = channel->tx_ring;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
430
if (!ring)
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
433
dma_desc = ring->dma_desc_head;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
434
dma_desc_addr = ring->dma_desc_head_addr;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
436
for (j = 0; j < ring->dma_desc_count; j++) {
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
437
desc_data = XLGMAC_GET_DESC_DATA(ring, j);
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
446
ring->cur = 0;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
447
ring->dirty = 0;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
448
memset(&ring->tx, 0, sizeof(ring->tx));
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
460
struct xlgmac_ring *ring;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
466
ring = channel->rx_ring;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
467
if (!ring)
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
470
dma_desc = ring->dma_desc_head;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
471
dma_desc_addr = ring->dma_desc_head_addr;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
473
for (j = 0; j < ring->dma_desc_count; j++) {
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
474
desc_data = XLGMAC_GET_DESC_DATA(ring, j);
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
479
if (xlgmac_map_rx_buffer(pdata, ring, desc_data))
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
486
ring->cur = 0;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
487
ring->dirty = 0;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
497
struct xlgmac_ring *ring = channel->tx_ring;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
508
start_index = ring->cur;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
509
cur_index = ring->cur;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
511
pkt_info = &ring->pkt_info;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
523
if ((tso && (pkt_info->mss != ring->tx.cur_mss)) ||
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
524
(vlan && (pkt_info->vlan_ctag != ring->tx.cur_vlan_ctag)))
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
526
desc_data = XLGMAC_GET_DESC_DATA(ring, cur_index);
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
547
desc_data = XLGMAC_GET_DESC_DATA(ring, cur_index);
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
572
desc_data = XLGMAC_GET_DESC_DATA(ring, cur_index);
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
606
desc_data = XLGMAC_GET_DESC_DATA(ring, cur_index);
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
614
desc_data = XLGMAC_GET_DESC_DATA(ring, cur_index - 1);
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
624
desc_data = XLGMAC_GET_DESC_DATA(ring, start_index++);
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
75
struct xlgmac_ring *ring)
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
80
if (!ring)
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
83
if (ring->desc_data_head) {
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
84
for (i = 0; i < ring->dma_desc_count; i++) {
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
85
desc_data = XLGMAC_GET_DESC_DATA(ring, i);
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
89
kfree(ring->desc_data_head);
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
90
ring->desc_data_head = NULL;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
93
if (ring->rx_hdr_pa.pages) {
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
94
dma_unmap_page(pdata->dev, ring->rx_hdr_pa.pages_dma,
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
95
ring->rx_hdr_pa.pages_len, DMA_FROM_DEVICE);
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
96
put_page(ring->rx_hdr_pa.pages);
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
98
ring->rx_hdr_pa.pages = NULL;
drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c
99
ring->rx_hdr_pa.pages_len = 0;
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
1001
xlgmac_tx_start_xmit(channel, ring);
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
1003
ring->tx.xmit_more = 1;
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
1006
channel->name, start_index & (ring->dma_desc_count - 1),
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
1007
(ring->cur - 1) & (ring->dma_desc_count - 1));
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
1058
struct xlgmac_ring *ring = channel->tx_ring;
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
1060
int start_index = ring->cur;
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
1064
for (i = 0; i < ring->dma_desc_count; i++) {
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
1065
desc_data = XLGMAC_GET_DESC_DATA(ring, i);
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
1072
writel(ring->dma_desc_count - 1, XLGMAC_DMA_REG(channel, DMA_CH_TDRLR));
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
1075
desc_data = XLGMAC_GET_DESC_DATA(ring, start_index);
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
1142
struct xlgmac_ring *ring = channel->rx_ring;
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
1143
unsigned int start_index = ring->cur;
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
1148
for (i = 0; i < ring->dma_desc_count; i++) {
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
1149
desc_data = XLGMAC_GET_DESC_DATA(ring, i);
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
1156
writel(ring->dma_desc_count - 1, XLGMAC_DMA_REG(channel, DMA_CH_RDRLR));
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
1159
desc_data = XLGMAC_GET_DESC_DATA(ring, start_index);
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
1166
desc_data = XLGMAC_GET_DESC_DATA(ring, start_index +
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
1167
ring->dma_desc_count - 1);
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
2641
struct xlgmac_ring *ring = channel->rx_ring;
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
2648
desc_data = XLGMAC_GET_DESC_DATA(ring, ring->cur);
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
2650
pkt_info = &ring->pkt_info;
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
2662
xlgmac_dump_rx_desc(pdata, ring, ring->cur);
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
2811
ring->cur & (ring->dma_desc_count - 1), ring->cur);
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
677
struct xlgmac_ring *ring)
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
688
desc_data = XLGMAC_GET_DESC_DATA(ring, ring->cur);
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
699
ring->tx.xmit_more = 0;
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
705
struct xlgmac_ring *ring = channel->tx_ring;
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
711
int start_index = ring->cur;
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
712
int cur_index = ring->cur;
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
716
pkt_info = &ring->pkt_info;
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
727
if (tso && (pkt_info->mss != ring->tx.cur_mss))
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
732
if (vlan && (pkt_info->vlan_ctag != ring->tx.cur_vlan_ctag))
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
747
ring->coalesce_count += pkt_info->tx_packets;
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
752
else if ((ring->coalesce_count % pdata->tx_frames) <
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
758
desc_data = XLGMAC_GET_DESC_DATA(ring, cur_index);
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
789
ring->tx.cur_mss = pkt_info->mss;
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
818
ring->tx.cur_vlan_ctag = pkt_info->vlan_ctag;
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
822
desc_data = XLGMAC_GET_DESC_DATA(ring, cur_index);
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
922
desc_data = XLGMAC_GET_DESC_DATA(ring, cur_index);
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
983
desc_data = XLGMAC_GET_DESC_DATA(ring, start_index);
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
991
xlgmac_dump_tx_desc(pdata, ring, start_index,
drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c
997
ring->cur = cur_index + 1;
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1037
struct xlgmac_ring *ring = channel->tx_ring;
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1052
if (!ring)
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1055
cur = ring->cur;
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1063
(ring->dirty != cur)) {
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1064
desc_data = XLGMAC_GET_DESC_DATA(ring, ring->dirty);
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1076
xlgmac_dump_tx_desc(pdata, ring, ring->dirty, 1, 0);
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1088
ring->dirty++;
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1096
if ((ring->tx.queue_stopped == 1) &&
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1097
(xlgmac_tx_avail_desc(ring) > XLGMAC_TX_DESC_MIN_FREE)) {
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1098
ring->tx.queue_stopped = 0;
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1110
struct xlgmac_ring *ring = channel->rx_ring;
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1126
if (!ring)
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1134
desc_data = XLGMAC_GET_DESC_DATA(ring, ring->cur);
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1135
pkt_info = &ring->pkt_info;
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
115
struct xlgmac_ring *ring,
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1150
desc_data = XLGMAC_GET_DESC_DATA(ring, ring->cur);
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1152
if (xlgmac_rx_dirty_desc(ring) > XLGMAC_RX_DESC_MAX_DIRTY)
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1159
ring->cur++;
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1268
desc_data = XLGMAC_GET_DESC_DATA(ring, ring->cur);
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
134
if (skb_shinfo(skb)->gso_size != ring->tx.cur_mss) {
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
161
if (skb_vlan_tag_get(skb) != ring->tx.cur_vlan_ctag)
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
28
static inline unsigned int xlgmac_tx_avail_desc(struct xlgmac_ring *ring)
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
30
return (ring->dma_desc_count - (ring->cur - ring->dirty));
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
33
static inline unsigned int xlgmac_rx_dirty_desc(struct xlgmac_ring *ring)
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
35
return (ring->cur - ring->dirty);
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
40
struct xlgmac_ring *ring,
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
45
if (count > xlgmac_tx_avail_desc(ring)) {
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
49
ring->tx.queue_stopped = 1;
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
524
struct xlgmac_ring *ring;
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
529
ring = channel->tx_ring;
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
530
if (!ring)
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
533
for (j = 0; j < ring->dma_desc_count; j++) {
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
534
desc_data = XLGMAC_GET_DESC_DATA(ring, j);
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
54
if (ring->tx.xmit_more)
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
545
struct xlgmac_ring *ring;
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
55
pdata->hw_ops.tx_start_xmit(channel, ring);
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
550
ring = channel->rx_ring;
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
551
if (!ring)
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
554
for (j = 0; j < ring->dma_desc_count; j++) {
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
555
desc_data = XLGMAC_GET_DESC_DATA(ring, j);
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
708
struct xlgmac_ring *ring;
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
718
ring = channel->tx_ring;
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
719
tx_pkt_info = &ring->pkt_info;
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
730
xlgmac_prep_tx_pkt(pdata, ring, skb, tx_pkt_info);
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
733
ret = xlgmac_maybe_stop_tx_queue(channel, ring,
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
762
xlgmac_maybe_stop_tx_queue(channel, ring, XLGMAC_TX_MAX_DESC_NR);
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
954
struct xlgmac_ring *ring = channel->rx_ring;
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
962
while (ring->dirty != ring->cur) {
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
963
desc_data = XLGMAC_GET_DESC_DATA(ring, ring->dirty);
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
968
if (desc_ops->map_rx_buffer(pdata, ring, desc_data))
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
971
hw_ops->rx_desc_reset(pdata, desc_data, ring->dirty);
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
973
ring->dirty++;
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
982
desc_data = XLGMAC_GET_DESC_DATA(ring, ring->dirty - 1);
drivers/net/ethernet/synopsys/dwc-xlgmac.h
388
struct xlgmac_ring *ring,
drivers/net/ethernet/synopsys/dwc-xlgmac.h
435
struct xlgmac_ring *ring);
drivers/net/ethernet/synopsys/dwc-xlgmac.h
638
struct xlgmac_ring *ring,
drivers/net/ethernet/synopsys/dwc-xlgmac.h
643
struct xlgmac_ring *ring,
drivers/net/ethernet/synopsys/dwc-xlgmac.h
95
#define XLGMAC_GET_DESC_DATA(ring, idx) ({ \
drivers/net/ethernet/synopsys/dwc-xlgmac.h
96
typeof(ring) _ring = (ring); \
drivers/net/ethernet/tehuti/tehuti.c
2244
bdx_get_ringparam(struct net_device *netdev, struct ethtool_ringparam *ring,
drivers/net/ethernet/tehuti/tehuti.c
2251
ring->rx_max_pending = bdx_rx_fifo_size_to_packets(3);
drivers/net/ethernet/tehuti/tehuti.c
2252
ring->tx_max_pending = bdx_tx_fifo_size_to_packets(3);
drivers/net/ethernet/tehuti/tehuti.c
2253
ring->rx_pending = bdx_rx_fifo_size_to_packets(priv->rxf_size);
drivers/net/ethernet/tehuti/tehuti.c
2254
ring->tx_pending = bdx_tx_fifo_size_to_packets(priv->txd_size);
drivers/net/ethernet/tehuti/tehuti.c
2265
bdx_set_ringparam(struct net_device *netdev, struct ethtool_ringparam *ring,
drivers/net/ethernet/tehuti/tehuti.c
2274
if (bdx_rx_fifo_size_to_packets(rx_size) >= ring->rx_pending)
drivers/net/ethernet/tehuti/tehuti.c
2281
if (bdx_tx_fifo_size_to_packets(tx_size) >= ring->tx_pending)
drivers/net/ethernet/via/via-rhine.c
1142
void *ring;
drivers/net/ethernet/via/via-rhine.c
1145
ring = dma_alloc_coherent(hwdev,
drivers/net/ethernet/via/via-rhine.c
1150
if (!ring) {
drivers/net/ethernet/via/via-rhine.c
1163
ring, ring_dma);
drivers/net/ethernet/via/via-rhine.c
1168
rp->rx_ring = ring;
drivers/net/ethernet/via/via-rhine.c
1169
rp->tx_ring = ring + RX_RING_SIZE * sizeof(struct rx_desc);
drivers/net/ethernet/via/via-velocity.c
1464
vptr->rx.ring[dirty].rdesc0.len |= OWNED_BY_NIC;
drivers/net/ethernet/via/via-velocity.c
1501
vptr->rx.ring = pool;
drivers/net/ethernet/via/via-velocity.c
1534
struct rx_desc *rd = &(vptr->rx.ring[idx]);
drivers/net/ethernet/via/via-velocity.c
1567
struct rx_desc *rd = vptr->rx.ring + dirty;
drivers/net/ethernet/via/via-velocity.c
1605
struct rx_desc *rd = vptr->rx.ring + i;
drivers/net/ethernet/via/via-velocity.c
1691
dma_free_coherent(vptr->dev, size, vptr->rx.ring, vptr->rx.pool_dma);
drivers/net/ethernet/via/via-velocity.c
2050
struct rx_desc *rd = &(vptr->rx.ring[idx]);
drivers/net/ethernet/via/via-velocity.c
2113
struct rx_desc *rd = vptr->rx.ring + rd_curr;
drivers/net/ethernet/via/via-velocity.c
577
vptr->rx.ring[i].rdesc0.len |= OWNED_BY_NIC;
drivers/net/ethernet/via/via-velocity.h
1414
struct rx_desc *ring;
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
128
struct wx_ring *ring;
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
156
ring = wx->tx_ring[j];
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
157
if (!ring) {
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
164
start = u64_stats_fetch_begin(&ring->syncp);
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
165
data[i] = ring->stats.packets;
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
166
data[i + 1] = ring->stats.bytes;
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
167
} while (u64_stats_fetch_retry(&ring->syncp, start));
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
171
ring = wx->rx_ring[j];
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
172
if (!ring) {
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
179
start = u64_stats_fetch_begin(&ring->syncp);
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
180
data[i] = ring->stats.packets;
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
181
data[i + 1] = ring->stats.bytes;
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
182
} while (u64_stats_fetch_retry(&ring->syncp, start));
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
284
struct ethtool_ringparam *ring,
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
290
ring->rx_max_pending = WX_MAX_RXD;
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
291
ring->tx_max_pending = WX_MAX_TXD;
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
292
ring->rx_mini_max_pending = 0;
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
293
ring->rx_jumbo_max_pending = 0;
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
294
ring->rx_pending = wx->rx_ring_count;
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
295
ring->tx_pending = wx->tx_ring_count;
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
296
ring->rx_mini_pending = 0;
drivers/net/ethernet/wangxun/libwx/wx_ethtool.c
297
ring->rx_jumbo_pending = 0;
drivers/net/ethernet/wangxun/libwx/wx_ethtool.h
26
struct ethtool_ringparam *ring,
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1602
struct wx_ring *ring = wx->rx_ring[i];
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1604
j = ring->reg_idx;
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1828
void wx_disable_rx_queue(struct wx *wx, struct wx_ring *ring)
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1830
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1851
void wx_enable_rx_queue(struct wx *wx, struct wx_ring *ring)
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1853
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1889
struct wx_ring *ring)
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1891
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1905
struct wx_ring *ring)
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1908
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1909
u64 tdba = ring->dma;
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1922
ring->tail = wx->hw_addr + WX_PX_TR_WP(reg_idx);
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1924
if (ring->count < WX_MAX_TXD)
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1925
txdctl |= ring->count / 128 << WX_PX_TR_CFG_TR_SIZE_SHIFT;
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1928
ring->atr_count = 0;
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1931
ring->atr_sample_rate = wx->atr_sample_rate;
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1933
ring->atr_sample_rate = 0;
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1936
memset(ring->tx_buffer_info, 0,
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1937
sizeof(struct wx_tx_buffer) * ring->count);
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1939
if (ring->headwb_mem) {
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1941
ring->headwb_dma & DMA_BIT_MASK(32));
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1943
upper_32_bits(ring->headwb_dma));
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1959
struct wx_ring *ring)
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1961
u16 reg_idx = ring->reg_idx;
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1962
u64 rdba = ring->dma;
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1967
wx_disable_rx_queue(wx, ring);
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1972
if (ring->count == WX_MAX_RXD)
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1975
rxdctl |= (ring->count / 128) << WX_PX_RR_CFG_RR_SIZE_SHIFT;
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1987
ring->tail = wx->hw_addr + WX_PX_RR_WP(reg_idx);
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1989
wx_configure_srrctl(wx, ring);
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1990
wx_configure_rscctl(wx, ring);
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1993
memset(ring->rx_buffer_info, 0,
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1994
sizeof(struct wx_rx_buffer) * ring->count);
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1997
ring->next_to_clean = 0;
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1998
ring->next_to_use = 0;
drivers/net/ethernet/wangxun/libwx/wx_hw.c
2004
wx_enable_rx_queue(wx, ring);
drivers/net/ethernet/wangxun/libwx/wx_hw.c
2005
wx_alloc_rx_buffers(ring, wx_desc_unused(ring));
drivers/net/ethernet/wangxun/libwx/wx_hw.c
2738
static void wx_enable_rx_drop(struct wx *wx, struct wx_ring *ring)
drivers/net/ethernet/wangxun/libwx/wx_hw.c
2740
u16 reg_idx = ring->reg_idx;
drivers/net/ethernet/wangxun/libwx/wx_hw.c
2749
static void wx_disable_rx_drop(struct wx *wx, struct wx_ring *ring)
drivers/net/ethernet/wangxun/libwx/wx_hw.c
2751
u16 reg_idx = ring->reg_idx;
drivers/net/ethernet/wangxun/libwx/wx_hw.h
40
void wx_disable_rx_queue(struct wx *wx, struct wx_ring *ring);
drivers/net/ethernet/wangxun/libwx/wx_hw.h
41
void wx_enable_rx_queue(struct wx *wx, struct wx_ring *ring);
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2067
static void wx_add_ring(struct wx_ring *ring, struct wx_ring_container *head)
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2069
ring->next = head->ring;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2070
head->ring = ring;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2093
struct wx_ring *ring;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2098
q_vector = kzalloc_flex(*q_vector, ring, ring_count);
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2114
ring = q_vector->ring;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2139
ring->dev = &wx->pdev->dev;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2140
ring->netdev = wx->netdev;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2143
ring->q_vector = q_vector;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2146
wx_add_ring(ring, &q_vector->tx);
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2149
ring->count = wx->tx_ring_count;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2151
ring->queue_index = txr_idx;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2154
wx->tx_ring[txr_idx] = ring;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2161
ring++;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2166
ring->dev = &wx->pdev->dev;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2167
ring->netdev = wx->netdev;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2170
ring->q_vector = q_vector;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2173
wx_add_ring(ring, &q_vector->rx);
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2176
ring->count = wx->rx_ring_count;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2177
ring->queue_index = rxr_idx;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2180
wx->rx_ring[rxr_idx] = ring;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2187
ring++;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2205
struct wx_ring *ring;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2207
wx_for_each_ring(ring, q_vector->tx)
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2208
wx->tx_ring[ring->queue_index] = NULL;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2210
wx_for_each_ring(ring, q_vector->rx)
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2211
wx->rx_ring[ring->queue_index] = NULL;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2353
if (q_vector->rx.ring || q_vector->tx.ring) {
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2378
if (!q_vector->rx.ring && !q_vector->tx.ring)
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2547
struct wx_ring *ring;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2549
wx_for_each_ring(ring, q_vector->rx)
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2550
wx_set_ivar(wx, 0, ring->reg_idx, v_idx);
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2552
wx_for_each_ring(ring, q_vector->tx)
drivers/net/ethernet/wangxun/libwx/wx_lib.c
2553
wx_set_ivar(wx, 1, ring->reg_idx, v_idx);
drivers/net/ethernet/wangxun/libwx/wx_lib.c
3058
struct wx_ring *ring = READ_ONCE(wx->rx_ring[i]);
drivers/net/ethernet/wangxun/libwx/wx_lib.c
3062
if (ring) {
drivers/net/ethernet/wangxun/libwx/wx_lib.c
3064
start = u64_stats_fetch_begin(&ring->syncp);
drivers/net/ethernet/wangxun/libwx/wx_lib.c
3065
packets = ring->stats.packets;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
3066
bytes = ring->stats.bytes;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
3067
} while (u64_stats_fetch_retry(&ring->syncp, start));
drivers/net/ethernet/wangxun/libwx/wx_lib.c
3074
struct wx_ring *ring = READ_ONCE(wx->tx_ring[i]);
drivers/net/ethernet/wangxun/libwx/wx_lib.c
3078
if (ring) {
drivers/net/ethernet/wangxun/libwx/wx_lib.c
3080
start = u64_stats_fetch_begin(&ring->syncp);
drivers/net/ethernet/wangxun/libwx/wx_lib.c
3081
packets = ring->stats.packets;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
3082
bytes = ring->stats.bytes;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
3083
} while (u64_stats_fetch_retry(&ring->syncp,
drivers/net/ethernet/wangxun/libwx/wx_lib.c
376
u16 wx_desc_unused(struct wx_ring *ring)
drivers/net/ethernet/wangxun/libwx/wx_lib.c
378
u16 ntc = ring->next_to_clean;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
379
u16 ntu = ring->next_to_use;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
381
return ((ntc > ntu) ? 0 : ring->count) + ntc - ntu - 1;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
506
static void wx_rx_hash(struct wx_ring *ring,
drivers/net/ethernet/wangxun/libwx/wx_lib.c
512
if (!(ring->netdev->features & NETIF_F_RXHASH))
drivers/net/ethernet/wangxun/libwx/wx_lib.c
532
static void wx_rx_checksum(struct wx_ring *ring,
drivers/net/ethernet/wangxun/libwx/wx_lib.c
540
if (!(ring->netdev->features & NETIF_F_RXCSUM))
drivers/net/ethernet/wangxun/libwx/wx_lib.c
548
ring->rx_stats.csum_err++;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
563
ring->rx_stats.csum_err++;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
576
ring->rx_stats.csum_good_cnt++;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
579
static void wx_rx_vlan(struct wx_ring *ring, union wx_rx_desc *rx_desc,
drivers/net/ethernet/wangxun/libwx/wx_lib.c
585
if ((ring->netdev->features &
drivers/net/ethernet/wangxun/libwx/wx_lib.c
590
ethertype = ring->q_vector->wx->tpid[idx];
drivers/net/ethernet/wangxun/libwx/wx_lib.c
596
static void wx_set_rsc_gso_size(struct wx_ring *ring,
drivers/net/ethernet/wangxun/libwx/wx_lib.c
740
static struct netdev_queue *wx_txring_txq(const struct wx_ring *ring)
drivers/net/ethernet/wangxun/libwx/wx_lib.c
742
return netdev_get_tx_queue(ring->netdev, ring->queue_index);
drivers/net/ethernet/wangxun/libwx/wx_lib.c
932
struct wx_ring *ring;
drivers/net/ethernet/wangxun/libwx/wx_lib.c
934
wx_for_each_ring(ring, q_vector->tx) {
drivers/net/ethernet/wangxun/libwx/wx_lib.c
935
if (!wx_clean_tx_irq(q_vector, ring, budget))
drivers/net/ethernet/wangxun/libwx/wx_lib.c
951
wx_for_each_ring(ring, q_vector->rx) {
drivers/net/ethernet/wangxun/libwx/wx_lib.c
952
int cleaned = wx_clean_rx_irq(q_vector, ring, per_ring_budget);
drivers/net/ethernet/wangxun/libwx/wx_lib.h
12
u16 wx_desc_unused(struct wx_ring *ring);
drivers/net/ethernet/wangxun/libwx/wx_type.h
1052
for (posm = (headm).ring; posm; posm = posm->next)
drivers/net/ethernet/wangxun/libwx/wx_type.h
1055
struct wx_ring *ring; /* pointer to linked list of rings */
drivers/net/ethernet/wangxun/libwx/wx_type.h
1125
struct wx_ring ring[] ____cacheline_internodealigned_in_smp;
drivers/net/ethernet/wangxun/libwx/wx_type.h
1372
void (*atr)(struct wx_ring *ring, struct wx_tx_buffer *first, u8 ptype);
drivers/net/ethernet/wangxun/libwx/wx_type.h
1496
static inline unsigned int wx_rx_pg_order(struct wx_ring *ring)
drivers/net/ethernet/wangxun/libwx/wx_type.h
1499
if (ring->rx_buf_len == WX_RXBUFFER_3K)
drivers/net/ethernet/wangxun/libwx/wx_vf_common.c
85
if (q_vector->tx.ring && q_vector->rx.ring)
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
107
static void wx_configure_tx_ring_vf(struct wx *wx, struct wx_ring *ring)
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
109
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
110
u64 tdba = ring->dma;
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
126
ring->tail = wx->hw_addr + WX_VXTDT(reg_idx);
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
129
ring->next_to_clean = 0;
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
130
ring->next_to_use = 0;
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
132
txdctl |= WX_VXTXDCTL_BUFLEN(wx_buf_len(ring->count));
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
135
if (ring->headwb_mem) {
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
137
ring->headwb_dma & DMA_BIT_MASK(32));
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
139
upper_32_bits(ring->headwb_dma));
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
145
memset(ring->tx_buffer_info, 0,
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
146
sizeof(struct wx_tx_buffer) * ring->count);
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
171
static void wx_configure_srrctl_vf(struct wx *wx, struct wx_ring *ring,
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
236
void wx_configure_rx_ring_vf(struct wx *wx, struct wx_ring *ring)
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
238
u8 reg_idx = ring->reg_idx;
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
240
u64 rdba = ring->dma;
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
245
wx_disable_rx_queue(wx, ring);
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
257
ring->tail = wx->hw_addr + WX_VXRDT(reg_idx);
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
260
memset(ring->rx_buffer_info, 0,
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
261
sizeof(struct wx_rx_buffer) * ring->count);
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
264
rx_desc = WX_RX_DESC(ring, 0);
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
268
ring->next_to_clean = 0;
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
269
ring->next_to_use = 0;
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
270
ring->next_to_alloc = 0;
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
272
wx_configure_srrctl_vf(wx, ring, reg_idx);
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
276
rxdctl |= WX_VXRXDCTL_BUFLEN(wx_buf_len(ring->count));
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
290
wx_enable_rx_queue(wx, ring);
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
291
wx_alloc_rx_buffers(ring, wx_desc_unused(ring));
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
59
struct wx_ring *ring;
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
61
wx_for_each_ring(ring, q_vector->rx)
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
62
wx_set_ivar_vf(wx, 0, ring->reg_idx, v_idx);
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
64
wx_for_each_ring(ring, q_vector->tx)
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.c
65
wx_set_ivar_vf(wx, 1, ring->reg_idx, v_idx);
drivers/net/ethernet/wangxun/libwx/wx_vf_lib.h
13
void wx_configure_rx_ring_vf(struct wx *wx, struct wx_ring *ring);
drivers/net/ethernet/wangxun/ngbe/ngbe_ethtool.c
48
struct ethtool_ringparam *ring,
drivers/net/ethernet/wangxun/ngbe/ngbe_ethtool.c
57
new_tx_count = clamp_t(u32, ring->tx_pending, WX_MIN_TXD, WX_MAX_TXD);
drivers/net/ethernet/wangxun/ngbe/ngbe_ethtool.c
60
new_rx_count = clamp_t(u32, ring->rx_pending, WX_MIN_RXD, WX_MAX_RXD);
drivers/net/ethernet/wangxun/ngbe/ngbe_main.c
269
if (q_vector->tx.ring && q_vector->rx.ring)
drivers/net/ethernet/wangxun/txgbe/txgbe_ethtool.c
371
u32 ring = ethtool_get_flow_spec_ring(fsp->ring_cookie);
drivers/net/ethernet/wangxun/txgbe/txgbe_ethtool.c
374
if (!vf && ring >= wx->num_rx_queues)
drivers/net/ethernet/wangxun/txgbe/txgbe_ethtool.c
377
ring >= wx->num_rx_queues_per_pool))
drivers/net/ethernet/wangxun/txgbe/txgbe_ethtool.c
382
queue = wx->rx_ring[ring]->reg_idx;
drivers/net/ethernet/wangxun/txgbe/txgbe_ethtool.c
384
queue = ((vf - 1) * wx->num_rx_queues_per_pool) + ring;
drivers/net/ethernet/wangxun/txgbe/txgbe_ethtool.c
40
struct ethtool_ringparam *ring,
drivers/net/ethernet/wangxun/txgbe/txgbe_ethtool.c
49
new_tx_count = clamp_t(u32, ring->tx_pending, WX_MIN_TXD, WX_MAX_TXD);
drivers/net/ethernet/wangxun/txgbe/txgbe_ethtool.c
52
new_rx_count = clamp_t(u32, ring->rx_pending, WX_MIN_RXD, WX_MAX_RXD);
drivers/net/ethernet/wangxun/txgbe/txgbe_fdir.c
225
void txgbe_atr(struct wx_ring *ring, struct wx_tx_buffer *first, u8 ptype)
drivers/net/ethernet/wangxun/txgbe/txgbe_fdir.c
229
struct wx_q_vector *q_vector = ring->q_vector;
drivers/net/ethernet/wangxun/txgbe/txgbe_fdir.c
242
ring->atr_count++;
drivers/net/ethernet/wangxun/txgbe/txgbe_fdir.c
262
if (!th->syn && ring->atr_count < ring->atr_sample_rate)
drivers/net/ethernet/wangxun/txgbe/txgbe_fdir.c
266
ring->atr_count = 0;
drivers/net/ethernet/wangxun/txgbe/txgbe_fdir.c
304
ring->queue_index);
drivers/net/ethernet/wangxun/txgbe/txgbe_fdir.c
593
u32 ring = ethtool_get_flow_spec_ring(filter->action);
drivers/net/ethernet/wangxun/txgbe/txgbe_fdir.c
595
if (ring >= wx->num_rx_queues) {
drivers/net/ethernet/wangxun/txgbe/txgbe_fdir.c
597
ring);
drivers/net/ethernet/wangxun/txgbe/txgbe_fdir.c
602
queue = wx->rx_ring[ring]->reg_idx;
drivers/net/ethernet/wangxun/txgbe/txgbe_fdir.h
9
void txgbe_atr(struct wx_ring *ring, struct wx_tx_buffer *first, u8 ptype);
drivers/net/ethernet/wangxun/txgbe/txgbe_irq.c
57
if (q_vector->tx.ring && q_vector->rx.ring)
drivers/net/fddi/defza.c
1150
struct fza_ring_cmd __iomem *ring;
drivers/net/fddi/defza.c
1195
ring = fza_cmd_send(dev, FZA_RING_CMD_PARAM);
drivers/net/fddi/defza.c
1197
if (!ring)
drivers/net/fddi/defza.c
1206
stat = readl_u(&ring->stat);
drivers/net/fddi/defza.c
289
struct fza_ring_cmd __iomem *ring = fp->ring_cmd + fp->ring_cmd_index;
drivers/net/fddi/defza.c
301
buf = fp->mmio + readl_u(&ring->buffer);
drivers/net/fddi/defza.c
303
if ((readl_u(&ring->cmd_own) & FZA_RING_OWN_MASK) !=
drivers/net/fddi/defza.c
366
writel_u(FZA_RING_OWN_FZA | command, &ring->cmd_own);
drivers/net/fddi/defza.c
374
return ring;
drivers/net/fddi/defza.c
381
struct fza_ring_cmd __iomem *ring;
drivers/net/fddi/defza.c
388
ring = fza_cmd_send(dev, FZA_RING_CMD_INIT);
drivers/net/fddi/defza.c
390
if (!ring)
drivers/net/fddi/defza.c
403
stat = readl_u(&ring->stat);
drivers/net/fddi/defza.c
414
*init = fp->mmio + readl_u(&ring->buffer);
drivers/net/fjes/fjes_hw.c
125
epbh->ring = (u8 *)(mem + sizeof(union ep_buffer_info));
drivers/net/fjes/fjes_hw.c
137
epbh->ring = NULL;
drivers/net/fjes/fjes_hw.c
942
ring_frame = (struct esmem_frame *)&(epbh->ring[EP_RING_INDEX
drivers/net/fjes/fjes_hw.c
973
ring_frame = (struct esmem_frame *)&(epbh->ring[EP_RING_INDEX
drivers/net/fjes/fjes_hw.h
251
u8 *ring;
drivers/net/hyperv/netvsc_drv.c
1778
struct ethtool_ringparam *ring)
drivers/net/hyperv/netvsc_drv.c
1782
ring->rx_pending = nvdev->recv_section_cnt;
drivers/net/hyperv/netvsc_drv.c
1783
ring->tx_pending = nvdev->send_section_cnt;
drivers/net/hyperv/netvsc_drv.c
1790
ring->rx_max_pending = max_buf_size / nvdev->recv_section_size;
drivers/net/hyperv/netvsc_drv.c
1791
ring->tx_max_pending = NETVSC_SEND_BUFFER_SIZE
drivers/net/hyperv/netvsc_drv.c
1796
struct ethtool_ringparam *ring,
drivers/net/hyperv/netvsc_drv.c
1806
__netvsc_get_ringparam(nvdev, ring);
drivers/net/hyperv/netvsc_drv.c
1810
struct ethtool_ringparam *ring,
drivers/net/hyperv/netvsc_drv.c
1827
new_tx = clamp_t(u32, ring->tx_pending,
drivers/net/hyperv/netvsc_drv.c
1829
new_rx = clamp_t(u32, ring->rx_pending,
drivers/net/ipa/gsi.c
1487
struct gsi_ring *ring = &evt_ring->ring;
drivers/net/ipa/gsi.c
1500
old_index = ring->index;
drivers/net/ipa/gsi.c
1501
event = gsi_ring_virt(ring, old_index);
drivers/net/ipa/gsi.c
1506
event_avail = ring->count - old_index % ring->count;
drivers/net/ipa/gsi.c
1507
event_done = gsi_ring_virt(ring, index);
drivers/net/ipa/gsi.c
1526
event = gsi_ring_virt(ring, 0);
drivers/net/ipa/gsi.c
1534
static int gsi_ring_alloc(struct gsi *gsi, struct gsi_ring *ring, u32 count)
drivers/net/ipa/gsi.c
1544
ring->virt = dma_alloc_coherent(dev, size, &addr, GFP_KERNEL);
drivers/net/ipa/gsi.c
1545
if (!ring->virt)
drivers/net/ipa/gsi.c
1548
ring->addr = addr;
drivers/net/ipa/gsi.c
1549
ring->count = count;
drivers/net/ipa/gsi.c
1550
ring->index = 0;
drivers/net/ipa/gsi.c
1556
static void gsi_ring_free(struct gsi *gsi, struct gsi_ring *ring)
drivers/net/ipa/gsi.c
1558
size_t size = ring->count * GSI_RING_ELEMENT_SIZE;
drivers/net/ipa/gsi.c
1560
dma_free_coherent(gsi->dev, size, ring->virt, ring->addr);
drivers/net/ipa/gsi.c
1607
struct gsi_ring *ring;
drivers/net/ipa/gsi.c
1613
ring = &evt_ring->ring;
drivers/net/ipa/gsi.c
1620
index = gsi_ring_index(ring, ioread32(gsi->virt + offset));
drivers/net/ipa/gsi.c
1621
if (index == ring->index % ring->count)
drivers/net/ipa/gsi.c
1625
trans = gsi_event_trans(gsi, gsi_ring_virt(ring, index - 1));
drivers/net/ipa/gsi.c
2125
ret = gsi_ring_alloc(gsi, &evt_ring->ring, channel->event_count);
drivers/net/ipa/gsi.c
2145
gsi_ring_free(gsi, &evt_ring->ring);
drivers/net/ipa/gsi.c
362
void *gsi_ring_virt(struct gsi_ring *ring, u32 index)
drivers/net/ipa/gsi.c
365
return ring->virt + (index % ring->count) * GSI_RING_ELEMENT_SIZE;
drivers/net/ipa/gsi.c
369
static u32 gsi_ring_addr(struct gsi_ring *ring, u32 index)
drivers/net/ipa/gsi.c
371
return lower_32_bits(ring->addr) + index * GSI_RING_ELEMENT_SIZE;
drivers/net/ipa/gsi.c
375
static u32 gsi_ring_index(struct gsi_ring *ring, u32 offset)
drivers/net/ipa/gsi.c
377
return (offset - gsi_ring_addr(ring, 0)) / GSI_RING_ELEMENT_SIZE;
drivers/net/ipa/gsi.c
705
struct gsi_ring *ring = &gsi->evt_ring[evt_ring_id].ring;
drivers/net/ipa/gsi.c
708
ring->index = index; /* Next unused entry */
drivers/net/ipa/gsi.c
711
val = gsi_ring_addr(ring, (index - 1) % ring->count);
drivers/net/ipa/gsi.c
719
struct gsi_ring *ring = &evt_ring->ring;
drivers/net/ipa/gsi.c
732
val = reg_encode(reg, R_LENGTH, ring->count * GSI_RING_ELEMENT_SIZE);
drivers/net/ipa/gsi.c
740
val = lower_32_bits(ring->addr);
drivers/net/ipa/gsi.c
744
val = upper_32_bits(ring->addr);
drivers/net/ipa/gsi.c
772
gsi_evt_ring_doorbell(gsi, evt_ring_id, ring->index);
drivers/net/ipa/gsi.h
133
struct gsi_ring ring;
drivers/net/ipa/gsi_private.h
111
void *gsi_ring_virt(struct gsi_ring *ring, u32 index);
drivers/net/netdevsim/ethtool.c
229
ns->ethtool.ring.rx_pending = 512;
drivers/net/netdevsim/ethtool.c
230
ns->ethtool.ring.rx_max_pending = 4096;
drivers/net/netdevsim/ethtool.c
231
ns->ethtool.ring.rx_jumbo_max_pending = 4096;
drivers/net/netdevsim/ethtool.c
232
ns->ethtool.ring.rx_mini_max_pending = 4096;
drivers/net/netdevsim/ethtool.c
233
ns->ethtool.ring.tx_pending = 512;
drivers/net/netdevsim/ethtool.c
234
ns->ethtool.ring.tx_max_pending = 4096;
drivers/net/netdevsim/ethtool.c
266
&ns->ethtool.ring.rx_max_pending);
drivers/net/netdevsim/ethtool.c
268
&ns->ethtool.ring.rx_jumbo_max_pending);
drivers/net/netdevsim/ethtool.c
270
&ns->ethtool.ring.rx_mini_max_pending);
drivers/net/netdevsim/ethtool.c
272
&ns->ethtool.ring.tx_max_pending);
drivers/net/netdevsim/ethtool.c
68
struct ethtool_ringparam *ring,
drivers/net/netdevsim/ethtool.c
74
memcpy(ring, &ns->ethtool.ring, sizeof(ns->ethtool.ring));
drivers/net/netdevsim/ethtool.c
82
struct ethtool_ringparam *ring,
drivers/net/netdevsim/ethtool.c
88
ns->ethtool.ring.rx_pending = ring->rx_pending;
drivers/net/netdevsim/ethtool.c
89
ns->ethtool.ring.rx_jumbo_pending = ring->rx_jumbo_pending;
drivers/net/netdevsim/ethtool.c
90
ns->ethtool.ring.rx_mini_pending = ring->rx_mini_pending;
drivers/net/netdevsim/ethtool.c
91
ns->ethtool.ring.tx_pending = ring->tx_pending;
drivers/net/netdevsim/netdevsim.h
91
struct ethtool_ringparam ring;
drivers/net/tap.c
1153
return PTR_RING_PEEK_CALL(&q->ring, __skb_array_len_with_tag);
drivers/net/tap.c
1188
return &q->ring;
drivers/net/tap.c
1205
rings[i++] = &q->ring;
drivers/net/tap.c
290
if (ptr_ring_produce(&q->ring, skb)) {
drivers/net/tap.c
300
if (ptr_ring_produce(&q->ring, skb)) {
drivers/net/tap.c
319
if (ptr_ring_produce(&q->ring, skb)) {
drivers/net/tap.c
443
ptr_ring_cleanup(&q->ring, __skb_array_destroy_skb);
drivers/net/tap.c
463
if (ptr_ring_init(&q->ring, tap->dev->tx_queue_len, GFP_KERNEL)) {
drivers/net/tap.c
531
if (!ptr_ring_empty(&q->ring))
drivers/net/tap.c
777
skb = ptr_ring_consume(&q->ring);
drivers/net/thunderbolt/main.c
1219
tb_ring_tx(net->tx_ring.ring, &frames[i]->frame);
drivers/net/thunderbolt/main.c
146
struct tb_ring *ring;
drivers/net/thunderbolt/main.c
335
static void tbnet_free_buffers(struct tbnet_ring *ring)
drivers/net/thunderbolt/main.c
340
struct device *dma_dev = tb_ring_dma_device(ring->ring);
drivers/net/thunderbolt/main.c
341
struct tbnet_frame *tf = &ring->frames[i];
drivers/net/thunderbolt/main.c
349
if (ring->ring->is_tx) {
drivers/net/thunderbolt/main.c
369
ring->cons = 0;
drivers/net/thunderbolt/main.c
370
ring->prod = 0;
drivers/net/thunderbolt/main.c
393
tb_ring_stop(net->rx_ring.ring);
drivers/net/thunderbolt/main.c
394
tb_ring_stop(net->tx_ring.ring);
drivers/net/thunderbolt/main.c
400
net->tx_ring.ring->hop,
drivers/net/thunderbolt/main.c
402
net->rx_ring.ring->hop);
drivers/net/thunderbolt/main.c
495
static unsigned int tbnet_available_buffers(const struct tbnet_ring *ring)
drivers/net/thunderbolt/main.c
497
return ring->prod - ring->cons;
drivers/net/thunderbolt/main.c
502
struct tbnet_ring *ring = &net->rx_ring;
drivers/net/thunderbolt/main.c
506
struct device *dma_dev = tb_ring_dma_device(ring->ring);
drivers/net/thunderbolt/main.c
507
unsigned int index = ring->prod & (TBNET_RING_SIZE - 1);
drivers/net/thunderbolt/main.c
508
struct tbnet_frame *tf = &ring->frames[index];
drivers/net/thunderbolt/main.c
537
tb_ring_rx(ring->ring, &tf->frame);
drivers/net/thunderbolt/main.c
539
ring->prod++;
drivers/net/thunderbolt/main.c
545
tbnet_free_buffers(ring);
drivers/net/thunderbolt/main.c
551
struct tbnet_ring *ring = &net->tx_ring;
drivers/net/thunderbolt/main.c
552
struct device *dma_dev = tb_ring_dma_device(ring->ring);
drivers/net/thunderbolt/main.c
556
if (!tbnet_available_buffers(ring))
drivers/net/thunderbolt/main.c
559
index = ring->cons++ & (TBNET_RING_SIZE - 1);
drivers/net/thunderbolt/main.c
561
tf = &ring->frames[index];
drivers/net/thunderbolt/main.c
570
static void tbnet_tx_callback(struct tb_ring *ring, struct ring_frame *frame,
drivers/net/thunderbolt/main.c
585
struct tbnet_ring *ring = &net->tx_ring;
drivers/net/thunderbolt/main.c
586
struct device *dma_dev = tb_ring_dma_device(ring->ring);
drivers/net/thunderbolt/main.c
590
struct tbnet_frame *tf = &ring->frames[i];
drivers/net/thunderbolt/main.c
595
tbnet_free_buffers(ring);
drivers/net/thunderbolt/main.c
604
tbnet_free_buffers(ring);
drivers/net/thunderbolt/main.c
617
ring->cons = 0;
drivers/net/thunderbolt/main.c
618
ring->prod = TBNET_RING_SIZE - 1;
drivers/net/thunderbolt/main.c
654
tb_ring_start(net->tx_ring.ring);
drivers/net/thunderbolt/main.c
655
tb_ring_start(net->rx_ring.ring);
drivers/net/thunderbolt/main.c
666
net->tx_ring.ring->hop,
drivers/net/thunderbolt/main.c
668
net->rx_ring.ring->hop);
drivers/net/thunderbolt/main.c
685
tb_ring_stop(net->rx_ring.ring);
drivers/net/thunderbolt/main.c
686
tb_ring_stop(net->tx_ring.ring);
drivers/net/thunderbolt/main.c
807
struct device *dma_dev = tb_ring_dma_device(net->rx_ring.ring);
drivers/net/thunderbolt/main.c
829
frame = tb_ring_poll(net->rx_ring.ring);
drivers/net/thunderbolt/main.c
905
tb_ring_poll_complete(net->rx_ring.ring);
drivers/net/thunderbolt/main.c
922
struct tb_ring *ring;
drivers/net/thunderbolt/main.c
933
ring = tb_ring_alloc_tx(xd->tb->nhi, -1, TBNET_RING_SIZE, flags);
drivers/net/thunderbolt/main.c
934
if (!ring) {
drivers/net/thunderbolt/main.c
938
net->tx_ring.ring = ring;
drivers/net/thunderbolt/main.c
943
tb_ring_free(net->tx_ring.ring);
drivers/net/thunderbolt/main.c
944
net->tx_ring.ring = NULL;
drivers/net/thunderbolt/main.c
952
ring = tb_ring_alloc_rx(xd->tb->nhi, -1, TBNET_RING_SIZE, flags,
drivers/net/thunderbolt/main.c
953
net->tx_ring.ring->hop, sof_mask,
drivers/net/thunderbolt/main.c
955
if (!ring) {
drivers/net/thunderbolt/main.c
958
tb_ring_free(net->tx_ring.ring);
drivers/net/thunderbolt/main.c
959
net->tx_ring.ring = NULL;
drivers/net/thunderbolt/main.c
962
net->rx_ring.ring = ring;
drivers/net/thunderbolt/main.c
979
tb_ring_free(net->rx_ring.ring);
drivers/net/thunderbolt/main.c
980
net->rx_ring.ring = NULL;
drivers/net/thunderbolt/main.c
983
tb_ring_free(net->tx_ring.ring);
drivers/net/thunderbolt/main.c
984
net->tx_ring.ring = NULL;
drivers/net/thunderbolt/main.c
993
struct device *dma_dev = tb_ring_dma_device(net->tx_ring.ring);
drivers/net/usb/r8152.c
9178
struct ethtool_ringparam *ring,
drivers/net/usb/r8152.c
9184
ring->rx_max_pending = RTL8152_RX_MAX_PENDING;
drivers/net/usb/r8152.c
9185
ring->rx_pending = tp->rx_pending;
drivers/net/usb/r8152.c
9189
struct ethtool_ringparam *ring,
drivers/net/usb/r8152.c
9195
if (ring->rx_pending < (RTL8152_MAX_RX * 2))
drivers/net/usb/r8152.c
9198
if (tp->rx_pending != ring->rx_pending) {
drivers/net/usb/r8152.c
9202
tp->rx_pending = ring->rx_pending;
drivers/net/usb/r8152.c
9206
tp->rx_pending = ring->rx_pending;
drivers/net/virtio_net.c
4092
struct ethtool_ringparam *ring,
drivers/net/virtio_net.c
4098
ring->rx_max_pending = vi->rq[0].vq->num_max;
drivers/net/virtio_net.c
4099
ring->tx_max_pending = vi->sq[0].vq->num_max;
drivers/net/virtio_net.c
4100
ring->rx_pending = virtqueue_get_vring_size(vi->rq[0].vq);
drivers/net/virtio_net.c
4101
ring->tx_pending = virtqueue_get_vring_size(vi->sq[0].vq);
drivers/net/virtio_net.c
4105
struct ethtool_ringparam *ring,
drivers/net/virtio_net.c
4115
if (ring->rx_mini_pending || ring->rx_jumbo_pending)
drivers/net/virtio_net.c
4121
if (ring->rx_pending == rx_pending &&
drivers/net/virtio_net.c
4122
ring->tx_pending == tx_pending)
drivers/net/virtio_net.c
4125
if (ring->rx_pending > vi->rq[0].vq->num_max)
drivers/net/virtio_net.c
4128
if (ring->tx_pending > vi->sq[0].vq->num_max)
drivers/net/virtio_net.c
4135
if (ring->tx_pending != tx_pending) {
drivers/net/virtio_net.c
4136
err = virtnet_tx_resize(vi, sq, ring->tx_pending);
drivers/net/virtio_net.c
4156
if (ring->rx_pending != rx_pending) {
drivers/net/virtio_net.c
4157
err = virtnet_rx_resize(vi, rq, ring->rx_pending);
drivers/net/vmxnet3/vmxnet3_drv.c
1629
struct vmxnet3_cmd_ring *ring = NULL;
drivers/net/vmxnet3/vmxnet3_drv.c
1646
ring = rq->rx_ring + ring_idx;
drivers/net/vmxnet3/vmxnet3_drv.c
1953
ring = rq->rx_ring + ring_idx;
drivers/net/vmxnet3/vmxnet3_drv.c
1956
comp_offset = vmxnet3_cmd_ring_desc_avail(ring);
drivers/net/vmxnet3/vmxnet3_drv.c
1957
fill_offset = (idx > ring->next2fill ? 0 : ring->size) +
drivers/net/vmxnet3/vmxnet3_drv.c
1958
idx - ring->next2fill - 1;
drivers/net/vmxnet3/vmxnet3_drv.c
1959
if (!ring->isOutOfOrder || fill_offset >= comp_offset)
drivers/net/vmxnet3/vmxnet3_drv.c
1960
ring->next2comp = idx;
drivers/net/vmxnet3/vmxnet3_drv.c
1961
num_to_alloc = vmxnet3_cmd_ring_desc_avail(ring);
drivers/net/vmxnet3/vmxnet3_drv.c
1969
rbi = rq->buf_info[ring_idx] + ring->next2fill;
drivers/net/vmxnet3/vmxnet3_drv.c
1979
vmxnet3_getRxDesc(rxd, &ring->base[ring->next2fill].rxd,
drivers/net/vmxnet3/vmxnet3_drv.c
1984
rxd->gen = ring->gen;
drivers/net/vmxnet3/vmxnet3_drv.c
1985
vmxnet3_cmd_ring_adv_next2fill(ring);
drivers/net/vmxnet3/vmxnet3_drv.c
1990
ring->isOutOfOrder = 1;
drivers/net/vmxnet3/vmxnet3_drv.c
1996
ring->isOutOfOrder = 0;
drivers/net/vmxnet3/vmxnet3_drv.c
2000
if (unlikely(rq->shared->updateRxProd) && (ring->next2fill & 0xf) == 0) {
drivers/net/vmxnet3/vmxnet3_drv.c
2003
ring->next2fill);
drivers/net/vmxnet3/vmxnet3_drv.c
678
struct vmxnet3_cmd_ring *ring = &rq->rx_ring[ring_idx];
drivers/net/vmxnet3/vmxnet3_drv.c
685
rbi = rbi_base + ring->next2fill;
drivers/net/vmxnet3/vmxnet3_drv.c
686
gd = ring->base + ring->next2fill;
drivers/net/vmxnet3/vmxnet3_drv.c
752
gd->dword[2] = cpu_to_le32((!ring->gen << VMXNET3_RXD_GEN_SHIFT)
drivers/net/vmxnet3/vmxnet3_drv.c
762
gd->dword[2] |= cpu_to_le32(ring->gen << VMXNET3_RXD_GEN_SHIFT);
drivers/net/vmxnet3/vmxnet3_drv.c
764
vmxnet3_cmd_ring_adv_next2fill(ring);
drivers/net/vmxnet3/vmxnet3_drv.c
769
num_allocated, ring->next2fill, ring->next2comp);
drivers/net/vmxnet3/vmxnet3_drv.c
772
BUG_ON(num_allocated != 0 && ring->next2fill == ring->next2comp);
drivers/net/vmxnet3/vmxnet3_int.h
149
vmxnet3_cmd_ring_adv_next2fill(struct vmxnet3_cmd_ring *ring)
drivers/net/vmxnet3/vmxnet3_int.h
151
ring->next2fill++;
drivers/net/vmxnet3/vmxnet3_int.h
152
if (unlikely(ring->next2fill == ring->size)) {
drivers/net/vmxnet3/vmxnet3_int.h
153
ring->next2fill = 0;
drivers/net/vmxnet3/vmxnet3_int.h
154
VMXNET3_FLIP_RING_GEN(ring->gen);
drivers/net/vmxnet3/vmxnet3_int.h
159
vmxnet3_cmd_ring_adv_next2comp(struct vmxnet3_cmd_ring *ring)
drivers/net/vmxnet3/vmxnet3_int.h
161
VMXNET3_INC_RING_IDX_ONLY(ring->next2comp, ring->size);
drivers/net/vmxnet3/vmxnet3_int.h
165
vmxnet3_cmd_ring_desc_avail(struct vmxnet3_cmd_ring *ring)
drivers/net/vmxnet3/vmxnet3_int.h
167
return (ring->next2comp > ring->next2fill ? 0 : ring->size) +
drivers/net/vmxnet3/vmxnet3_int.h
168
ring->next2comp - ring->next2fill - 1;
drivers/net/vmxnet3/vmxnet3_int.h
181
vmxnet3_comp_ring_adv_next2proc(struct vmxnet3_comp_ring *ring)
drivers/net/vmxnet3/vmxnet3_int.h
183
ring->next2proc++;
drivers/net/vmxnet3/vmxnet3_int.h
184
if (unlikely(ring->next2proc == ring->size)) {
drivers/net/vmxnet3/vmxnet3_int.h
185
ring->next2proc = 0;
drivers/net/vmxnet3/vmxnet3_int.h
186
VMXNET3_FLIP_RING_GEN(ring->gen);
drivers/net/wireguard/device.c
130
while ((skb = ptr_ring_consume(&wg->handshake_queue.ring)) != NULL)
drivers/net/wireguard/device.h
29
struct ptr_ring ring;
drivers/net/wireguard/queueing.c
32
ret = ptr_ring_init(&queue->ring, len, GFP_KERNEL);
drivers/net/wireguard/queueing.c
37
ptr_ring_cleanup(&queue->ring, NULL);
drivers/net/wireguard/queueing.c
46
WARN_ON(!purge && !__ptr_ring_empty(&queue->ring));
drivers/net/wireguard/queueing.c
47
ptr_ring_cleanup(&queue->ring, purge ? __skb_array_destroy_skb : NULL);
drivers/net/wireguard/queueing.h
169
if (unlikely(ptr_ring_produce_bh(&device_queue->ring, skb)))
drivers/net/wireguard/receive.c
212
while ((skb = ptr_ring_consume_bh(&queue->ring)) != NULL) {
drivers/net/wireguard/receive.c
499
while ((skb = ptr_ring_consume_bh(&queue->ring)) != NULL) {
drivers/net/wireguard/receive.c
555
if (spin_trylock_bh(&wg->handshake_queue.ring.producer_lock)) {
drivers/net/wireguard/receive.c
556
ret = __ptr_ring_produce(&wg->handshake_queue.ring, skb);
drivers/net/wireguard/receive.c
557
spin_unlock_bh(&wg->handshake_queue.ring.producer_lock);
drivers/net/wireguard/receive.c
560
ret = ptr_ring_produce_bh(&wg->handshake_queue.ring, skb);
drivers/net/wireguard/send.c
293
while ((first = ptr_ring_consume_bh(&queue->ring)) != NULL) {
drivers/net/wireless/ath/ath10k/htt_tx.c
819
struct htt_rx_ring_setup_ring32 *ring;
drivers/net/wireless/ath/ath10k/htt_tx.c
834
+ (sizeof(*ring) * num_rx_ring);
drivers/net/wireless/ath/ath10k/htt_tx.c
842
ring = &cmd->rx_setup_32.rings[0];
drivers/net/wireless/ath/ath10k/htt_tx.c
868
ring->fw_idx_shadow_reg_paddr =
drivers/net/wireless/ath/ath10k/htt_tx.c
870
ring->rx_ring_base_paddr = __cpu_to_le32(htt->rx_ring.base_paddr);
drivers/net/wireless/ath/ath10k/htt_tx.c
871
ring->rx_ring_len = __cpu_to_le16(htt->rx_ring.size);
drivers/net/wireless/ath/ath10k/htt_tx.c
872
ring->rx_ring_bufsize = __cpu_to_le16(HTT_RX_BUF_SIZE);
drivers/net/wireless/ath/ath10k/htt_tx.c
873
ring->flags = __cpu_to_le16(flags);
drivers/net/wireless/ath/ath10k/htt_tx.c
874
ring->fw_idx_init_val = __cpu_to_le16(fw_idx);
drivers/net/wireless/ath/ath10k/htt_tx.c
876
ath10k_htt_fill_rx_desc_offset_32(hw, ring);
drivers/net/wireless/ath/ath10k/htt_tx.c
892
struct htt_rx_ring_setup_ring64 *ring;
drivers/net/wireless/ath/ath10k/htt_tx.c
906
+ (sizeof(*ring) * num_rx_ring);
drivers/net/wireless/ath/ath10k/htt_tx.c
914
ring = &cmd->rx_setup_64.rings[0];
drivers/net/wireless/ath/ath10k/htt_tx.c
939
ring->fw_idx_shadow_reg_paddr = __cpu_to_le64(htt->rx_ring.alloc_idx.paddr);
drivers/net/wireless/ath/ath10k/htt_tx.c
940
ring->rx_ring_base_paddr = __cpu_to_le64(htt->rx_ring.base_paddr);
drivers/net/wireless/ath/ath10k/htt_tx.c
941
ring->rx_ring_len = __cpu_to_le16(htt->rx_ring.size);
drivers/net/wireless/ath/ath10k/htt_tx.c
942
ring->rx_ring_bufsize = __cpu_to_le16(HTT_RX_BUF_SIZE);
drivers/net/wireless/ath/ath10k/htt_tx.c
943
ring->flags = __cpu_to_le16(flags);
drivers/net/wireless/ath/ath10k/htt_tx.c
944
ring->fw_idx_init_val = __cpu_to_le16(fw_idx);
drivers/net/wireless/ath/ath10k/htt_tx.c
946
ath10k_htt_fill_rx_desc_offset_64(hw, ring);
drivers/net/wireless/ath/ath10k/htt_tx.c
961
struct htt_rx_ring_setup_ring32 *ring;
drivers/net/wireless/ath/ath10k/htt_tx.c
975
+ (sizeof(*ring) * num_rx_ring);
drivers/net/wireless/ath/ath10k/htt_tx.c
983
ring = &cmd->rx_setup_32.rings[0];
drivers/net/wireless/ath/ath10k/htt_tx.c
993
memset(ring, 0, sizeof(*ring));
drivers/net/wireless/ath/ath10k/htt_tx.c
994
ring->rx_ring_len = __cpu_to_le16(HTT_RX_RING_SIZE_MIN);
drivers/net/wireless/ath/ath10k/htt_tx.c
995
ring->rx_ring_bufsize = __cpu_to_le16(HTT_RX_BUF_SIZE);
drivers/net/wireless/ath/ath10k/htt_tx.c
996
ring->flags = __cpu_to_le16(flags);
drivers/net/wireless/ath/ath11k/ce.c
277
struct ath11k_ce_ring *ring = pipe->dest_ring;
drivers/net/wireless/ath/ath11k/ce.c
280
unsigned int nentries_mask = ring->nentries_mask;
drivers/net/wireless/ath/ath11k/ce.c
286
write_index = ring->write_index;
drivers/net/wireless/ath/ath11k/ce.c
288
srng = &ab->hal.srng_list[ring->hal_ring_id];
drivers/net/wireless/ath/ath11k/ce.c
307
ring->skb[write_index] = skb;
drivers/net/wireless/ath/ath11k/ce.c
309
ring->write_index = write_index;
drivers/net/wireless/ath/ath11k/ce.c
653
struct ath11k_ce_ring *ring;
drivers/net/wireless/ath/ath11k/ce.c
663
ring = ath11k_ce_alloc_ring(ab, nentries, desc_sz);
drivers/net/wireless/ath/ath11k/ce.c
664
if (IS_ERR(ring))
drivers/net/wireless/ath/ath11k/ce.c
665
return PTR_ERR(ring);
drivers/net/wireless/ath/ath11k/ce.c
666
pipe->src_ring = ring;
drivers/net/wireless/ath/ath11k/ce.c
673
ring = ath11k_ce_alloc_ring(ab, nentries, desc_sz);
drivers/net/wireless/ath/ath11k/ce.c
674
if (IS_ERR(ring))
drivers/net/wireless/ath/ath11k/ce.c
675
return PTR_ERR(ring);
drivers/net/wireless/ath/ath11k/ce.c
676
pipe->dest_ring = ring;
drivers/net/wireless/ath/ath11k/ce.c
679
ring = ath11k_ce_alloc_ring(ab, nentries, desc_sz);
drivers/net/wireless/ath/ath11k/ce.c
680
if (IS_ERR(ring))
drivers/net/wireless/ath/ath11k/ce.c
681
return PTR_ERR(ring);
drivers/net/wireless/ath/ath11k/ce.c
682
pipe->status_ring = ring;
drivers/net/wireless/ath/ath11k/ce.c
803
struct ath11k_ce_ring *ring = pipe->dest_ring;
drivers/net/wireless/ath/ath11k/ce.c
807
if (!(ring && pipe->buf_sz))
drivers/net/wireless/ath/ath11k/ce.c
810
for (i = 0; i < ring->nentries; i++) {
drivers/net/wireless/ath/ath11k/ce.c
811
skb = ring->skb[i];
drivers/net/wireless/ath/ath11k/ce.c
815
ring->skb[i] = NULL;
drivers/net/wireless/ath/ath11k/dbring.c
100
spin_unlock_bh(&ring->idr_lock);
drivers/net/wireless/ath/ath11k/dbring.c
102
dma_unmap_single(ab->dev, paddr, ring->buf_sz,
drivers/net/wireless/ath/ath11k/dbring.c
110
struct ath11k_dbring *ring,
drivers/net/wireless/ath/ath11k/dbring.c
119
srng = &ar->ab->hal.srng_list[ring->refill_srng.ring_id];
drivers/net/wireless/ath/ath11k/dbring.c
124
req_entries = min(num_free, ring->bufs_max);
drivers/net/wireless/ath/ath11k/dbring.c
126
align = ring->buf_align;
drivers/net/wireless/ath/ath11k/dbring.c
127
size = ring->buf_sz + align - 1;
drivers/net/wireless/ath/ath11k/dbring.c
139
ret = ath11k_dbring_bufs_replenish(ar, ring, buff, id);
drivers/net/wireless/ath/ath11k/dbring.c
156
struct ath11k_dbring *ring,
drivers/net/wireless/ath/ath11k/dbring.c
166
param.base_paddr_lo = lower_32_bits(ring->refill_srng.paddr);
drivers/net/wireless/ath/ath11k/dbring.c
167
param.base_paddr_hi = upper_32_bits(ring->refill_srng.paddr);
drivers/net/wireless/ath/ath11k/dbring.c
168
param.head_idx_paddr_lo = lower_32_bits(ring->hp_addr);
drivers/net/wireless/ath/ath11k/dbring.c
169
param.head_idx_paddr_hi = upper_32_bits(ring->hp_addr);
drivers/net/wireless/ath/ath11k/dbring.c
170
param.tail_idx_paddr_lo = lower_32_bits(ring->tp_addr);
drivers/net/wireless/ath/ath11k/dbring.c
171
param.tail_idx_paddr_hi = upper_32_bits(ring->tp_addr);
drivers/net/wireless/ath/ath11k/dbring.c
172
param.num_elems = ring->bufs_max;
drivers/net/wireless/ath/ath11k/dbring.c
173
param.buf_size = ring->buf_sz;
drivers/net/wireless/ath/ath11k/dbring.c
174
param.num_resp_per_event = ring->num_resp_per_event;
drivers/net/wireless/ath/ath11k/dbring.c
175
param.event_timeout_ms = ring->event_timeout_ms;
drivers/net/wireless/ath/ath11k/dbring.c
188
param.pdev_id = DP_SW2HW_MACID(ring->pdev_id);
drivers/net/wireless/ath/ath11k/dbring.c
199
int ath11k_dbring_set_cfg(struct ath11k *ar, struct ath11k_dbring *ring,
drivers/net/wireless/ath/ath11k/dbring.c
204
if (WARN_ON(!ring))
drivers/net/wireless/ath/ath11k/dbring.c
207
ring->num_resp_per_event = num_resp_per_event;
drivers/net/wireless/ath/ath11k/dbring.c
208
ring->event_timeout_ms = event_timeout_ms;
drivers/net/wireless/ath/ath11k/dbring.c
209
ring->handler = handler;
drivers/net/wireless/ath/ath11k/dbring.c
215
struct ath11k_dbring *ring,
drivers/net/wireless/ath/ath11k/dbring.c
222
srng = &ab->hal.srng_list[ring->refill_srng.ring_id];
drivers/net/wireless/ath/ath11k/dbring.c
223
ring->bufs_max = ring->refill_srng.size /
drivers/net/wireless/ath/ath11k/dbring.c
226
ring->buf_sz = db_cap->min_buf_sz;
drivers/net/wireless/ath/ath11k/dbring.c
227
ring->buf_align = db_cap->min_buf_align;
drivers/net/wireless/ath/ath11k/dbring.c
228
ring->pdev_id = db_cap->pdev_id;
drivers/net/wireless/ath/ath11k/dbring.c
229
ring->hp_addr = ath11k_hal_srng_get_hp_addr(ar->ab, srng);
drivers/net/wireless/ath/ath11k/dbring.c
230
ring->tp_addr = ath11k_hal_srng_get_tp_addr(ar->ab, srng);
drivers/net/wireless/ath/ath11k/dbring.c
232
ret = ath11k_dbring_fill_bufs(ar, ring, db_cap->id);
drivers/net/wireless/ath/ath11k/dbring.c
237
int ath11k_dbring_srng_setup(struct ath11k *ar, struct ath11k_dbring *ring,
drivers/net/wireless/ath/ath11k/dbring.c
242
ret = ath11k_dp_srng_setup(ar->ab, &ring->refill_srng, HAL_RXDMA_DIR_BUF,
drivers/net/wireless/ath/ath11k/dbring.c
252
ath11k_dp_srng_cleanup(ar->ab, &ring->refill_srng);
drivers/net/wireless/ath/ath11k/dbring.c
284
struct ath11k_dbring *ring;
drivers/net/wireless/ath/ath11k/dbring.c
330
ring = ath11k_spectral_get_dbring(ar);
drivers/net/wireless/ath/ath11k/dbring.c
333
ring = ath11k_cfr_get_dbring(ar);
drivers/net/wireless/ath/ath11k/dbring.c
336
ring = NULL;
drivers/net/wireless/ath/ath11k/dbring.c
342
if (!ring) {
drivers/net/wireless/ath/ath11k/dbring.c
347
srng = &ab->hal.srng_list[ring->refill_srng.ring_id];
drivers/net/wireless/ath/ath11k/dbring.c
349
size = ring->buf_sz + ring->buf_align - 1;
drivers/net/wireless/ath/ath11k/dbring.c
365
spin_lock_bh(&ring->idr_lock);
drivers/net/wireless/ath/ath11k/dbring.c
366
buff = idr_find(&ring->bufs_idr, buf_id);
drivers/net/wireless/ath/ath11k/dbring.c
368
spin_unlock_bh(&ring->idr_lock);
drivers/net/wireless/ath/ath11k/dbring.c
371
idr_remove(&ring->bufs_idr, buf_id);
drivers/net/wireless/ath/ath11k/dbring.c
372
spin_unlock_bh(&ring->idr_lock);
drivers/net/wireless/ath/ath11k/dbring.c
374
dma_unmap_single(ab->dev, buff->paddr, ring->buf_sz,
drivers/net/wireless/ath/ath11k/dbring.c
380
if (ring->handler) {
drivers/net/wireless/ath/ath11k/dbring.c
383
ring->buf_align);
drivers/net/wireless/ath/ath11k/dbring.c
384
handler_data.data_sz = ring->buf_sz;
drivers/net/wireless/ath/ath11k/dbring.c
388
status = ring->handler(ar, &handler_data);
drivers/net/wireless/ath/ath11k/dbring.c
395
ath11k_dbring_bufs_replenish(ar, ring, buff, module_id);
drivers/net/wireless/ath/ath11k/dbring.c
40
struct ath11k_dbring *ring,
drivers/net/wireless/ath/ath11k/dbring.c
406
void ath11k_dbring_srng_cleanup(struct ath11k *ar, struct ath11k_dbring *ring)
drivers/net/wireless/ath/ath11k/dbring.c
408
ath11k_dp_srng_cleanup(ar->ab, &ring->refill_srng);
drivers/net/wireless/ath/ath11k/dbring.c
411
void ath11k_dbring_buf_cleanup(struct ath11k *ar, struct ath11k_dbring *ring)
drivers/net/wireless/ath/ath11k/dbring.c
416
spin_lock_bh(&ring->idr_lock);
drivers/net/wireless/ath/ath11k/dbring.c
417
idr_for_each_entry(&ring->bufs_idr, buff, buf_id) {
drivers/net/wireless/ath/ath11k/dbring.c
418
idr_remove(&ring->bufs_idr, buf_id);
drivers/net/wireless/ath/ath11k/dbring.c
420
ring->buf_sz, DMA_FROM_DEVICE);
drivers/net/wireless/ath/ath11k/dbring.c
425
idr_destroy(&ring->bufs_idr);
drivers/net/wireless/ath/ath11k/dbring.c
426
spin_unlock_bh(&ring->idr_lock);
drivers/net/wireless/ath/ath11k/dbring.c
52
srng = &ab->hal.srng_list[ring->refill_srng.ring_id];
drivers/net/wireless/ath/ath11k/dbring.c
59
ptr_aligned = PTR_ALIGN(ptr_unaligned, ring->buf_align);
drivers/net/wireless/ath/ath11k/dbring.c
60
ath11k_dbring_fill_magic_value(ar, ptr_aligned, ring->buf_sz);
drivers/net/wireless/ath/ath11k/dbring.c
61
paddr = dma_map_single(ab->dev, ptr_aligned, ring->buf_sz,
drivers/net/wireless/ath/ath11k/dbring.c
68
spin_lock_bh(&ring->idr_lock);
drivers/net/wireless/ath/ath11k/dbring.c
69
buf_id = idr_alloc(&ring->bufs_idr, buff, 0, ring->bufs_max, GFP_ATOMIC);
drivers/net/wireless/ath/ath11k/dbring.c
70
spin_unlock_bh(&ring->idr_lock);
drivers/net/wireless/ath/ath11k/dbring.c
98
spin_lock_bh(&ring->idr_lock);
drivers/net/wireless/ath/ath11k/dbring.c
99
idr_remove(&ring->bufs_idr, buf_id);
drivers/net/wireless/ath/ath11k/dbring.h
61
struct ath11k_dbring *ring,
drivers/net/wireless/ath/ath11k/dbring.h
67
struct ath11k_dbring *ring,
drivers/net/wireless/ath/ath11k/dbring.h
71
struct ath11k_dbring *ring,
drivers/net/wireless/ath/ath11k/dbring.h
74
struct ath11k_dbring *ring,
drivers/net/wireless/ath/ath11k/dbring.h
76
int ath11k_dbring_srng_setup(struct ath11k *ar, struct ath11k_dbring *ring,
drivers/net/wireless/ath/ath11k/dbring.h
84
void ath11k_dbring_srng_cleanup(struct ath11k *ar, struct ath11k_dbring *ring);
drivers/net/wireless/ath/ath11k/dbring.h
85
void ath11k_dbring_buf_cleanup(struct ath11k *ar, struct ath11k_dbring *ring);
drivers/net/wireless/ath/ath11k/dp.c
104
void ath11k_dp_srng_cleanup(struct ath11k_base *ab, struct dp_srng *ring)
drivers/net/wireless/ath/ath11k/dp.c
106
if (!ring->vaddr_unaligned)
drivers/net/wireless/ath/ath11k/dp.c
109
if (ring->cached)
drivers/net/wireless/ath/ath11k/dp.c
110
dma_free_noncoherent(ab->dev, ring->size, ring->vaddr_unaligned,
drivers/net/wireless/ath/ath11k/dp.c
111
ring->paddr_unaligned, DMA_FROM_DEVICE);
drivers/net/wireless/ath/ath11k/dp.c
113
dma_free_coherent(ab->dev, ring->size, ring->vaddr_unaligned,
drivers/net/wireless/ath/ath11k/dp.c
114
ring->paddr_unaligned);
drivers/net/wireless/ath/ath11k/dp.c
116
ring->vaddr_unaligned = NULL;
drivers/net/wireless/ath/ath11k/dp.c
224
int ath11k_dp_srng_setup(struct ath11k_base *ab, struct dp_srng *ring,
drivers/net/wireless/ath/ath11k/dp.c
240
ring->size = (num_entries * entry_sz) + HAL_RING_BASE_ALIGN - 1;
drivers/net/wireless/ath/ath11k/dp.c
255
ring->vaddr_unaligned = dma_alloc_noncoherent(ab->dev, ring->size,
drivers/net/wireless/ath/ath11k/dp.c
256
&ring->paddr_unaligned,
drivers/net/wireless/ath/ath11k/dp.c
260
ring->vaddr_unaligned = dma_alloc_coherent(ab->dev, ring->size,
drivers/net/wireless/ath/ath11k/dp.c
261
&ring->paddr_unaligned,
drivers/net/wireless/ath/ath11k/dp.c
264
if (!ring->vaddr_unaligned)
drivers/net/wireless/ath/ath11k/dp.c
267
ring->vaddr = PTR_ALIGN(ring->vaddr_unaligned, HAL_RING_BASE_ALIGN);
drivers/net/wireless/ath/ath11k/dp.c
268
ring->paddr = ring->paddr_unaligned + ((unsigned long)ring->vaddr -
drivers/net/wireless/ath/ath11k/dp.c
269
(unsigned long)ring->vaddr_unaligned);
drivers/net/wireless/ath/ath11k/dp.c
271
params.ring_base_vaddr = ring->vaddr;
drivers/net/wireless/ath/ath11k/dp.c
272
params.ring_base_paddr = ring->paddr;
drivers/net/wireless/ath/ath11k/dp.c
325
ring->cached = 1;
drivers/net/wireless/ath/ath11k/dp.c
335
ring->ring_id = ret;
drivers/net/wireless/ath/ath11k/dp.c
639
u32 ring_type, struct dp_srng *ring)
drivers/net/wireless/ath/ath11k/dp.c
644
ath11k_dp_srng_cleanup(ab, ring);
drivers/net/wireless/ath/ath11k/dp.h
1672
void ath11k_dp_srng_cleanup(struct ath11k_base *ab, struct dp_srng *ring);
drivers/net/wireless/ath/ath11k/dp.h
1673
int ath11k_dp_srng_setup(struct ath11k_base *ab, struct dp_srng *ring,
drivers/net/wireless/ath/ath11k/dp.h
1678
u32 ring_type, struct dp_srng *ring);
drivers/net/wireless/ath/ath12k/ce.c
15
struct ath12k_ce_ring *ring = pipe->dest_ring;
drivers/net/wireless/ath/ath12k/ce.c
18
unsigned int nentries_mask = ring->nentries_mask;
drivers/net/wireless/ath/ath12k/ce.c
24
write_index = ring->write_index;
drivers/net/wireless/ath/ath12k/ce.c
26
srng = &ab->hal.srng_list[ring->hal_ring_id];
drivers/net/wireless/ath/ath12k/ce.c
371
struct ath12k_ce_ring *ring;
drivers/net/wireless/ath/ath12k/ce.c
381
ring = ath12k_ce_alloc_ring(ab, nentries, desc_sz);
drivers/net/wireless/ath/ath12k/ce.c
382
if (IS_ERR(ring))
drivers/net/wireless/ath/ath12k/ce.c
383
return PTR_ERR(ring);
drivers/net/wireless/ath/ath12k/ce.c
384
pipe->src_ring = ring;
drivers/net/wireless/ath/ath12k/ce.c
391
ring = ath12k_ce_alloc_ring(ab, nentries, desc_sz);
drivers/net/wireless/ath/ath12k/ce.c
392
if (IS_ERR(ring))
drivers/net/wireless/ath/ath12k/ce.c
393
return PTR_ERR(ring);
drivers/net/wireless/ath/ath12k/ce.c
394
pipe->dest_ring = ring;
drivers/net/wireless/ath/ath12k/ce.c
397
ring = ath12k_ce_alloc_ring(ab, nentries, desc_sz);
drivers/net/wireless/ath/ath12k/ce.c
398
if (IS_ERR(ring))
drivers/net/wireless/ath/ath12k/ce.c
399
return PTR_ERR(ring);
drivers/net/wireless/ath/ath12k/ce.c
400
pipe->status_ring = ring;
drivers/net/wireless/ath/ath12k/ce.c
45
ring->skb[write_index] = skb;
drivers/net/wireless/ath/ath12k/ce.c
47
ring->write_index = write_index;
drivers/net/wireless/ath/ath12k/ce.c
509
struct ath12k_ce_ring *ring = pipe->dest_ring;
drivers/net/wireless/ath/ath12k/ce.c
513
if (!(ring && pipe->buf_sz))
drivers/net/wireless/ath/ath12k/ce.c
516
for (i = 0; i < ring->nentries; i++) {
drivers/net/wireless/ath/ath12k/ce.c
517
skb = ring->skb[i];
drivers/net/wireless/ath/ath12k/ce.c
521
ring->skb[i] = NULL;
drivers/net/wireless/ath/ath12k/dbring.c
102
ret = ath12k_dbring_bufs_replenish(ar, ring, buff, gfp);
drivers/net/wireless/ath/ath12k/dbring.c
118
struct ath12k_dbring *ring,
drivers/net/wireless/ath/ath12k/dbring.c
12
struct ath12k_dbring *ring,
drivers/net/wireless/ath/ath12k/dbring.c
127
arg.pdev_id = DP_SW2HW_MACID(ring->pdev_id);
drivers/net/wireless/ath/ath12k/dbring.c
129
arg.base_paddr_lo = lower_32_bits(ring->refill_srng.paddr);
drivers/net/wireless/ath/ath12k/dbring.c
130
arg.base_paddr_hi = upper_32_bits(ring->refill_srng.paddr);
drivers/net/wireless/ath/ath12k/dbring.c
131
arg.head_idx_paddr_lo = lower_32_bits(ring->hp_addr);
drivers/net/wireless/ath/ath12k/dbring.c
132
arg.head_idx_paddr_hi = upper_32_bits(ring->hp_addr);
drivers/net/wireless/ath/ath12k/dbring.c
133
arg.tail_idx_paddr_lo = lower_32_bits(ring->tp_addr);
drivers/net/wireless/ath/ath12k/dbring.c
134
arg.tail_idx_paddr_hi = upper_32_bits(ring->tp_addr);
drivers/net/wireless/ath/ath12k/dbring.c
135
arg.num_elems = ring->bufs_max;
drivers/net/wireless/ath/ath12k/dbring.c
136
arg.buf_size = ring->buf_sz;
drivers/net/wireless/ath/ath12k/dbring.c
137
arg.num_resp_per_event = ring->num_resp_per_event;
drivers/net/wireless/ath/ath12k/dbring.c
138
arg.event_timeout_ms = ring->event_timeout_ms;
drivers/net/wireless/ath/ath12k/dbring.c
149
int ath12k_dbring_set_cfg(struct ath12k *ar, struct ath12k_dbring *ring,
drivers/net/wireless/ath/ath12k/dbring.c
154
if (WARN_ON(!ring))
drivers/net/wireless/ath/ath12k/dbring.c
157
ring->num_resp_per_event = num_resp_per_event;
drivers/net/wireless/ath/ath12k/dbring.c
158
ring->event_timeout_ms = event_timeout_ms;
drivers/net/wireless/ath/ath12k/dbring.c
159
ring->handler = handler;
drivers/net/wireless/ath/ath12k/dbring.c
165
struct ath12k_dbring *ring,
drivers/net/wireless/ath/ath12k/dbring.c
172
srng = &ab->hal.srng_list[ring->refill_srng.ring_id];
drivers/net/wireless/ath/ath12k/dbring.c
173
ring->bufs_max = ring->refill_srng.size /
drivers/net/wireless/ath/ath12k/dbring.c
176
ring->buf_sz = db_cap->min_buf_sz;
drivers/net/wireless/ath/ath12k/dbring.c
177
ring->buf_align = db_cap->min_buf_align;
drivers/net/wireless/ath/ath12k/dbring.c
178
ring->pdev_id = db_cap->pdev_id;
drivers/net/wireless/ath/ath12k/dbring.c
179
ring->hp_addr = ath12k_hal_srng_get_hp_addr(ab, srng);
drivers/net/wireless/ath/ath12k/dbring.c
180
ring->tp_addr = ath12k_hal_srng_get_tp_addr(ab, srng);
drivers/net/wireless/ath/ath12k/dbring.c
182
ret = ath12k_dbring_fill_bufs(ar, ring, GFP_KERNEL);
drivers/net/wireless/ath/ath12k/dbring.c
187
int ath12k_dbring_srng_setup(struct ath12k *ar, struct ath12k_dbring *ring,
drivers/net/wireless/ath/ath12k/dbring.c
192
ret = ath12k_dp_srng_setup(ar->ab, &ring->refill_srng, HAL_RXDMA_DIR_BUF,
drivers/net/wireless/ath/ath12k/dbring.c
202
ath12k_dp_srng_cleanup(ar->ab, &ring->refill_srng);
drivers/net/wireless/ath/ath12k/dbring.c
234
struct ath12k_dbring *ring = NULL;
drivers/net/wireless/ath/ath12k/dbring.c
24
srng = &ab->hal.srng_list[ring->refill_srng.ring_id];
drivers/net/wireless/ath/ath12k/dbring.c
276
ring = NULL;
drivers/net/wireless/ath/ath12k/dbring.c
282
if (!ring) {
drivers/net/wireless/ath/ath12k/dbring.c
287
srng = &ab->hal.srng_list[ring->refill_srng.ring_id];
drivers/net/wireless/ath/ath12k/dbring.c
289
size = sizeof(*buff) + ring->buf_sz + ring->buf_align - 1;
drivers/net/wireless/ath/ath12k/dbring.c
305
spin_lock_bh(&ring->idr_lock);
drivers/net/wireless/ath/ath12k/dbring.c
306
buff = idr_find(&ring->bufs_idr, buf_id);
drivers/net/wireless/ath/ath12k/dbring.c
308
spin_unlock_bh(&ring->idr_lock);
drivers/net/wireless/ath/ath12k/dbring.c
31
ptr_aligned = PTR_ALIGN(ptr_unaligned, ring->buf_align);
drivers/net/wireless/ath/ath12k/dbring.c
311
idr_remove(&ring->bufs_idr, buf_id);
drivers/net/wireless/ath/ath12k/dbring.c
312
spin_unlock_bh(&ring->idr_lock);
drivers/net/wireless/ath/ath12k/dbring.c
314
dma_unmap_single(ab->dev, buff->paddr, ring->buf_sz,
drivers/net/wireless/ath/ath12k/dbring.c
317
if (ring->handler) {
drivers/net/wireless/ath/ath12k/dbring.c
32
paddr = dma_map_single(ab->dev, ptr_aligned, ring->buf_sz,
drivers/net/wireless/ath/ath12k/dbring.c
320
ring->buf_align);
drivers/net/wireless/ath/ath12k/dbring.c
321
handler_data.data_sz = ring->buf_sz;
drivers/net/wireless/ath/ath12k/dbring.c
323
ring->handler(ar, &handler_data);
drivers/net/wireless/ath/ath12k/dbring.c
327
ath12k_dbring_bufs_replenish(ar, ring, buff, GFP_ATOMIC);
drivers/net/wireless/ath/ath12k/dbring.c
338
void ath12k_dbring_srng_cleanup(struct ath12k *ar, struct ath12k_dbring *ring)
drivers/net/wireless/ath/ath12k/dbring.c
340
ath12k_dp_srng_cleanup(ar->ab, &ring->refill_srng);
drivers/net/wireless/ath/ath12k/dbring.c
343
void ath12k_dbring_buf_cleanup(struct ath12k *ar, struct ath12k_dbring *ring)
drivers/net/wireless/ath/ath12k/dbring.c
348
spin_lock_bh(&ring->idr_lock);
drivers/net/wireless/ath/ath12k/dbring.c
349
idr_for_each_entry(&ring->bufs_idr, buff, buf_id) {
drivers/net/wireless/ath/ath12k/dbring.c
350
idr_remove(&ring->bufs_idr, buf_id);
drivers/net/wireless/ath/ath12k/dbring.c
352
ring->buf_sz, DMA_FROM_DEVICE);
drivers/net/wireless/ath/ath12k/dbring.c
356
idr_destroy(&ring->bufs_idr);
drivers/net/wireless/ath/ath12k/dbring.c
357
spin_unlock_bh(&ring->idr_lock);
drivers/net/wireless/ath/ath12k/dbring.c
39
spin_lock_bh(&ring->idr_lock);
drivers/net/wireless/ath/ath12k/dbring.c
40
buf_id = idr_alloc(&ring->bufs_idr, buff, 0, ring->bufs_max, gfp);
drivers/net/wireless/ath/ath12k/dbring.c
41
spin_unlock_bh(&ring->idr_lock);
drivers/net/wireless/ath/ath12k/dbring.c
65
spin_lock_bh(&ring->idr_lock);
drivers/net/wireless/ath/ath12k/dbring.c
66
idr_remove(&ring->bufs_idr, buf_id);
drivers/net/wireless/ath/ath12k/dbring.c
67
spin_unlock_bh(&ring->idr_lock);
drivers/net/wireless/ath/ath12k/dbring.c
69
dma_unmap_single(ab->dev, paddr, ring->buf_sz,
drivers/net/wireless/ath/ath12k/dbring.c
77
struct ath12k_dbring *ring,
drivers/net/wireless/ath/ath12k/dbring.c
87
srng = &ab->hal.srng_list[ring->refill_srng.ring_id];
drivers/net/wireless/ath/ath12k/dbring.c
92
req_entries = min(num_free, ring->bufs_max);
drivers/net/wireless/ath/ath12k/dbring.c
94
align = ring->buf_align;
drivers/net/wireless/ath/ath12k/dbring.c
95
size = sizeof(*buff) + ring->buf_sz + align - 1;
drivers/net/wireless/ath/ath12k/dbring.h
59
struct ath12k_dbring *ring,
drivers/net/wireless/ath/ath12k/dbring.h
65
struct ath12k_dbring *ring,
drivers/net/wireless/ath/ath12k/dbring.h
68
struct ath12k_dbring *ring,
drivers/net/wireless/ath/ath12k/dbring.h
70
int ath12k_dbring_srng_setup(struct ath12k *ar, struct ath12k_dbring *ring,
drivers/net/wireless/ath/ath12k/dbring.h
78
void ath12k_dbring_srng_cleanup(struct ath12k *ar, struct ath12k_dbring *ring);
drivers/net/wireless/ath/ath12k/dbring.h
79
void ath12k_dbring_buf_cleanup(struct ath12k *ar, struct ath12k_dbring *ring);
drivers/net/wireless/ath/ath12k/dp.c
107
void ath12k_dp_srng_cleanup(struct ath12k_base *ab, struct dp_srng *ring)
drivers/net/wireless/ath/ath12k/dp.c
109
if (!ring->vaddr_unaligned)
drivers/net/wireless/ath/ath12k/dp.c
112
dma_free_coherent(ab->dev, ring->size, ring->vaddr_unaligned,
drivers/net/wireless/ath/ath12k/dp.c
113
ring->paddr_unaligned);
drivers/net/wireless/ath/ath12k/dp.c
115
ring->vaddr_unaligned = NULL;
drivers/net/wireless/ath/ath12k/dp.c
235
int ath12k_dp_srng_setup(struct ath12k_base *ab, struct dp_srng *ring,
drivers/net/wireless/ath/ath12k/dp.c
250
ring->size = (num_entries * entry_sz) + HAL_RING_BASE_ALIGN - 1;
drivers/net/wireless/ath/ath12k/dp.c
251
ring->vaddr_unaligned = dma_alloc_coherent(ab->dev, ring->size,
drivers/net/wireless/ath/ath12k/dp.c
252
&ring->paddr_unaligned,
drivers/net/wireless/ath/ath12k/dp.c
254
if (!ring->vaddr_unaligned)
drivers/net/wireless/ath/ath12k/dp.c
257
ring->vaddr = PTR_ALIGN(ring->vaddr_unaligned, HAL_RING_BASE_ALIGN);
drivers/net/wireless/ath/ath12k/dp.c
258
ring->paddr = ring->paddr_unaligned + ((unsigned long)ring->vaddr -
drivers/net/wireless/ath/ath12k/dp.c
259
(unsigned long)ring->vaddr_unaligned);
drivers/net/wireless/ath/ath12k/dp.c
261
params.ring_base_vaddr = ring->vaddr;
drivers/net/wireless/ath/ath12k/dp.c
262
params.ring_base_paddr = ring->paddr;
drivers/net/wireless/ath/ath12k/dp.c
331
ring->ring_id = ret;
drivers/net/wireless/ath/ath12k/dp.c
700
u32 ring_type, struct dp_srng *ring)
drivers/net/wireless/ath/ath12k/dp.c
705
ath12k_dp_srng_cleanup(ab, ring);
drivers/net/wireless/ath/ath12k/dp.h
689
void ath12k_dp_srng_cleanup(struct ath12k_base *ab, struct dp_srng *ring);
drivers/net/wireless/ath/ath12k/dp.h
690
int ath12k_dp_srng_setup(struct ath12k_base *ab, struct dp_srng *ring,
drivers/net/wireless/ath/ath12k/dp.h
695
u32 ring_type, struct dp_srng *ring);
drivers/net/wireless/ath/ath12k/wifi7/dp_tx.c
649
int ring)
drivers/net/wireless/ath/ath12k/wifi7/dp_tx.c
671
dp->device_stats.tx_completed[ring]++;
drivers/net/wireless/ath/carl9170/debug.c
581
ar->debug.ring[ar->debug.ring_tail].reg = reg + (i << 2);
drivers/net/wireless/ath/carl9170/debug.c
582
ar->debug.ring[ar->debug.ring_tail].value = tmp;
drivers/net/wireless/ath/carl9170/debug.c
598
ar->debug.ring[ar->debug.ring_head].reg,
drivers/net/wireless/ath/carl9170/debug.c
599
ar->debug.ring[ar->debug.ring_head].value);
drivers/net/wireless/ath/carl9170/debug.h
124
struct carl9170_debug_mem_rbe ring[CARL9170_DEBUG_RING_SIZE];
drivers/net/wireless/ath/wil6210/debugfs.c
103
seq_printf(s, " hwtail = [0x%08x] -> ", ring->hwtail);
drivers/net/wireless/ath/wil6210/debugfs.c
104
x = wmi_addr(wil, ring->hwtail);
drivers/net/wireless/ath/wil6210/debugfs.c
1065
struct wil_ring *ring;
drivers/net/wireless/ath/wil6210/debugfs.c
1089
ring = tx ? &wil->ring_tx[ring_idx] : &wil->ring_rx;
drivers/net/wireless/ath/wil6210/debugfs.c
1091
if (!ring->va) {
drivers/net/wireless/ath/wil6210/debugfs.c
1099
if (txdesc_idx >= ring->size) {
drivers/net/wireless/ath/wil6210/debugfs.c
1102
ring_idx, txdesc_idx, ring->size);
drivers/net/wireless/ath/wil6210/debugfs.c
1105
txdesc_idx, ring->size);
drivers/net/wireless/ath/wil6210/debugfs.c
1112
d = &ring->va[txdesc_idx].tx.legacy;
drivers/net/wireless/ath/wil6210/debugfs.c
1118
skb = ring->ctx ? ring->ctx[txdesc_idx].skb : NULL;
drivers/net/wireless/ath/wil6210/debugfs.c
112
if (ring->va && (ring->size <= (1 << WIL_RING_SIZE_ORDER_MAX))) {
drivers/net/wireless/ath/wil6210/debugfs.c
1122
&ring->va[txdesc_idx].rx.enhanced;
drivers/net/wireless/ath/wil6210/debugfs.c
1132
skb = ring->ctx[txdesc_idx].skb;
drivers/net/wireless/ath/wil6210/debugfs.c
115
for (i = 0; i < ring->size; i++) {
drivers/net/wireless/ath/wil6210/debugfs.c
119
wil_print_desc_edma(s, wil, ring, _s, _h, i);
drivers/net/wireless/ath/wil6210/debugfs.c
122
&ring->va[i].tx.legacy;
drivers/net/wireless/ath/wil6210/debugfs.c
124
_s : (ring->ctx[i].skb ? _h : 'h'));
drivers/net/wireless/ath/wil6210/debugfs.c
140
struct wil_ring *ring = &wil->ring_tx[i];
drivers/net/wireless/ath/wil6210/debugfs.c
143
if (ring->va) {
drivers/net/wireless/ath/wil6210/debugfs.c
146
u32 swhead = ring->swhead;
drivers/net/wireless/ath/wil6210/debugfs.c
147
u32 swtail = ring->swtail;
drivers/net/wireless/ath/wil6210/debugfs.c
148
int used = (ring->size + swhead - swtail)
drivers/net/wireless/ath/wil6210/debugfs.c
149
% ring->size;
drivers/net/wireless/ath/wil6210/debugfs.c
150
int avail = ring->size - used - 1;
drivers/net/wireless/ath/wil6210/debugfs.c
185
wil_print_ring(s, wil, name, ring, '_', 'H');
drivers/net/wireless/ath/wil6210/debugfs.c
191
DEFINE_SHOW_ATTRIBUTE(ring);
drivers/net/wireless/ath/wil6210/debugfs.c
43
struct wil_ring *ring,
drivers/net/wireless/ath/wil6210/debugfs.c
49
if (ring->is_rx) {
drivers/net/wireless/ath/wil6210/debugfs.c
52
&ring->va[idx].rx.enhanced;
drivers/net/wireless/ath/wil6210/debugfs.c
62
&ring->va[idx].tx.enhanced;
drivers/net/wireless/ath/wil6210/debugfs.c
65
has_skb = ring->ctx && ring->ctx[idx].skb;
drivers/net/wireless/ath/wil6210/debugfs.c
75
const char *name, struct wil_ring *ring,
drivers/net/wireless/ath/wil6210/debugfs.c
82
seq_printf(s, " pa = %pad\n", &ring->pa);
drivers/net/wireless/ath/wil6210/debugfs.c
83
seq_printf(s, " va = 0x%p\n", ring->va);
drivers/net/wireless/ath/wil6210/debugfs.c
84
seq_printf(s, " size = %d\n", ring->size);
drivers/net/wireless/ath/wil6210/debugfs.c
85
if (wil->use_enhanced_dma_hw && ring->is_rx)
drivers/net/wireless/ath/wil6210/debugfs.c
86
seq_printf(s, " swtail = %u\n", *ring->edma_rx_swtail.va);
drivers/net/wireless/ath/wil6210/debugfs.c
88
seq_printf(s, " swtail = %d\n", ring->swtail);
drivers/net/wireless/ath/wil6210/debugfs.c
89
seq_printf(s, " swhead = %d\n", ring->swhead);
drivers/net/wireless/ath/wil6210/debugfs.c
91
int ring_id = ring->is_rx ?
drivers/net/wireless/ath/wil6210/debugfs.c
92
WIL_RX_DESC_RING_ID : ring - wil->ring_tx;
drivers/net/wireless/ath/wil6210/main.c
200
struct wil_ring *ring = &wil->ring_tx[id];
drivers/net/wireless/ath/wil6210/main.c
205
if (!ring->va)
drivers/net/wireless/ath/wil6210/main.c
226
wil->txrx_ops.ring_fini_tx(wil, ring);
drivers/net/wireless/ath/wil6210/netdev.c
142
struct wil_ring *ring = &wil->ring_tx[i];
drivers/net/wireless/ath/wil6210/netdev.c
146
if (!ring->va || !txdata->enabled ||
drivers/net/wireless/ath/wil6210/txrx.c
1439
struct wil_ring *ring, struct sk_buff *skb);
drivers/net/wireless/ath/wil6210/txrx.c
1445
struct wil_ring *ring;
drivers/net/wireless/ath/wil6210/txrx.c
1456
ring = &wil->ring_tx[i];
drivers/net/wireless/ath/wil6210/txrx.c
1458
if (!ring->va || !txdata->enabled || txdata->mid != vif->mid)
drivers/net/wireless/ath/wil6210/txrx.c
1471
return ring;
drivers/net/wireless/ath/wil6210/txrx.c
2027
struct wil_ring *ring, struct sk_buff *skb)
drivers/net/wireless/ath/wil6210/txrx.c
2032
u32 swhead = ring->swhead;
drivers/net/wireless/ath/wil6210/txrx.c
2033
int avail = wil_ring_avail_tx(ring);
drivers/net/wireless/ath/wil6210/txrx.c
2036
int ring_index = ring - wil->ring_tx;
drivers/net/wireless/ath/wil6210/txrx.c
2056
_d = &ring->va[i].tx.legacy;
drivers/net/wireless/ath/wil6210/txrx.c
2067
ring->ctx[i].mapped_as = wil_mapped_as_single;
drivers/net/wireless/ath/wil6210/txrx.c
2083
ring->ctx[i].nr_frags = nr_frags;
drivers/net/wireless/ath/wil6210/txrx.c
2095
i = (swhead + f + 1) % ring->size;
drivers/net/wireless/ath/wil6210/txrx.c
2096
_d = &ring->va[i].tx.legacy;
drivers/net/wireless/ath/wil6210/txrx.c
2104
ring->ctx[i].mapped_as = wil_mapped_as_page;
drivers/net/wireless/ath/wil6210/txrx.c
2126
ring->ctx[i].skb = skb_get(skb);
drivers/net/wireless/ath/wil6210/txrx.c
2129
used = wil_ring_used_tx(ring);
drivers/net/wireless/ath/wil6210/txrx.c
2145
wil_ring_advance_head(ring, nr_frags + 1);
drivers/net/wireless/ath/wil6210/txrx.c
2147
ring->swhead);
drivers/net/wireless/ath/wil6210/txrx.c
2160
wil_w(wil, ring->hwtail, ring->swhead);
drivers/net/wireless/ath/wil6210/txrx.c
2169
i = (swhead + f) % ring->size;
drivers/net/wireless/ath/wil6210/txrx.c
2170
ctx = &ring->ctx[i];
drivers/net/wireless/ath/wil6210/txrx.c
2171
_d = &ring->va[i].tx.legacy;
drivers/net/wireless/ath/wil6210/txrx.c
2185
struct wil_ring *ring, struct sk_buff *skb)
drivers/net/wireless/ath/wil6210/txrx.c
2187
int ring_index = ring - wil->ring_tx;
drivers/net/wireless/ath/wil6210/txrx.c
2203
(wil, vif, ring, skb);
drivers/net/wireless/ath/wil6210/txrx.c
2228
struct wil_ring *ring,
drivers/net/wireless/ath/wil6210/txrx.c
2237
if (ring)
drivers/net/wireless/ath/wil6210/txrx.c
2239
(int)(ring - wil->ring_tx), vif->mid, check_stop,
drivers/net/wireless/ath/wil6210/txrx.c
2245
if (ring && drop_if_ring_full)
drivers/net/wireless/ath/wil6210/txrx.c
2254
if (!ring || unlikely(wil_ring_avail_low(ring))) {
drivers/net/wireless/ath/wil6210/txrx.c
2274
!txdata->enabled || cur_ring == ring)
drivers/net/wireless/ath/wil6210/txrx.c
2284
if (!ring || wil_ring_avail_high(ring)) {
drivers/net/wireless/ath/wil6210/txrx.c
2293
struct wil_ring *ring, bool check_stop)
drivers/net/wireless/ath/wil6210/txrx.c
2296
__wil_update_net_queues(wil, vif, ring, check_stop);
drivers/net/wireless/ath/wil6210/txrx.c
2301
struct wil_ring *ring, bool check_stop)
drivers/net/wireless/ath/wil6210/txrx.c
2304
__wil_update_net_queues(wil, vif, ring, check_stop);
drivers/net/wireless/ath/wil6210/txrx.c
2314
struct wil_ring *ring;
drivers/net/wireless/ath/wil6210/txrx.c
2340
ring = wil_find_tx_ring_sta(wil, vif, skb);
drivers/net/wireless/ath/wil6210/txrx.c
2346
ring = wil_find_tx_bcast_2(wil, vif, skb);
drivers/net/wireless/ath/wil6210/txrx.c
2349
ring = wil_find_tx_bcast_1(wil, vif, skb);
drivers/net/wireless/ath/wil6210/txrx.c
2354
ring = wil_find_tx_bcast_2(wil, vif, skb);
drivers/net/wireless/ath/wil6210/txrx.c
2357
ring = wil_find_tx_ucast(wil, vif, skb);
drivers/net/wireless/ath/wil6210/txrx.c
2359
if (unlikely(!ring)) {
drivers/net/wireless/ath/wil6210/txrx.c
2364
rc = wil_tx_ring(wil, vif, ring, skb);
drivers/net/wireless/ath/wil6210/txrx.c
2369
wil_update_net_queues_bh(wil, vif, ring, true);
drivers/net/wireless/ath/wil6210/txrx.c
354
struct wil_ring *ring = &wil->ring_rx;
drivers/net/wireless/ath/wil6210/txrx.c
356
_d = (struct vring_rx_desc *)&ring->va[ring->swhead].rx.legacy;
drivers/net/wireless/ath/wil6210/txrx.c
40
static inline int wil_ring_wmark_low(struct wil_ring *ring)
drivers/net/wireless/ath/wil6210/txrx.c
42
return ring->size / 8;
drivers/net/wireless/ath/wil6210/txrx.c
46
static inline int wil_ring_wmark_high(struct wil_ring *ring)
drivers/net/wireless/ath/wil6210/txrx.c
48
return ring->size / 4;
drivers/net/wireless/ath/wil6210/txrx.c
52
static inline int wil_ring_avail_low(struct wil_ring *ring)
drivers/net/wireless/ath/wil6210/txrx.c
54
return wil_ring_avail_tx(ring) < wil_ring_wmark_low(ring);
drivers/net/wireless/ath/wil6210/txrx.c
58
static inline int wil_ring_avail_high(struct wil_ring *ring)
drivers/net/wireless/ath/wil6210/txrx.c
60
return wil_ring_avail_tx(ring) > wil_ring_wmark_high(ring);
drivers/net/wireless/ath/wil6210/txrx.h
582
static inline int wil_ring_is_empty(struct wil_ring *ring)
drivers/net/wireless/ath/wil6210/txrx.h
584
return ring->swhead == ring->swtail;
drivers/net/wireless/ath/wil6210/txrx.h
587
static inline u32 wil_ring_next_tail(struct wil_ring *ring)
drivers/net/wireless/ath/wil6210/txrx.h
589
return (ring->swtail + 1) % ring->size;
drivers/net/wireless/ath/wil6210/txrx.h
592
static inline void wil_ring_advance_head(struct wil_ring *ring, int n)
drivers/net/wireless/ath/wil6210/txrx.h
594
ring->swhead = (ring->swhead + n) % ring->size;
drivers/net/wireless/ath/wil6210/txrx.h
597
static inline int wil_ring_is_full(struct wil_ring *ring)
drivers/net/wireless/ath/wil6210/txrx.h
599
return wil_ring_next_tail(ring) == ring->swhead;
drivers/net/wireless/ath/wil6210/txrx.h
632
static inline int wil_ring_used_tx(struct wil_ring *ring)
drivers/net/wireless/ath/wil6210/txrx.h
634
u32 swhead = ring->swhead;
drivers/net/wireless/ath/wil6210/txrx.h
635
u32 swtail = ring->swtail;
drivers/net/wireless/ath/wil6210/txrx.h
637
return (ring->size + swhead - swtail) % ring->size;
drivers/net/wireless/ath/wil6210/txrx.h
641
static inline int wil_ring_avail_tx(struct wil_ring *ring)
drivers/net/wireless/ath/wil6210/txrx.h
643
return ring->size - wil_ring_used_tx(ring) - 1;
drivers/net/wireless/ath/wil6210/txrx_edma.c
1067
struct wil_ring *ring = &wil->ring_rx;
drivers/net/wireless/ath/wil6210/txrx_edma.c
1072
if (unlikely(!ring->va)) {
drivers/net/wireless/ath/wil6210/txrx_edma.c
1161
struct wil_ring *ring = NULL;
drivers/net/wireless/ath/wil6210/txrx_edma.c
1194
ring = &wil->ring_tx[ring_id];
drivers/net/wireless/ath/wil6210/txrx_edma.c
1195
if (unlikely(!ring->va)) {
drivers/net/wireless/ath/wil6210/txrx_edma.c
1222
used_before_complete = wil_ring_used_tx(ring);
drivers/net/wireless/ath/wil6210/txrx_edma.c
1225
struct wil_ctx *ctx = &ring->ctx[ring->swtail];
drivers/net/wireless/ath/wil6210/txrx_edma.c
1231
&ring->va[ring->swtail].tx.enhanced;
drivers/net/wireless/ath/wil6210/txrx_edma.c
1235
trace_wil6210_tx_status(&msg, ring->swtail, dmalen);
drivers/net/wireless/ath/wil6210/txrx_edma.c
1238
ring_id, ring->swtail, dmalen,
drivers/net/wireless/ath/wil6210/txrx_edma.c
1278
ring->swtail = wil_ring_next_tail(ring);
drivers/net/wireless/ath/wil6210/txrx_edma.c
1284
used_new = wil_ring_used_tx(ring);
drivers/net/wireless/ath/wil6210/txrx_edma.c
1351
skb_frag_t *frag, struct wil_ring *ring,
drivers/net/wireless/ath/wil6210/txrx_edma.c
1358
&ring->va[i].tx.enhanced;
drivers/net/wireless/ath/wil6210/txrx_edma.c
1360
int ring_index = ring - wil->ring_tx;
drivers/net/wireless/ath/wil6210/txrx_edma.c
1368
ring->ctx[i].mapped_as = wil_mapped_as_single;
drivers/net/wireless/ath/wil6210/txrx_edma.c
1371
ring->ctx[i].mapped_as = wil_mapped_as_page;
drivers/net/wireless/ath/wil6210/txrx_edma.c
1389
ring->ctx[i].skb = skb_get(skb);
drivers/net/wireless/ath/wil6210/txrx_edma.c
1402
struct wil_ring *ring,
drivers/net/wireless/ath/wil6210/txrx_edma.c
1405
int ring_index = ring - wil->ring_tx;
drivers/net/wireless/ath/wil6210/txrx_edma.c
1409
int used, avail = wil_ring_avail_tx(ring);
drivers/net/wireless/ath/wil6210/txrx_edma.c
1413
u32 swhead = ring->swhead;
drivers/net/wireless/ath/wil6210/txrx_edma.c
1461
wil_tso_type_hdr, NULL, ring, skb,
drivers/net/wireless/ath/wil6210/txrx_edma.c
1471
(swhead + descs_used) % ring->size,
drivers/net/wireless/ath/wil6210/txrx_edma.c
1473
wil_tso_type_lst, NULL, ring, skb,
drivers/net/wireless/ath/wil6210/txrx_edma.c
1488
(swhead + descs_used) % ring->size,
drivers/net/wireless/ath/wil6210/txrx_edma.c
1491
frag, ring, skb, is_ipv4,
drivers/net/wireless/ath/wil6210/txrx_edma.c
1499
used = wil_ring_used_tx(ring);
drivers/net/wireless/ath/wil6210/txrx_edma.c
1508
wil_ring_advance_head(ring, descs_used);
drivers/net/wireless/ath/wil6210/txrx_edma.c
1509
wil_dbg_txrx(wil, "TSO: Tx swhead %d -> %d\n", swhead, ring->swhead);
drivers/net/wireless/ath/wil6210/txrx_edma.c
152
struct wil_ring *ring, u32 i)
drivers/net/wireless/ath/wil6210/txrx_edma.c
1521
wil_w(wil, ring->hwtail, ring->swhead);
drivers/net/wireless/ath/wil6210/txrx_edma.c
1529
int i = (swhead + descs_used - 1) % ring->size;
drivers/net/wireless/ath/wil6210/txrx_edma.c
1533
&ring->va[i].tx.enhanced;
drivers/net/wireless/ath/wil6210/txrx_edma.c
1536
ctx = &ring->ctx[i];
drivers/net/wireless/ath/wil6210/txrx_edma.c
1548
struct wil_ring *ring = &wil->ring_tx[ring_id];
drivers/net/wireless/ath/wil6210/txrx_edma.c
1558
ring->size = size;
drivers/net/wireless/ath/wil6210/txrx_edma.c
1559
ring->is_rx = false;
drivers/net/wireless/ath/wil6210/txrx_edma.c
1560
rc = wil_ring_alloc_desc_ring(wil, ring);
drivers/net/wireless/ath/wil6210/txrx_edma.c
1580
wil_ring_free_edma(wil, ring);
drivers/net/wireless/ath/wil6210/txrx_edma.c
1606
struct wil_ring *ring = &wil->ring_rx;
drivers/net/wireless/ath/wil6210/txrx_edma.c
1611
wil_ring_free_edma(wil, ring);
drivers/net/wireless/ath/wil6210/txrx_edma.c
165
&ring->va[i].rx.enhanced;
drivers/net/wireless/ath/wil6210/txrx_edma.c
233
struct wil_ring *ring = &wil->ring_rx;
drivers/net/wireless/ath/wil6210/txrx_edma.c
236
ring->swtail = *ring->edma_rx_swtail.va;
drivers/net/wireless/ath/wil6210/txrx_edma.c
238
for (; next_head = wil_ring_next_head(ring),
drivers/net/wireless/ath/wil6210/txrx_edma.c
239
(next_head != ring->swtail);
drivers/net/wireless/ath/wil6210/txrx_edma.c
240
ring->swhead = next_head) {
drivers/net/wireless/ath/wil6210/txrx_edma.c
241
rc = wil_ring_alloc_skb_edma(wil, ring, ring->swhead);
drivers/net/wireless/ath/wil6210/txrx_edma.c
248
rc, ring->swhead);
drivers/net/wireless/ath/wil6210/txrx_edma.c
258
wil_w(wil, ring->hwtail, ring->swhead);
drivers/net/wireless/ath/wil6210/txrx_edma.c
264
struct wil_ring *ring)
drivers/net/wireless/ath/wil6210/txrx_edma.c
295
struct wil_ring *ring = &wil->ring_rx;
drivers/net/wireless/ath/wil6210/txrx_edma.c
303
wil_move_all_rx_buff_to_free_list(wil, ring);
drivers/net/wireless/ath/wil6210/txrx_edma.c
372
struct wil_ring *ring)
drivers/net/wireless/ath/wil6210/txrx_edma.c
375
size_t sz = ring->size * sizeof(ring->va[0]);
drivers/net/wireless/ath/wil6210/txrx_edma.c
379
BUILD_BUG_ON(sizeof(ring->va[0]) != 32);
drivers/net/wireless/ath/wil6210/txrx_edma.c
381
ring->swhead = 0;
drivers/net/wireless/ath/wil6210/txrx_edma.c
382
ring->swtail = 0;
drivers/net/wireless/ath/wil6210/txrx_edma.c
383
ring->ctx = kzalloc_objs(ring->ctx[0], ring->size);
drivers/net/wireless/ath/wil6210/txrx_edma.c
384
if (!ring->ctx)
drivers/net/wireless/ath/wil6210/txrx_edma.c
387
ring->va = dma_alloc_coherent(dev, sz, &ring->pa, GFP_KERNEL);
drivers/net/wireless/ath/wil6210/txrx_edma.c
388
if (!ring->va)
drivers/net/wireless/ath/wil6210/txrx_edma.c
391
if (ring->is_rx) {
drivers/net/wireless/ath/wil6210/txrx_edma.c
392
sz = sizeof(*ring->edma_rx_swtail.va);
drivers/net/wireless/ath/wil6210/txrx_edma.c
393
ring->edma_rx_swtail.va =
drivers/net/wireless/ath/wil6210/txrx_edma.c
394
dma_alloc_coherent(dev, sz, &ring->edma_rx_swtail.pa,
drivers/net/wireless/ath/wil6210/txrx_edma.c
396
if (!ring->edma_rx_swtail.va)
drivers/net/wireless/ath/wil6210/txrx_edma.c
401
ring->is_rx ? "RX" : "TX",
drivers/net/wireless/ath/wil6210/txrx_edma.c
402
ring->size, ring->va, &ring->pa, ring->ctx);
drivers/net/wireless/ath/wil6210/txrx_edma.c
406
dma_free_coherent(dev, ring->size * sizeof(ring->va[0]),
drivers/net/wireless/ath/wil6210/txrx_edma.c
407
(void *)ring->va, ring->pa);
drivers/net/wireless/ath/wil6210/txrx_edma.c
408
ring->va = NULL;
drivers/net/wireless/ath/wil6210/txrx_edma.c
410
kfree(ring->ctx);
drivers/net/wireless/ath/wil6210/txrx_edma.c
411
ring->ctx = NULL;
drivers/net/wireless/ath/wil6210/txrx_edma.c
416
static void wil_ring_free_edma(struct wil6210_priv *wil, struct wil_ring *ring)
drivers/net/wireless/ath/wil6210/txrx_edma.c
422
if (!ring->va)
drivers/net/wireless/ath/wil6210/txrx_edma.c
425
sz = ring->size * sizeof(ring->va[0]);
drivers/net/wireless/ath/wil6210/txrx_edma.c
428
if (ring->is_rx) {
drivers/net/wireless/ath/wil6210/txrx_edma.c
430
ring->size, ring->va,
drivers/net/wireless/ath/wil6210/txrx_edma.c
431
&ring->pa, ring->ctx);
drivers/net/wireless/ath/wil6210/txrx_edma.c
433
wil_move_all_rx_buff_to_free_list(wil, ring);
drivers/net/wireless/ath/wil6210/txrx_edma.c
434
dma_free_coherent(dev, sizeof(*ring->edma_rx_swtail.va),
drivers/net/wireless/ath/wil6210/txrx_edma.c
435
ring->edma_rx_swtail.va,
drivers/net/wireless/ath/wil6210/txrx_edma.c
436
ring->edma_rx_swtail.pa);
drivers/net/wireless/ath/wil6210/txrx_edma.c
441
ring_index = ring - wil->ring_tx;
drivers/net/wireless/ath/wil6210/txrx_edma.c
444
ring_index, ring->size, ring->va,
drivers/net/wireless/ath/wil6210/txrx_edma.c
445
&ring->pa, ring->ctx);
drivers/net/wireless/ath/wil6210/txrx_edma.c
447
while (!wil_ring_is_empty(ring)) {
drivers/net/wireless/ath/wil6210/txrx_edma.c
453
&ring->va[ring->swtail].tx.enhanced;
drivers/net/wireless/ath/wil6210/txrx_edma.c
455
ctx = &ring->ctx[ring->swtail];
drivers/net/wireless/ath/wil6210/txrx_edma.c
459
ring->swtail);
drivers/net/wireless/ath/wil6210/txrx_edma.c
460
ring->swtail = wil_ring_next_tail(ring);
drivers/net/wireless/ath/wil6210/txrx_edma.c
467
ring->swtail = wil_ring_next_tail(ring);
drivers/net/wireless/ath/wil6210/txrx_edma.c
471
dma_free_coherent(dev, sz, (void *)ring->va, ring->pa);
drivers/net/wireless/ath/wil6210/txrx_edma.c
472
kfree(ring->ctx);
drivers/net/wireless/ath/wil6210/txrx_edma.c
473
ring->pa = 0;
drivers/net/wireless/ath/wil6210/txrx_edma.c
474
ring->va = NULL;
drivers/net/wireless/ath/wil6210/txrx_edma.c
475
ring->ctx = NULL;
drivers/net/wireless/ath/wil6210/txrx_edma.c
481
struct wil_ring *ring = &wil->ring_rx;
drivers/net/wireless/ath/wil6210/txrx_edma.c
486
ring->size = desc_ring_size;
drivers/net/wireless/ath/wil6210/txrx_edma.c
487
ring->is_rx = true;
drivers/net/wireless/ath/wil6210/txrx_edma.c
488
rc = wil_ring_alloc_desc_ring(wil, ring);
drivers/net/wireless/ath/wil6210/txrx_edma.c
498
wil_ring_free_edma(wil, ring);
drivers/net/wireless/ath/wil6210/txrx_edma.c
602
struct wil_ring *ring = &wil->ring_rx;
drivers/net/wireless/ath/wil6210/txrx_edma.c
686
wil_ring_free_edma(wil, ring);
drivers/net/wireless/ath/wil6210/txrx_edma.c
699
struct wil_ring *ring = &wil->ring_tx[ring_id];
drivers/net/wireless/ath/wil6210/txrx_edma.c
709
ring->size = size;
drivers/net/wireless/ath/wil6210/txrx_edma.c
710
rc = wil_ring_alloc_desc_ring(wil, ring);
drivers/net/wireless/ath/wil6210/txrx_edma.c
734
wil_ring_free_edma(wil, ring);
drivers/net/wireless/ath/wil6210/txrx_edma.h
567
static inline u32 wil_ring_next_head(struct wil_ring *ring)
drivers/net/wireless/ath/wil6210/txrx_edma.h
569
return (ring->swhead + 1) % ring->size;
drivers/net/wireless/ath/wil6210/wil6210.h
1370
struct wil_ring *ring, bool should_stop);
drivers/net/wireless/ath/wil6210/wil6210.h
1372
struct wil_ring *ring, bool check_stop);
drivers/net/wireless/ath/wil6210/wil6210.h
597
void (*ring_fini_tx)(struct wil6210_priv *wil, struct wil_ring *ring);
drivers/net/wireless/ath/wil6210/wil6210.h
607
struct wil_ring *ring, struct sk_buff *skb);
drivers/net/wireless/ath/wil6210/wmi.c
1583
struct wil_ring *ring;
drivers/net/wireless/ath/wil6210/wmi.c
1600
ring = &wil->ring_tx[i];
drivers/net/wireless/ath/wil6210/wmi.c
1602
if (!ring->va || !txdata->enabled || txdata->mid != vif->mid)
drivers/net/wireless/ath/wil6210/wmi.c
3839
struct wil_ring *ring = &wil->ring_rx;
drivers/net/wireless/ath/wil6210/wmi.c
3843
.ring_size = cpu_to_le16(ring->size),
drivers/net/wireless/ath/wil6210/wmi.c
3856
cmd.ring_cfg.ring_mem_base = cpu_to_le64(ring->pa);
drivers/net/wireless/ath/wil6210/wmi.c
3857
cmd.sw_tail_host_addr = cpu_to_le64(ring->edma_rx_swtail.pa);
drivers/net/wireless/ath/wil6210/wmi.c
3872
ring->hwtail = le32_to_cpu(reply.evt.ring_tail_ptr);
drivers/net/wireless/ath/wil6210/wmi.c
3883
struct wil_ring *ring = &wil->ring_tx[ring_id];
drivers/net/wireless/ath/wil6210/wmi.c
3887
.ring_size = cpu_to_le16(ring->size),
drivers/net/wireless/ath/wil6210/wmi.c
3907
cmd.ring_cfg.ring_mem_base = cpu_to_le64(ring->pa);
drivers/net/wireless/ath/wil6210/wmi.c
3923
ring->hwtail = le32_to_cpu(reply.evt.ring_tail_ptr);
drivers/net/wireless/ath/wil6210/wmi.c
3934
struct wil_ring *ring = &wil->ring_tx[ring_id];
drivers/net/wireless/ath/wil6210/wmi.c
3938
.ring_size = cpu_to_le16(ring->size),
drivers/net/wireless/ath/wil6210/wmi.c
3953
cmd.ring_cfg.ring_mem_base = cpu_to_le64(ring->pa);
drivers/net/wireless/ath/wil6210/wmi.c
3969
ring->hwtail = le32_to_cpu(reply.evt.ring_tail_ptr);
drivers/net/wireless/broadcom/b43/dma.c
100
addrext = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_EXT);
drivers/net/wireless/broadcom/b43/dma.c
1002
dmacontroller_cleanup(ring);
drivers/net/wireless/broadcom/b43/dma.c
1003
free_all_descbuffers(ring);
drivers/net/wireless/broadcom/b43/dma.c
1004
free_ringmemory(ring);
drivers/net/wireless/broadcom/b43/dma.c
1006
kfree(ring->txhdr_cache);
drivers/net/wireless/broadcom/b43/dma.c
1007
kfree(ring->meta);
drivers/net/wireless/broadcom/b43/dma.c
1008
kfree(ring);
drivers/net/wireless/broadcom/b43/dma.c
1011
#define destroy_ring(dma, ring) do { \
drivers/net/wireless/broadcom/b43/dma.c
1012
b43_destroy_dmaring((dma)->ring, __stringify(ring)); \
drivers/net/wireless/broadcom/b43/dma.c
1013
(dma)->ring = NULL; \
drivers/net/wireless/broadcom/b43/dma.c
103
if (slot == ring->nr_slots - 1)
drivers/net/wireless/broadcom/b43/dma.c
1135
static u16 generate_cookie(struct b43_dmaring *ring, int slot)
drivers/net/wireless/broadcom/b43/dma.c
1147
cookie = (((u16)ring->index + 1) << 12);
drivers/net/wireless/broadcom/b43/dma.c
1159
struct b43_dmaring *ring = NULL;
drivers/net/wireless/broadcom/b43/dma.c
1163
ring = dma->tx_ring_AC_BK;
drivers/net/wireless/broadcom/b43/dma.c
1166
ring = dma->tx_ring_AC_BE;
drivers/net/wireless/broadcom/b43/dma.c
1169
ring = dma->tx_ring_AC_VI;
drivers/net/wireless/broadcom/b43/dma.c
1172
ring = dma->tx_ring_AC_VO;
drivers/net/wireless/broadcom/b43/dma.c
1175
ring = dma->tx_ring_mcast;
drivers/net/wireless/broadcom/b43/dma.c
1179
if (unlikely(!ring || *slot < 0 || *slot >= ring->nr_slots)) {
drivers/net/wireless/broadcom/b43/dma.c
118
static void op32_poke_tx(struct b43_dmaring *ring, int slot)
drivers/net/wireless/broadcom/b43/dma.c
1185
return ring;
drivers/net/wireless/broadcom/b43/dma.c
1188
static int dma_tx_fragment(struct b43_dmaring *ring,
drivers/net/wireless/broadcom/b43/dma.c
1191
const struct b43_dma_ops *ops = ring->ops;
drivers/net/wireless/broadcom/b43/dma.c
120
b43_dma_write(ring, B43_DMA32_TXINDEX,
drivers/net/wireless/broadcom/b43/dma.c
1201
size_t hdrsize = b43_txhdr_size(ring->dev);
drivers/net/wireless/broadcom/b43/dma.c
1208
old_top_slot = ring->current_slot;
drivers/net/wireless/broadcom/b43/dma.c
1209
old_used_slots = ring->used_slots;
drivers/net/wireless/broadcom/b43/dma.c
1212
slot = request_slot(ring);
drivers/net/wireless/broadcom/b43/dma.c
1213
desc = ops->idx2desc(ring, slot, &meta_hdr);
drivers/net/wireless/broadcom/b43/dma.c
1216
header = &(ring->txhdr_cache[(slot / TX_SLOTS_PER_FRAME) * hdrsize]);
drivers/net/wireless/broadcom/b43/dma.c
1217
cookie = generate_cookie(ring, slot);
drivers/net/wireless/broadcom/b43/dma.c
1218
err = b43_generate_txhdr(ring->dev, header,
drivers/net/wireless/broadcom/b43/dma.c
1221
ring->current_slot = old_top_slot;
drivers/net/wireless/broadcom/b43/dma.c
1222
ring->used_slots = old_used_slots;
drivers/net/wireless/broadcom/b43/dma.c
1226
meta_hdr->dmaaddr = map_descbuffer(ring, (unsigned char *)header,
drivers/net/wireless/broadcom/b43/dma.c
1228
if (b43_dma_mapping_error(ring, meta_hdr->dmaaddr, hdrsize, 1)) {
drivers/net/wireless/broadcom/b43/dma.c
1229
ring->current_slot = old_top_slot;
drivers/net/wireless/broadcom/b43/dma.c
1230
ring->used_slots = old_used_slots;
drivers/net/wireless/broadcom/b43/dma.c
1233
ops->fill_descriptor(ring, desc, meta_hdr->dmaaddr,
drivers/net/wireless/broadcom/b43/dma.c
1237
slot = request_slot(ring);
drivers/net/wireless/broadcom/b43/dma.c
1238
desc = ops->idx2desc(ring, slot, &meta);
drivers/net/wireless/broadcom/b43/dma.c
124
static void op32_tx_suspend(struct b43_dmaring *ring)
drivers/net/wireless/broadcom/b43/dma.c
1245
meta->dmaaddr = map_descbuffer(ring, skb->data, skb->len, 1);
drivers/net/wireless/broadcom/b43/dma.c
1247
if (b43_dma_mapping_error(ring, meta->dmaaddr, skb->len, 1)) {
drivers/net/wireless/broadcom/b43/dma.c
1251
ring->current_slot = old_top_slot;
drivers/net/wireless/broadcom/b43/dma.c
1252
ring->used_slots = old_used_slots;
drivers/net/wireless/broadcom/b43/dma.c
1257
meta->dmaaddr = map_descbuffer(ring, priv_info->bouncebuffer, skb->len, 1);
drivers/net/wireless/broadcom/b43/dma.c
1258
if (b43_dma_mapping_error(ring, meta->dmaaddr, skb->len, 1)) {
drivers/net/wireless/broadcom/b43/dma.c
126
b43_dma_write(ring, B43_DMA32_TXCTL, b43_dma_read(ring, B43_DMA32_TXCTL)
drivers/net/wireless/broadcom/b43/dma.c
1261
ring->current_slot = old_top_slot;
drivers/net/wireless/broadcom/b43/dma.c
1262
ring->used_slots = old_used_slots;
drivers/net/wireless/broadcom/b43/dma.c
1268
ops->fill_descriptor(ring, desc, meta->dmaaddr, skb->len, 0, 1, 1);
drivers/net/wireless/broadcom/b43/dma.c
1273
b43_shm_write16(ring->dev, B43_SHM_SHARED,
drivers/net/wireless/broadcom/b43/dma.c
1278
ops->poke_tx(ring, next_slot(ring, slot));
drivers/net/wireless/broadcom/b43/dma.c
1282
unmap_descbuffer(ring, meta_hdr->dmaaddr,
drivers/net/wireless/broadcom/b43/dma.c
1287
static inline int should_inject_overflow(struct b43_dmaring *ring)
drivers/net/wireless/broadcom/b43/dma.c
1290
if (unlikely(b43_debug(ring->dev, B43_DBG_DMAOVERFLOW))) {
drivers/net/wireless/broadcom/b43/dma.c
1295
next_overflow = ring->last_injected_overflow + HZ;
drivers/net/wireless/broadcom/b43/dma.c
1297
ring->last_injected_overflow = jiffies;
drivers/net/wireless/broadcom/b43/dma.c
1298
b43dbg(ring->dev->wl,
drivers/net/wireless/broadcom/b43/dma.c
130
static void op32_tx_resume(struct b43_dmaring *ring)
drivers/net/wireless/broadcom/b43/dma.c
1300
"DMA controller %d\n", ring->index);
drivers/net/wireless/broadcom/b43/dma.c
1312
struct b43_dmaring *ring;
drivers/net/wireless/broadcom/b43/dma.c
132
b43_dma_write(ring, B43_DMA32_TXCTL, b43_dma_read(ring, B43_DMA32_TXCTL)
drivers/net/wireless/broadcom/b43/dma.c
1321
ring = dev->dma.tx_ring_AC_VO;
drivers/net/wireless/broadcom/b43/dma.c
1324
ring = dev->dma.tx_ring_AC_VI;
drivers/net/wireless/broadcom/b43/dma.c
1327
ring = dev->dma.tx_ring_AC_BE;
drivers/net/wireless/broadcom/b43/dma.c
1330
ring = dev->dma.tx_ring_AC_BK;
drivers/net/wireless/broadcom/b43/dma.c
1334
ring = dev->dma.tx_ring_AC_BE;
drivers/net/wireless/broadcom/b43/dma.c
1336
return ring;
drivers/net/wireless/broadcom/b43/dma.c
1341
struct b43_dmaring *ring;
drivers/net/wireless/broadcom/b43/dma.c
1349
ring = dev->dma.tx_ring_mcast;
drivers/net/wireless/broadcom/b43/dma.c
1355
ring = select_ring_by_priority(
drivers/net/wireless/broadcom/b43/dma.c
1359
B43_WARN_ON(!ring->tx);
drivers/net/wireless/broadcom/b43/dma.c
136
static int op32_get_current_rxslot(struct b43_dmaring *ring)
drivers/net/wireless/broadcom/b43/dma.c
1361
if (unlikely(ring->stopped)) {
drivers/net/wireless/broadcom/b43/dma.c
1372
if (WARN_ON(free_slots(ring) < TX_SLOTS_PER_FRAME)) {
drivers/net/wireless/broadcom/b43/dma.c
1383
ring->queue_prio = skb_get_queue_mapping(skb);
drivers/net/wireless/broadcom/b43/dma.c
1385
err = dma_tx_fragment(ring, skb);
drivers/net/wireless/broadcom/b43/dma.c
1397
if ((free_slots(ring) < TX_SLOTS_PER_FRAME) ||
drivers/net/wireless/broadcom/b43/dma.c
1398
should_inject_overflow(ring)) {
drivers/net/wireless/broadcom/b43/dma.c
140
val = b43_dma_read(ring, B43_DMA32_RXSTATUS);
drivers/net/wireless/broadcom/b43/dma.c
1403
ring->stopped = true;
drivers/net/wireless/broadcom/b43/dma.c
1405
b43dbg(dev->wl, "Stopped TX ring %d\n", ring->index);
drivers/net/wireless/broadcom/b43/dma.c
1417
struct b43_dmaring *ring;
drivers/net/wireless/broadcom/b43/dma.c
1426
ring = parse_cookie(dev, status->cookie, &slot);
drivers/net/wireless/broadcom/b43/dma.c
1427
if (unlikely(!ring))
drivers/net/wireless/broadcom/b43/dma.c
1429
B43_WARN_ON(!ring->tx);
drivers/net/wireless/broadcom/b43/dma.c
1434
firstused = ring->current_slot - ring->used_slots + 1;
drivers/net/wireless/broadcom/b43/dma.c
1436
firstused = ring->nr_slots + firstused;
drivers/net/wireless/broadcom/b43/dma.c
1443
if (slot == next_slot(ring, next_slot(ring, firstused))) {
drivers/net/wireless/broadcom/b43/dma.c
1453
ring->index, slot);
drivers/net/wireless/broadcom/b43/dma.c
146
static void op32_set_current_rxslot(struct b43_dmaring *ring, int slot)
drivers/net/wireless/broadcom/b43/dma.c
1464
ring->index, firstused, slot);
drivers/net/wireless/broadcom/b43/dma.c
1471
ops = ring->ops;
drivers/net/wireless/broadcom/b43/dma.c
1473
B43_WARN_ON(slot < 0 || slot >= ring->nr_slots);
drivers/net/wireless/broadcom/b43/dma.c
1475
ops->idx2desc(ring, slot, &meta);
drivers/net/wireless/broadcom/b43/dma.c
148
b43_dma_write(ring, B43_DMA32_RXINDEX,
drivers/net/wireless/broadcom/b43/dma.c
1480
slot, firstused, ring->index);
drivers/net/wireless/broadcom/b43/dma.c
1488
unmap_descbuffer(ring, meta->dmaaddr,
drivers/net/wireless/broadcom/b43/dma.c
1493
unmap_descbuffer(ring, meta->dmaaddr,
drivers/net/wireless/broadcom/b43/dma.c
1506
slot, firstused, ring->index);
drivers/net/wireless/broadcom/b43/dma.c
1528
ring->nr_succeed_tx_packets++;
drivers/net/wireless/broadcom/b43/dma.c
1530
ring->nr_failed_tx_packets++;
drivers/net/wireless/broadcom/b43/dma.c
1531
ring->nr_total_packet_tries += status->frame_count;
drivers/net/wireless/broadcom/b43/dma.c
1545
slot, firstused, ring->index);
drivers/net/wireless/broadcom/b43/dma.c
1551
ring->used_slots--;
drivers/net/wireless/broadcom/b43/dma.c
1558
slot = next_slot(ring, slot);
drivers/net/wireless/broadcom/b43/dma.c
1562
if (ring->stopped) {
drivers/net/wireless/broadcom/b43/dma.c
1563
B43_WARN_ON(free_slots(ring) < TX_SLOTS_PER_FRAME);
drivers/net/wireless/broadcom/b43/dma.c
1564
ring->stopped = false;
drivers/net/wireless/broadcom/b43/dma.c
1567
if (dev->wl->tx_queue_stopped[ring->queue_prio]) {
drivers/net/wireless/broadcom/b43/dma.c
1568
dev->wl->tx_queue_stopped[ring->queue_prio] = false;
drivers/net/wireless/broadcom/b43/dma.c
1572
b43_wake_queue(dev, ring->queue_prio);
drivers/net/wireless/broadcom/b43/dma.c
1574
b43dbg(dev->wl, "Woke up TX ring %d\n", ring->index);
drivers/net/wireless/broadcom/b43/dma.c
1581
static void dma_rx(struct b43_dmaring *ring, int *slot)
drivers/net/wireless/broadcom/b43/dma.c
1583
const struct b43_dma_ops *ops = ring->ops;
drivers/net/wireless/broadcom/b43/dma.c
1592
desc = ops->idx2desc(ring, *slot, &meta);
drivers/net/wireless/broadcom/b43/dma.c
1594
sync_descbuffer_for_cpu(ring, meta->dmaaddr, ring->rx_buffersize);
drivers/net/wireless/broadcom/b43/dma.c
1612
if (unlikely(b43_rx_buffer_is_poisoned(ring, skb))) {
drivers/net/wireless/broadcom/b43/dma.c
1615
b43dbg(ring->dev->wl, "DMA RX: Dropping poisoned buffer.\n");
drivers/net/wireless/broadcom/b43/dma.c
1619
if (unlikely(len + ring->frameoffset > ring->rx_buffersize)) {
drivers/net/wireless/broadcom/b43/dma.c
1629
desc = ops->idx2desc(ring, *slot, &meta);
drivers/net/wireless/broadcom/b43/dma.c
1631
b43_poison_rx_buffer(ring, meta->skb);
drivers/net/wireless/broadcom/b43/dma.c
1632
sync_descbuffer_for_device(ring, meta->dmaaddr,
drivers/net/wireless/broadcom/b43/dma.c
1633
ring->rx_buffersize);
drivers/net/wireless/broadcom/b43/dma.c
1634
*slot = next_slot(ring, *slot);
drivers/net/wireless/broadcom/b43/dma.c
1636
tmp -= ring->rx_buffersize;
drivers/net/wireless/broadcom/b43/dma.c
164
struct b43_dmadesc_generic *op64_idx2desc(struct b43_dmaring *ring,
drivers/net/wireless/broadcom/b43/dma.c
1640
b43err(ring->dev->wl, "DMA RX buffer too small "
drivers/net/wireless/broadcom/b43/dma.c
1642
len, ring->rx_buffersize, cnt);
drivers/net/wireless/broadcom/b43/dma.c
1647
err = setup_rx_descbuffer(ring, desc, meta, GFP_ATOMIC);
drivers/net/wireless/broadcom/b43/dma.c
1649
b43dbg(ring->dev->wl, "DMA RX: setup_rx_descbuffer() failed\n");
drivers/net/wireless/broadcom/b43/dma.c
1653
unmap_descbuffer(ring, dmaaddr, ring->rx_buffersize, 0);
drivers/net/wireless/broadcom/b43/dma.c
1654
skb_put(skb, len + ring->frameoffset);
drivers/net/wireless/broadcom/b43/dma.c
1655
skb_pull(skb, ring->frameoffset);
drivers/net/wireless/broadcom/b43/dma.c
1657
b43_rx(ring->dev, skb, rxhdr);
drivers/net/wireless/broadcom/b43/dma.c
1663
b43_poison_rx_buffer(ring, skb);
drivers/net/wireless/broadcom/b43/dma.c
1664
sync_descbuffer_for_device(ring, dmaaddr, ring->rx_buffersize);
drivers/net/wireless/broadcom/b43/dma.c
1667
void b43_dma_handle_rx_overflow(struct b43_dmaring *ring)
drivers/net/wireless/broadcom/b43/dma.c
1671
B43_WARN_ON(ring->tx);
drivers/net/wireless/broadcom/b43/dma.c
1681
current_slot = ring->ops->get_current_rxslot(ring);
drivers/net/wireless/broadcom/b43/dma.c
1682
previous_slot = prev_slot(ring, current_slot);
drivers/net/wireless/broadcom/b43/dma.c
1683
ring->ops->set_current_rxslot(ring, previous_slot);
drivers/net/wireless/broadcom/b43/dma.c
1686
void b43_dma_rx(struct b43_dmaring *ring)
drivers/net/wireless/broadcom/b43/dma.c
1688
const struct b43_dma_ops *ops = ring->ops;
drivers/net/wireless/broadcom/b43/dma.c
1692
B43_WARN_ON(ring->tx);
drivers/net/wireless/broadcom/b43/dma.c
1693
current_slot = ops->get_current_rxslot(ring);
drivers/net/wireless/broadcom/b43/dma.c
1694
B43_WARN_ON(!(current_slot >= 0 && current_slot < ring->nr_slots));
drivers/net/wireless/broadcom/b43/dma.c
1696
slot = ring->current_slot;
drivers/net/wireless/broadcom/b43/dma.c
1697
for (; slot != current_slot; slot = next_slot(ring, slot)) {
drivers/net/wireless/broadcom/b43/dma.c
1698
dma_rx(ring, &slot);
drivers/net/wireless/broadcom/b43/dma.c
1699
update_max_used_slots(ring, ++used_slots);
drivers/net/wireless/broadcom/b43/dma.c
170
*meta = &(ring->meta[slot]);
drivers/net/wireless/broadcom/b43/dma.c
1702
ops->set_current_rxslot(ring, slot);
drivers/net/wireless/broadcom/b43/dma.c
1703
ring->current_slot = slot;
drivers/net/wireless/broadcom/b43/dma.c
1706
static void b43_dma_tx_suspend_ring(struct b43_dmaring *ring)
drivers/net/wireless/broadcom/b43/dma.c
1708
B43_WARN_ON(!ring->tx);
drivers/net/wireless/broadcom/b43/dma.c
1709
ring->ops->tx_suspend(ring);
drivers/net/wireless/broadcom/b43/dma.c
171
desc = ring->descbase;
drivers/net/wireless/broadcom/b43/dma.c
1712
static void b43_dma_tx_resume_ring(struct b43_dmaring *ring)
drivers/net/wireless/broadcom/b43/dma.c
1714
B43_WARN_ON(!ring->tx);
drivers/net/wireless/broadcom/b43/dma.c
1715
ring->ops->tx_resume(ring);
drivers/net/wireless/broadcom/b43/dma.c
177
static void op64_fill_descriptor(struct b43_dmaring *ring,
drivers/net/wireless/broadcom/b43/dma.c
182
struct b43_dmadesc64 *descbase = ring->descbase;
drivers/net/wireless/broadcom/b43/dma.c
189
B43_WARN_ON(!(slot >= 0 && slot < ring->nr_slots));
drivers/net/wireless/broadcom/b43/dma.c
191
addrlo = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_LOW);
drivers/net/wireless/broadcom/b43/dma.c
192
addrhi = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_HIGH);
drivers/net/wireless/broadcom/b43/dma.c
193
addrext = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_EXT);
drivers/net/wireless/broadcom/b43/dma.c
195
if (slot == ring->nr_slots - 1)
drivers/net/wireless/broadcom/b43/dma.c
213
static void op64_poke_tx(struct b43_dmaring *ring, int slot)
drivers/net/wireless/broadcom/b43/dma.c
215
b43_dma_write(ring, B43_DMA64_TXINDEX,
drivers/net/wireless/broadcom/b43/dma.c
219
static void op64_tx_suspend(struct b43_dmaring *ring)
drivers/net/wireless/broadcom/b43/dma.c
221
b43_dma_write(ring, B43_DMA64_TXCTL, b43_dma_read(ring, B43_DMA64_TXCTL)
drivers/net/wireless/broadcom/b43/dma.c
225
static void op64_tx_resume(struct b43_dmaring *ring)
drivers/net/wireless/broadcom/b43/dma.c
227
b43_dma_write(ring, B43_DMA64_TXCTL, b43_dma_read(ring, B43_DMA64_TXCTL)
drivers/net/wireless/broadcom/b43/dma.c
231
static int op64_get_current_rxslot(struct b43_dmaring *ring)
drivers/net/wireless/broadcom/b43/dma.c
235
val = b43_dma_read(ring, B43_DMA64_RXSTATUS);
drivers/net/wireless/broadcom/b43/dma.c
241
static void op64_set_current_rxslot(struct b43_dmaring *ring, int slot)
drivers/net/wireless/broadcom/b43/dma.c
243
b43_dma_write(ring, B43_DMA64_RXINDEX,
drivers/net/wireless/broadcom/b43/dma.c
257
static inline int free_slots(struct b43_dmaring *ring)
drivers/net/wireless/broadcom/b43/dma.c
259
return (ring->nr_slots - ring->used_slots);
drivers/net/wireless/broadcom/b43/dma.c
262
static inline int next_slot(struct b43_dmaring *ring, int slot)
drivers/net/wireless/broadcom/b43/dma.c
264
B43_WARN_ON(!(slot >= -1 && slot <= ring->nr_slots - 1));
drivers/net/wireless/broadcom/b43/dma.c
265
if (slot == ring->nr_slots - 1)
drivers/net/wireless/broadcom/b43/dma.c
270
static inline int prev_slot(struct b43_dmaring *ring, int slot)
drivers/net/wireless/broadcom/b43/dma.c
272
B43_WARN_ON(!(slot >= 0 && slot <= ring->nr_slots - 1));
drivers/net/wireless/broadcom/b43/dma.c
274
return ring->nr_slots - 1;
drivers/net/wireless/broadcom/b43/dma.c
279
static void update_max_used_slots(struct b43_dmaring *ring,
drivers/net/wireless/broadcom/b43/dma.c
282
if (current_used_slots <= ring->max_used_slots)
drivers/net/wireless/broadcom/b43/dma.c
284
ring->max_used_slots = current_used_slots;
drivers/net/wireless/broadcom/b43/dma.c
285
if (b43_debug(ring->dev, B43_DBG_DMAVERBOSE)) {
drivers/net/wireless/broadcom/b43/dma.c
286
b43dbg(ring->dev->wl,
drivers/net/wireless/broadcom/b43/dma.c
288
ring->max_used_slots,
drivers/net/wireless/broadcom/b43/dma.c
289
ring->tx ? "TX" : "RX", ring->index);
drivers/net/wireless/broadcom/b43/dma.c
294
void update_max_used_slots(struct b43_dmaring *ring, int current_used_slots)
drivers/net/wireless/broadcom/b43/dma.c
300
static inline int request_slot(struct b43_dmaring *ring)
drivers/net/wireless/broadcom/b43/dma.c
304
B43_WARN_ON(!ring->tx);
drivers/net/wireless/broadcom/b43/dma.c
305
B43_WARN_ON(ring->stopped);
drivers/net/wireless/broadcom/b43/dma.c
306
B43_WARN_ON(free_slots(ring) == 0);
drivers/net/wireless/broadcom/b43/dma.c
308
slot = next_slot(ring, ring->current_slot);
drivers/net/wireless/broadcom/b43/dma.c
309
ring->current_slot = slot;
drivers/net/wireless/broadcom/b43/dma.c
310
ring->used_slots++;
drivers/net/wireless/broadcom/b43/dma.c
312
update_max_used_slots(ring, ring->used_slots);
drivers/net/wireless/broadcom/b43/dma.c
347
dma_addr_t map_descbuffer(struct b43_dmaring *ring,
drivers/net/wireless/broadcom/b43/dma.c
353
dmaaddr = dma_map_single(ring->dev->dev->dma_dev,
drivers/net/wireless/broadcom/b43/dma.c
356
dmaaddr = dma_map_single(ring->dev->dev->dma_dev,
drivers/net/wireless/broadcom/b43/dma.c
364
void unmap_descbuffer(struct b43_dmaring *ring,
drivers/net/wireless/broadcom/b43/dma.c
368
dma_unmap_single(ring->dev->dev->dma_dev,
drivers/net/wireless/broadcom/b43/dma.c
371
dma_unmap_single(ring->dev->dev->dma_dev,
drivers/net/wireless/broadcom/b43/dma.c
377
void sync_descbuffer_for_cpu(struct b43_dmaring *ring,
drivers/net/wireless/broadcom/b43/dma.c
380
B43_WARN_ON(ring->tx);
drivers/net/wireless/broadcom/b43/dma.c
381
dma_sync_single_for_cpu(ring->dev->dev->dma_dev,
drivers/net/wireless/broadcom/b43/dma.c
386
void sync_descbuffer_for_device(struct b43_dmaring *ring,
drivers/net/wireless/broadcom/b43/dma.c
389
B43_WARN_ON(ring->tx);
drivers/net/wireless/broadcom/b43/dma.c
390
dma_sync_single_for_device(ring->dev->dev->dma_dev,
drivers/net/wireless/broadcom/b43/dma.c
395
void free_descriptor_buffer(struct b43_dmaring *ring,
drivers/net/wireless/broadcom/b43/dma.c
399
if (ring->tx)
drivers/net/wireless/broadcom/b43/dma.c
400
ieee80211_free_txskb(ring->dev->wl->hw, meta->skb);
drivers/net/wireless/broadcom/b43/dma.c
407
static int alloc_ringmemory(struct b43_dmaring *ring)
drivers/net/wireless/broadcom/b43/dma.c
418
u16 ring_mem_size = (ring->type == B43_DMA_64BIT) ?
drivers/net/wireless/broadcom/b43/dma.c
421
ring->descbase = dma_alloc_coherent(ring->dev->dev->dma_dev,
drivers/net/wireless/broadcom/b43/dma.c
422
ring_mem_size, &(ring->dmabase),
drivers/net/wireless/broadcom/b43/dma.c
424
if (!ring->descbase)
drivers/net/wireless/broadcom/b43/dma.c
430
static void free_ringmemory(struct b43_dmaring *ring)
drivers/net/wireless/broadcom/b43/dma.c
432
u16 ring_mem_size = (ring->type == B43_DMA_64BIT) ?
drivers/net/wireless/broadcom/b43/dma.c
434
dma_free_coherent(ring->dev->dev->dma_dev, ring_mem_size,
drivers/net/wireless/broadcom/b43/dma.c
435
ring->descbase, ring->dmabase);
drivers/net/wireless/broadcom/b43/dma.c
538
static bool b43_dma_mapping_error(struct b43_dmaring *ring,
drivers/net/wireless/broadcom/b43/dma.c
542
if (unlikely(dma_mapping_error(ring->dev->dev->dma_dev, addr)))
drivers/net/wireless/broadcom/b43/dma.c
545
switch (ring->type) {
drivers/net/wireless/broadcom/b43/dma.c
565
unmap_descbuffer(ring, addr, buffersize, dma_to_device);
drivers/net/wireless/broadcom/b43/dma.c
570
static bool b43_rx_buffer_is_poisoned(struct b43_dmaring *ring, struct sk_buff *skb)
drivers/net/wireless/broadcom/b43/dma.c
572
unsigned char *f = skb->data + ring->frameoffset;
drivers/net/wireless/broadcom/b43/dma.c
577
static void b43_poison_rx_buffer(struct b43_dmaring *ring, struct sk_buff *skb)
drivers/net/wireless/broadcom/b43/dma.c
587
B43_WARN_ON(ring->rx_buffersize < ring->frameoffset + sizeof(struct b43_plcp_hdr6) + 2);
drivers/net/wireless/broadcom/b43/dma.c
588
frame = skb->data + ring->frameoffset;
drivers/net/wireless/broadcom/b43/dma.c
592
static int setup_rx_descbuffer(struct b43_dmaring *ring,
drivers/net/wireless/broadcom/b43/dma.c
599
B43_WARN_ON(ring->tx);
drivers/net/wireless/broadcom/b43/dma.c
601
skb = __dev_alloc_skb(ring->rx_buffersize, gfp_flags);
drivers/net/wireless/broadcom/b43/dma.c
604
b43_poison_rx_buffer(ring, skb);
drivers/net/wireless/broadcom/b43/dma.c
605
dmaaddr = map_descbuffer(ring, skb->data, ring->rx_buffersize, 0);
drivers/net/wireless/broadcom/b43/dma.c
606
if (b43_dma_mapping_error(ring, dmaaddr, ring->rx_buffersize, 0)) {
drivers/net/wireless/broadcom/b43/dma.c
612
skb = __dev_alloc_skb(ring->rx_buffersize, gfp_flags);
drivers/net/wireless/broadcom/b43/dma.c
615
b43_poison_rx_buffer(ring, skb);
drivers/net/wireless/broadcom/b43/dma.c
616
dmaaddr = map_descbuffer(ring, skb->data,
drivers/net/wireless/broadcom/b43/dma.c
617
ring->rx_buffersize, 0);
drivers/net/wireless/broadcom/b43/dma.c
618
if (b43_dma_mapping_error(ring, dmaaddr, ring->rx_buffersize, 0)) {
drivers/net/wireless/broadcom/b43/dma.c
619
b43err(ring->dev->wl, "RX DMA buffer allocation failed\n");
drivers/net/wireless/broadcom/b43/dma.c
627
ring->ops->fill_descriptor(ring, desc, dmaaddr,
drivers/net/wireless/broadcom/b43/dma.c
628
ring->rx_buffersize, 0, 0, 0);
drivers/net/wireless/broadcom/b43/dma.c
636
static int alloc_initial_descbuffers(struct b43_dmaring *ring)
drivers/net/wireless/broadcom/b43/dma.c
642
for (i = 0; i < ring->nr_slots; i++) {
drivers/net/wireless/broadcom/b43/dma.c
643
desc = ring->ops->idx2desc(ring, i, &meta);
drivers/net/wireless/broadcom/b43/dma.c
645
err = setup_rx_descbuffer(ring, desc, meta, GFP_KERNEL);
drivers/net/wireless/broadcom/b43/dma.c
647
b43err(ring->dev->wl,
drivers/net/wireless/broadcom/b43/dma.c
653
ring->used_slots = ring->nr_slots;
drivers/net/wireless/broadcom/b43/dma.c
660
desc = ring->ops->idx2desc(ring, i, &meta);
drivers/net/wireless/broadcom/b43/dma.c
662
unmap_descbuffer(ring, meta->dmaaddr, ring->rx_buffersize, 0);
drivers/net/wireless/broadcom/b43/dma.c
672
static int dmacontroller_setup(struct b43_dmaring *ring)
drivers/net/wireless/broadcom/b43/dma.c
677
bool parity = ring->dev->dma.parity;
drivers/net/wireless/broadcom/b43/dma.c
681
if (ring->tx) {
drivers/net/wireless/broadcom/b43/dma.c
682
if (ring->type == B43_DMA_64BIT) {
drivers/net/wireless/broadcom/b43/dma.c
683
u64 ringbase = (u64) (ring->dmabase);
drivers/net/wireless/broadcom/b43/dma.c
684
addrext = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_EXT);
drivers/net/wireless/broadcom/b43/dma.c
685
addrlo = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_LOW);
drivers/net/wireless/broadcom/b43/dma.c
686
addrhi = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_HIGH);
drivers/net/wireless/broadcom/b43/dma.c
693
b43_dma_write(ring, B43_DMA64_TXCTL, value);
drivers/net/wireless/broadcom/b43/dma.c
694
b43_dma_write(ring, B43_DMA64_TXRINGLO, addrlo);
drivers/net/wireless/broadcom/b43/dma.c
695
b43_dma_write(ring, B43_DMA64_TXRINGHI, addrhi);
drivers/net/wireless/broadcom/b43/dma.c
697
u32 ringbase = (u32) (ring->dmabase);
drivers/net/wireless/broadcom/b43/dma.c
698
addrext = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_EXT);
drivers/net/wireless/broadcom/b43/dma.c
699
addrlo = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_LOW);
drivers/net/wireless/broadcom/b43/dma.c
706
b43_dma_write(ring, B43_DMA32_TXCTL, value);
drivers/net/wireless/broadcom/b43/dma.c
707
b43_dma_write(ring, B43_DMA32_TXRING, addrlo);
drivers/net/wireless/broadcom/b43/dma.c
710
err = alloc_initial_descbuffers(ring);
drivers/net/wireless/broadcom/b43/dma.c
713
if (ring->type == B43_DMA_64BIT) {
drivers/net/wireless/broadcom/b43/dma.c
714
u64 ringbase = (u64) (ring->dmabase);
drivers/net/wireless/broadcom/b43/dma.c
715
addrext = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_EXT);
drivers/net/wireless/broadcom/b43/dma.c
716
addrlo = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_LOW);
drivers/net/wireless/broadcom/b43/dma.c
717
addrhi = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_HIGH);
drivers/net/wireless/broadcom/b43/dma.c
719
value = (ring->frameoffset << B43_DMA64_RXFROFF_SHIFT);
drivers/net/wireless/broadcom/b43/dma.c
72
struct b43_dmadesc_generic *op32_idx2desc(struct b43_dmaring *ring,
drivers/net/wireless/broadcom/b43/dma.c
725
b43_dma_write(ring, B43_DMA64_RXCTL, value);
drivers/net/wireless/broadcom/b43/dma.c
726
b43_dma_write(ring, B43_DMA64_RXRINGLO, addrlo);
drivers/net/wireless/broadcom/b43/dma.c
727
b43_dma_write(ring, B43_DMA64_RXRINGHI, addrhi);
drivers/net/wireless/broadcom/b43/dma.c
728
b43_dma_write(ring, B43_DMA64_RXINDEX, ring->nr_slots *
drivers/net/wireless/broadcom/b43/dma.c
731
u32 ringbase = (u32) (ring->dmabase);
drivers/net/wireless/broadcom/b43/dma.c
732
addrext = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_EXT);
drivers/net/wireless/broadcom/b43/dma.c
733
addrlo = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_LOW);
drivers/net/wireless/broadcom/b43/dma.c
735
value = (ring->frameoffset << B43_DMA32_RXFROFF_SHIFT);
drivers/net/wireless/broadcom/b43/dma.c
741
b43_dma_write(ring, B43_DMA32_RXCTL, value);
drivers/net/wireless/broadcom/b43/dma.c
742
b43_dma_write(ring, B43_DMA32_RXRING, addrlo);
drivers/net/wireless/broadcom/b43/dma.c
743
b43_dma_write(ring, B43_DMA32_RXINDEX, ring->nr_slots *
drivers/net/wireless/broadcom/b43/dma.c
753
static void dmacontroller_cleanup(struct b43_dmaring *ring)
drivers/net/wireless/broadcom/b43/dma.c
755
if (ring->tx) {
drivers/net/wireless/broadcom/b43/dma.c
756
b43_dmacontroller_tx_reset(ring->dev, ring->mmio_base,
drivers/net/wireless/broadcom/b43/dma.c
757
ring->type);
drivers/net/wireless/broadcom/b43/dma.c
758
if (ring->type == B43_DMA_64BIT) {
drivers/net/wireless/broadcom/b43/dma.c
759
b43_dma_write(ring, B43_DMA64_TXRINGLO, 0);
drivers/net/wireless/broadcom/b43/dma.c
760
b43_dma_write(ring, B43_DMA64_TXRINGHI, 0);
drivers/net/wireless/broadcom/b43/dma.c
762
b43_dma_write(ring, B43_DMA32_TXRING, 0);
drivers/net/wireless/broadcom/b43/dma.c
764
b43_dmacontroller_rx_reset(ring->dev, ring->mmio_base,
drivers/net/wireless/broadcom/b43/dma.c
765
ring->type);
drivers/net/wireless/broadcom/b43/dma.c
766
if (ring->type == B43_DMA_64BIT) {
drivers/net/wireless/broadcom/b43/dma.c
767
b43_dma_write(ring, B43_DMA64_RXRINGLO, 0);
drivers/net/wireless/broadcom/b43/dma.c
768
b43_dma_write(ring, B43_DMA64_RXRINGHI, 0);
drivers/net/wireless/broadcom/b43/dma.c
770
b43_dma_write(ring, B43_DMA32_RXRING, 0);
drivers/net/wireless/broadcom/b43/dma.c
774
static void free_all_descbuffers(struct b43_dmaring *ring)
drivers/net/wireless/broadcom/b43/dma.c
779
if (!ring->used_slots)
drivers/net/wireless/broadcom/b43/dma.c
78
*meta = &(ring->meta[slot]);
drivers/net/wireless/broadcom/b43/dma.c
781
for (i = 0; i < ring->nr_slots; i++) {
drivers/net/wireless/broadcom/b43/dma.c
783
ring->ops->idx2desc(ring, i, &meta);
drivers/net/wireless/broadcom/b43/dma.c
786
B43_WARN_ON(!ring->tx);
drivers/net/wireless/broadcom/b43/dma.c
789
if (ring->tx) {
drivers/net/wireless/broadcom/b43/dma.c
79
desc = ring->descbase;
drivers/net/wireless/broadcom/b43/dma.c
790
unmap_descbuffer(ring, meta->dmaaddr,
drivers/net/wireless/broadcom/b43/dma.c
793
unmap_descbuffer(ring, meta->dmaaddr,
drivers/net/wireless/broadcom/b43/dma.c
794
ring->rx_buffersize, 0);
drivers/net/wireless/broadcom/b43/dma.c
796
free_descriptor_buffer(ring, meta);
drivers/net/wireless/broadcom/b43/dma.c
837
struct b43_dmaring *ring;
drivers/net/wireless/broadcom/b43/dma.c
841
ring = kzalloc_obj(*ring);
drivers/net/wireless/broadcom/b43/dma.c
842
if (!ring)
drivers/net/wireless/broadcom/b43/dma.c
845
ring->nr_slots = B43_RXRING_SLOTS;
drivers/net/wireless/broadcom/b43/dma.c
847
ring->nr_slots = B43_TXRING_SLOTS;
drivers/net/wireless/broadcom/b43/dma.c
849
ring->meta = kzalloc_objs(struct b43_dmadesc_meta, ring->nr_slots);
drivers/net/wireless/broadcom/b43/dma.c
85
static void op32_fill_descriptor(struct b43_dmaring *ring,
drivers/net/wireless/broadcom/b43/dma.c
850
if (!ring->meta)
drivers/net/wireless/broadcom/b43/dma.c
852
for (i = 0; i < ring->nr_slots; i++)
drivers/net/wireless/broadcom/b43/dma.c
853
ring->meta->skb = B43_DMA_PTR_POISON;
drivers/net/wireless/broadcom/b43/dma.c
855
ring->type = type;
drivers/net/wireless/broadcom/b43/dma.c
856
ring->dev = dev;
drivers/net/wireless/broadcom/b43/dma.c
857
ring->mmio_base = b43_dmacontroller_base(type, controller_index);
drivers/net/wireless/broadcom/b43/dma.c
858
ring->index = controller_index;
drivers/net/wireless/broadcom/b43/dma.c
860
ring->ops = &dma64_ops;
drivers/net/wireless/broadcom/b43/dma.c
862
ring->ops = &dma32_ops;
drivers/net/wireless/broadcom/b43/dma.c
864
ring->tx = true;
drivers/net/wireless/broadcom/b43/dma.c
865
ring->current_slot = -1;
drivers/net/wireless/broadcom/b43/dma.c
867
if (ring->index == 0) {
drivers/net/wireless/broadcom/b43/dma.c
870
ring->rx_buffersize = B43_DMA0_RX_FW598_BUFSIZE;
drivers/net/wireless/broadcom/b43/dma.c
871
ring->frameoffset = B43_DMA0_RX_FW598_FO;
drivers/net/wireless/broadcom/b43/dma.c
875
ring->rx_buffersize = B43_DMA0_RX_FW351_BUFSIZE;
drivers/net/wireless/broadcom/b43/dma.c
876
ring->frameoffset = B43_DMA0_RX_FW351_FO;
drivers/net/wireless/broadcom/b43/dma.c
883
ring->last_injected_overflow = jiffies;
drivers/net/wireless/broadcom/b43/dma.c
890
ring->txhdr_cache = kcalloc(ring->nr_slots / TX_SLOTS_PER_FRAME,
drivers/net/wireless/broadcom/b43/dma.c
893
if (!ring->txhdr_cache)
drivers/net/wireless/broadcom/b43/dma.c
898
ring->txhdr_cache,
drivers/net/wireless/broadcom/b43/dma.c
90
struct b43_dmadesc32 *descbase = ring->descbase;
drivers/net/wireless/broadcom/b43/dma.c
902
if (b43_dma_mapping_error(ring, dma_test,
drivers/net/wireless/broadcom/b43/dma.c
905
kfree(ring->txhdr_cache);
drivers/net/wireless/broadcom/b43/dma.c
906
ring->txhdr_cache = kcalloc(ring->nr_slots / TX_SLOTS_PER_FRAME,
drivers/net/wireless/broadcom/b43/dma.c
909
if (!ring->txhdr_cache)
drivers/net/wireless/broadcom/b43/dma.c
913
ring->txhdr_cache,
drivers/net/wireless/broadcom/b43/dma.c
917
if (b43_dma_mapping_error(ring, dma_test,
drivers/net/wireless/broadcom/b43/dma.c
931
err = alloc_ringmemory(ring);
drivers/net/wireless/broadcom/b43/dma.c
934
err = dmacontroller_setup(ring);
drivers/net/wireless/broadcom/b43/dma.c
939
return ring;
drivers/net/wireless/broadcom/b43/dma.c
942
free_ringmemory(ring);
drivers/net/wireless/broadcom/b43/dma.c
944
kfree(ring->txhdr_cache);
drivers/net/wireless/broadcom/b43/dma.c
946
kfree(ring->meta);
drivers/net/wireless/broadcom/b43/dma.c
948
kfree(ring);
drivers/net/wireless/broadcom/b43/dma.c
949
ring = NULL;
drivers/net/wireless/broadcom/b43/dma.c
965
static void b43_destroy_dmaring(struct b43_dmaring *ring,
drivers/net/wireless/broadcom/b43/dma.c
968
if (!ring)
drivers/net/wireless/broadcom/b43/dma.c
97
B43_WARN_ON(!(slot >= 0 && slot < ring->nr_slots));
drivers/net/wireless/broadcom/b43/dma.c
974
u64 failed_packets = ring->nr_failed_tx_packets;
drivers/net/wireless/broadcom/b43/dma.c
975
u64 succeed_packets = ring->nr_succeed_tx_packets;
drivers/net/wireless/broadcom/b43/dma.c
982
average_tries = divide(ring->nr_total_packet_tries * 100, nr_packets);
drivers/net/wireless/broadcom/b43/dma.c
984
b43dbg(ring->dev->wl, "DMA-%u %s: "
drivers/net/wireless/broadcom/b43/dma.c
987
(unsigned int)(ring->type), ringname,
drivers/net/wireless/broadcom/b43/dma.c
988
ring->max_used_slots,
drivers/net/wireless/broadcom/b43/dma.c
989
ring->nr_slots,
drivers/net/wireless/broadcom/b43/dma.c
99
addr = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_LOW);
drivers/net/wireless/broadcom/b43/dma.h
199
struct b43_dmadesc_generic *(*idx2desc) (struct b43_dmaring * ring,
drivers/net/wireless/broadcom/b43/dma.h
203
void (*fill_descriptor) (struct b43_dmaring * ring,
drivers/net/wireless/broadcom/b43/dma.h
207
void (*poke_tx) (struct b43_dmaring * ring, int slot);
drivers/net/wireless/broadcom/b43/dma.h
208
void (*tx_suspend) (struct b43_dmaring * ring);
drivers/net/wireless/broadcom/b43/dma.h
209
void (*tx_resume) (struct b43_dmaring * ring);
drivers/net/wireless/broadcom/b43/dma.h
210
int (*get_current_rxslot) (struct b43_dmaring * ring);
drivers/net/wireless/broadcom/b43/dma.h
211
void (*set_current_rxslot) (struct b43_dmaring * ring, int slot);
drivers/net/wireless/broadcom/b43/dma.h
278
static inline u32 b43_dma_read(struct b43_dmaring *ring, u16 offset)
drivers/net/wireless/broadcom/b43/dma.h
280
return b43_read32(ring->dev, ring->mmio_base + offset);
drivers/net/wireless/broadcom/b43/dma.h
283
static inline void b43_dma_write(struct b43_dmaring *ring, u16 offset, u32 value)
drivers/net/wireless/broadcom/b43/dma.h
285
b43_write32(ring->dev, ring->mmio_base + offset, value);
drivers/net/wireless/broadcom/b43/dma.h
299
void b43_dma_handle_rx_overflow(struct b43_dmaring *ring);
drivers/net/wireless/broadcom/b43/dma.h
301
void b43_dma_rx(struct b43_dmaring *ring);
drivers/net/wireless/broadcom/b43legacy/dma.c
100
static int op32_get_current_rxslot(struct b43legacy_dmaring *ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
1000
if (b43legacy_dma_mapping_error(ring, meta->dmaaddr, skb->len, 1)) {
drivers/net/wireless/broadcom/b43legacy/dma.c
1001
ring->current_slot = old_top_slot;
drivers/net/wireless/broadcom/b43legacy/dma.c
1002
ring->used_slots = old_used_slots;
drivers/net/wireless/broadcom/b43legacy/dma.c
1008
op32_fill_descriptor(ring, desc, meta->dmaaddr,
drivers/net/wireless/broadcom/b43legacy/dma.c
1013
op32_poke_tx(ring, next_slot(ring, slot));
drivers/net/wireless/broadcom/b43legacy/dma.c
1019
unmap_descbuffer(ring, meta_hdr->dmaaddr,
drivers/net/wireless/broadcom/b43legacy/dma.c
1025
int should_inject_overflow(struct b43legacy_dmaring *ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
1028
if (unlikely(b43legacy_debug(ring->dev,
drivers/net/wireless/broadcom/b43legacy/dma.c
1034
next_overflow = ring->last_injected_overflow + HZ;
drivers/net/wireless/broadcom/b43legacy/dma.c
1036
ring->last_injected_overflow = jiffies;
drivers/net/wireless/broadcom/b43legacy/dma.c
1037
b43legacydbg(ring->dev->wl,
drivers/net/wireless/broadcom/b43legacy/dma.c
1039
"DMA controller %d\n", ring->index);
drivers/net/wireless/broadcom/b43legacy/dma.c
104
val = b43legacy_dma_read(ring, B43legacy_DMA32_RXSTATUS);
drivers/net/wireless/broadcom/b43legacy/dma.c
1050
struct b43legacy_dmaring *ring;
drivers/net/wireless/broadcom/b43legacy/dma.c
1053
ring = priority_to_txring(dev, skb_get_queue_mapping(skb));
drivers/net/wireless/broadcom/b43legacy/dma.c
1054
B43legacy_WARN_ON(!ring->tx);
drivers/net/wireless/broadcom/b43legacy/dma.c
1056
if (unlikely(ring->stopped)) {
drivers/net/wireless/broadcom/b43legacy/dma.c
1066
if (WARN_ON(free_slots(ring) < SLOTS_PER_PACKET)) {
drivers/net/wireless/broadcom/b43legacy/dma.c
1075
err = dma_tx_fragment(ring, &skb);
drivers/net/wireless/broadcom/b43legacy/dma.c
1086
if ((free_slots(ring) < SLOTS_PER_PACKET) ||
drivers/net/wireless/broadcom/b43legacy/dma.c
1087
should_inject_overflow(ring)) {
drivers/net/wireless/broadcom/b43legacy/dma.c
1092
ring->stopped = true;
drivers/net/wireless/broadcom/b43legacy/dma.c
1095
ring->index);
drivers/net/wireless/broadcom/b43legacy/dma.c
110
static void op32_set_current_rxslot(struct b43legacy_dmaring *ring,
drivers/net/wireless/broadcom/b43legacy/dma.c
1103
struct b43legacy_dmaring *ring;
drivers/net/wireless/broadcom/b43legacy/dma.c
1109
ring = parse_cookie(dev, status->cookie, &slot);
drivers/net/wireless/broadcom/b43legacy/dma.c
1110
if (unlikely(!ring))
drivers/net/wireless/broadcom/b43legacy/dma.c
1112
B43legacy_WARN_ON(!ring->tx);
drivers/net/wireless/broadcom/b43legacy/dma.c
1117
firstused = ring->current_slot - ring->used_slots + 1;
drivers/net/wireless/broadcom/b43legacy/dma.c
1119
firstused = ring->nr_slots + firstused;
drivers/net/wireless/broadcom/b43legacy/dma.c
1126
ring->index, firstused, slot);
drivers/net/wireless/broadcom/b43legacy/dma.c
113
b43legacy_dma_write(ring, B43legacy_DMA32_RXINDEX,
drivers/net/wireless/broadcom/b43legacy/dma.c
1131
B43legacy_WARN_ON(!(slot >= 0 && slot < ring->nr_slots));
drivers/net/wireless/broadcom/b43legacy/dma.c
1132
op32_idx2desc(ring, slot, &meta);
drivers/net/wireless/broadcom/b43legacy/dma.c
1135
unmap_descbuffer(ring, meta->dmaaddr,
drivers/net/wireless/broadcom/b43legacy/dma.c
1138
unmap_descbuffer(ring, meta->dmaaddr,
drivers/net/wireless/broadcom/b43legacy/dma.c
117
static inline int free_slots(struct b43legacy_dmaring *ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
119
return (ring->nr_slots - ring->used_slots);
drivers/net/wireless/broadcom/b43legacy/dma.c
1193
ring->used_slots--;
drivers/net/wireless/broadcom/b43legacy/dma.c
1197
slot = next_slot(ring, slot);
drivers/net/wireless/broadcom/b43legacy/dma.c
1200
if (ring->stopped) {
drivers/net/wireless/broadcom/b43legacy/dma.c
1201
B43legacy_WARN_ON(free_slots(ring) < SLOTS_PER_PACKET);
drivers/net/wireless/broadcom/b43legacy/dma.c
1202
ring->stopped = false;
drivers/net/wireless/broadcom/b43legacy/dma.c
1205
if (dev->wl->tx_queue_stopped[ring->queue_prio]) {
drivers/net/wireless/broadcom/b43legacy/dma.c
1206
dev->wl->tx_queue_stopped[ring->queue_prio] = 0;
drivers/net/wireless/broadcom/b43legacy/dma.c
1210
ieee80211_wake_queue(dev->wl->hw, ring->queue_prio);
drivers/net/wireless/broadcom/b43legacy/dma.c
1213
ring->index);
drivers/net/wireless/broadcom/b43legacy/dma.c
1219
static void dma_rx(struct b43legacy_dmaring *ring,
drivers/net/wireless/broadcom/b43legacy/dma.c
122
static inline int next_slot(struct b43legacy_dmaring *ring, int slot)
drivers/net/wireless/broadcom/b43legacy/dma.c
1230
desc = op32_idx2desc(ring, *slot, &meta);
drivers/net/wireless/broadcom/b43legacy/dma.c
1232
sync_descbuffer_for_cpu(ring, meta->dmaaddr, ring->rx_buffersize);
drivers/net/wireless/broadcom/b43legacy/dma.c
1235
if (ring->index == 3) {
drivers/net/wireless/broadcom/b43legacy/dma.c
124
B43legacy_WARN_ON(!(slot >= -1 && slot <= ring->nr_slots - 1));
drivers/net/wireless/broadcom/b43legacy/dma.c
1248
b43legacy_handle_hwtxstatus(ring->dev, hw);
drivers/net/wireless/broadcom/b43legacy/dma.c
125
if (slot == ring->nr_slots - 1)
drivers/net/wireless/broadcom/b43legacy/dma.c
1250
sync_descbuffer_for_device(ring, meta->dmaaddr,
drivers/net/wireless/broadcom/b43legacy/dma.c
1251
ring->rx_buffersize);
drivers/net/wireless/broadcom/b43legacy/dma.c
1267
sync_descbuffer_for_device(ring, meta->dmaaddr,
drivers/net/wireless/broadcom/b43legacy/dma.c
1268
ring->rx_buffersize);
drivers/net/wireless/broadcom/b43legacy/dma.c
1272
if (unlikely(len > ring->rx_buffersize)) {
drivers/net/wireless/broadcom/b43legacy/dma.c
1282
desc = op32_idx2desc(ring, *slot, &meta);
drivers/net/wireless/broadcom/b43legacy/dma.c
1284
sync_descbuffer_for_device(ring, meta->dmaaddr,
drivers/net/wireless/broadcom/b43legacy/dma.c
1285
ring->rx_buffersize);
drivers/net/wireless/broadcom/b43legacy/dma.c
1286
*slot = next_slot(ring, *slot);
drivers/net/wireless/broadcom/b43legacy/dma.c
1288
tmp -= ring->rx_buffersize;
drivers/net/wireless/broadcom/b43legacy/dma.c
1292
b43legacyerr(ring->dev->wl, "DMA RX buffer too small "
drivers/net/wireless/broadcom/b43legacy/dma.c
1294
len, ring->rx_buffersize, cnt);
drivers/net/wireless/broadcom/b43legacy/dma.c
1299
err = setup_rx_descbuffer(ring, desc, meta, GFP_ATOMIC);
drivers/net/wireless/broadcom/b43legacy/dma.c
1301
b43legacydbg(ring->dev->wl, "DMA RX: setup_rx_descbuffer()"
drivers/net/wireless/broadcom/b43legacy/dma.c
1303
sync_descbuffer_for_device(ring, dmaaddr,
drivers/net/wireless/broadcom/b43legacy/dma.c
1304
ring->rx_buffersize);
drivers/net/wireless/broadcom/b43legacy/dma.c
1308
unmap_descbuffer(ring, dmaaddr, ring->rx_buffersize, 0);
drivers/net/wireless/broadcom/b43legacy/dma.c
1309
skb_put(skb, len + ring->frameoffset);
drivers/net/wireless/broadcom/b43legacy/dma.c
131
static void update_max_used_slots(struct b43legacy_dmaring *ring,
drivers/net/wireless/broadcom/b43legacy/dma.c
1310
skb_pull(skb, ring->frameoffset);
drivers/net/wireless/broadcom/b43legacy/dma.c
1312
b43legacy_rx(ring->dev, skb, rxhdr);
drivers/net/wireless/broadcom/b43legacy/dma.c
1317
void b43legacy_dma_rx(struct b43legacy_dmaring *ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
1323
B43legacy_WARN_ON(ring->tx);
drivers/net/wireless/broadcom/b43legacy/dma.c
1324
current_slot = op32_get_current_rxslot(ring);
drivers/net/wireless/broadcom/b43legacy/dma.c
1326
ring->nr_slots));
drivers/net/wireless/broadcom/b43legacy/dma.c
1328
slot = ring->current_slot;
drivers/net/wireless/broadcom/b43legacy/dma.c
1329
for (; slot != current_slot; slot = next_slot(ring, slot)) {
drivers/net/wireless/broadcom/b43legacy/dma.c
1330
dma_rx(ring, &slot);
drivers/net/wireless/broadcom/b43legacy/dma.c
1331
update_max_used_slots(ring, ++used_slots);
drivers/net/wireless/broadcom/b43legacy/dma.c
1333
op32_set_current_rxslot(ring, slot);
drivers/net/wireless/broadcom/b43legacy/dma.c
1334
ring->current_slot = slot;
drivers/net/wireless/broadcom/b43legacy/dma.c
1337
static void b43legacy_dma_tx_suspend_ring(struct b43legacy_dmaring *ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
1339
B43legacy_WARN_ON(!ring->tx);
drivers/net/wireless/broadcom/b43legacy/dma.c
134
if (current_used_slots <= ring->max_used_slots)
drivers/net/wireless/broadcom/b43legacy/dma.c
1340
op32_tx_suspend(ring);
drivers/net/wireless/broadcom/b43legacy/dma.c
1343
static void b43legacy_dma_tx_resume_ring(struct b43legacy_dmaring *ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
1345
B43legacy_WARN_ON(!ring->tx);
drivers/net/wireless/broadcom/b43legacy/dma.c
1346
op32_tx_resume(ring);
drivers/net/wireless/broadcom/b43legacy/dma.c
136
ring->max_used_slots = current_used_slots;
drivers/net/wireless/broadcom/b43legacy/dma.c
137
if (b43legacy_debug(ring->dev, B43legacy_DBG_DMAVERBOSE))
drivers/net/wireless/broadcom/b43legacy/dma.c
138
b43legacydbg(ring->dev->wl,
drivers/net/wireless/broadcom/b43legacy/dma.c
140
ring->max_used_slots,
drivers/net/wireless/broadcom/b43legacy/dma.c
141
ring->tx ? "TX" : "RX",
drivers/net/wireless/broadcom/b43legacy/dma.c
142
ring->index);
drivers/net/wireless/broadcom/b43legacy/dma.c
146
void update_max_used_slots(struct b43legacy_dmaring *ring,
drivers/net/wireless/broadcom/b43legacy/dma.c
153
int request_slot(struct b43legacy_dmaring *ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
157
B43legacy_WARN_ON(!ring->tx);
drivers/net/wireless/broadcom/b43legacy/dma.c
158
B43legacy_WARN_ON(ring->stopped);
drivers/net/wireless/broadcom/b43legacy/dma.c
159
B43legacy_WARN_ON(free_slots(ring) == 0);
drivers/net/wireless/broadcom/b43legacy/dma.c
161
slot = next_slot(ring, ring->current_slot);
drivers/net/wireless/broadcom/b43legacy/dma.c
162
ring->current_slot = slot;
drivers/net/wireless/broadcom/b43legacy/dma.c
163
ring->used_slots++;
drivers/net/wireless/broadcom/b43legacy/dma.c
165
update_max_used_slots(ring, ring->used_slots);
drivers/net/wireless/broadcom/b43legacy/dma.c
175
struct b43legacy_dmaring *ring;
drivers/net/wireless/broadcom/b43legacy/dma.c
186
ring = dev->dma.tx_ring3;
drivers/net/wireless/broadcom/b43legacy/dma.c
189
ring = dev->dma.tx_ring2;
drivers/net/wireless/broadcom/b43legacy/dma.c
192
ring = dev->dma.tx_ring1;
drivers/net/wireless/broadcom/b43legacy/dma.c
195
ring = dev->dma.tx_ring0;
drivers/net/wireless/broadcom/b43legacy/dma.c
198
ring = dev->dma.tx_ring4;
drivers/net/wireless/broadcom/b43legacy/dma.c
201
ring = dev->dma.tx_ring5;
drivers/net/wireless/broadcom/b43legacy/dma.c
205
return ring;
drivers/net/wireless/broadcom/b43legacy/dma.c
226
dma_addr_t map_descbuffer(struct b43legacy_dmaring *ring,
drivers/net/wireless/broadcom/b43legacy/dma.c
234
dmaaddr = dma_map_single(ring->dev->dev->dma_dev,
drivers/net/wireless/broadcom/b43legacy/dma.c
238
dmaaddr = dma_map_single(ring->dev->dev->dma_dev,
drivers/net/wireless/broadcom/b43legacy/dma.c
246
void unmap_descbuffer(struct b43legacy_dmaring *ring,
drivers/net/wireless/broadcom/b43legacy/dma.c
252
dma_unmap_single(ring->dev->dev->dma_dev,
drivers/net/wireless/broadcom/b43legacy/dma.c
256
dma_unmap_single(ring->dev->dev->dma_dev,
drivers/net/wireless/broadcom/b43legacy/dma.c
262
void sync_descbuffer_for_cpu(struct b43legacy_dmaring *ring,
drivers/net/wireless/broadcom/b43legacy/dma.c
266
B43legacy_WARN_ON(ring->tx);
drivers/net/wireless/broadcom/b43legacy/dma.c
268
dma_sync_single_for_cpu(ring->dev->dev->dma_dev,
drivers/net/wireless/broadcom/b43legacy/dma.c
273
void sync_descbuffer_for_device(struct b43legacy_dmaring *ring,
drivers/net/wireless/broadcom/b43legacy/dma.c
277
B43legacy_WARN_ON(ring->tx);
drivers/net/wireless/broadcom/b43legacy/dma.c
279
dma_sync_single_for_device(ring->dev->dev->dma_dev,
drivers/net/wireless/broadcom/b43legacy/dma.c
284
void free_descriptor_buffer(struct b43legacy_dmaring *ring,
drivers/net/wireless/broadcom/b43legacy/dma.c
297
static int alloc_ringmemory(struct b43legacy_dmaring *ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
300
ring->descbase = dma_alloc_coherent(ring->dev->dev->dma_dev,
drivers/net/wireless/broadcom/b43legacy/dma.c
302
&(ring->dmabase), GFP_KERNEL);
drivers/net/wireless/broadcom/b43legacy/dma.c
303
if (!ring->descbase)
drivers/net/wireless/broadcom/b43legacy/dma.c
309
static void free_ringmemory(struct b43legacy_dmaring *ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
311
dma_free_coherent(ring->dev->dev->dma_dev, B43legacy_DMA_RINGMEMSIZE,
drivers/net/wireless/broadcom/b43legacy/dma.c
312
ring->descbase, ring->dmabase);
drivers/net/wireless/broadcom/b43legacy/dma.c
32
struct b43legacy_dmadesc32 *op32_idx2desc(struct b43legacy_dmaring *ring,
drivers/net/wireless/broadcom/b43legacy/dma.c
38
*meta = &(ring->meta[slot]);
drivers/net/wireless/broadcom/b43legacy/dma.c
39
desc = ring->descbase;
drivers/net/wireless/broadcom/b43legacy/dma.c
390
static bool b43legacy_dma_mapping_error(struct b43legacy_dmaring *ring,
drivers/net/wireless/broadcom/b43legacy/dma.c
395
if (unlikely(dma_mapping_error(ring->dev->dev->dma_dev, addr)))
drivers/net/wireless/broadcom/b43legacy/dma.c
398
switch (ring->type) {
drivers/net/wireless/broadcom/b43legacy/dma.c
414
unmap_descbuffer(ring, addr, buffersize, dma_to_device);
drivers/net/wireless/broadcom/b43legacy/dma.c
419
static int setup_rx_descbuffer(struct b43legacy_dmaring *ring,
drivers/net/wireless/broadcom/b43legacy/dma.c
429
B43legacy_WARN_ON(ring->tx);
drivers/net/wireless/broadcom/b43legacy/dma.c
431
skb = __dev_alloc_skb(ring->rx_buffersize, gfp_flags);
drivers/net/wireless/broadcom/b43legacy/dma.c
434
dmaaddr = map_descbuffer(ring, skb->data,
drivers/net/wireless/broadcom/b43legacy/dma.c
435
ring->rx_buffersize, 0);
drivers/net/wireless/broadcom/b43legacy/dma.c
436
if (b43legacy_dma_mapping_error(ring, dmaaddr, ring->rx_buffersize, 0)) {
drivers/net/wireless/broadcom/b43legacy/dma.c
442
skb = __dev_alloc_skb(ring->rx_buffersize, gfp_flags);
drivers/net/wireless/broadcom/b43legacy/dma.c
445
dmaaddr = map_descbuffer(ring, skb->data,
drivers/net/wireless/broadcom/b43legacy/dma.c
446
ring->rx_buffersize, 0);
drivers/net/wireless/broadcom/b43legacy/dma.c
449
if (b43legacy_dma_mapping_error(ring, dmaaddr, ring->rx_buffersize, 0)) {
drivers/net/wireless/broadcom/b43legacy/dma.c
45
static void op32_fill_descriptor(struct b43legacy_dmaring *ring,
drivers/net/wireless/broadcom/b43legacy/dma.c
456
op32_fill_descriptor(ring, desc, dmaaddr, ring->rx_buffersize, 0, 0, 0);
drivers/net/wireless/broadcom/b43legacy/dma.c
469
static int alloc_initial_descbuffers(struct b43legacy_dmaring *ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
476
for (i = 0; i < ring->nr_slots; i++) {
drivers/net/wireless/broadcom/b43legacy/dma.c
477
desc = op32_idx2desc(ring, i, &meta);
drivers/net/wireless/broadcom/b43legacy/dma.c
479
err = setup_rx_descbuffer(ring, desc, meta, GFP_KERNEL);
drivers/net/wireless/broadcom/b43legacy/dma.c
481
b43legacyerr(ring->dev->wl,
drivers/net/wireless/broadcom/b43legacy/dma.c
487
ring->used_slots = ring->nr_slots;
drivers/net/wireless/broadcom/b43legacy/dma.c
494
desc = op32_idx2desc(ring, i, &meta);
drivers/net/wireless/broadcom/b43legacy/dma.c
496
unmap_descbuffer(ring, meta->dmaaddr, ring->rx_buffersize, 0);
drivers/net/wireless/broadcom/b43legacy/dma.c
50
struct b43legacy_dmadesc32 *descbase = ring->descbase;
drivers/net/wireless/broadcom/b43legacy/dma.c
506
static int dmacontroller_setup(struct b43legacy_dmaring *ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
511
u32 trans = ring->dev->dma.translation;
drivers/net/wireless/broadcom/b43legacy/dma.c
512
u32 ringbase = (u32)(ring->dmabase);
drivers/net/wireless/broadcom/b43legacy/dma.c
514
if (ring->tx) {
drivers/net/wireless/broadcom/b43legacy/dma.c
520
b43legacy_dma_write(ring, B43legacy_DMA32_TXCTL, value);
drivers/net/wireless/broadcom/b43legacy/dma.c
521
b43legacy_dma_write(ring, B43legacy_DMA32_TXRING,
drivers/net/wireless/broadcom/b43legacy/dma.c
525
err = alloc_initial_descbuffers(ring);
drivers/net/wireless/broadcom/b43legacy/dma.c
531
value = (ring->frameoffset <<
drivers/net/wireless/broadcom/b43legacy/dma.c
536
b43legacy_dma_write(ring, B43legacy_DMA32_RXCTL, value);
drivers/net/wireless/broadcom/b43legacy/dma.c
537
b43legacy_dma_write(ring, B43legacy_DMA32_RXRING,
drivers/net/wireless/broadcom/b43legacy/dma.c
540
b43legacy_dma_write(ring, B43legacy_DMA32_RXINDEX, 200);
drivers/net/wireless/broadcom/b43legacy/dma.c
548
static void dmacontroller_cleanup(struct b43legacy_dmaring *ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
550
if (ring->tx) {
drivers/net/wireless/broadcom/b43legacy/dma.c
551
b43legacy_dmacontroller_tx_reset(ring->dev, ring->mmio_base,
drivers/net/wireless/broadcom/b43legacy/dma.c
552
ring->type);
drivers/net/wireless/broadcom/b43legacy/dma.c
553
b43legacy_dma_write(ring, B43legacy_DMA32_TXRING, 0);
drivers/net/wireless/broadcom/b43legacy/dma.c
555
b43legacy_dmacontroller_rx_reset(ring->dev, ring->mmio_base,
drivers/net/wireless/broadcom/b43legacy/dma.c
556
ring->type);
drivers/net/wireless/broadcom/b43legacy/dma.c
557
b43legacy_dma_write(ring, B43legacy_DMA32_RXRING, 0);
drivers/net/wireless/broadcom/b43legacy/dma.c
561
static void free_all_descbuffers(struct b43legacy_dmaring *ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
566
if (!ring->used_slots)
drivers/net/wireless/broadcom/b43legacy/dma.c
568
for (i = 0; i < ring->nr_slots; i++) {
drivers/net/wireless/broadcom/b43legacy/dma.c
569
op32_idx2desc(ring, i, &meta);
drivers/net/wireless/broadcom/b43legacy/dma.c
57
B43legacy_WARN_ON(!(slot >= 0 && slot < ring->nr_slots));
drivers/net/wireless/broadcom/b43legacy/dma.c
572
B43legacy_WARN_ON(!ring->tx);
drivers/net/wireless/broadcom/b43legacy/dma.c
575
if (ring->tx)
drivers/net/wireless/broadcom/b43legacy/dma.c
576
unmap_descbuffer(ring, meta->dmaaddr,
drivers/net/wireless/broadcom/b43legacy/dma.c
579
unmap_descbuffer(ring, meta->dmaaddr,
drivers/net/wireless/broadcom/b43legacy/dma.c
580
ring->rx_buffersize, 0);
drivers/net/wireless/broadcom/b43legacy/dma.c
581
free_descriptor_buffer(ring, meta, 0);
drivers/net/wireless/broadcom/b43legacy/dma.c
608
struct b43legacy_dmaring *ring;
drivers/net/wireless/broadcom/b43legacy/dma.c
613
ring = kzalloc_obj(*ring);
drivers/net/wireless/broadcom/b43legacy/dma.c
614
if (!ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
616
ring->type = type;
drivers/net/wireless/broadcom/b43legacy/dma.c
617
ring->dev = dev;
drivers/net/wireless/broadcom/b43legacy/dma.c
62
addr |= ring->dev->dma.translation;
drivers/net/wireless/broadcom/b43legacy/dma.c
623
ring->meta = kzalloc_objs(struct b43legacy_dmadesc_meta, nr_slots);
drivers/net/wireless/broadcom/b43legacy/dma.c
624
if (!ring->meta)
drivers/net/wireless/broadcom/b43legacy/dma.c
627
ring->txhdr_cache = kcalloc(nr_slots,
drivers/net/wireless/broadcom/b43legacy/dma.c
63
ctl = (bufsize - ring->frameoffset)
drivers/net/wireless/broadcom/b43legacy/dma.c
630
if (!ring->txhdr_cache)
drivers/net/wireless/broadcom/b43legacy/dma.c
634
dma_test = dma_map_single(dev->dev->dma_dev, ring->txhdr_cache,
drivers/net/wireless/broadcom/b43legacy/dma.c
638
if (b43legacy_dma_mapping_error(ring, dma_test,
drivers/net/wireless/broadcom/b43legacy/dma.c
641
kfree(ring->txhdr_cache);
drivers/net/wireless/broadcom/b43legacy/dma.c
642
ring->txhdr_cache = kcalloc(nr_slots,
drivers/net/wireless/broadcom/b43legacy/dma.c
645
if (!ring->txhdr_cache)
drivers/net/wireless/broadcom/b43legacy/dma.c
649
ring->txhdr_cache,
drivers/net/wireless/broadcom/b43legacy/dma.c
65
if (slot == ring->nr_slots - 1)
drivers/net/wireless/broadcom/b43legacy/dma.c
653
if (b43legacy_dma_mapping_error(ring, dma_test,
drivers/net/wireless/broadcom/b43legacy/dma.c
663
ring->nr_slots = nr_slots;
drivers/net/wireless/broadcom/b43legacy/dma.c
664
ring->mmio_base = b43legacy_dmacontroller_base(type, controller_index);
drivers/net/wireless/broadcom/b43legacy/dma.c
665
ring->index = controller_index;
drivers/net/wireless/broadcom/b43legacy/dma.c
667
ring->tx = true;
drivers/net/wireless/broadcom/b43legacy/dma.c
668
ring->current_slot = -1;
drivers/net/wireless/broadcom/b43legacy/dma.c
670
if (ring->index == 0) {
drivers/net/wireless/broadcom/b43legacy/dma.c
671
ring->rx_buffersize = B43legacy_DMA0_RX_BUFFERSIZE;
drivers/net/wireless/broadcom/b43legacy/dma.c
672
ring->frameoffset = B43legacy_DMA0_RX_FRAMEOFFSET;
drivers/net/wireless/broadcom/b43legacy/dma.c
673
} else if (ring->index == 3) {
drivers/net/wireless/broadcom/b43legacy/dma.c
674
ring->rx_buffersize = B43legacy_DMA3_RX_BUFFERSIZE;
drivers/net/wireless/broadcom/b43legacy/dma.c
675
ring->frameoffset = B43legacy_DMA3_RX_FRAMEOFFSET;
drivers/net/wireless/broadcom/b43legacy/dma.c
680
ring->last_injected_overflow = jiffies;
drivers/net/wireless/broadcom/b43legacy/dma.c
683
err = alloc_ringmemory(ring);
drivers/net/wireless/broadcom/b43legacy/dma.c
686
err = dmacontroller_setup(ring);
drivers/net/wireless/broadcom/b43legacy/dma.c
691
return ring;
drivers/net/wireless/broadcom/b43legacy/dma.c
694
free_ringmemory(ring);
drivers/net/wireless/broadcom/b43legacy/dma.c
696
kfree(ring->txhdr_cache);
drivers/net/wireless/broadcom/b43legacy/dma.c
698
kfree(ring->meta);
drivers/net/wireless/broadcom/b43legacy/dma.c
700
kfree(ring);
drivers/net/wireless/broadcom/b43legacy/dma.c
701
ring = NULL;
drivers/net/wireless/broadcom/b43legacy/dma.c
706
static void b43legacy_destroy_dmaring(struct b43legacy_dmaring *ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
708
if (!ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
711
b43legacydbg(ring->dev->wl, "DMA-%u 0x%04X (%s) max used slots:"
drivers/net/wireless/broadcom/b43legacy/dma.c
712
" %d/%d\n", (unsigned int)(ring->type), ring->mmio_base,
drivers/net/wireless/broadcom/b43legacy/dma.c
713
(ring->tx) ? "TX" : "RX", ring->max_used_slots,
drivers/net/wireless/broadcom/b43legacy/dma.c
714
ring->nr_slots);
drivers/net/wireless/broadcom/b43legacy/dma.c
718
dmacontroller_cleanup(ring);
drivers/net/wireless/broadcom/b43legacy/dma.c
719
free_all_descbuffers(ring);
drivers/net/wireless/broadcom/b43legacy/dma.c
720
free_ringmemory(ring);
drivers/net/wireless/broadcom/b43legacy/dma.c
722
kfree(ring->txhdr_cache);
drivers/net/wireless/broadcom/b43legacy/dma.c
723
kfree(ring->meta);
drivers/net/wireless/broadcom/b43legacy/dma.c
724
kfree(ring);
drivers/net/wireless/broadcom/b43legacy/dma.c
757
struct b43legacy_dmaring *ring;
drivers/net/wireless/broadcom/b43legacy/dma.c
778
ring = b43legacy_setup_dmaring(dev, 0, 1, type);
drivers/net/wireless/broadcom/b43legacy/dma.c
779
if (!ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
781
dma->tx_ring0 = ring;
drivers/net/wireless/broadcom/b43legacy/dma.c
783
ring = b43legacy_setup_dmaring(dev, 1, 1, type);
drivers/net/wireless/broadcom/b43legacy/dma.c
784
if (!ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
786
dma->tx_ring1 = ring;
drivers/net/wireless/broadcom/b43legacy/dma.c
788
ring = b43legacy_setup_dmaring(dev, 2, 1, type);
drivers/net/wireless/broadcom/b43legacy/dma.c
789
if (!ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
791
dma->tx_ring2 = ring;
drivers/net/wireless/broadcom/b43legacy/dma.c
793
ring = b43legacy_setup_dmaring(dev, 3, 1, type);
drivers/net/wireless/broadcom/b43legacy/dma.c
794
if (!ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
796
dma->tx_ring3 = ring;
drivers/net/wireless/broadcom/b43legacy/dma.c
798
ring = b43legacy_setup_dmaring(dev, 4, 1, type);
drivers/net/wireless/broadcom/b43legacy/dma.c
799
if (!ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
80
static void op32_poke_tx(struct b43legacy_dmaring *ring, int slot)
drivers/net/wireless/broadcom/b43legacy/dma.c
801
dma->tx_ring4 = ring;
drivers/net/wireless/broadcom/b43legacy/dma.c
803
ring = b43legacy_setup_dmaring(dev, 5, 1, type);
drivers/net/wireless/broadcom/b43legacy/dma.c
804
if (!ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
806
dma->tx_ring5 = ring;
drivers/net/wireless/broadcom/b43legacy/dma.c
809
ring = b43legacy_setup_dmaring(dev, 0, 0, type);
drivers/net/wireless/broadcom/b43legacy/dma.c
810
if (!ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
812
dma->rx_ring0 = ring;
drivers/net/wireless/broadcom/b43legacy/dma.c
815
ring = b43legacy_setup_dmaring(dev, 3, 0, type);
drivers/net/wireless/broadcom/b43legacy/dma.c
816
if (!ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
818
dma->rx_ring3 = ring;
drivers/net/wireless/broadcom/b43legacy/dma.c
82
b43legacy_dma_write(ring, B43legacy_DMA32_TXINDEX,
drivers/net/wireless/broadcom/b43legacy/dma.c
851
static u16 generate_cookie(struct b43legacy_dmaring *ring,
drivers/net/wireless/broadcom/b43legacy/dma.c
86
static void op32_tx_suspend(struct b43legacy_dmaring *ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
862
switch (ring->index) {
drivers/net/wireless/broadcom/b43legacy/dma.c
88
b43legacy_dma_write(ring, B43legacy_DMA32_TXCTL,
drivers/net/wireless/broadcom/b43legacy/dma.c
89
b43legacy_dma_read(ring, B43legacy_DMA32_TXCTL)
drivers/net/wireless/broadcom/b43legacy/dma.c
894
struct b43legacy_dmaring *ring = NULL;
drivers/net/wireless/broadcom/b43legacy/dma.c
898
ring = dma->tx_ring0;
drivers/net/wireless/broadcom/b43legacy/dma.c
901
ring = dma->tx_ring1;
drivers/net/wireless/broadcom/b43legacy/dma.c
904
ring = dma->tx_ring2;
drivers/net/wireless/broadcom/b43legacy/dma.c
907
ring = dma->tx_ring3;
drivers/net/wireless/broadcom/b43legacy/dma.c
910
ring = dma->tx_ring4;
drivers/net/wireless/broadcom/b43legacy/dma.c
913
ring = dma->tx_ring5;
drivers/net/wireless/broadcom/b43legacy/dma.c
919
B43legacy_WARN_ON(!(ring && *slot >= 0 && *slot < ring->nr_slots));
drivers/net/wireless/broadcom/b43legacy/dma.c
921
return ring;
drivers/net/wireless/broadcom/b43legacy/dma.c
924
static int dma_tx_fragment(struct b43legacy_dmaring *ring,
drivers/net/wireless/broadcom/b43legacy/dma.c
93
static void op32_tx_resume(struct b43legacy_dmaring *ring)
drivers/net/wireless/broadcom/b43legacy/dma.c
940
old_top_slot = ring->current_slot;
drivers/net/wireless/broadcom/b43legacy/dma.c
941
old_used_slots = ring->used_slots;
drivers/net/wireless/broadcom/b43legacy/dma.c
944
slot = request_slot(ring);
drivers/net/wireless/broadcom/b43legacy/dma.c
945
desc = op32_idx2desc(ring, slot, &meta_hdr);
drivers/net/wireless/broadcom/b43legacy/dma.c
948
header = &(ring->txhdr_cache[slot * sizeof(
drivers/net/wireless/broadcom/b43legacy/dma.c
95
b43legacy_dma_write(ring, B43legacy_DMA32_TXCTL,
drivers/net/wireless/broadcom/b43legacy/dma.c
950
err = b43legacy_generate_txhdr(ring->dev, header,
drivers/net/wireless/broadcom/b43legacy/dma.c
952
generate_cookie(ring, slot));
drivers/net/wireless/broadcom/b43legacy/dma.c
954
ring->current_slot = old_top_slot;
drivers/net/wireless/broadcom/b43legacy/dma.c
955
ring->used_slots = old_used_slots;
drivers/net/wireless/broadcom/b43legacy/dma.c
959
meta_hdr->dmaaddr = map_descbuffer(ring, (unsigned char *)header,
drivers/net/wireless/broadcom/b43legacy/dma.c
96
b43legacy_dma_read(ring, B43legacy_DMA32_TXCTL)
drivers/net/wireless/broadcom/b43legacy/dma.c
961
if (b43legacy_dma_mapping_error(ring, meta_hdr->dmaaddr,
drivers/net/wireless/broadcom/b43legacy/dma.c
963
ring->current_slot = old_top_slot;
drivers/net/wireless/broadcom/b43legacy/dma.c
964
ring->used_slots = old_used_slots;
drivers/net/wireless/broadcom/b43legacy/dma.c
967
op32_fill_descriptor(ring, desc, meta_hdr->dmaaddr,
drivers/net/wireless/broadcom/b43legacy/dma.c
971
slot = request_slot(ring);
drivers/net/wireless/broadcom/b43legacy/dma.c
972
desc = op32_idx2desc(ring, slot, &meta);
drivers/net/wireless/broadcom/b43legacy/dma.c
978
meta->dmaaddr = map_descbuffer(ring, skb->data, skb->len, 1);
drivers/net/wireless/broadcom/b43legacy/dma.c
980
if (b43legacy_dma_mapping_error(ring, meta->dmaaddr, skb->len, 1)) {
drivers/net/wireless/broadcom/b43legacy/dma.c
983
ring->current_slot = old_top_slot;
drivers/net/wireless/broadcom/b43legacy/dma.c
984
ring->used_slots = old_used_slots;
drivers/net/wireless/broadcom/b43legacy/dma.c
999
meta->dmaaddr = map_descbuffer(ring, skb->data, skb->len, 1);
drivers/net/wireless/broadcom/b43legacy/dma.h
168
u32 b43legacy_dma_read(struct b43legacy_dmaring *ring,
drivers/net/wireless/broadcom/b43legacy/dma.h
171
return b43legacy_read32(ring->dev, ring->mmio_base + offset);
drivers/net/wireless/broadcom/b43legacy/dma.h
175
void b43legacy_dma_write(struct b43legacy_dmaring *ring,
drivers/net/wireless/broadcom/b43legacy/dma.h
178
b43legacy_write32(ring->dev, ring->mmio_base + offset, value);
drivers/net/wireless/broadcom/b43legacy/dma.h
193
void b43legacy_dma_rx(struct b43legacy_dmaring *ring);
drivers/net/wireless/broadcom/b43legacy/dma.h
219
void b43legacy_dma_rx(struct b43legacy_dmaring *ring)
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
106
struct brcmf_flowring_ring *ring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
148
ring = kzalloc_obj(*ring, GFP_ATOMIC);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
149
if (!ring)
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
157
ring->hash_id = hash_idx;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
158
ring->status = RING_CLOSED;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
159
skb_queue_head_init(&ring->skblist);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
160
flow->rings[i] = ring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
170
struct brcmf_flowring_ring *ring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
172
ring = flow->rings[flowid];
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
174
return flow->hash[ring->hash_id].fifo;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
181
struct brcmf_flowring_ring *ring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
192
ring = flow->rings[flowid];
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
193
if (ring->blocked == blocked) {
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
202
ring = flow->rings[i];
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
203
if ((ring->status == RING_OPEN) &&
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
205
if (ring->blocked) {
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
230
struct brcmf_flowring_ring *ring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
236
ring = flow->rings[flowid];
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
237
if (!ring)
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
244
hash_idx = ring->hash_id;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
249
skb = skb_dequeue(&ring->skblist);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
252
skb = skb_dequeue(&ring->skblist);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
255
kfree(ring);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
262
struct brcmf_flowring_ring *ring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
264
ring = flow->rings[flowid];
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
266
skb_queue_tail(&ring->skblist, skb);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
268
if (!ring->blocked &&
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
269
(skb_queue_len(&ring->skblist) > BRCMF_FLOWRING_HIGH)) {
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
278
if (skb_queue_len(&ring->skblist) < BRCMF_FLOWRING_LOW)
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
281
return skb_queue_len(&ring->skblist);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
287
struct brcmf_flowring_ring *ring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
290
ring = flow->rings[flowid];
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
291
if (ring->status != RING_OPEN)
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
294
skb = skb_dequeue(&ring->skblist);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
296
if (ring->blocked &&
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
297
(skb_queue_len(&ring->skblist) < BRCMF_FLOWRING_LOW)) {
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
309
struct brcmf_flowring_ring *ring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
311
ring = flow->rings[flowid];
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
313
skb_queue_head(&ring->skblist, skb);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
319
struct brcmf_flowring_ring *ring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
321
ring = flow->rings[flowid];
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
322
if (!ring)
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
325
if (ring->status != RING_OPEN)
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
328
return skb_queue_len(&ring->skblist);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
334
struct brcmf_flowring_ring *ring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
336
ring = flow->rings[flowid];
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
337
if (!ring) {
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
342
ring->status = RING_OPEN;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
348
struct brcmf_flowring_ring *ring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
351
ring = flow->rings[flowid];
drivers/net/wireless/broadcom/brcm80211/brcmfmac/flowring.c
352
hash_idx = ring->hash_id;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/msgbuf.c
1225
int ring = le16_to_cpu(ring_status->compl_hdr.flow_ring_id);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/msgbuf.c
1227
bphy_err(drvr, "Firmware reported ring %d error: %d\n", ring,
drivers/net/wireless/broadcom/brcm80211/brcmfmac/msgbuf.c
1476
struct brcmf_flowring_ring *ring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/msgbuf.c
1501
ring = msgbuf->flow->rings[i];
drivers/net/wireless/broadcom/brcm80211/brcmfmac/msgbuf.c
1502
if (ring->status != RING_OPEN)
drivers/net/wireless/broadcom/brcm80211/brcmfmac/msgbuf.c
1505
hash = &msgbuf->flow->hash[ring->hash_id];
drivers/net/wireless/broadcom/brcm80211/brcmfmac/msgbuf.c
1509
skb_queue_len(&ring->skblist), ring->blocked,
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1022
struct brcmf_pcie_ringbuf *ring = (struct brcmf_pcie_ringbuf *)ctx;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1023
struct brcmf_pciedev_info *devinfo = ring->devinfo;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1024
struct brcmf_commonring *commonring = &ring->commonring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1030
commonring->w_ptr, ring->id);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1032
devinfo->write_ptr(devinfo, ring->r_idx_addr, commonring->r_ptr);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1040
struct brcmf_pcie_ringbuf *ring = (struct brcmf_pcie_ringbuf *)ctx;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1041
struct brcmf_pciedev_info *devinfo = ring->devinfo;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1042
struct brcmf_commonring *commonring = &ring->commonring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1048
commonring->r_ptr, ring->id);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1050
devinfo->write_ptr(devinfo, ring->w_idx_addr, commonring->w_ptr);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1058
struct brcmf_pcie_ringbuf *ring = (struct brcmf_pcie_ringbuf *)ctx;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1059
struct brcmf_pciedev_info *devinfo = ring->devinfo;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1074
struct brcmf_pcie_ringbuf *ring = (struct brcmf_pcie_ringbuf *)ctx;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1075
struct brcmf_pciedev_info *devinfo = ring->devinfo;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1076
struct brcmf_commonring *commonring = &ring->commonring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1081
commonring->r_ptr = devinfo->read_ptr(devinfo, ring->r_idx_addr);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1084
commonring->w_ptr, ring->id);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1092
struct brcmf_pcie_ringbuf *ring = (struct brcmf_pcie_ringbuf *)ctx;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1093
struct brcmf_pciedev_info *devinfo = ring->devinfo;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1094
struct brcmf_commonring *commonring = &ring->commonring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1099
commonring->w_ptr = devinfo->read_ptr(devinfo, ring->w_idx_addr);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1102
commonring->r_ptr, ring->id);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1113
void *ring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1116
ring = dma_alloc_coherent(&devinfo->pdev->dev, size, dma_handle,
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1118
if (!ring)
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1126
return (ring);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1136
struct brcmf_pcie_ringbuf *ring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1158
ring = kzalloc_obj(*ring);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1159
if (!ring) {
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1164
brcmf_commonring_config(&ring->commonring, brcmf_ring_max_item[ring_id],
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1166
ring->dma_handle = dma_handle;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1167
ring->devinfo = devinfo;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1168
brcmf_commonring_register_cb(&ring->commonring,
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1173
brcmf_pcie_ring_mb_write_wptr, ring);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1175
return (ring);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1180
struct brcmf_pcie_ringbuf *ring)
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1185
if (!ring)
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1188
dma_buf = ring->commonring.buf_addr;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1190
size = ring->commonring.depth * ring->commonring.item_len;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1191
dma_free_coherent(dev, size, dma_buf, ring->dma_handle);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1193
kfree(ring);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1221
struct brcmf_pcie_ringbuf *ring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1319
ring = brcmf_pcie_alloc_dma_and_ring(devinfo, i, ring_mem_ptr);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1320
if (!ring)
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1322
ring->w_idx_addr = h2d_w_idx_ptr;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1323
ring->r_idx_addr = h2d_r_idx_ptr;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1324
ring->id = i;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1325
devinfo->shared.commonrings[i] = ring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1334
ring = brcmf_pcie_alloc_dma_and_ring(devinfo, i, ring_mem_ptr);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1335
if (!ring)
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1337
ring->w_idx_addr = d2h_w_idx_ptr;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1338
ring->r_idx_addr = d2h_r_idx_ptr;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1339
ring->id = i;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1340
devinfo->shared.commonrings[i] = ring;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1350
rings = kzalloc_objs(*ring, max_flowrings);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1357
ring = &rings[i];
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1358
ring->devinfo = devinfo;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1359
ring->id = i + BRCMF_H2D_MSGRING_FLOWRING_IDSTART;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1360
brcmf_commonring_register_cb(&ring->commonring,
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1366
ring);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1367
ring->w_idx_addr = h2d_w_idx_ptr;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c
1368
ring->r_idx_addr = h2d_r_idx_ptr;
drivers/net/wireless/intersil/p54/p54pci.c
133
int ring_index, struct p54p_desc *ring, u32 ring_limit,
drivers/net/wireless/intersil/p54/p54pci.c
147
struct p54p_desc *desc = &ring[i];
drivers/net/wireless/intersil/p54/p54pci.c
185
int ring_index, struct p54p_desc *ring, u32 ring_limit,
drivers/net/wireless/intersil/p54/p54pci.c
200
desc = &ring[i];
drivers/net/wireless/intersil/p54/p54pci.c
241
p54p_refill_rx_ring(dev, ring_index, ring, ring_limit, rx_buf, *index);
drivers/net/wireless/intersil/p54/p54pci.c
245
int ring_index, struct p54p_desc *ring, u32 ring_limit,
drivers/net/wireless/intersil/p54/p54pci.c
259
desc = &ring[i];
drivers/net/wireless/mediatek/mt76/mt7996/dma.c
523
&dev->wed_rro.emi_rings_cpu.ptr->ring[0].idx;
drivers/net/wireless/mediatek/mt76/mt7996/mt7996.h
344
} ring[MT7996_MAX_RRO_RRS_RING];
drivers/net/wireless/mediatek/mt76/wed.c
109
ring = FIELD_GET(MT_QFLAG_WED_RING, q->flags);
drivers/net/wireless/mediatek/mt76/wed.c
113
ret = mtk_wed_device_tx_ring_setup(q->wed, ring, q->regs,
drivers/net/wireless/mediatek/mt76/wed.c
116
q->wed_regs = q->wed->tx_ring[ring].reg_base;
drivers/net/wireless/mediatek/mt76/wed.c
129
ret = mtk_wed_device_rx_ring_setup(q->wed, ring, q->regs,
drivers/net/wireless/mediatek/mt76/wed.c
132
q->wed_regs = q->wed->rx_ring[ring].reg_base;
drivers/net/wireless/mediatek/mt76/wed.c
137
mtk_wed_device_rro_rx_ring_setup(q->wed, ring, q->regs);
drivers/net/wireless/mediatek/mt76/wed.c
144
mtk_wed_device_msdu_pg_rx_ring_setup(q->wed, ring, q->regs);
drivers/net/wireless/mediatek/mt76/wed.c
95
int ret = 0, type, ring;
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1074
struct rtl8180_tx_desc *ring;
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1078
ring = dma_alloc_coherent(&priv->pdev->dev, sizeof(*ring) * entries,
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1080
if (!ring || (unsigned long)ring & 0xFF) {
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1086
priv->tx_ring[prio].desc = ring;
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1093
ring[i].next_tx_desc =
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1094
cpu_to_le32((u32)dma + ((i + 1) % entries) * sizeof(*ring));
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1102
struct rtl8180_tx_ring *ring = &priv->tx_ring[prio];
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1104
while (skb_queue_len(&ring->queue)) {
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1105
struct rtl8180_tx_desc *entry = &ring->desc[ring->idx];
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1106
struct sk_buff *skb = __skb_dequeue(&ring->queue);
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1111
ring->idx = (ring->idx + 1) % ring->entries;
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1115
sizeof(*ring->desc) * ring->entries, ring->desc,
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1116
ring->dma);
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1117
ring->desc = NULL;
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
345
struct rtl8180_tx_ring *ring = &priv->tx_ring[prio];
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
347
while (skb_queue_len(&ring->queue)) {
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
348
struct rtl8180_tx_desc *entry = &ring->desc[ring->idx];
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
356
ring->idx = (ring->idx + 1) % ring->entries;
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
357
skb = __skb_dequeue(&ring->queue);
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
371
if (ring->entries - skb_queue_len(&ring->queue) == 2)
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
461
struct rtl8180_tx_ring *ring;
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
478
ring = &priv->tx_ring[prio];
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
548
idx = (ring->idx + skb_queue_len(&ring->queue)) % ring->entries;
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
549
entry = &ring->desc[idx];
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
577
__skb_queue_tail(&ring->queue, skb);
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
578
if (ring->entries - skb_queue_len(&ring->queue) < 2)
drivers/net/wireless/realtek/rtlwifi/core.c
1843
struct rtl8192_tx_ring *ring;
drivers/net/wireless/realtek/rtlwifi/core.c
1848
ring = &rtlpci->tx_ring[BEACON_QUEUE];
drivers/net/wireless/realtek/rtlwifi/core.c
1851
pskb = __skb_dequeue(&ring->queue);
drivers/net/wireless/realtek/rtlwifi/core.c
1856
pdesc = &ring->desc[0];
drivers/net/wireless/realtek/rtlwifi/core.c
1860
__skb_queue_tail(&ring->queue, skb);
drivers/net/wireless/realtek/rtlwifi/pci.c
1010
struct rtl8192_tx_ring *ring = NULL;
drivers/net/wireless/realtek/rtlwifi/pci.c
1022
ring = &rtlpci->tx_ring[BEACON_QUEUE];
drivers/net/wireless/realtek/rtlwifi/pci.c
1023
pskb = __skb_dequeue(&ring->queue);
drivers/net/wireless/realtek/rtlwifi/pci.c
1025
entry = (u8 *)(&ring->buffer_desc[ring->idx]);
drivers/net/wireless/realtek/rtlwifi/pci.c
1027
entry = (u8 *)(&ring->desc[ring->idx]);
drivers/net/wireless/realtek/rtlwifi/pci.c
1042
pdesc = &ring->desc[0];
drivers/net/wireless/realtek/rtlwifi/pci.c
1044
pbuffer_desc = &ring->buffer_desc[0];
drivers/net/wireless/realtek/rtlwifi/pci.c
1050
__skb_queue_tail(&ring->queue, pskb);
drivers/net/wireless/realtek/rtlwifi/pci.c
1268
struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[prio];
drivers/net/wireless/realtek/rtlwifi/pci.c
1271
while (skb_queue_len(&ring->queue)) {
drivers/net/wireless/realtek/rtlwifi/pci.c
1273
struct sk_buff *skb = __skb_dequeue(&ring->queue);
drivers/net/wireless/realtek/rtlwifi/pci.c
1276
entry = (u8 *)(&ring->buffer_desc[ring->idx]);
drivers/net/wireless/realtek/rtlwifi/pci.c
1278
entry = (u8 *)(&ring->desc[ring->idx]);
drivers/net/wireless/realtek/rtlwifi/pci.c
1285
ring->idx = (ring->idx + 1) % ring->entries;
drivers/net/wireless/realtek/rtlwifi/pci.c
1290
sizeof(*ring->desc) * ring->entries, ring->desc,
drivers/net/wireless/realtek/rtlwifi/pci.c
1291
ring->dma);
drivers/net/wireless/realtek/rtlwifi/pci.c
1292
ring->desc = NULL;
drivers/net/wireless/realtek/rtlwifi/pci.c
1295
sizeof(*ring->buffer_desc) * ring->entries,
drivers/net/wireless/realtek/rtlwifi/pci.c
1296
ring->buffer_desc, ring->buffer_desc_dma);
drivers/net/wireless/realtek/rtlwifi/pci.c
1297
ring->buffer_desc = NULL;
drivers/net/wireless/realtek/rtlwifi/pci.c
1447
struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[i];
drivers/net/wireless/realtek/rtlwifi/pci.c
1449
while (skb_queue_len(&ring->queue)) {
drivers/net/wireless/realtek/rtlwifi/pci.c
1452
__skb_dequeue(&ring->queue);
drivers/net/wireless/realtek/rtlwifi/pci.c
1454
entry = (u8 *)(&ring->buffer_desc
drivers/net/wireless/realtek/rtlwifi/pci.c
1455
[ring->idx]);
drivers/net/wireless/realtek/rtlwifi/pci.c
1457
entry = (u8 *)(&ring->desc[ring->idx]);
drivers/net/wireless/realtek/rtlwifi/pci.c
1464
ring->idx = (ring->idx + 1) % ring->entries;
drivers/net/wireless/realtek/rtlwifi/pci.c
1472
ring->idx = 0;
drivers/net/wireless/realtek/rtlwifi/pci.c
1473
ring->entries = rtlpci->txringcount[i];
drivers/net/wireless/realtek/rtlwifi/pci.c
1527
struct rtl8192_tx_ring *ring;
drivers/net/wireless/realtek/rtlwifi/pci.c
1559
ring = &rtlpci->tx_ring[hw_queue];
drivers/net/wireless/realtek/rtlwifi/pci.c
1562
idx = ring->cur_tx_wp;
drivers/net/wireless/realtek/rtlwifi/pci.c
1564
idx = (ring->idx + skb_queue_len(&ring->queue)) %
drivers/net/wireless/realtek/rtlwifi/pci.c
1565
ring->entries;
drivers/net/wireless/realtek/rtlwifi/pci.c
1570
pdesc = &ring->desc[idx];
drivers/net/wireless/realtek/rtlwifi/pci.c
1572
ptx_bd_desc = &ring->buffer_desc[idx];
drivers/net/wireless/realtek/rtlwifi/pci.c
1580
hw_queue, ring->idx, idx,
drivers/net/wireless/realtek/rtlwifi/pci.c
1581
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/pci.c
1603
__skb_queue_tail(&ring->queue, skb);
drivers/net/wireless/realtek/rtlwifi/pci.c
1613
if ((ring->entries - skb_queue_len(&ring->queue)) < 2 &&
drivers/net/wireless/realtek/rtlwifi/pci.c
1617
hw_queue, ring->idx, idx,
drivers/net/wireless/realtek/rtlwifi/pci.c
1618
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/pci.c
1638
struct rtl8192_tx_ring *ring;
drivers/net/wireless/realtek/rtlwifi/pci.c
1650
ring = &pcipriv->dev.tx_ring[queue_id];
drivers/net/wireless/realtek/rtlwifi/pci.c
1651
queue_len = skb_queue_len(&ring->queue);
drivers/net/wireless/realtek/rtlwifi/pci.c
418
struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[hw_queue];
drivers/net/wireless/realtek/rtlwifi/pci.c
428
(ring->entries - skb_queue_len(&ring->queue) >
drivers/net/wireless/realtek/rtlwifi/pci.c
455
struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[prio];
drivers/net/wireless/realtek/rtlwifi/pci.c
457
while (skb_queue_len(&ring->queue)) {
drivers/net/wireless/realtek/rtlwifi/pci.c
465
entry = (u8 *)(&ring->buffer_desc[ring->idx]);
drivers/net/wireless/realtek/rtlwifi/pci.c
467
entry = (u8 *)(&ring->desc[ring->idx]);
drivers/net/wireless/realtek/rtlwifi/pci.c
469
if (!rtlpriv->cfg->ops->is_tx_desc_closed(hw, prio, ring->idx))
drivers/net/wireless/realtek/rtlwifi/pci.c
471
ring->idx = (ring->idx + 1) % ring->entries;
drivers/net/wireless/realtek/rtlwifi/pci.c
473
skb = __skb_dequeue(&ring->queue);
drivers/net/wireless/realtek/rtlwifi/pci.c
485
ring->idx,
drivers/net/wireless/realtek/rtlwifi/pci.c
486
skb_queue_len(&ring->queue),
drivers/net/wireless/realtek/rtlwifi/pci.c
533
if ((ring->entries - skb_queue_len(&ring->queue)) <= 4) {
drivers/net/wireless/realtek/rtlwifi/pci.c
536
prio, ring->idx,
drivers/net/wireless/realtek/rtlwifi/pci.c
537
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8188ee/hw.c
173
struct rtl8192_tx_ring *ring;
drivers/net/wireless/realtek/rtlwifi/rtl8188ee/hw.c
189
ring = &rtlpci->tx_ring[queue];
drivers/net/wireless/realtek/rtlwifi/rtl8188ee/hw.c
190
if (skb_queue_len(&ring->queue)) {
drivers/net/wireless/realtek/rtlwifi/rtl8188ee/hw.c
70
struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[BEACON_QUEUE];
drivers/net/wireless/realtek/rtlwifi/rtl8188ee/hw.c
76
while (skb_queue_len(&ring->queue)) {
drivers/net/wireless/realtek/rtlwifi/rtl8188ee/hw.c
77
struct rtl_tx_desc *entry = &ring->desc[ring->idx];
drivers/net/wireless/realtek/rtlwifi/rtl8188ee/hw.c
78
struct sk_buff *skb = __skb_dequeue(&ring->queue);
drivers/net/wireless/realtek/rtlwifi/rtl8188ee/hw.c
85
ring->idx = (ring->idx + 1) % ring->entries;
drivers/net/wireless/realtek/rtlwifi/rtl8188ee/phy.c
2160
struct rtl8192_tx_ring *ring = NULL;
drivers/net/wireless/realtek/rtlwifi/rtl8188ee/phy.c
2197
ring = &pcipriv->dev.tx_ring[queue_id];
drivers/net/wireless/realtek/rtlwifi/rtl8188ee/phy.c
2199
skb_queue_len(&ring->queue) == 0) {
drivers/net/wireless/realtek/rtlwifi/rtl8188ee/phy.c
2206
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8188ee/phy.c
2216
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8188ee/phy.c
2241
ring = &pcipriv->dev.tx_ring[queue_id];
drivers/net/wireless/realtek/rtlwifi/rtl8188ee/phy.c
2242
if (skb_queue_len(&ring->queue) == 0) {
drivers/net/wireless/realtek/rtlwifi/rtl8188ee/phy.c
2249
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8188ee/phy.c
2259
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8188ee/trx.c
810
struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[hw_queue];
drivers/net/wireless/realtek/rtlwifi/rtl8188ee/trx.c
811
u8 *entry = (u8 *)(&ring->desc[ring->idx]);
drivers/net/wireless/realtek/rtlwifi/rtl8192ce/phy.c
408
struct rtl8192_tx_ring *ring = NULL;
drivers/net/wireless/realtek/rtlwifi/rtl8192ce/phy.c
464
ring = &pcipriv->dev.tx_ring[queue_id];
drivers/net/wireless/realtek/rtlwifi/rtl8192ce/phy.c
466
skb_queue_len(&ring->queue) == 0) {
drivers/net/wireless/realtek/rtlwifi/rtl8192ce/phy.c
473
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8192ce/phy.c
483
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8192ce/trx.c
662
struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[hw_queue];
drivers/net/wireless/realtek/rtlwifi/rtl8192ce/trx.c
663
u8 *entry = (u8 *)(&ring->desc[ring->idx]);
drivers/net/wireless/realtek/rtlwifi/rtl8192cu/phy.c
382
struct rtl8192_tx_ring *ring = NULL;
drivers/net/wireless/realtek/rtlwifi/rtl8192cu/phy.c
418
ring = &pcipriv->dev.tx_ring[queue_id];
drivers/net/wireless/realtek/rtlwifi/rtl8192cu/phy.c
419
if (skb_queue_len(&ring->queue) == 0 ||
drivers/net/wireless/realtek/rtlwifi/rtl8192cu/phy.c
428
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8192cu/phy.c
437
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8192cu/phy.c
461
ring = &pcipriv->dev.tx_ring[queue_id];
drivers/net/wireless/realtek/rtlwifi/rtl8192cu/phy.c
462
if (skb_queue_len(&ring->queue) == 0) {
drivers/net/wireless/realtek/rtlwifi/rtl8192cu/phy.c
469
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8192cu/phy.c
478
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8192de/fw.c
117
struct rtl8192_tx_ring *ring;
drivers/net/wireless/realtek/rtlwifi/rtl8192de/fw.c
123
ring = &rtlpci->tx_ring[BEACON_QUEUE];
drivers/net/wireless/realtek/rtlwifi/rtl8192de/fw.c
124
pskb = __skb_dequeue(&ring->queue);
drivers/net/wireless/realtek/rtlwifi/rtl8192de/fw.c
127
pdesc = &ring->desc[idx];
drivers/net/wireless/realtek/rtlwifi/rtl8192de/fw.c
131
__skb_queue_tail(&ring->queue, skb);
drivers/net/wireless/realtek/rtlwifi/rtl8192de/phy.c
2348
struct rtl8192_tx_ring *ring = NULL;
drivers/net/wireless/realtek/rtlwifi/rtl8192de/phy.c
2405
ring = &pcipriv->dev.tx_ring[queue_id];
drivers/net/wireless/realtek/rtlwifi/rtl8192de/phy.c
2406
if (skb_queue_len(&ring->queue) == 0 ||
drivers/net/wireless/realtek/rtlwifi/rtl8192de/phy.c
2419
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8192de/phy.c
2428
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8192de/trx.c
293
struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[hw_queue];
drivers/net/wireless/realtek/rtlwifi/rtl8192de/trx.c
294
u8 *entry = (u8 *)(&ring->desc[ring->idx]);
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/hw.c
147
struct rtl8192_tx_ring *ring;
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/hw.c
164
ring = &rtlpci->tx_ring[queue];
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/hw.c
165
if (skb_queue_len(&ring->queue)) {
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/phy.c
3038
struct rtl8192_tx_ring *ring = NULL;
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/phy.c
3070
ring = &pcipriv->dev.tx_ring[queue_id];
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/phy.c
3072
skb_queue_len(&ring->queue) == 0) {
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/phy.c
3079
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/phy.c
3089
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/phy.c
3114
ring = &pcipriv->dev.tx_ring[queue_id];
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/phy.c
3115
if (skb_queue_len(&ring->queue) == 0) {
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/phy.c
3122
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/phy.c
3131
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/trx.c
1000
struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[hw_queue];
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/trx.c
1012
ring->cur_tx_rp = cur_tx_rp;
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/trx.c
1015
read_point = ring->cur_tx_rp;
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/trx.c
1016
write_point = ring->cur_tx_wp;
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/trx.c
900
struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[q_idx];
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/trx.c
901
u16 max_tx_desc = ring->entries;
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/trx.c
904
ring->cur_tx_wp = 0;
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/trx.c
905
ring->cur_tx_rp = 0;
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/trx.c
911
ring->cur_tx_wp = ((ring->cur_tx_wp + 1) % max_tx_desc);
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/trx.c
915
ring->cur_tx_wp);
drivers/net/wireless/realtek/rtlwifi/rtl8192se/fw.c
114
struct rtl8192_tx_ring *ring;
drivers/net/wireless/realtek/rtlwifi/rtl8192se/fw.c
119
ring = &rtlpci->tx_ring[TXCMD_QUEUE];
drivers/net/wireless/realtek/rtlwifi/rtl8192se/fw.c
123
idx = (ring->idx + skb_queue_len(&ring->queue)) % ring->entries;
drivers/net/wireless/realtek/rtlwifi/rtl8192se/fw.c
124
pdesc = &ring->desc[idx];
drivers/net/wireless/realtek/rtlwifi/rtl8192se/fw.c
126
__skb_queue_tail(&ring->queue, skb);
drivers/net/wireless/realtek/rtlwifi/rtl8192se/phy.c
504
struct rtl8192_tx_ring *ring = NULL;
drivers/net/wireless/realtek/rtlwifi/rtl8192se/phy.c
567
ring = &pcipriv->dev.tx_ring[queue_id];
drivers/net/wireless/realtek/rtlwifi/rtl8192se/phy.c
568
if (skb_queue_len(&ring->queue) == 0 ||
drivers/net/wireless/realtek/rtlwifi/rtl8192se/phy.c
576
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8192se/phy.c
587
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8192se/sw.c
215
struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[hw_queue];
drivers/net/wireless/realtek/rtlwifi/rtl8192se/sw.c
216
u8 *entry = (u8 *)(&ring->desc[ring->idx]);
drivers/net/wireless/realtek/rtlwifi/rtl8723ae/phy.c
1559
struct rtl8192_tx_ring *ring = NULL;
drivers/net/wireless/realtek/rtlwifi/rtl8723ae/phy.c
1613
ring = &pcipriv->dev.tx_ring[queue_id];
drivers/net/wireless/realtek/rtlwifi/rtl8723ae/phy.c
1615
skb_queue_len(&ring->queue) == 0) {
drivers/net/wireless/realtek/rtlwifi/rtl8723ae/phy.c
1622
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8723ae/phy.c
1632
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8723ae/trx.c
664
struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[hw_queue];
drivers/net/wireless/realtek/rtlwifi/rtl8723ae/trx.c
665
u8 *entry = (u8 *)(&ring->desc[ring->idx]);
drivers/net/wireless/realtek/rtlwifi/rtl8723be/hw.c
176
struct rtl8192_tx_ring *ring;
drivers/net/wireless/realtek/rtlwifi/rtl8723be/hw.c
192
ring = &rtlpci->tx_ring[queue];
drivers/net/wireless/realtek/rtlwifi/rtl8723be/hw.c
193
if (skb_queue_len(&ring->queue)) {
drivers/net/wireless/realtek/rtlwifi/rtl8723be/hw.c
32
struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[BEACON_QUEUE];
drivers/net/wireless/realtek/rtlwifi/rtl8723be/hw.c
38
while (skb_queue_len(&ring->queue)) {
drivers/net/wireless/realtek/rtlwifi/rtl8723be/hw.c
39
struct rtl_tx_desc *entry = &ring->desc[ring->idx];
drivers/net/wireless/realtek/rtlwifi/rtl8723be/hw.c
40
struct sk_buff *skb = __skb_dequeue(&ring->queue);
drivers/net/wireless/realtek/rtlwifi/rtl8723be/hw.c
47
ring->idx = (ring->idx + 1) % ring->entries;
drivers/net/wireless/realtek/rtlwifi/rtl8723be/phy.c
2522
struct rtl8192_tx_ring *ring = NULL;
drivers/net/wireless/realtek/rtlwifi/rtl8723be/phy.c
2555
ring = &pcipriv->dev.tx_ring[queue_id];
drivers/net/wireless/realtek/rtlwifi/rtl8723be/phy.c
2561
skb_queue_len(&ring->queue) == 0) {
drivers/net/wireless/realtek/rtlwifi/rtl8723be/phy.c
2568
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8723be/phy.c
2578
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8723be/phy.c
2604
ring = &pcipriv->dev.tx_ring[queue_id];
drivers/net/wireless/realtek/rtlwifi/rtl8723be/phy.c
2605
if (skb_queue_len(&ring->queue) == 0) {
drivers/net/wireless/realtek/rtlwifi/rtl8723be/phy.c
2612
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8723be/phy.c
2622
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8723be/trx.c
721
struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[hw_queue];
drivers/net/wireless/realtek/rtlwifi/rtl8723be/trx.c
722
u8 *entry = (u8 *)(&ring->desc[ring->idx]);
drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c
174
struct rtl8192_tx_ring *ring;
drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c
190
ring = &rtlpci->tx_ring[queue];
drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c
191
if (skb_queue_len(&ring->queue)) {
drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c
28
struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[BEACON_QUEUE];
drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c
34
while (skb_queue_len(&ring->queue)) {
drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c
35
struct rtl_tx_desc *entry = &ring->desc[ring->idx];
drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c
36
struct sk_buff *skb = __skb_dequeue(&ring->queue);
drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c
43
ring->idx = (ring->idx + 1) % ring->entries;
drivers/net/wireless/realtek/rtlwifi/rtl8821ae/phy.c
4690
struct rtl8192_tx_ring *ring = NULL;
drivers/net/wireless/realtek/rtlwifi/rtl8821ae/phy.c
4726
ring = &pcipriv->dev.tx_ring[queue_id];
drivers/net/wireless/realtek/rtlwifi/rtl8821ae/phy.c
4728
skb_queue_len(&ring->queue) == 0) {
drivers/net/wireless/realtek/rtlwifi/rtl8821ae/phy.c
4735
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8821ae/phy.c
4745
skb_queue_len(&ring->queue));
drivers/net/wireless/realtek/rtlwifi/rtl8821ae/trx.c
966
struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[hw_queue];
drivers/net/wireless/realtek/rtlwifi/rtl8821ae/trx.c
967
u8 *entry = (u8 *)(&ring->desc[ring->idx]);
drivers/net/wireless/realtek/rtw88/pci.c
1012
ring->r.rp = cur_rp;
drivers/net/wireless/realtek/rtw88/pci.c
1026
struct rtw_pci_rx_ring *ring;
drivers/net/wireless/realtek/rtw88/pci.c
1030
ring = &rtwpci->rx_rings[RTW_RX_QUEUE_MPDU];
drivers/net/wireless/realtek/rtw88/pci.c
1033
if (cur_wp >= ring->r.wp)
drivers/net/wireless/realtek/rtw88/pci.c
1034
count = cur_wp - ring->r.wp;
drivers/net/wireless/realtek/rtw88/pci.c
1036
count = ring->r.len - (ring->r.wp - cur_wp);
drivers/net/wireless/realtek/rtw88/pci.c
1046
struct rtw_pci_rx_ring *ring = &rtwpci->rx_rings[RTW_RX_QUEUE_MPDU];
drivers/net/wireless/realtek/rtw88/pci.c
1050
u32 cur_rp = ring->r.rp;
drivers/net/wireless/realtek/rtw88/pci.c
1063
rtw_pci_dma_check(rtwdev, ring, cur_rp);
drivers/net/wireless/realtek/rtw88/pci.c
1064
skb = ring->buf[cur_rp];
drivers/net/wireless/realtek/rtw88/pci.c
1101
rtw_pci_sync_rx_desc_device(rtwdev, dma, ring, cur_rp,
drivers/net/wireless/realtek/rtw88/pci.c
1105
if (++cur_rp >= ring->r.len)
drivers/net/wireless/realtek/rtw88/pci.c
1109
ring->r.rp = cur_rp;
drivers/net/wireless/realtek/rtw88/pci.c
1113
ring->r.wp = cur_rp;
drivers/net/wireless/realtek/rtw88/pci.c
1114
rtw_write16(rtwdev, RTK_PCI_RXBD_IDX_MPDUQ, ring->r.rp);
drivers/net/wireless/realtek/rtw88/pci.c
668
struct rtw_pci_tx_ring *ring)
drivers/net/wireless/realtek/rtw88/pci.c
670
struct sk_buff *prev = skb_dequeue(&ring->queue);
drivers/net/wireless/realtek/rtw88/pci.c
715
struct rtw_pci_tx_ring *ring = &rtwpci->tx_rings[pci_q];
drivers/net/wireless/realtek/rtw88/pci.c
726
if (cur_rp == ring->r.wp)
drivers/net/wireless/realtek/rtw88/pci.c
776
struct rtw_pci_tx_ring *ring;
drivers/net/wireless/realtek/rtw88/pci.c
779
ring = &rtwpci->tx_rings[queue];
drivers/net/wireless/realtek/rtw88/pci.c
785
rtw_write16(rtwdev, bd_idx, ring->r.wp & TRX_BD_IDX_MASK);
drivers/net/wireless/realtek/rtw88/pci.c
806
struct rtw_pci_tx_ring *ring;
drivers/net/wireless/realtek/rtw88/pci.c
816
ring = &rtwpci->tx_rings[queue];
drivers/net/wireless/realtek/rtw88/pci.c
821
rtw_pci_release_rsvd_page(rtwpci, ring);
drivers/net/wireless/realtek/rtw88/pci.c
822
else if (!avail_desc(ring->r.wp, ring->r.rp, ring->r.len))
drivers/net/wireless/realtek/rtw88/pci.c
835
buf_desc = get_tx_buffer_desc(ring, tx_buf_desc_sz);
drivers/net/wireless/realtek/rtw88/pci.c
853
skb_queue_tail(&ring->queue, skb);
drivers/net/wireless/realtek/rtw88/pci.c
860
if (++ring->r.wp >= ring->r.len)
drivers/net/wireless/realtek/rtw88/pci.c
861
ring->r.wp = 0;
drivers/net/wireless/realtek/rtw88/pci.c
922
struct rtw_pci_tx_ring *ring;
drivers/net/wireless/realtek/rtw88/pci.c
929
ring = &rtwpci->tx_rings[queue];
drivers/net/wireless/realtek/rtw88/pci.c
931
if (avail_desc(ring->r.wp, ring->r.rp, ring->r.len) < 2) {
drivers/net/wireless/realtek/rtw88/pci.c
933
ring->queue_stopped = true;
drivers/net/wireless/realtek/rtw88/pci.c
945
struct rtw_pci_tx_ring *ring;
drivers/net/wireless/realtek/rtw88/pci.c
953
ring = &rtwpci->tx_rings[hw_queue];
drivers/net/wireless/realtek/rtw88/pci.c
959
rp_idx = ring->r.rp;
drivers/net/wireless/realtek/rtw88/pci.c
960
if (cur_rp >= ring->r.rp)
drivers/net/wireless/realtek/rtw88/pci.c
961
count = cur_rp - ring->r.rp;
drivers/net/wireless/realtek/rtw88/pci.c
963
count = ring->r.len - (ring->r.rp - cur_rp);
drivers/net/wireless/realtek/rtw88/pci.c
966
skb = skb_dequeue(&ring->queue);
drivers/net/wireless/realtek/rtw88/pci.c
969
count, hw_queue, bd_idx, ring->r.rp, cur_rp);
drivers/net/wireless/realtek/rtw88/pci.c
982
if (ring->queue_stopped &&
drivers/net/wireless/realtek/rtw88/pci.c
983
avail_desc(ring->r.wp, rp_idx, ring->r.len) > 4) {
drivers/net/wireless/realtek/rtw88/pci.c
986
ring->queue_stopped = false;
drivers/net/wireless/realtek/rtw88/pci.c
989
if (++rp_idx >= ring->r.len)
drivers/net/wireless/realtek/rtw88/pci.h
271
struct rtw_pci_tx_buffer_desc *get_tx_buffer_desc(struct rtw_pci_tx_ring *ring,
drivers/net/wireless/realtek/rtw88/pci.h
276
buf_desc = ring->r.head + ring->r.wp * size;
drivers/net/wireless/realtek/rtw89/pci.c
1222
static u32 rtw89_pci_get_avail_txbd_num(struct rtw89_pci_tx_ring *ring)
drivers/net/wireless/realtek/rtw89/pci.c
1224
struct rtw89_pci_dma_ring *bd_ring = &ring->bd_ring;
drivers/net/wwan/t7xx/t7xx_hif_cldma.c
368
struct cldma_ring *ring, enum dma_data_direction tx_rx)
drivers/net/wwan/t7xx/t7xx_hif_cldma.c
372
list_for_each_entry_safe(req_cur, req_next, &ring->gpd_ring, entry) {
drivers/net/wwan/t7xx/t7xx_hif_cldma.c
375
ring->pkt_size, tx_rx);
drivers/net/wwan/t7xx/t7xx_hif_cldma.c
417
static int t7xx_cldma_rx_ring_init(struct cldma_ctrl *md_ctrl, struct cldma_ring *ring)
drivers/net/wwan/t7xx/t7xx_hif_cldma.c
423
INIT_LIST_HEAD(&ring->gpd_ring);
drivers/net/wwan/t7xx/t7xx_hif_cldma.c
424
ring->length = MAX_RX_BUDGET;
drivers/net/wwan/t7xx/t7xx_hif_cldma.c
426
for (i = 0; i < ring->length; i++) {
drivers/net/wwan/t7xx/t7xx_hif_cldma.c
427
req = t7xx_alloc_rx_request(md_ctrl, ring->pkt_size);
drivers/net/wwan/t7xx/t7xx_hif_cldma.c
429
t7xx_cldma_ring_free(md_ctrl, ring, DMA_FROM_DEVICE);
drivers/net/wwan/t7xx/t7xx_hif_cldma.c
435
gpd->rx_data_allow_len = cpu_to_le16(ring->pkt_size);
drivers/net/wwan/t7xx/t7xx_hif_cldma.c
438
list_add_tail(&req->entry, &ring->gpd_ring);
drivers/net/wwan/t7xx/t7xx_hif_cldma.c
442
list_for_each_entry(req, &ring->gpd_ring, entry) {
drivers/net/wwan/t7xx/t7xx_hif_cldma.c
467
static int t7xx_cldma_tx_ring_init(struct cldma_ctrl *md_ctrl, struct cldma_ring *ring)
drivers/net/wwan/t7xx/t7xx_hif_cldma.c
473
INIT_LIST_HEAD(&ring->gpd_ring);
drivers/net/wwan/t7xx/t7xx_hif_cldma.c
474
ring->length = MAX_TX_BUDGET;
drivers/net/wwan/t7xx/t7xx_hif_cldma.c
476
for (i = 0; i < ring->length; i++) {
drivers/net/wwan/t7xx/t7xx_hif_cldma.c
479
t7xx_cldma_ring_free(md_ctrl, ring, DMA_TO_DEVICE);
drivers/net/wwan/t7xx/t7xx_hif_cldma.c
486
list_add_tail(&req->entry, &ring->gpd_ring);
drivers/net/wwan/t7xx/t7xx_hif_cldma.c
490
list_for_each_entry(req, &ring->gpd_ring, entry) {
drivers/nvme/host/rdma.c
201
struct nvme_rdma_qe *ring, size_t ib_queue_size,
drivers/nvme/host/rdma.c
207
nvme_rdma_free_qe(ibdev, &ring[i], capsule_size, dir);
drivers/nvme/host/rdma.c
208
kfree(ring);
drivers/nvme/host/rdma.c
215
struct nvme_rdma_qe *ring;
drivers/nvme/host/rdma.c
218
ring = kzalloc_objs(struct nvme_rdma_qe, ib_queue_size);
drivers/nvme/host/rdma.c
219
if (!ring)
drivers/nvme/host/rdma.c
228
if (nvme_rdma_alloc_qe(ibdev, &ring[i], capsule_size, dir))
drivers/nvme/host/rdma.c
232
return ring;
drivers/nvme/host/rdma.c
235
nvme_rdma_free_ring(ibdev, ring, i, capsule_size, dir);
drivers/platform/chrome/cros_ec_sensorhub_ring.c
1061
sensorhub->ring = devm_kcalloc(sensorhub->dev, sensorhub->fifo_size,
drivers/platform/chrome/cros_ec_sensorhub_ring.c
1062
sizeof(*sensorhub->ring), GFP_KERNEL);
drivers/platform/chrome/cros_ec_sensorhub_ring.c
1063
if (!sensorhub->ring)
drivers/platform/chrome/cros_ec_sensorhub_ring.c
588
for (batch_start = sensorhub->ring; batch_start < last_out;
drivers/platform/chrome/cros_ec_sensorhub_ring.c
762
for (out = sensorhub->ring; out < last_out; out++) {
drivers/platform/chrome/cros_ec_sensorhub_ring.c
784
for (out = sensorhub->ring; out < last_out; out++) {
drivers/platform/chrome/cros_ec_sensorhub_ring.c
793
for (out = sensorhub->ring; out < last_out; out++)
drivers/platform/chrome/cros_ec_sensorhub_ring.c
855
out = sensorhub->ring;
drivers/platform/chrome/cros_ec_sensorhub_ring.c
883
sensorhub->ring + fifo_info->count) {
drivers/platform/chrome/cros_ec_sensorhub_ring.c
886
i, out - sensorhub->ring, i + number_data,
drivers/platform/chrome/cros_ec_sensorhub_ring.c
906
if (out == sensorhub->ring)
drivers/platform/mellanox/mlxbf-tmfifo.c
341
head = virtio16_to_cpu(vdev, vr->avail->ring[idx]);
drivers/platform/mellanox/mlxbf-tmfifo.c
360
vr->used->ring[idx].id = cpu_to_virtio32(vdev, desc - vr->desc);
drivers/platform/mellanox/mlxbf-tmfifo.c
361
vr->used->ring[idx].len = cpu_to_virtio32(vdev, len);
drivers/scsi/fnic/fnic_scsi.c
200
wq->ring.desc_avail += (fnic->fw_ack_index[hwq]
drivers/scsi/fnic/fnic_scsi.c
203
wq->ring.desc_avail += (wq->ring.desc_count
drivers/scsi/fnic/fnic_scsi.c
213
(fnic->fw_ack_index[hwq] + 1) % wq->ring.desc_count;
drivers/scsi/fnic/vnic_cq.c
14
vnic_dev_free_desc_ring(cq->vdev, &cq->ring);
drivers/scsi/fnic/vnic_cq.c
33
err = vnic_dev_alloc_desc_ring(vdev, &cq->ring, desc_count, desc_size);
drivers/scsi/fnic/vnic_cq.c
48
paddr = (u64)cq->ring.base_addr | VNIC_PADDR_TARGET;
drivers/scsi/fnic/vnic_cq.c
50
iowrite32(cq->ring.desc_count, &cq->ctrl->ring_size);
drivers/scsi/fnic/vnic_cq.c
72
vnic_dev_clear_desc_ring(&cq->ring);
drivers/scsi/fnic/vnic_cq.h
53
struct vnic_dev_ring ring;
drivers/scsi/fnic/vnic_cq.h
69
cq_desc = (struct cq_desc *)((u8 *)cq->ring.descs +
drivers/scsi/fnic/vnic_cq.h
70
cq->ring.desc_size * cq->to_clean);
drivers/scsi/fnic/vnic_cq.h
81
if (cq->to_clean == cq->ring.desc_count) {
drivers/scsi/fnic/vnic_cq.h
86
cq_desc = (struct cq_desc *)((u8 *)cq->ring.descs +
drivers/scsi/fnic/vnic_cq.h
87
cq->ring.desc_size * cq->to_clean);
drivers/scsi/fnic/vnic_cq_copy.h
23
desc = (struct fcpio_fw_req *)((u8 *)cq->ring.descs +
drivers/scsi/fnic/vnic_cq_copy.h
24
cq->ring.desc_size * cq->to_clean);
drivers/scsi/fnic/vnic_cq_copy.h
33
if (cq->to_clean == cq->ring.desc_count) {
drivers/scsi/fnic/vnic_cq_copy.h
38
desc = (struct fcpio_fw_req *)((u8 *)cq->ring.descs +
drivers/scsi/fnic/vnic_cq_copy.h
39
cq->ring.desc_size * cq->to_clean);
drivers/scsi/fnic/vnic_dev.c
177
unsigned int vnic_dev_desc_ring_size(struct vnic_dev_ring *ring,
drivers/scsi/fnic/vnic_dev.c
190
ring->base_align = 512;
drivers/scsi/fnic/vnic_dev.c
195
ring->desc_count = ALIGN(desc_count, count_align);
drivers/scsi/fnic/vnic_dev.c
197
ring->desc_size = ALIGN(desc_size, desc_align);
drivers/scsi/fnic/vnic_dev.c
199
ring->size = ring->desc_count * ring->desc_size;
drivers/scsi/fnic/vnic_dev.c
200
ring->size_unaligned = ring->size + ring->base_align;
drivers/scsi/fnic/vnic_dev.c
202
return ring->size_unaligned;
drivers/scsi/fnic/vnic_dev.c
205
void vnic_dev_clear_desc_ring(struct vnic_dev_ring *ring)
drivers/scsi/fnic/vnic_dev.c
207
memset(ring->descs, 0, ring->size);
drivers/scsi/fnic/vnic_dev.c
210
int vnic_dev_alloc_desc_ring(struct vnic_dev *vdev, struct vnic_dev_ring *ring,
drivers/scsi/fnic/vnic_dev.c
213
vnic_dev_desc_ring_size(ring, desc_count, desc_size);
drivers/scsi/fnic/vnic_dev.c
215
ring->descs_unaligned = dma_alloc_coherent(&vdev->pdev->dev,
drivers/scsi/fnic/vnic_dev.c
216
ring->size_unaligned,
drivers/scsi/fnic/vnic_dev.c
217
&ring->base_addr_unaligned, GFP_KERNEL);
drivers/scsi/fnic/vnic_dev.c
219
if (!ring->descs_unaligned) {
drivers/scsi/fnic/vnic_dev.c
222
(int)ring->size);
drivers/scsi/fnic/vnic_dev.c
226
ring->base_addr = ALIGN(ring->base_addr_unaligned,
drivers/scsi/fnic/vnic_dev.c
227
ring->base_align);
drivers/scsi/fnic/vnic_dev.c
228
ring->descs = (u8 *)ring->descs_unaligned +
drivers/scsi/fnic/vnic_dev.c
229
(ring->base_addr - ring->base_addr_unaligned);
drivers/scsi/fnic/vnic_dev.c
231
vnic_dev_clear_desc_ring(ring);
drivers/scsi/fnic/vnic_dev.c
233
ring->desc_avail = ring->desc_count - 1;
drivers/scsi/fnic/vnic_dev.c
238
void vnic_dev_free_desc_ring(struct vnic_dev *vdev, struct vnic_dev_ring *ring)
drivers/scsi/fnic/vnic_dev.c
240
if (ring->descs) {
drivers/scsi/fnic/vnic_dev.c
242
ring->size_unaligned,
drivers/scsi/fnic/vnic_dev.c
243
ring->descs_unaligned,
drivers/scsi/fnic/vnic_dev.c
244
ring->base_addr_unaligned);
drivers/scsi/fnic/vnic_dev.c
245
ring->descs = NULL;
drivers/scsi/fnic/vnic_dev.c
461
(struct vnic_devcmd2 *) vdev->devcmd2->wq.ring.descs;
drivers/scsi/fnic/vnic_dev.h
105
unsigned int vnic_dev_desc_ring_size(struct vnic_dev_ring *ring,
drivers/scsi/fnic/vnic_dev.h
108
void vnic_dev_clear_desc_ring(struct vnic_dev_ring *ring);
drivers/scsi/fnic/vnic_dev.h
109
int vnic_dev_alloc_desc_ring(struct vnic_dev *vdev, struct vnic_dev_ring *ring,
drivers/scsi/fnic/vnic_dev.h
112
struct vnic_dev_ring *ring);
drivers/scsi/fnic/vnic_rq.c
106
paddr = (u64)rq->ring.base_addr | VNIC_PADDR_TARGET;
drivers/scsi/fnic/vnic_rq.c
108
iowrite32(rq->ring.desc_count, &rq->ctrl->ring_size);
drivers/scsi/fnic/vnic_rq.c
168
rq->ring.desc_avail++;
drivers/scsi/fnic/vnic_rq.c
18
unsigned int i, j, count = rq->ring.desc_count;
drivers/scsi/fnic/vnic_rq.c
180
vnic_dev_clear_desc_ring(&rq->ring);
drivers/scsi/fnic/vnic_rq.c
33
buf->desc = (u8 *)rq->ring.descs +
drivers/scsi/fnic/vnic_rq.c
34
rq->ring.desc_size * buf->index;
drivers/scsi/fnic/vnic_rq.c
60
vnic_dev_free_desc_ring(vdev, &rq->ring);
drivers/scsi/fnic/vnic_rq.c
86
err = vnic_dev_alloc_desc_ring(vdev, &rq->ring, desc_count, desc_size);
drivers/scsi/fnic/vnic_rq.h
102
return rq->ring.desc_count - rq->ring.desc_avail - 1;
drivers/scsi/fnic/vnic_rq.h
133
rq->ring.desc_avail--;
drivers/scsi/fnic/vnic_rq.h
160
rq->ring.desc_avail += count;
drivers/scsi/fnic/vnic_rq.h
185
rq->ring.desc_avail++;
drivers/scsi/fnic/vnic_rq.h
84
struct vnic_dev_ring ring;
drivers/scsi/fnic/vnic_rq.h
96
return rq->ring.desc_avail;
drivers/scsi/fnic/vnic_wq.c
106
err = vnic_dev_alloc_desc_ring(vdev, &wq->ring, desc_count, desc_size);
drivers/scsi/fnic/vnic_wq.c
147
unsigned int count = wq->ring.desc_count;
drivers/scsi/fnic/vnic_wq.c
149
paddr = (u64)wq->ring.base_addr | VNIC_PADDR_TARGET;
drivers/scsi/fnic/vnic_wq.c
171
paddr = (u64)wq->ring.base_addr | VNIC_PADDR_TARGET;
drivers/scsi/fnic/vnic_wq.c
173
iowrite32(wq->ring.desc_count, &wq->ctrl->ring_size);
drivers/scsi/fnic/vnic_wq.c
224
wq->ring.desc_avail++;
drivers/scsi/fnic/vnic_wq.c
233
vnic_dev_clear_desc_ring(&wq->ring);
drivers/scsi/fnic/vnic_wq.c
31
return vnic_dev_alloc_desc_ring(vdev, &wq->ring, desc_count, desc_size);
drivers/scsi/fnic/vnic_wq.c
38
unsigned int i, j, count = wq->ring.desc_count;
drivers/scsi/fnic/vnic_wq.c
53
buf->desc = (u8 *)wq->ring.descs +
drivers/scsi/fnic/vnic_wq.c
54
wq->ring.desc_size * buf->index;
drivers/scsi/fnic/vnic_wq.c
79
vnic_dev_free_desc_ring(vdev, &wq->ring);
drivers/scsi/fnic/vnic_wq.h
126
wq->ring.desc_avail--;
drivers/scsi/fnic/vnic_wq.h
142
wq->ring.desc_avail++;
drivers/scsi/fnic/vnic_wq.h
79
struct vnic_dev_ring ring;
drivers/scsi/fnic/vnic_wq.h
89
return wq->ring.desc_avail;
drivers/scsi/fnic/vnic_wq.h
95
return wq->ring.desc_count - wq->ring.desc_avail - 1;
drivers/scsi/fnic/vnic_wq_copy.c
54
vnic_dev_clear_desc_ring(&wq->ring);
drivers/scsi/fnic/vnic_wq_copy.c
62
vnic_dev_free_desc_ring(vdev, &wq->ring);
drivers/scsi/fnic/vnic_wq_copy.c
81
return vnic_dev_alloc_desc_ring(vdev, &wq->ring, desc_count, desc_size);
drivers/scsi/fnic/vnic_wq_copy.c
90
paddr = (u64)wq->ring.base_addr | VNIC_PADDR_TARGET;
drivers/scsi/fnic/vnic_wq_copy.c
92
iowrite32(wq->ring.desc_count, &wq->ctrl->ring_size);
drivers/scsi/fnic/vnic_wq_copy.h
19
struct vnic_dev_ring ring;
drivers/scsi/fnic/vnic_wq_copy.h
26
return wq->ring.desc_avail;
drivers/scsi/fnic/vnic_wq_copy.h
31
return wq->ring.desc_count - 1 - wq->ring.desc_avail;
drivers/scsi/fnic/vnic_wq_copy.h
36
struct fcpio_host_req *desc = wq->ring.descs;
drivers/scsi/fnic/vnic_wq_copy.h
43
((wq->to_use_index + 1) == wq->ring.desc_count) ?
drivers/scsi/fnic/vnic_wq_copy.h
45
wq->ring.desc_avail--;
drivers/scsi/fnic/vnic_wq_copy.h
64
cnt = wq->ring.desc_count - wq->to_clean_index + index + 1;
drivers/scsi/fnic/vnic_wq_copy.h
66
wq->to_clean_index = ((index + 1) % wq->ring.desc_count);
drivers/scsi/fnic/vnic_wq_copy.h
67
wq->ring.desc_avail += cnt;
drivers/scsi/fnic/vnic_wq_copy.h
76
struct fcpio_host_req *wq_desc = wq->ring.descs;
drivers/scsi/fnic/vnic_wq_copy.h
84
wq->ring.desc_avail++;
drivers/scsi/fnic/vnic_wq_copy.h
91
((wq->to_clean_index + 1) == wq->ring.desc_count) ?
drivers/scsi/ibmvscsi_tgt/libsrp.c
101
kfree(ring);
drivers/scsi/ibmvscsi_tgt/libsrp.c
25
struct srp_buf **ring)
drivers/scsi/ibmvscsi_tgt/libsrp.c
42
iue->sbuf = ring[i];
drivers/scsi/ibmvscsi_tgt/libsrp.c
61
struct srp_buf **ring;
drivers/scsi/ibmvscsi_tgt/libsrp.c
64
ring = kzalloc_objs(struct srp_buf *, max);
drivers/scsi/ibmvscsi_tgt/libsrp.c
65
if (!ring)
drivers/scsi/ibmvscsi_tgt/libsrp.c
69
ring[i] = kzalloc_obj(*ring[i]);
drivers/scsi/ibmvscsi_tgt/libsrp.c
70
if (!ring[i])
drivers/scsi/ibmvscsi_tgt/libsrp.c
72
ring[i]->buf = dma_alloc_coherent(dev, size, &ring[i]->dma,
drivers/scsi/ibmvscsi_tgt/libsrp.c
74
if (!ring[i]->buf)
drivers/scsi/ibmvscsi_tgt/libsrp.c
77
return ring;
drivers/scsi/ibmvscsi_tgt/libsrp.c
80
for (i = 0; i < max && ring[i]; i++) {
drivers/scsi/ibmvscsi_tgt/libsrp.c
81
if (ring[i]->buf) {
drivers/scsi/ibmvscsi_tgt/libsrp.c
82
dma_free_coherent(dev, size, ring[i]->buf,
drivers/scsi/ibmvscsi_tgt/libsrp.c
83
ring[i]->dma);
drivers/scsi/ibmvscsi_tgt/libsrp.c
85
kfree(ring[i]);
drivers/scsi/ibmvscsi_tgt/libsrp.c
87
kfree(ring);
drivers/scsi/ibmvscsi_tgt/libsrp.c
92
static void srp_ring_free(struct device *dev, struct srp_buf **ring,
drivers/scsi/ibmvscsi_tgt/libsrp.c
98
dma_free_coherent(dev, size, ring[i]->buf, ring[i]->dma);
drivers/scsi/ibmvscsi_tgt/libsrp.c
99
kfree(ring[i]);
drivers/scsi/lpfc/lpfc.h
1572
struct rx_info_entry *ring; /* info organized in a circular buffer */
drivers/scsi/lpfc/lpfc_hw.h
2576
uint32_t ring:4;
drivers/scsi/lpfc/lpfc_hw.h
2578
uint32_t ring:4;
drivers/scsi/lpfc/lpfc_hw.h
2684
uint32_t ring:4;
drivers/scsi/lpfc/lpfc_hw.h
2686
uint32_t ring:4;
drivers/scsi/lpfc/lpfc_hw.h
3688
uint32_t ring:2; /* Ring for ASYNC_EVENT iocb Bits 0-1*/
drivers/scsi/lpfc/lpfc_hw.h
3690
uint32_t ring:2; /* Ring for ASYNC_EVENT iocb Bits 0-1*/
drivers/scsi/lpfc/lpfc_mbox.c
1267
lpfc_config_ring(struct lpfc_hba * phba, int ring, LPFC_MBOXQ_t * pmb)
drivers/scsi/lpfc/lpfc_mbox.c
1276
mb->un.varCfgRing.ring = ring;
drivers/scsi/lpfc/lpfc_mbox.c
1282
pring = &psli->sli3_ring[ring];
drivers/scsi/lpfc/lpfc_mbox.c
292
uint32_t ring)
drivers/scsi/lpfc/lpfc_mbox.c
299
mb->un.varCfgAsyncEvent.ring = ring;
drivers/scsi/lpfc/lpfc_sli.c
8049
rx_monitor->ring = kmalloc_objs(struct rx_info_entry, entries);
drivers/scsi/lpfc/lpfc_sli.c
8050
if (!rx_monitor->ring)
drivers/scsi/lpfc/lpfc_sli.c
8069
kfree(rx_monitor->ring);
drivers/scsi/lpfc/lpfc_sli.c
8070
rx_monitor->ring = NULL;
drivers/scsi/lpfc/lpfc_sli.c
8091
struct rx_info_entry *ring = rx_monitor->ring;
drivers/scsi/lpfc/lpfc_sli.c
8098
memcpy(&ring[*tail_idx], entry, sizeof(*entry));
drivers/scsi/lpfc/lpfc_sli.c
8128
struct rx_info_entry *ring = rx_monitor->ring;
drivers/scsi/lpfc/lpfc_sli.c
8155
entry = &ring[*head_idx];
drivers/scsi/qla2xxx/qla_dbg.c
563
void *ring;
drivers/scsi/qla2xxx/qla_dbg.c
572
aqp->ring = ha->tgt.atio_ring;
drivers/scsi/qla2xxx/qla_dbg.c
593
memcpy(ptr, aqp->ring, aqp->length * sizeof(request_t));
drivers/scsi/qla2xxx/qla_dbg.c
637
memcpy(ptr, req->ring, req->length * sizeof(request_t));
drivers/scsi/qla2xxx/qla_dbg.c
665
memcpy(ptr, rsp->ring, rsp->length * sizeof(response_t));
drivers/scsi/qla2xxx/qla_dbg.c
93
memcpy(ptr, req->ring, req->length *
drivers/scsi/qla2xxx/qla_dbg.c
98
memcpy(ptr, rsp->ring, rsp->length *
drivers/scsi/qla2xxx/qla_def.h
3738
response_t *ring;
drivers/scsi/qla2xxx/qla_def.h
3765
request_t *ring;
drivers/scsi/qla2xxx/qla_edif.c
3200
req->ring_ptr = req->ring;
drivers/scsi/qla2xxx/qla_init.c
2802
ha->isp_ops->get_flash_version(vha, req->ring);
drivers/scsi/qla2xxx/qla_init.c
4483
rsp->ring_ptr = rsp->ring;
drivers/scsi/qla2xxx/qla_init.c
4810
req->out_ptr = (uint16_t *)(req->ring + req->length);
drivers/scsi/qla2xxx/qla_init.c
4818
req->ring_ptr = req->ring;
drivers/scsi/qla2xxx/qla_init.c
4827
rsp->in_ptr = (uint16_t *)(rsp->ring + rsp->length);
drivers/scsi/qla2xxx/qla_init.c
7599
ha->isp_ops->get_flash_version(vha, req->ring);
drivers/scsi/qla2xxx/qla_init.c
8473
dcode = (uint32_t *)req->ring;
drivers/scsi/qla2xxx/qla_init.c
8499
dcode = (uint32_t *)req->ring;
drivers/scsi/qla2xxx/qla_init.c
8534
dcode = (uint32_t *)req->ring;
drivers/scsi/qla2xxx/qla_init.c
8623
dcode = (uint32_t *)req->ring;
drivers/scsi/qla2xxx/qla_init.c
8635
dcode = (uint32_t *)req->ring;
drivers/scsi/qla2xxx/qla_init.c
8696
dcode = (uint32_t *)req->ring;
drivers/scsi/qla2xxx/qla_init.c
8789
wcode = (uint16_t *)req->ring;
drivers/scsi/qla2xxx/qla_init.c
8904
dcode = (uint32_t *)req->ring;
drivers/scsi/qla2xxx/qla_inline.h
365
req->ring_ptr = req->ring;
drivers/scsi/qla2xxx/qla_iocb.c
102
req->ring_ptr = req->ring;
drivers/scsi/qla2xxx/qla_iocb.c
131
req->ring_ptr = req->ring;
drivers/scsi/qla2xxx/qla_iocb.c
1660
req->ring_ptr = req->ring;
drivers/scsi/qla2xxx/qla_iocb.c
1866
req->ring_ptr = req->ring;
drivers/scsi/qla2xxx/qla_iocb.c
2027
req->ring_ptr = req->ring;
drivers/scsi/qla2xxx/qla_iocb.c
2247
req->ring_ptr = req->ring;
drivers/scsi/qla2xxx/qla_iocb.c
3643
req->ring_ptr = req->ring;
drivers/scsi/qla2xxx/qla_iocb.c
426
req->ring_ptr = req->ring;
drivers/scsi/qla2xxx/qla_iocb.c
4452
req->ring_ptr = req->ring;
drivers/scsi/qla2xxx/qla_iocb.c
471
req->ring_ptr = req->ring;
drivers/scsi/qla2xxx/qla_isr.c
1218
rsp_q->ring_ptr = rsp_q->ring;
drivers/scsi/qla2xxx/qla_isr.c
215
rsp_q->ring_ptr = rsp_q->ring;
drivers/scsi/qla2xxx/qla_isr.c
293
rsp_q->ring_ptr = rsp_q->ring;
drivers/scsi/qla2xxx/qla_isr.c
2979
rsp->ring_ptr = rsp->ring;
drivers/scsi/qla2xxx/qla_isr.c
3938
__func__, rsp->ring, pkt, pkt->entry_count, iocb_cnt, rsp_q_in, rc);
drivers/scsi/qla2xxx/qla_isr.c
4009
rsp->ring_ptr = rsp->ring;
drivers/scsi/qla2xxx/qla_isr.c
934
rsp_q->ring_ptr = rsp_q->ring;
drivers/scsi/qla2xxx/qla_mid.c
579
sizeof(request_t), req->ring, req->dma);
drivers/scsi/qla2xxx/qla_mid.c
580
req->ring = NULL;
drivers/scsi/qla2xxx/qla_mid.c
605
sizeof(response_t), rsp->ring, rsp->dma);
drivers/scsi/qla2xxx/qla_mid.c
606
rsp->ring = NULL;
drivers/scsi/qla2xxx/qla_mid.c
718
req->ring = dma_alloc_coherent(&ha->pdev->dev,
drivers/scsi/qla2xxx/qla_mid.c
721
if (req->ring == NULL) {
drivers/scsi/qla2xxx/qla_mid.c
771
req->ring_ptr = req->ring;
drivers/scsi/qla2xxx/qla_mid.c
779
req->out_ptr = (uint16_t *)(req->ring + req->length);
drivers/scsi/qla2xxx/qla_mid.c
845
rsp->ring = dma_alloc_coherent(&ha->pdev->dev,
drivers/scsi/qla2xxx/qla_mid.c
848
if (rsp->ring == NULL) {
drivers/scsi/qla2xxx/qla_mid.c
891
rsp->in_ptr = (uint16_t *)(rsp->ring + rsp->length);
drivers/scsi/qla2xxx/qla_mr.c
1401
rsp->ring_ptr = rsp->ring;
drivers/scsi/qla2xxx/qla_mr.c
2710
rsp->ring_ptr = rsp->ring;
drivers/scsi/qla2xxx/qla_mr.c
2951
req->ring_ptr = req->ring;
drivers/scsi/qla2xxx/qla_mr.c
3146
req->ring_ptr = req->ring;
drivers/scsi/qla2xxx/qla_mr.c
816
req->ring_fx00 = req->ring;
drivers/scsi/qla2xxx/qla_mr.c
820
rsp->ring_fx00 = rsp->ring;
drivers/scsi/qla2xxx/qla_mr.c
843
req->ring = (void __force *)ha->iobase + ha->req_que_off;
drivers/scsi/qla2xxx/qla_mr.c
845
if ((!req->ring) || (req->length == 0)) {
drivers/scsi/qla2xxx/qla_mr.c
854
req, req->ring, req->length,
drivers/scsi/qla2xxx/qla_mr.c
858
rsp->ring = (void __force *)ha->iobase + ha->rsp_que_off;
drivers/scsi/qla2xxx/qla_mr.c
860
if ((!rsp->ring) || (rsp->length == 0)) {
drivers/scsi/qla2xxx/qla_mr.c
869
rsp, rsp->ring, rsp->length,
drivers/scsi/qla2xxx/qla_nvme.c
713
req->ring_ptr = req->ring;
drivers/scsi/qla2xxx/qla_nvme.c
737
req->ring_ptr = req->ring;
drivers/scsi/qla2xxx/qla_nx.c
2738
req->ring_ptr = req->ring;
drivers/scsi/qla2xxx/qla_nx.c
2961
ha->isp_ops->get_flash_version(vha, req->ring);
drivers/scsi/qla2xxx/qla_nx2.c
1657
ha->isp_ops->get_flash_version(vha, vha->req->ring);
drivers/scsi/qla2xxx/qla_os.c
4344
(*req)->ring = dma_alloc_coherent(&ha->pdev->dev,
drivers/scsi/qla2xxx/qla_os.c
4347
if (!(*req)->ring) {
drivers/scsi/qla2xxx/qla_os.c
4361
(*rsp)->ring = dma_alloc_coherent(&ha->pdev->dev,
drivers/scsi/qla2xxx/qla_os.c
4364
if (!(*rsp)->ring) {
drivers/scsi/qla2xxx/qla_os.c
4374
*req, (*req)->length, (*req)->ring, *rsp, (*rsp)->length,
drivers/scsi/qla2xxx/qla_os.c
4375
(*rsp)->ring);
drivers/scsi/qla2xxx/qla_os.c
4508
sizeof(response_t), (*rsp)->ring, (*rsp)->dma);
drivers/scsi/qla2xxx/qla_os.c
4509
(*rsp)->ring = NULL;
drivers/scsi/qla2xxx/qla_os.c
4516
sizeof(request_t), (*req)->ring, (*req)->dma);
drivers/scsi/qla2xxx/qla_os.c
4517
(*req)->ring = NULL;
drivers/scsi/qla2xxx/qla_os.c
509
} else if (req && req->ring)
drivers/scsi/qla2xxx/qla_os.c
512
req->ring, req->dma);
drivers/scsi/qla2xxx/qla_os.c
527
} else if (rsp && rsp->ring) {
drivers/scsi/qla2xxx/qla_os.c
530
rsp->ring, rsp->dma);
drivers/scsi/qla2xxx/qla_sup.c
1069
wptr = (__force __le32 *)req->ring;
drivers/scsi/qla2xxx/qla_sup.c
1070
ha->isp_ops->read_optrom(vha, req->ring, QLA82XX_IDC_PARAM_ADDR, 8);
drivers/scsi/qla2xxx/qla_sup.c
1091
uint32_t *dcode = (uint32_t *)req->ring;
drivers/scsi/qla2xxx/qla_sup.c
555
struct qla_flt_location *fltl = (void *)req->ring;
drivers/scsi/qla2xxx/qla_sup.c
556
uint32_t *dcode = (uint32_t *)req->ring;
drivers/scsi/qla2xxx/qla_sup.c
557
uint8_t *buf = (void *)req->ring, *bcode, last_image;
drivers/scsi/qla2xxx/qla_sup.c
629
wptr = (__force __le16 *)req->ring;
drivers/scsi/qla2xxx/qla_sup.c
969
__le16 *wptr = (__force __le16 *)req->ring;
drivers/scsi/qla2xxx/qla_sup.c
970
struct qla_fdt_layout *fdt = (struct qla_fdt_layout *)req->ring;
drivers/scsi/qla2xxx/qla_target.c
2499
req->ring_ptr = req->ring;
drivers/scsi/qla2xxx/qla_tmpl.c
345
qla27xx_insertbuf(req ? req->ring : NULL,
drivers/scsi/qla2xxx/qla_tmpl.c
346
length * sizeof(*req->ring), buf, len);
drivers/scsi/qla2xxx/qla_tmpl.c
359
qla27xx_insertbuf(rsp ? rsp->ring : NULL,
drivers/scsi/qla2xxx/qla_tmpl.c
360
length * sizeof(*rsp->ring), buf, len);
drivers/scsi/snic/vnic_cq.c
12
svnic_dev_free_desc_ring(cq->vdev, &cq->ring);
drivers/scsi/snic/vnic_cq.c
30
return svnic_dev_alloc_desc_ring(vdev, &cq->ring, desc_count, desc_size);
drivers/scsi/snic/vnic_cq.c
41
paddr = (u64)cq->ring.base_addr | VNIC_PADDR_TARGET;
drivers/scsi/snic/vnic_cq.c
43
iowrite32(cq->ring.desc_count, &cq->ctrl->ring_size);
drivers/scsi/snic/vnic_cq.c
65
svnic_dev_clear_desc_ring(&cq->ring);
drivers/scsi/snic/vnic_cq.h
41
struct vnic_dev_ring ring;
drivers/scsi/snic/vnic_cq.h
57
cq_desc = (struct cq_desc *)((u8 *)cq->ring.descs +
drivers/scsi/snic/vnic_cq.h
58
cq->ring.desc_size * cq->to_clean);
drivers/scsi/snic/vnic_cq.h
69
if (cq->to_clean == cq->ring.desc_count) {
drivers/scsi/snic/vnic_cq.h
74
cq_desc = (struct cq_desc *)((u8 *)cq->ring.descs +
drivers/scsi/snic/vnic_cq.h
75
cq->ring.desc_size * cq->to_clean);
drivers/scsi/snic/vnic_cq_fw.h
21
desc = (struct snic_fw_req *)((u8 *)cq->ring.descs +
drivers/scsi/snic/vnic_cq_fw.h
22
cq->ring.desc_size * cq->to_clean);
drivers/scsi/snic/vnic_cq_fw.h
31
if (cq->to_clean == cq->ring.desc_count) {
drivers/scsi/snic/vnic_cq_fw.h
36
desc = (struct snic_fw_req *)((u8 *)cq->ring.descs +
drivers/scsi/snic/vnic_cq_fw.h
37
cq->ring.desc_size * cq->to_clean);
drivers/scsi/snic/vnic_dev.c
174
unsigned int svnic_dev_desc_ring_size(struct vnic_dev_ring *ring,
drivers/scsi/snic/vnic_dev.c
187
ring->base_align = 512;
drivers/scsi/snic/vnic_dev.c
192
ring->desc_count = ALIGN(desc_count, count_align);
drivers/scsi/snic/vnic_dev.c
194
ring->desc_size = ALIGN(desc_size, desc_align);
drivers/scsi/snic/vnic_dev.c
196
ring->size = ring->desc_count * ring->desc_size;
drivers/scsi/snic/vnic_dev.c
197
ring->size_unaligned = ring->size + ring->base_align;
drivers/scsi/snic/vnic_dev.c
199
return ring->size_unaligned;
drivers/scsi/snic/vnic_dev.c
202
void svnic_dev_clear_desc_ring(struct vnic_dev_ring *ring)
drivers/scsi/snic/vnic_dev.c
204
memset(ring->descs, 0, ring->size);
drivers/scsi/snic/vnic_dev.c
207
int svnic_dev_alloc_desc_ring(struct vnic_dev *vdev, struct vnic_dev_ring *ring,
drivers/scsi/snic/vnic_dev.c
210
svnic_dev_desc_ring_size(ring, desc_count, desc_size);
drivers/scsi/snic/vnic_dev.c
212
ring->descs_unaligned = dma_alloc_coherent(&vdev->pdev->dev,
drivers/scsi/snic/vnic_dev.c
213
ring->size_unaligned, &ring->base_addr_unaligned,
drivers/scsi/snic/vnic_dev.c
215
if (!ring->descs_unaligned) {
drivers/scsi/snic/vnic_dev.c
217
(int)ring->size);
drivers/scsi/snic/vnic_dev.c
222
ring->base_addr = ALIGN(ring->base_addr_unaligned,
drivers/scsi/snic/vnic_dev.c
223
ring->base_align);
drivers/scsi/snic/vnic_dev.c
224
ring->descs = (u8 *)ring->descs_unaligned +
drivers/scsi/snic/vnic_dev.c
225
(ring->base_addr - ring->base_addr_unaligned);
drivers/scsi/snic/vnic_dev.c
227
svnic_dev_clear_desc_ring(ring);
drivers/scsi/snic/vnic_dev.c
229
ring->desc_avail = ring->desc_count - 1;
drivers/scsi/snic/vnic_dev.c
234
void svnic_dev_free_desc_ring(struct vnic_dev *vdev, struct vnic_dev_ring *ring)
drivers/scsi/snic/vnic_dev.c
236
if (ring->descs) {
drivers/scsi/snic/vnic_dev.c
238
ring->size_unaligned,
drivers/scsi/snic/vnic_dev.c
239
ring->descs_unaligned,
drivers/scsi/snic/vnic_dev.c
240
ring->base_addr_unaligned);
drivers/scsi/snic/vnic_dev.c
241
ring->descs = NULL;
drivers/scsi/snic/vnic_dev.c
390
dc2c->cmd_ring = (struct vnic_devcmd2 *) dc2c->wq.ring.descs;
drivers/scsi/snic/vnic_dev.h
61
unsigned int svnic_dev_desc_ring_size(struct vnic_dev_ring *ring,
drivers/scsi/snic/vnic_dev.h
64
void svnic_dev_clear_desc_ring(struct vnic_dev_ring *ring);
drivers/scsi/snic/vnic_dev.h
65
int svnic_dev_alloc_desc_ring(struct vnic_dev *vdev, struct vnic_dev_ring *ring,
drivers/scsi/snic/vnic_dev.h
68
struct vnic_dev_ring *ring);
drivers/scsi/snic/vnic_wq.c
146
unsigned int count = wq->ring.desc_count;
drivers/scsi/snic/vnic_wq.c
148
paddr = (u64)wq->ring.base_addr | VNIC_PADDR_TARGET;
drivers/scsi/snic/vnic_wq.c
213
wq->ring.desc_avail++;
drivers/scsi/snic/vnic_wq.c
222
svnic_dev_clear_desc_ring(&wq->ring);
drivers/scsi/snic/vnic_wq.c
25
return svnic_dev_alloc_desc_ring(vdev, &wq->ring, desc_count,
drivers/scsi/snic/vnic_wq.c
32
unsigned int i, j, count = wq->ring.desc_count;
drivers/scsi/snic/vnic_wq.c
48
buf->desc = (u8 *)wq->ring.descs +
drivers/scsi/snic/vnic_wq.c
49
wq->ring.desc_size * buf->index;
drivers/scsi/snic/vnic_wq.c
74
svnic_dev_free_desc_ring(vdev, &wq->ring);
drivers/scsi/snic/vnic_wq.h
111
wq->ring.desc_avail--;
drivers/scsi/snic/vnic_wq.h
127
wq->ring.desc_avail++;
drivers/scsi/snic/vnic_wq.h
64
struct vnic_dev_ring ring;
drivers/scsi/snic/vnic_wq.h
74
return wq->ring.desc_avail;
drivers/scsi/snic/vnic_wq.h
80
return wq->ring.desc_count - wq->ring.desc_avail - 1;
drivers/scsi/vmw_pvscsi.c
1101
struct PVSCSIRingMsgDesc *ring = adapter->msg_ring;
drivers/scsi/vmw_pvscsi.c
1105
struct PVSCSIRingMsgDesc *e = ring + (s->msgConsIdx &
drivers/scsi/vmw_pvscsi.c
662
struct PVSCSIRingCmpDesc *ring = adapter->cmp_ring;
drivers/scsi/vmw_pvscsi.c
666
struct PVSCSIRingCmpDesc *e = ring + (s->cmpConsIdx &
drivers/scsi/xen-scsifront.c
116
struct vscsiif_front_ring ring;
drivers/scsi/xen-scsifront.c
184
struct vscsiif_front_ring *ring = &(info->ring);
drivers/scsi/xen-scsifront.c
190
if (RING_FULL(&info->ring))
drivers/scsi/xen-scsifront.c
200
ring_req = RING_GET_REQUEST(&(info->ring), ring->req_prod_pvt);
drivers/scsi/xen-scsifront.c
201
ring->req_prod_pvt++;
drivers/scsi/xen-scsifront.c
225
RING_PUSH_REQUESTS_AND_CHECK_NOTIFY(ring, notify);
drivers/scsi/xen-scsifront.c
390
rp = READ_ONCE(info->ring.sring->rsp_prod);
drivers/scsi/xen-scsifront.c
392
if (RING_RESPONSE_PROD_OVERFLOW(&info->ring, rp)) {
drivers/scsi/xen-scsifront.c
396
for (i = info->ring.rsp_cons; i != rp; i++) {
drivers/scsi/xen-scsifront.c
397
RING_COPY_RESPONSE(&info->ring, i, &ring_rsp);
drivers/scsi/xen-scsifront.c
404
info->ring.rsp_cons = i;
drivers/scsi/xen-scsifront.c
406
if (i != info->ring.req_prod_pvt)
drivers/scsi/xen-scsifront.c
407
RING_FINAL_CHECK_FOR_RESPONSES(&info->ring, more_to_do);
drivers/scsi/xen-scsifront.c
409
info->ring.sring->rsp_event = i + 1;
drivers/scsi/xen-scsifront.c
802
XEN_FRONT_RING_INIT(&info->ring, sring, PAGE_SIZE);
drivers/scsi/xen-scsifront.c
839
xenbus_teardown_ring((void **)&info->ring.sring, 1, &info->ring_ref);
drivers/soc/fsl/qbman/bman.c
123
struct bm_rcr_entry *ring, *cursor;
drivers/soc/fsl/qbman/bman.c
387
rcr->ring = portal->addr.ce + BM_CL_RCR;
drivers/soc/fsl/qbman/bman.c
390
rcr->cursor = rcr->ring + pi;
drivers/soc/fsl/qbman/qman.c
198
struct qm_eqcr_entry *ring, *cursor;
drivers/soc/fsl/qbman/qman.c
207
const struct qm_dqrr_entry *ring, *cursor;
drivers/soc/fsl/qbman/qman.c
217
union qm_mr_entry *ring, *cursor;
drivers/soc/fsl/qbman/qman.c
430
eqcr->ring = portal->addr.ce + QM_CL_EQCR;
drivers/soc/fsl/qbman/qman.c
434
eqcr->cursor = eqcr->ring + pi;
drivers/soc/fsl/qbman/qman.c
614
dqrr->ring = portal->addr.ce + QM_CL_DQRR;
drivers/soc/fsl/qbman/qman.c
617
dqrr->cursor = dqrr->ring + dqrr->ci;
drivers/soc/fsl/qbman/qman.c
629
dpaa_invalidate(qm_cl(dqrr->ring, cfg));
drivers/soc/fsl/qbman/qman.c
675
struct qm_dqrr_entry *res = qm_cl(dqrr->ring, dqrr->pi);
drivers/soc/fsl/qbman/qman.c
701
DPAA_ASSERT((dqrr->ring + idx) == dq);
drivers/soc/fsl/qbman/qman.c
768
mr->ring = portal->addr.ce + QM_CL_MR;
drivers/soc/fsl/qbman/qman.c
771
mr->cursor = mr->ring + mr->ci;
drivers/soc/fsl/qbman/qman.c
815
union qm_mr_entry *res = qm_cl(mr->ring, mr->pi);
drivers/soc/ti/k3-ringacc.c
1000
static int k3_ringacc_ring_push_tail_proxy(struct k3_ring *ring, void *elem)
drivers/soc/ti/k3-ringacc.c
1002
return k3_ringacc_ring_access_proxy(ring, elem,
drivers/soc/ti/k3-ringacc.c
1006
static int k3_ringacc_ring_pop_head_proxy(struct k3_ring *ring, void *elem)
drivers/soc/ti/k3-ringacc.c
1008
return k3_ringacc_ring_access_proxy(ring, elem,
drivers/soc/ti/k3-ringacc.c
1012
static int k3_ringacc_ring_pop_tail_proxy(struct k3_ring *ring, void *elem)
drivers/soc/ti/k3-ringacc.c
1014
return k3_ringacc_ring_access_proxy(ring, elem,
drivers/soc/ti/k3-ringacc.c
1018
static int k3_ringacc_ring_access_io(struct k3_ring *ring, void *elem,
drivers/soc/ti/k3-ringacc.c
1026
ptr = (void __iomem *)&ring->fifos->head_data;
drivers/soc/ti/k3-ringacc.c
1030
ptr = (void __iomem *)&ring->fifos->tail_data;
drivers/soc/ti/k3-ringacc.c
1036
ptr += k3_ringacc_ring_get_fifo_pos(ring);
drivers/soc/ti/k3-ringacc.c
1041
dev_dbg(ring->parent->dev,
drivers/soc/ti/k3-ringacc.c
1044
memcpy_fromio(elem, ptr, (4 << ring->elm_size));
drivers/soc/ti/k3-ringacc.c
1045
ring->state.occ--;
drivers/soc/ti/k3-ringacc.c
1049
dev_dbg(ring->parent->dev,
drivers/soc/ti/k3-ringacc.c
1052
memcpy_toio(ptr, elem, (4 << ring->elm_size));
drivers/soc/ti/k3-ringacc.c
1053
ring->state.free--;
drivers/soc/ti/k3-ringacc.c
1059
dev_dbg(ring->parent->dev, "free%d index%d occ%d index%d\n",
drivers/soc/ti/k3-ringacc.c
1060
ring->state.free, ring->state.windex, ring->state.occ,
drivers/soc/ti/k3-ringacc.c
1061
ring->state.rindex);
drivers/soc/ti/k3-ringacc.c
1065
static int k3_ringacc_ring_push_head_io(struct k3_ring *ring, void *elem)
drivers/soc/ti/k3-ringacc.c
1067
return k3_ringacc_ring_access_io(ring, elem,
drivers/soc/ti/k3-ringacc.c
1071
static int k3_ringacc_ring_push_io(struct k3_ring *ring, void *elem)
drivers/soc/ti/k3-ringacc.c
1073
return k3_ringacc_ring_access_io(ring, elem,
drivers/soc/ti/k3-ringacc.c
1077
static int k3_ringacc_ring_pop_io(struct k3_ring *ring, void *elem)
drivers/soc/ti/k3-ringacc.c
1079
return k3_ringacc_ring_access_io(ring, elem,
drivers/soc/ti/k3-ringacc.c
1083
static int k3_ringacc_ring_pop_tail_io(struct k3_ring *ring, void *elem)
drivers/soc/ti/k3-ringacc.c
1085
return k3_ringacc_ring_access_io(ring, elem,
drivers/soc/ti/k3-ringacc.c
1099
static int k3_dmaring_fwd_pop(struct k3_ring *ring, void *elem)
drivers/soc/ti/k3-ringacc.c
1110
ring->state.occ = k3_ringacc_ring_read_occ(ring);
drivers/soc/ti/k3-ringacc.c
1111
if (ring->state.windex >= ring->state.occ)
drivers/soc/ti/k3-ringacc.c
1112
elem_idx = ring->state.windex - ring->state.occ;
drivers/soc/ti/k3-ringacc.c
1114
elem_idx = ring->size - (ring->state.occ - ring->state.windex);
drivers/soc/ti/k3-ringacc.c
1116
elem_ptr = k3_ringacc_get_elm_addr(ring, elem_idx);
drivers/soc/ti/k3-ringacc.c
1117
memcpy(elem, elem_ptr, (4 << ring->elm_size));
drivers/soc/ti/k3-ringacc.c
1120
ring->state.occ--;
drivers/soc/ti/k3-ringacc.c
1121
writel(-1, &ring->rt->db);
drivers/soc/ti/k3-ringacc.c
1123
dev_dbg(ring->parent->dev, "%s: occ%d Windex%d Rindex%d pos_ptr%px\n",
drivers/soc/ti/k3-ringacc.c
1124
__func__, ring->state.occ, ring->state.windex, elem_idx,
drivers/soc/ti/k3-ringacc.c
1129
static int k3_dmaring_reverse_pop(struct k3_ring *ring, void *elem)
drivers/soc/ti/k3-ringacc.c
1133
elem_ptr = k3_ringacc_get_elm_addr(ring, ring->state.rindex);
drivers/soc/ti/k3-ringacc.c
1135
if (ring->state.occ) {
drivers/soc/ti/k3-ringacc.c
1136
memcpy(elem, elem_ptr, (4 << ring->elm_size));
drivers/soc/ti/k3-ringacc.c
1139
ring->state.rindex = (ring->state.rindex + 1) % ring->size;
drivers/soc/ti/k3-ringacc.c
114
int (*push_tail)(struct k3_ring *ring, void *elm);
drivers/soc/ti/k3-ringacc.c
1140
ring->state.occ--;
drivers/soc/ti/k3-ringacc.c
1141
writel(-1 & K3_DMARING_RT_DB_ENTRY_MASK, &ring->rt->db);
drivers/soc/ti/k3-ringacc.c
1142
} else if (ring->state.tdown_complete) {
drivers/soc/ti/k3-ringacc.c
1146
writel(K3_DMARING_RT_DB_TDOWN_ACK, &ring->rt->db);
drivers/soc/ti/k3-ringacc.c
1147
ring->state.tdown_complete = false;
drivers/soc/ti/k3-ringacc.c
115
int (*push_head)(struct k3_ring *ring, void *elm);
drivers/soc/ti/k3-ringacc.c
1150
dev_dbg(ring->parent->dev, "%s: occ%d index%d pos_ptr%px\n",
drivers/soc/ti/k3-ringacc.c
1151
__func__, ring->state.occ, ring->state.rindex, elem_ptr);
drivers/soc/ti/k3-ringacc.c
1155
static int k3_ringacc_ring_push_mem(struct k3_ring *ring, void *elem)
drivers/soc/ti/k3-ringacc.c
1159
elem_ptr = k3_ringacc_get_elm_addr(ring, ring->state.windex);
drivers/soc/ti/k3-ringacc.c
116
int (*pop_tail)(struct k3_ring *ring, void *elm);
drivers/soc/ti/k3-ringacc.c
1161
memcpy(elem_ptr, elem, (4 << ring->elm_size));
drivers/soc/ti/k3-ringacc.c
1162
if (ring->parent->dma_rings) {
drivers/soc/ti/k3-ringacc.c
1165
*addr |= ((u64)ring->asel << K3_ADDRESS_ASEL_SHIFT);
drivers/soc/ti/k3-ringacc.c
1168
ring->state.windex = (ring->state.windex + 1) % ring->size;
drivers/soc/ti/k3-ringacc.c
1169
ring->state.free--;
drivers/soc/ti/k3-ringacc.c
117
int (*pop_head)(struct k3_ring *ring, void *elm);
drivers/soc/ti/k3-ringacc.c
1170
writel(1, &ring->rt->db);
drivers/soc/ti/k3-ringacc.c
1172
dev_dbg(ring->parent->dev, "ring_push_mem: free%d index%d\n",
drivers/soc/ti/k3-ringacc.c
1173
ring->state.free, ring->state.windex);
drivers/soc/ti/k3-ringacc.c
1178
static int k3_ringacc_ring_pop_mem(struct k3_ring *ring, void *elem)
drivers/soc/ti/k3-ringacc.c
1182
elem_ptr = k3_ringacc_get_elm_addr(ring, ring->state.rindex);
drivers/soc/ti/k3-ringacc.c
1184
memcpy(elem, elem_ptr, (4 << ring->elm_size));
drivers/soc/ti/k3-ringacc.c
1186
ring->state.rindex = (ring->state.rindex + 1) % ring->size;
drivers/soc/ti/k3-ringacc.c
1187
ring->state.occ--;
drivers/soc/ti/k3-ringacc.c
1188
writel(-1, &ring->rt->db);
drivers/soc/ti/k3-ringacc.c
1190
dev_dbg(ring->parent->dev, "ring_pop_mem: occ%d index%d pos_ptr%p\n",
drivers/soc/ti/k3-ringacc.c
1191
ring->state.occ, ring->state.rindex, elem_ptr);
drivers/soc/ti/k3-ringacc.c
1195
int k3_ringacc_ring_push(struct k3_ring *ring, void *elem)
drivers/soc/ti/k3-ringacc.c
1199
if (!ring || !(ring->flags & K3_RING_FLAG_BUSY))
drivers/soc/ti/k3-ringacc.c
1202
dev_dbg(ring->parent->dev, "ring_push: free%d index%d\n",
drivers/soc/ti/k3-ringacc.c
1203
ring->state.free, ring->state.windex);
drivers/soc/ti/k3-ringacc.c
1205
if (k3_ringacc_ring_is_full(ring))
drivers/soc/ti/k3-ringacc.c
1208
if (ring->ops && ring->ops->push_tail)
drivers/soc/ti/k3-ringacc.c
1209
ret = ring->ops->push_tail(ring, elem);
drivers/soc/ti/k3-ringacc.c
1215
int k3_ringacc_ring_push_head(struct k3_ring *ring, void *elem)
drivers/soc/ti/k3-ringacc.c
1219
if (!ring || !(ring->flags & K3_RING_FLAG_BUSY))
drivers/soc/ti/k3-ringacc.c
1222
dev_dbg(ring->parent->dev, "ring_push_head: free%d index%d\n",
drivers/soc/ti/k3-ringacc.c
1223
ring->state.free, ring->state.windex);
drivers/soc/ti/k3-ringacc.c
1225
if (k3_ringacc_ring_is_full(ring))
drivers/soc/ti/k3-ringacc.c
1228
if (ring->ops && ring->ops->push_head)
drivers/soc/ti/k3-ringacc.c
1229
ret = ring->ops->push_head(ring, elem);
drivers/soc/ti/k3-ringacc.c
1235
int k3_ringacc_ring_pop(struct k3_ring *ring, void *elem)
drivers/soc/ti/k3-ringacc.c
1239
if (!ring || !(ring->flags & K3_RING_FLAG_BUSY))
drivers/soc/ti/k3-ringacc.c
1242
if (!ring->state.occ)
drivers/soc/ti/k3-ringacc.c
1243
k3_ringacc_ring_update_occ(ring);
drivers/soc/ti/k3-ringacc.c
1245
dev_dbg(ring->parent->dev, "ring_pop: occ%d index%d\n", ring->state.occ,
drivers/soc/ti/k3-ringacc.c
1246
ring->state.rindex);
drivers/soc/ti/k3-ringacc.c
1248
if (!ring->state.occ && !ring->state.tdown_complete)
drivers/soc/ti/k3-ringacc.c
1251
if (ring->ops && ring->ops->pop_head)
drivers/soc/ti/k3-ringacc.c
1252
ret = ring->ops->pop_head(ring, elem);
drivers/soc/ti/k3-ringacc.c
1258
int k3_ringacc_ring_pop_tail(struct k3_ring *ring, void *elem)
drivers/soc/ti/k3-ringacc.c
1262
if (!ring || !(ring->flags & K3_RING_FLAG_BUSY))
drivers/soc/ti/k3-ringacc.c
1265
if (!ring->state.occ)
drivers/soc/ti/k3-ringacc.c
1266
k3_ringacc_ring_update_occ(ring);
drivers/soc/ti/k3-ringacc.c
1268
dev_dbg(ring->parent->dev, "ring_pop_tail: occ%d index%d\n",
drivers/soc/ti/k3-ringacc.c
1269
ring->state.occ, ring->state.rindex);
drivers/soc/ti/k3-ringacc.c
1271
if (!ring->state.occ)
drivers/soc/ti/k3-ringacc.c
1274
if (ring->ops && ring->ops->pop_tail)
drivers/soc/ti/k3-ringacc.c
1275
ret = ring->ops->pop_tail(ring, elem);
drivers/soc/ti/k3-ringacc.c
1498
struct k3_ring *ring = &ringacc->rings[i];
drivers/soc/ti/k3-ringacc.c
1500
ring->rt = base_rt + K3_DMARING_RT_REGS_STEP * i;
drivers/soc/ti/k3-ringacc.c
1501
ring->parent = ringacc;
drivers/soc/ti/k3-ringacc.c
1502
ring->ring_id = i;
drivers/soc/ti/k3-ringacc.c
1503
ring->proxy_id = K3_RINGACC_PROXY_NOT_USED;
drivers/soc/ti/k3-ringacc.c
1505
ring = &ringacc->rings[ringacc->num_rings + i];
drivers/soc/ti/k3-ringacc.c
1506
ring->rt = base_rt + K3_DMARING_RT_REGS_STEP * i +
drivers/soc/ti/k3-ringacc.c
1508
ring->parent = ringacc;
drivers/soc/ti/k3-ringacc.c
1509
ring->ring_id = i;
drivers/soc/ti/k3-ringacc.c
1510
ring->proxy_id = K3_RINGACC_PROXY_NOT_USED;
drivers/soc/ti/k3-ringacc.c
1511
ring->flags = K3_RING_FLAG_REVERSE;
drivers/soc/ti/k3-ringacc.c
240
static int k3_ringacc_ring_read_occ(struct k3_ring *ring)
drivers/soc/ti/k3-ringacc.c
242
return readl(&ring->rt->occ) & K3_RINGACC_RT_OCC_MASK;
drivers/soc/ti/k3-ringacc.c
245
static void k3_ringacc_ring_update_occ(struct k3_ring *ring)
drivers/soc/ti/k3-ringacc.c
249
val = readl(&ring->rt->occ);
drivers/soc/ti/k3-ringacc.c
251
ring->state.occ = val & K3_RINGACC_RT_OCC_MASK;
drivers/soc/ti/k3-ringacc.c
252
ring->state.tdown_complete = !!(val & K3_DMARING_RT_OCC_TDOWN_COMPLETE);
drivers/soc/ti/k3-ringacc.c
255
static long k3_ringacc_ring_get_fifo_pos(struct k3_ring *ring)
drivers/soc/ti/k3-ringacc.c
258
(4 << ring->elm_size);
drivers/soc/ti/k3-ringacc.c
261
static void *k3_ringacc_get_elm_addr(struct k3_ring *ring, u32 idx)
drivers/soc/ti/k3-ringacc.c
263
return (ring->ring_mem_virt + idx * (4 << ring->elm_size));
drivers/soc/ti/k3-ringacc.c
266
static int k3_ringacc_ring_push_mem(struct k3_ring *ring, void *elem);
drivers/soc/ti/k3-ringacc.c
267
static int k3_ringacc_ring_pop_mem(struct k3_ring *ring, void *elem);
drivers/soc/ti/k3-ringacc.c
268
static int k3_dmaring_fwd_pop(struct k3_ring *ring, void *elem);
drivers/soc/ti/k3-ringacc.c
269
static int k3_dmaring_reverse_pop(struct k3_ring *ring, void *elem);
drivers/soc/ti/k3-ringacc.c
286
static int k3_ringacc_ring_push_io(struct k3_ring *ring, void *elem);
drivers/soc/ti/k3-ringacc.c
287
static int k3_ringacc_ring_pop_io(struct k3_ring *ring, void *elem);
drivers/soc/ti/k3-ringacc.c
288
static int k3_ringacc_ring_push_head_io(struct k3_ring *ring, void *elem);
drivers/soc/ti/k3-ringacc.c
289
static int k3_ringacc_ring_pop_tail_io(struct k3_ring *ring, void *elem);
drivers/soc/ti/k3-ringacc.c
298
static int k3_ringacc_ring_push_head_proxy(struct k3_ring *ring, void *elem);
drivers/soc/ti/k3-ringacc.c
299
static int k3_ringacc_ring_push_tail_proxy(struct k3_ring *ring, void *elem);
drivers/soc/ti/k3-ringacc.c
300
static int k3_ringacc_ring_pop_head_proxy(struct k3_ring *ring, void *elem);
drivers/soc/ti/k3-ringacc.c
301
static int k3_ringacc_ring_pop_tail_proxy(struct k3_ring *ring, void *elem);
drivers/soc/ti/k3-ringacc.c
310
static void k3_ringacc_ring_dump(struct k3_ring *ring)
drivers/soc/ti/k3-ringacc.c
312
struct device *dev = ring->parent->dev;
drivers/soc/ti/k3-ringacc.c
314
dev_dbg(dev, "dump ring: %d\n", ring->ring_id);
drivers/soc/ti/k3-ringacc.c
315
dev_dbg(dev, "dump mem virt %p, dma %pad\n", ring->ring_mem_virt,
drivers/soc/ti/k3-ringacc.c
316
&ring->ring_mem_dma);
drivers/soc/ti/k3-ringacc.c
318
ring->elm_size, ring->size, ring->mode, ring->proxy_id);
drivers/soc/ti/k3-ringacc.c
319
dev_dbg(dev, "dump flags %08X\n", ring->flags);
drivers/soc/ti/k3-ringacc.c
321
dev_dbg(dev, "dump ring_rt_regs: db%08x\n", readl(&ring->rt->db));
drivers/soc/ti/k3-ringacc.c
322
dev_dbg(dev, "dump occ%08x\n", readl(&ring->rt->occ));
drivers/soc/ti/k3-ringacc.c
323
dev_dbg(dev, "dump indx%08x\n", readl(&ring->rt->indx));
drivers/soc/ti/k3-ringacc.c
324
dev_dbg(dev, "dump hwocc%08x\n", readl(&ring->rt->hwocc));
drivers/soc/ti/k3-ringacc.c
325
dev_dbg(dev, "dump hwindx%08x\n", readl(&ring->rt->hwindx));
drivers/soc/ti/k3-ringacc.c
327
if (ring->ring_mem_virt)
drivers/soc/ti/k3-ringacc.c
329
16, 1, ring->ring_mem_virt, 16 * 8, false);
drivers/soc/ti/k3-ringacc.c
463
static void k3_ringacc_ring_reset_sci(struct k3_ring *ring)
drivers/soc/ti/k3-ringacc.c
466
struct k3_ringacc *ringacc = ring->parent;
drivers/soc/ti/k3-ringacc.c
470
ring_cfg.index = ring->ring_id;
drivers/soc/ti/k3-ringacc.c
472
ring_cfg.count = ring->size;
drivers/soc/ti/k3-ringacc.c
477
ret, ring->ring_id);
drivers/soc/ti/k3-ringacc.c
480
void k3_ringacc_ring_reset(struct k3_ring *ring)
drivers/soc/ti/k3-ringacc.c
482
if (!ring || !(ring->flags & K3_RING_FLAG_BUSY))
drivers/soc/ti/k3-ringacc.c
485
memset(&ring->state, 0, sizeof(ring->state));
drivers/soc/ti/k3-ringacc.c
487
k3_ringacc_ring_reset_sci(ring);
drivers/soc/ti/k3-ringacc.c
491
static void k3_ringacc_ring_reconfig_qmode_sci(struct k3_ring *ring,
drivers/soc/ti/k3-ringacc.c
495
struct k3_ringacc *ringacc = ring->parent;
drivers/soc/ti/k3-ringacc.c
499
ring_cfg.index = ring->ring_id;
drivers/soc/ti/k3-ringacc.c
506
ret, ring->ring_id);
drivers/soc/ti/k3-ringacc.c
509
void k3_ringacc_ring_reset_dma(struct k3_ring *ring, u32 occ)
drivers/soc/ti/k3-ringacc.c
511
if (!ring || !(ring->flags & K3_RING_FLAG_BUSY))
drivers/soc/ti/k3-ringacc.c
514
if (!ring->parent->dma_ring_reset_quirk)
drivers/soc/ti/k3-ringacc.c
518
occ = k3_ringacc_ring_read_occ(ring);
drivers/soc/ti/k3-ringacc.c
523
dev_dbg(ring->parent->dev, "%s %u occ: %u\n", __func__,
drivers/soc/ti/k3-ringacc.c
524
ring->ring_id, occ);
drivers/soc/ti/k3-ringacc.c
526
k3_ringacc_ring_reset_sci(ring);
drivers/soc/ti/k3-ringacc.c
532
if (ring->mode != K3_RINGACC_RING_MODE_RING)
drivers/soc/ti/k3-ringacc.c
534
ring, K3_RINGACC_RING_MODE_RING);
drivers/soc/ti/k3-ringacc.c
553
writel(db_ring_cnt_cur, &ring->rt->db);
drivers/soc/ti/k3-ringacc.c
558
if (ring->mode != K3_RINGACC_RING_MODE_RING)
drivers/soc/ti/k3-ringacc.c
559
k3_ringacc_ring_reconfig_qmode_sci(ring, ring->mode);
drivers/soc/ti/k3-ringacc.c
564
k3_ringacc_ring_reset(ring);
drivers/soc/ti/k3-ringacc.c
568
static void k3_ringacc_ring_free_sci(struct k3_ring *ring)
drivers/soc/ti/k3-ringacc.c
571
struct k3_ringacc *ringacc = ring->parent;
drivers/soc/ti/k3-ringacc.c
575
ring_cfg.index = ring->ring_id;
drivers/soc/ti/k3-ringacc.c
581
ret, ring->ring_id);
drivers/soc/ti/k3-ringacc.c
584
int k3_ringacc_ring_free(struct k3_ring *ring)
drivers/soc/ti/k3-ringacc.c
588
if (!ring)
drivers/soc/ti/k3-ringacc.c
591
ringacc = ring->parent;
drivers/soc/ti/k3-ringacc.c
597
if (ringacc->dma_rings && (ring->flags & K3_RING_FLAG_REVERSE))
drivers/soc/ti/k3-ringacc.c
600
dev_dbg(ring->parent->dev, "flags: 0x%08x\n", ring->flags);
drivers/soc/ti/k3-ringacc.c
602
if (!test_bit(ring->ring_id, ringacc->rings_inuse))
drivers/soc/ti/k3-ringacc.c
607
if (--ring->use_count)
drivers/soc/ti/k3-ringacc.c
610
if (!(ring->flags & K3_RING_FLAG_BUSY))
drivers/soc/ti/k3-ringacc.c
613
k3_ringacc_ring_free_sci(ring);
drivers/soc/ti/k3-ringacc.c
615
dma_free_coherent(ring->dma_dev,
drivers/soc/ti/k3-ringacc.c
616
ring->size * (4 << ring->elm_size),
drivers/soc/ti/k3-ringacc.c
617
ring->ring_mem_virt, ring->ring_mem_dma);
drivers/soc/ti/k3-ringacc.c
618
ring->flags = 0;
drivers/soc/ti/k3-ringacc.c
619
ring->ops = NULL;
drivers/soc/ti/k3-ringacc.c
620
ring->dma_dev = NULL;
drivers/soc/ti/k3-ringacc.c
621
ring->asel = 0;
drivers/soc/ti/k3-ringacc.c
623
if (ring->proxy_id != K3_RINGACC_PROXY_NOT_USED) {
drivers/soc/ti/k3-ringacc.c
624
clear_bit(ring->proxy_id, ringacc->proxy_inuse);
drivers/soc/ti/k3-ringacc.c
625
ring->proxy = NULL;
drivers/soc/ti/k3-ringacc.c
626
ring->proxy_id = K3_RINGACC_PROXY_NOT_USED;
drivers/soc/ti/k3-ringacc.c
630
clear_bit(ring->ring_id, ringacc->rings_inuse);
drivers/soc/ti/k3-ringacc.c
640
u32 k3_ringacc_get_ring_id(struct k3_ring *ring)
drivers/soc/ti/k3-ringacc.c
642
if (!ring)
drivers/soc/ti/k3-ringacc.c
645
return ring->ring_id;
drivers/soc/ti/k3-ringacc.c
649
u32 k3_ringacc_get_tisci_dev_id(struct k3_ring *ring)
drivers/soc/ti/k3-ringacc.c
651
if (!ring)
drivers/soc/ti/k3-ringacc.c
654
return ring->parent->tisci_dev_id;
drivers/soc/ti/k3-ringacc.c
658
int k3_ringacc_get_ring_irq_num(struct k3_ring *ring)
drivers/soc/ti/k3-ringacc.c
662
if (!ring)
drivers/soc/ti/k3-ringacc.c
665
irq_num = msi_get_virq(ring->parent->dev, ring->ring_id);
drivers/soc/ti/k3-ringacc.c
672
static int k3_ringacc_ring_cfg_sci(struct k3_ring *ring)
drivers/soc/ti/k3-ringacc.c
675
struct k3_ringacc *ringacc = ring->parent;
drivers/soc/ti/k3-ringacc.c
682
ring_cfg.index = ring->ring_id;
drivers/soc/ti/k3-ringacc.c
684
ring_cfg.addr_lo = lower_32_bits(ring->ring_mem_dma);
drivers/soc/ti/k3-ringacc.c
685
ring_cfg.addr_hi = upper_32_bits(ring->ring_mem_dma);
drivers/soc/ti/k3-ringacc.c
686
ring_cfg.count = ring->size;
drivers/soc/ti/k3-ringacc.c
687
ring_cfg.mode = ring->mode;
drivers/soc/ti/k3-ringacc.c
688
ring_cfg.size = ring->elm_size;
drivers/soc/ti/k3-ringacc.c
689
ring_cfg.asel = ring->asel;
drivers/soc/ti/k3-ringacc.c
694
ret, ring->ring_id);
drivers/soc/ti/k3-ringacc.c
699
static int k3_dmaring_cfg(struct k3_ring *ring, struct k3_ring_cfg *cfg)
drivers/soc/ti/k3-ringacc.c
710
ringacc = ring->parent;
drivers/soc/ti/k3-ringacc.c
716
if (ringacc->dma_rings && (ring->flags & K3_RING_FLAG_REVERSE))
drivers/soc/ti/k3-ringacc.c
719
if (!test_bit(ring->ring_id, ringacc->rings_inuse))
drivers/soc/ti/k3-ringacc.c
722
ring->size = cfg->size;
drivers/soc/ti/k3-ringacc.c
723
ring->elm_size = cfg->elm_size;
drivers/soc/ti/k3-ringacc.c
724
ring->mode = cfg->mode;
drivers/soc/ti/k3-ringacc.c
725
ring->asel = cfg->asel;
drivers/soc/ti/k3-ringacc.c
726
ring->dma_dev = cfg->dma_dev;
drivers/soc/ti/k3-ringacc.c
727
if (!ring->dma_dev) {
drivers/soc/ti/k3-ringacc.c
729
ring->ring_id);
drivers/soc/ti/k3-ringacc.c
730
ring->dma_dev = ringacc->dev;
drivers/soc/ti/k3-ringacc.c
733
memset(&ring->state, 0, sizeof(ring->state));
drivers/soc/ti/k3-ringacc.c
735
ring->ops = &k3_dmaring_fwd_ops;
drivers/soc/ti/k3-ringacc.c
737
ring->ring_mem_virt = dma_alloc_coherent(ring->dma_dev,
drivers/soc/ti/k3-ringacc.c
738
ring->size * (4 << ring->elm_size),
drivers/soc/ti/k3-ringacc.c
739
&ring->ring_mem_dma, GFP_KERNEL);
drivers/soc/ti/k3-ringacc.c
740
if (!ring->ring_mem_virt) {
drivers/soc/ti/k3-ringacc.c
746
ret = k3_ringacc_ring_cfg_sci(ring);
drivers/soc/ti/k3-ringacc.c
750
ring->flags |= K3_RING_FLAG_BUSY;
drivers/soc/ti/k3-ringacc.c
752
k3_ringacc_ring_dump(ring);
drivers/soc/ti/k3-ringacc.c
755
reverse_ring = &ringacc->rings[ring->ring_id + ringacc->num_rings];
drivers/soc/ti/k3-ringacc.c
763
reverse_ring->ring_mem_virt = ring->ring_mem_virt;
drivers/soc/ti/k3-ringacc.c
764
reverse_ring->ring_mem_dma = ring->ring_mem_dma;
drivers/soc/ti/k3-ringacc.c
771
dma_free_coherent(ring->dma_dev,
drivers/soc/ti/k3-ringacc.c
772
ring->size * (4 << ring->elm_size),
drivers/soc/ti/k3-ringacc.c
773
ring->ring_mem_virt,
drivers/soc/ti/k3-ringacc.c
774
ring->ring_mem_dma);
drivers/soc/ti/k3-ringacc.c
776
ring->ops = NULL;
drivers/soc/ti/k3-ringacc.c
777
ring->proxy = NULL;
drivers/soc/ti/k3-ringacc.c
778
ring->dma_dev = NULL;
drivers/soc/ti/k3-ringacc.c
779
ring->asel = 0;
drivers/soc/ti/k3-ringacc.c
783
int k3_ringacc_ring_cfg(struct k3_ring *ring, struct k3_ring_cfg *cfg)
drivers/soc/ti/k3-ringacc.c
788
if (!ring || !cfg)
drivers/soc/ti/k3-ringacc.c
791
ringacc = ring->parent;
drivers/soc/ti/k3-ringacc.c
794
return k3_dmaring_cfg(ring, cfg);
drivers/soc/ti/k3-ringacc.c
799
!test_bit(ring->ring_id, ringacc->rings_inuse))
drivers/soc/ti/k3-ringacc.c
803
ring->proxy_id == K3_RINGACC_PROXY_NOT_USED &&
drivers/soc/ti/k3-ringacc.c
807
4 << ring->elm_size);
drivers/soc/ti/k3-ringacc.c
819
if (ring->use_count != 1)
drivers/soc/ti/k3-ringacc.c
822
ring->size = cfg->size;
drivers/soc/ti/k3-ringacc.c
823
ring->elm_size = cfg->elm_size;
drivers/soc/ti/k3-ringacc.c
824
ring->mode = cfg->mode;
drivers/soc/ti/k3-ringacc.c
825
memset(&ring->state, 0, sizeof(ring->state));
drivers/soc/ti/k3-ringacc.c
827
if (ring->proxy_id != K3_RINGACC_PROXY_NOT_USED)
drivers/soc/ti/k3-ringacc.c
828
ring->proxy = ringacc->proxy_target_base +
drivers/soc/ti/k3-ringacc.c
829
ring->proxy_id * K3_RINGACC_PROXY_TARGET_STEP;
drivers/soc/ti/k3-ringacc.c
831
switch (ring->mode) {
drivers/soc/ti/k3-ringacc.c
833
ring->ops = &k3_ring_mode_ring_ops;
drivers/soc/ti/k3-ringacc.c
834
ring->dma_dev = cfg->dma_dev;
drivers/soc/ti/k3-ringacc.c
835
if (!ring->dma_dev)
drivers/soc/ti/k3-ringacc.c
836
ring->dma_dev = ringacc->dev;
drivers/soc/ti/k3-ringacc.c
839
ring->dma_dev = ringacc->dev;
drivers/soc/ti/k3-ringacc.c
840
if (ring->proxy)
drivers/soc/ti/k3-ringacc.c
841
ring->ops = &k3_ring_mode_proxy_ops;
drivers/soc/ti/k3-ringacc.c
843
ring->ops = &k3_ring_mode_msg_ops;
drivers/soc/ti/k3-ringacc.c
846
ring->ops = NULL;
drivers/soc/ti/k3-ringacc.c
851
ring->ring_mem_virt = dma_alloc_coherent(ring->dma_dev,
drivers/soc/ti/k3-ringacc.c
852
ring->size * (4 << ring->elm_size),
drivers/soc/ti/k3-ringacc.c
853
&ring->ring_mem_dma, GFP_KERNEL);
drivers/soc/ti/k3-ringacc.c
854
if (!ring->ring_mem_virt) {
drivers/soc/ti/k3-ringacc.c
860
ret = k3_ringacc_ring_cfg_sci(ring);
drivers/soc/ti/k3-ringacc.c
865
ring->flags |= K3_RING_FLAG_BUSY;
drivers/soc/ti/k3-ringacc.c
866
ring->flags |= (cfg->flags & K3_RINGACC_RING_SHARED) ?
drivers/soc/ti/k3-ringacc.c
869
k3_ringacc_ring_dump(ring);
drivers/soc/ti/k3-ringacc.c
874
dma_free_coherent(ring->dma_dev,
drivers/soc/ti/k3-ringacc.c
875
ring->size * (4 << ring->elm_size),
drivers/soc/ti/k3-ringacc.c
876
ring->ring_mem_virt,
drivers/soc/ti/k3-ringacc.c
877
ring->ring_mem_dma);
drivers/soc/ti/k3-ringacc.c
879
ring->ops = NULL;
drivers/soc/ti/k3-ringacc.c
880
ring->dma_dev = NULL;
drivers/soc/ti/k3-ringacc.c
882
ring->proxy = NULL;
drivers/soc/ti/k3-ringacc.c
887
u32 k3_ringacc_ring_get_size(struct k3_ring *ring)
drivers/soc/ti/k3-ringacc.c
889
if (!ring || !(ring->flags & K3_RING_FLAG_BUSY))
drivers/soc/ti/k3-ringacc.c
892
return ring->size;
drivers/soc/ti/k3-ringacc.c
896
u32 k3_ringacc_ring_get_free(struct k3_ring *ring)
drivers/soc/ti/k3-ringacc.c
898
if (!ring || !(ring->flags & K3_RING_FLAG_BUSY))
drivers/soc/ti/k3-ringacc.c
901
if (!ring->state.free)
drivers/soc/ti/k3-ringacc.c
902
ring->state.free = ring->size - k3_ringacc_ring_read_occ(ring);
drivers/soc/ti/k3-ringacc.c
904
return ring->state.free;
drivers/soc/ti/k3-ringacc.c
908
u32 k3_ringacc_ring_get_occ(struct k3_ring *ring)
drivers/soc/ti/k3-ringacc.c
910
if (!ring || !(ring->flags & K3_RING_FLAG_BUSY))
drivers/soc/ti/k3-ringacc.c
913
return k3_ringacc_ring_read_occ(ring);
drivers/soc/ti/k3-ringacc.c
917
u32 k3_ringacc_ring_is_full(struct k3_ring *ring)
drivers/soc/ti/k3-ringacc.c
919
return !k3_ringacc_ring_get_free(ring);
drivers/soc/ti/k3-ringacc.c
934
static int k3_ringacc_ring_cfg_proxy(struct k3_ring *ring,
drivers/soc/ti/k3-ringacc.c
939
val = ring->ring_id;
drivers/soc/ti/k3-ringacc.c
941
val |= K3_RINGACC_PROXY_ELSIZE(ring->elm_size);
drivers/soc/ti/k3-ringacc.c
942
writel(val, &ring->proxy->control);
drivers/soc/ti/k3-ringacc.c
946
static int k3_ringacc_ring_access_proxy(struct k3_ring *ring, void *elem,
drivers/soc/ti/k3-ringacc.c
951
ptr = (void __iomem *)&ring->proxy->data;
drivers/soc/ti/k3-ringacc.c
956
k3_ringacc_ring_cfg_proxy(ring, PROXY_ACCESS_MODE_HEAD);
drivers/soc/ti/k3-ringacc.c
960
k3_ringacc_ring_cfg_proxy(ring, PROXY_ACCESS_MODE_TAIL);
drivers/soc/ti/k3-ringacc.c
966
ptr += k3_ringacc_ring_get_fifo_pos(ring);
drivers/soc/ti/k3-ringacc.c
971
dev_dbg(ring->parent->dev,
drivers/soc/ti/k3-ringacc.c
974
memcpy_fromio(elem, ptr, (4 << ring->elm_size));
drivers/soc/ti/k3-ringacc.c
975
ring->state.occ--;
drivers/soc/ti/k3-ringacc.c
979
dev_dbg(ring->parent->dev,
drivers/soc/ti/k3-ringacc.c
982
memcpy_toio(ptr, elem, (4 << ring->elm_size));
drivers/soc/ti/k3-ringacc.c
983
ring->state.free--;
drivers/soc/ti/k3-ringacc.c
989
dev_dbg(ring->parent->dev, "proxy: free%d occ%d\n", ring->state.free,
drivers/soc/ti/k3-ringacc.c
990
ring->state.occ);
drivers/soc/ti/k3-ringacc.c
994
static int k3_ringacc_ring_push_head_proxy(struct k3_ring *ring, void *elem)
drivers/soc/ti/k3-ringacc.c
996
return k3_ringacc_ring_access_proxy(ring, elem,
drivers/spi/spi-pic32-sqi.c
145
struct ring_desc *ring;
drivers/spi/spi-pic32-sqi.c
470
sqi->ring = kzalloc_objs(*rdesc, PESQI_BD_COUNT);
drivers/spi/spi-pic32-sqi.c
471
if (!sqi->ring) {
drivers/spi/spi-pic32-sqi.c
484
for (i = 0, rdesc = sqi->ring; i < PESQI_BD_COUNT; i++, rdesc++) {
drivers/spi/spi-pic32-sqi.c
492
for (i = 0, rdesc = sqi->ring; i < PESQI_BD_COUNT - 1; i++)
drivers/spi/spi-pic32-sqi.c
504
kfree(sqi->ring);
drivers/thunderbolt/ctl.c
352
static void tb_ctl_tx_callback(struct tb_ring *ring, struct ring_frame *frame,
drivers/thunderbolt/ctl.c
445
static void tb_ctl_rx_callback(struct tb_ring *ring, struct ring_frame *frame,
drivers/thunderbolt/dma_test.c
141
struct tb_ring *ring;
drivers/thunderbolt/dma_test.c
152
ring = tb_ring_alloc_tx(xd->tb->nhi, -1, DMA_TEST_TX_RING_SIZE,
drivers/thunderbolt/dma_test.c
154
if (!ring)
drivers/thunderbolt/dma_test.c
157
dt->tx_ring = ring;
drivers/thunderbolt/dma_test.c
158
e2e_tx_hop = ring->hop;
drivers/thunderbolt/dma_test.c
175
ring = tb_ring_alloc_rx(xd->tb->nhi, -1, DMA_TEST_RX_RING_SIZE,
drivers/thunderbolt/dma_test.c
178
if (!ring) {
drivers/thunderbolt/dma_test.c
183
dt->rx_ring = ring;
drivers/thunderbolt/dma_test.c
230
static void dma_test_rx_callback(struct tb_ring *ring, struct ring_frame *frame,
drivers/thunderbolt/dma_test.c
299
static void dma_test_tx_callback(struct tb_ring *ring, struct ring_frame *frame,
drivers/thunderbolt/nhi.c
101
if (ring->is_tx)
drivers/thunderbolt/nhi.c
102
index = ring->hop;
drivers/thunderbolt/nhi.c
104
index = ring->hop + ring->nhi->hop_count;
drivers/thunderbolt/nhi.c
117
misc = ioread32(ring->nhi->iobase + REG_DMA_MISC);
drivers/thunderbolt/nhi.c
118
if (ring->nhi->quirks & QUIRK_AUTO_CLEAR_INT)
drivers/thunderbolt/nhi.c
124
ring->nhi->iobase + REG_DMA_MISC);
drivers/thunderbolt/nhi.c
126
ivr_base = ring->nhi->iobase + REG_INT_VEC_ALLOC_BASE;
drivers/thunderbolt/nhi.c
132
ivr |= ring->vector << shift;
drivers/thunderbolt/nhi.c
136
old = ioread32(ring->nhi->iobase + reg);
drivers/thunderbolt/nhi.c
142
dev_dbg(&ring->nhi->pdev->dev,
drivers/thunderbolt/nhi.c
147
dev_WARN(&ring->nhi->pdev->dev,
drivers/thunderbolt/nhi.c
149
RING_TYPE(ring), ring->hop,
drivers/thunderbolt/nhi.c
153
iowrite32(new, ring->nhi->iobase + reg);
drivers/thunderbolt/nhi.c
155
nhi_mask_interrupt(ring->nhi, mask, index);
drivers/thunderbolt/nhi.c
177
static void __iomem *ring_desc_base(struct tb_ring *ring)
drivers/thunderbolt/nhi.c
179
void __iomem *io = ring->nhi->iobase;
drivers/thunderbolt/nhi.c
180
io += ring->is_tx ? REG_TX_RING_BASE : REG_RX_RING_BASE;
drivers/thunderbolt/nhi.c
181
io += ring->hop * 16;
drivers/thunderbolt/nhi.c
185
static void __iomem *ring_options_base(struct tb_ring *ring)
drivers/thunderbolt/nhi.c
187
void __iomem *io = ring->nhi->iobase;
drivers/thunderbolt/nhi.c
188
io += ring->is_tx ? REG_TX_OPTIONS_BASE : REG_RX_OPTIONS_BASE;
drivers/thunderbolt/nhi.c
189
io += ring->hop * 32;
drivers/thunderbolt/nhi.c
193
static void ring_iowrite_cons(struct tb_ring *ring, u16 cons)
drivers/thunderbolt/nhi.c
200
iowrite32(cons, ring_desc_base(ring) + 8);
drivers/thunderbolt/nhi.c
203
static void ring_iowrite_prod(struct tb_ring *ring, u16 prod)
drivers/thunderbolt/nhi.c
206
iowrite32(prod << 16, ring_desc_base(ring) + 8);
drivers/thunderbolt/nhi.c
209
static void ring_iowrite32desc(struct tb_ring *ring, u32 value, u32 offset)
drivers/thunderbolt/nhi.c
211
iowrite32(value, ring_desc_base(ring) + offset);
drivers/thunderbolt/nhi.c
214
static void ring_iowrite64desc(struct tb_ring *ring, u64 value, u32 offset)
drivers/thunderbolt/nhi.c
216
iowrite32(value, ring_desc_base(ring) + offset);
drivers/thunderbolt/nhi.c
217
iowrite32(value >> 32, ring_desc_base(ring) + offset + 4);
drivers/thunderbolt/nhi.c
220
static void ring_iowrite32options(struct tb_ring *ring, u32 value, u32 offset)
drivers/thunderbolt/nhi.c
222
iowrite32(value, ring_options_base(ring) + offset);
drivers/thunderbolt/nhi.c
225
static bool ring_full(struct tb_ring *ring)
drivers/thunderbolt/nhi.c
227
return ((ring->head + 1) % ring->size) == ring->tail;
drivers/thunderbolt/nhi.c
230
static bool ring_empty(struct tb_ring *ring)
drivers/thunderbolt/nhi.c
232
return ring->head == ring->tail;
drivers/thunderbolt/nhi.c
240
static void ring_write_descriptors(struct tb_ring *ring)
drivers/thunderbolt/nhi.c
244
list_for_each_entry_safe(frame, n, &ring->queue, list) {
drivers/thunderbolt/nhi.c
245
if (ring_full(ring))
drivers/thunderbolt/nhi.c
247
list_move_tail(&frame->list, &ring->in_flight);
drivers/thunderbolt/nhi.c
248
descriptor = &ring->descriptors[ring->head];
drivers/thunderbolt/nhi.c
252
if (ring->is_tx) {
drivers/thunderbolt/nhi.c
257
ring->head = (ring->head + 1) % ring->size;
drivers/thunderbolt/nhi.c
258
if (ring->is_tx)
drivers/thunderbolt/nhi.c
259
ring_iowrite_prod(ring, ring->head);
drivers/thunderbolt/nhi.c
261
ring_iowrite_cons(ring, ring->head);
drivers/thunderbolt/nhi.c
276
struct tb_ring *ring = container_of(work, typeof(*ring), work);
drivers/thunderbolt/nhi.c
282
spin_lock_irqsave(&ring->lock, flags);
drivers/thunderbolt/nhi.c
284
if (!ring->running) {
drivers/thunderbolt/nhi.c
286
list_splice_tail_init(&ring->in_flight, &done);
drivers/thunderbolt/nhi.c
287
list_splice_tail_init(&ring->queue, &done);
drivers/thunderbolt/nhi.c
29
#define RING_TYPE(ring) ((ring)->is_tx ? "TX ring" : "RX ring")
drivers/thunderbolt/nhi.c
292
while (!ring_empty(ring)) {
drivers/thunderbolt/nhi.c
293
if (!(ring->descriptors[ring->tail].flags
drivers/thunderbolt/nhi.c
296
frame = list_first_entry(&ring->in_flight, typeof(*frame),
drivers/thunderbolt/nhi.c
299
if (!ring->is_tx) {
drivers/thunderbolt/nhi.c
300
frame->size = ring->descriptors[ring->tail].length;
drivers/thunderbolt/nhi.c
301
frame->eof = ring->descriptors[ring->tail].eof;
drivers/thunderbolt/nhi.c
302
frame->sof = ring->descriptors[ring->tail].sof;
drivers/thunderbolt/nhi.c
303
frame->flags = ring->descriptors[ring->tail].flags;
drivers/thunderbolt/nhi.c
305
ring->tail = (ring->tail + 1) % ring->size;
drivers/thunderbolt/nhi.c
307
ring_write_descriptors(ring);
drivers/thunderbolt/nhi.c
311
spin_unlock_irqrestore(&ring->lock, flags);
drivers/thunderbolt/nhi.c
320
frame->callback(ring, frame, canceled);
drivers/thunderbolt/nhi.c
324
int __tb_ring_enqueue(struct tb_ring *ring, struct ring_frame *frame)
drivers/thunderbolt/nhi.c
329
spin_lock_irqsave(&ring->lock, flags);
drivers/thunderbolt/nhi.c
330
if (ring->running) {
drivers/thunderbolt/nhi.c
331
list_add_tail(&frame->list, &ring->queue);
drivers/thunderbolt/nhi.c
332
ring_write_descriptors(ring);
drivers/thunderbolt/nhi.c
336
spin_unlock_irqrestore(&ring->lock, flags);
drivers/thunderbolt/nhi.c
352
struct ring_frame *tb_ring_poll(struct tb_ring *ring)
drivers/thunderbolt/nhi.c
357
spin_lock_irqsave(&ring->lock, flags);
drivers/thunderbolt/nhi.c
358
if (!ring->running)
drivers/thunderbolt/nhi.c
360
if (ring_empty(ring))
drivers/thunderbolt/nhi.c
363
if (ring->descriptors[ring->tail].flags & RING_DESC_COMPLETED) {
drivers/thunderbolt/nhi.c
364
frame = list_first_entry(&ring->in_flight, typeof(*frame),
drivers/thunderbolt/nhi.c
368
if (!ring->is_tx) {
drivers/thunderbolt/nhi.c
369
frame->size = ring->descriptors[ring->tail].length;
drivers/thunderbolt/nhi.c
370
frame->eof = ring->descriptors[ring->tail].eof;
drivers/thunderbolt/nhi.c
371
frame->sof = ring->descriptors[ring->tail].sof;
drivers/thunderbolt/nhi.c
372
frame->flags = ring->descriptors[ring->tail].flags;
drivers/thunderbolt/nhi.c
375
ring->tail = (ring->tail + 1) % ring->size;
drivers/thunderbolt/nhi.c
379
spin_unlock_irqrestore(&ring->lock, flags);
drivers/thunderbolt/nhi.c
384
static void __ring_interrupt_mask(struct tb_ring *ring, bool mask)
drivers/thunderbolt/nhi.c
386
int idx = ring_interrupt_index(ring);
drivers/thunderbolt/nhi.c
391
val = ioread32(ring->nhi->iobase + reg);
drivers/thunderbolt/nhi.c
396
iowrite32(val, ring->nhi->iobase + reg);
drivers/thunderbolt/nhi.c
400
static void __ring_interrupt(struct tb_ring *ring)
drivers/thunderbolt/nhi.c
402
if (!ring->running)
drivers/thunderbolt/nhi.c
405
if (ring->start_poll) {
drivers/thunderbolt/nhi.c
406
__ring_interrupt_mask(ring, true);
drivers/thunderbolt/nhi.c
407
ring->start_poll(ring->poll_data);
drivers/thunderbolt/nhi.c
409
schedule_work(&ring->work);
drivers/thunderbolt/nhi.c
420
void tb_ring_poll_complete(struct tb_ring *ring)
drivers/thunderbolt/nhi.c
424
spin_lock_irqsave(&ring->nhi->lock, flags);
drivers/thunderbolt/nhi.c
425
spin_lock(&ring->lock);
drivers/thunderbolt/nhi.c
426
if (ring->start_poll)
drivers/thunderbolt/nhi.c
427
__ring_interrupt_mask(ring, false);
drivers/thunderbolt/nhi.c
428
spin_unlock(&ring->lock);
drivers/thunderbolt/nhi.c
429
spin_unlock_irqrestore(&ring->nhi->lock, flags);
drivers/thunderbolt/nhi.c
433
static void ring_clear_msix(const struct tb_ring *ring)
drivers/thunderbolt/nhi.c
437
if (ring->nhi->quirks & QUIRK_AUTO_CLEAR_INT)
drivers/thunderbolt/nhi.c
440
bit = ring_interrupt_index(ring) & 31;
drivers/thunderbolt/nhi.c
441
if (ring->is_tx)
drivers/thunderbolt/nhi.c
442
iowrite32(BIT(bit), ring->nhi->iobase + REG_RING_INT_CLEAR);
drivers/thunderbolt/nhi.c
444
iowrite32(BIT(bit), ring->nhi->iobase + REG_RING_INT_CLEAR +
drivers/thunderbolt/nhi.c
445
4 * (ring->nhi->hop_count / 32));
drivers/thunderbolt/nhi.c
450
struct tb_ring *ring = data;
drivers/thunderbolt/nhi.c
452
spin_lock(&ring->nhi->lock);
drivers/thunderbolt/nhi.c
453
ring_clear_msix(ring);
drivers/thunderbolt/nhi.c
454
spin_lock(&ring->lock);
drivers/thunderbolt/nhi.c
455
__ring_interrupt(ring);
drivers/thunderbolt/nhi.c
456
spin_unlock(&ring->lock);
drivers/thunderbolt/nhi.c
457
spin_unlock(&ring->nhi->lock);
drivers/thunderbolt/nhi.c
462
static int ring_request_msix(struct tb_ring *ring, bool no_suspend)
drivers/thunderbolt/nhi.c
464
struct tb_nhi *nhi = ring->nhi;
drivers/thunderbolt/nhi.c
475
ring->vector = ret;
drivers/thunderbolt/nhi.c
477
ret = pci_irq_vector(ring->nhi->pdev, ring->vector);
drivers/thunderbolt/nhi.c
481
ring->irq = ret;
drivers/thunderbolt/nhi.c
484
ret = request_irq(ring->irq, ring_msix, irqflags, "thunderbolt", ring);
drivers/thunderbolt/nhi.c
491
ida_free(&nhi->msix_ida, ring->vector);
drivers/thunderbolt/nhi.c
496
static void ring_release_msix(struct tb_ring *ring)
drivers/thunderbolt/nhi.c
498
if (ring->irq <= 0)
drivers/thunderbolt/nhi.c
501
free_irq(ring->irq, ring);
drivers/thunderbolt/nhi.c
502
ida_free(&ring->nhi->msix_ida, ring->vector);
drivers/thunderbolt/nhi.c
503
ring->vector = 0;
drivers/thunderbolt/nhi.c
504
ring->irq = 0;
drivers/thunderbolt/nhi.c
507
static int nhi_alloc_hop(struct tb_nhi *nhi, struct tb_ring *ring)
drivers/thunderbolt/nhi.c
514
if (ring->flags & RING_FLAG_E2E && !ring->is_tx) {
drivers/thunderbolt/nhi.c
516
ring->e2e_tx_hop, RING_E2E_RESERVED_HOPID);
drivers/thunderbolt/nhi.c
517
ring->e2e_tx_hop = RING_E2E_RESERVED_HOPID;
drivers/thunderbolt/nhi.c
523
if (ring->hop < 0) {
drivers/thunderbolt/nhi.c
531
if (ring->is_tx) {
drivers/thunderbolt/nhi.c
533
ring->hop = i;
drivers/thunderbolt/nhi.c
538
ring->hop = i;
drivers/thunderbolt/nhi.c
54
static int ring_interrupt_index(const struct tb_ring *ring)
drivers/thunderbolt/nhi.c
545
if (ring->hop > 0 && ring->hop < start_hop) {
drivers/thunderbolt/nhi.c
546
dev_warn(&nhi->pdev->dev, "invalid hop: %d\n", ring->hop);
drivers/thunderbolt/nhi.c
550
if (ring->hop < 0 || ring->hop >= nhi->hop_count) {
drivers/thunderbolt/nhi.c
551
dev_warn(&nhi->pdev->dev, "invalid hop: %d\n", ring->hop);
drivers/thunderbolt/nhi.c
555
if (ring->is_tx && nhi->tx_rings[ring->hop]) {
drivers/thunderbolt/nhi.c
557
ring->hop);
drivers/thunderbolt/nhi.c
56
int bit = ring->hop;
drivers/thunderbolt/nhi.c
561
if (!ring->is_tx && nhi->rx_rings[ring->hop]) {
drivers/thunderbolt/nhi.c
563
ring->hop);
drivers/thunderbolt/nhi.c
568
if (ring->is_tx)
drivers/thunderbolt/nhi.c
569
nhi->tx_rings[ring->hop] = ring;
drivers/thunderbolt/nhi.c
57
if (!ring->is_tx)
drivers/thunderbolt/nhi.c
571
nhi->rx_rings[ring->hop] = ring;
drivers/thunderbolt/nhi.c
58
bit += ring->nhi->hop_count;
drivers/thunderbolt/nhi.c
585
struct tb_ring *ring = NULL;
drivers/thunderbolt/nhi.c
590
ring = kzalloc_obj(*ring);
drivers/thunderbolt/nhi.c
591
if (!ring)
drivers/thunderbolt/nhi.c
594
spin_lock_init(&ring->lock);
drivers/thunderbolt/nhi.c
595
INIT_LIST_HEAD(&ring->queue);
drivers/thunderbolt/nhi.c
596
INIT_LIST_HEAD(&ring->in_flight);
drivers/thunderbolt/nhi.c
597
INIT_WORK(&ring->work, ring_work);
drivers/thunderbolt/nhi.c
599
ring->nhi = nhi;
drivers/thunderbolt/nhi.c
600
ring->hop = hop;
drivers/thunderbolt/nhi.c
601
ring->is_tx = transmit;
drivers/thunderbolt/nhi.c
602
ring->size = size;
drivers/thunderbolt/nhi.c
603
ring->flags = flags;
drivers/thunderbolt/nhi.c
604
ring->e2e_tx_hop = e2e_tx_hop;
drivers/thunderbolt/nhi.c
605
ring->sof_mask = sof_mask;
drivers/thunderbolt/nhi.c
606
ring->eof_mask = eof_mask;
drivers/thunderbolt/nhi.c
607
ring->head = 0;
drivers/thunderbolt/nhi.c
608
ring->tail = 0;
drivers/thunderbolt/nhi.c
609
ring->running = false;
drivers/thunderbolt/nhi.c
610
ring->start_poll = start_poll;
drivers/thunderbolt/nhi.c
611
ring->poll_data = poll_data;
drivers/thunderbolt/nhi.c
613
ring->descriptors = dma_alloc_coherent(&ring->nhi->pdev->dev,
drivers/thunderbolt/nhi.c
614
size * sizeof(*ring->descriptors),
drivers/thunderbolt/nhi.c
615
&ring->descriptors_dma, GFP_KERNEL | __GFP_ZERO);
drivers/thunderbolt/nhi.c
616
if (!ring->descriptors)
drivers/thunderbolt/nhi.c
619
if (ring_request_msix(ring, flags & RING_FLAG_NO_SUSPEND))
drivers/thunderbolt/nhi.c
62
static void nhi_mask_interrupt(struct tb_nhi *nhi, int mask, int ring)
drivers/thunderbolt/nhi.c
622
if (nhi_alloc_hop(nhi, ring))
drivers/thunderbolt/nhi.c
625
return ring;
drivers/thunderbolt/nhi.c
628
ring_release_msix(ring);
drivers/thunderbolt/nhi.c
630
dma_free_coherent(&ring->nhi->pdev->dev,
drivers/thunderbolt/nhi.c
631
ring->size * sizeof(*ring->descriptors),
drivers/thunderbolt/nhi.c
632
ring->descriptors, ring->descriptors_dma);
drivers/thunderbolt/nhi.c
634
kfree(ring);
drivers/thunderbolt/nhi.c
67
val = ioread32(nhi->iobase + REG_RING_INTERRUPT_BASE + ring);
drivers/thunderbolt/nhi.c
68
iowrite32(val & ~mask, nhi->iobase + REG_RING_INTERRUPT_BASE + ring);
drivers/thunderbolt/nhi.c
687
void tb_ring_start(struct tb_ring *ring)
drivers/thunderbolt/nhi.c
692
spin_lock_irq(&ring->nhi->lock);
drivers/thunderbolt/nhi.c
693
spin_lock(&ring->lock);
drivers/thunderbolt/nhi.c
694
if (ring->nhi->going_away)
drivers/thunderbolt/nhi.c
696
if (ring->running) {
drivers/thunderbolt/nhi.c
697
dev_WARN(&ring->nhi->pdev->dev, "ring already started\n");
drivers/thunderbolt/nhi.c
70
iowrite32(mask, nhi->iobase + REG_RING_INTERRUPT_MASK_CLEAR_BASE + ring);
drivers/thunderbolt/nhi.c
700
dev_dbg(&ring->nhi->pdev->dev, "starting %s %d\n",
drivers/thunderbolt/nhi.c
701
RING_TYPE(ring), ring->hop);
drivers/thunderbolt/nhi.c
703
if (ring->flags & RING_FLAG_FRAME) {
drivers/thunderbolt/nhi.c
712
ring_iowrite64desc(ring, ring->descriptors_dma, 0);
drivers/thunderbolt/nhi.c
713
if (ring->is_tx) {
drivers/thunderbolt/nhi.c
714
ring_iowrite32desc(ring, ring->size, 12);
drivers/thunderbolt/nhi.c
715
ring_iowrite32options(ring, 0, 4);
drivers/thunderbolt/nhi.c
716
ring_iowrite32options(ring, flags, 0);
drivers/thunderbolt/nhi.c
718
u32 sof_eof_mask = ring->sof_mask << 16 | ring->eof_mask;
drivers/thunderbolt/nhi.c
720
ring_iowrite32desc(ring, (frame_size << 16) | ring->size, 12);
drivers/thunderbolt/nhi.c
721
ring_iowrite32options(ring, sof_eof_mask, 4);
drivers/thunderbolt/nhi.c
722
ring_iowrite32options(ring, flags, 0);
drivers/thunderbolt/nhi.c
729
if (ring->flags & RING_FLAG_E2E) {
drivers/thunderbolt/nhi.c
730
if (!ring->is_tx) {
drivers/thunderbolt/nhi.c
733
hop = ring->e2e_tx_hop << REG_RX_OPTIONS_E2E_HOP_SHIFT;
drivers/thunderbolt/nhi.c
737
dev_dbg(&ring->nhi->pdev->dev,
drivers/thunderbolt/nhi.c
739
RING_TYPE(ring), ring->hop, ring->e2e_tx_hop);
drivers/thunderbolt/nhi.c
74
static void nhi_clear_interrupt(struct tb_nhi *nhi, int ring)
drivers/thunderbolt/nhi.c
741
dev_dbg(&ring->nhi->pdev->dev, "enabling E2E for %s %d\n",
drivers/thunderbolt/nhi.c
742
RING_TYPE(ring), ring->hop);
drivers/thunderbolt/nhi.c
746
ring_iowrite32options(ring, flags, 0);
drivers/thunderbolt/nhi.c
749
ring_interrupt_active(ring, true);
drivers/thunderbolt/nhi.c
750
ring->running = true;
drivers/thunderbolt/nhi.c
752
spin_unlock(&ring->lock);
drivers/thunderbolt/nhi.c
753
spin_unlock_irq(&ring->nhi->lock);
drivers/thunderbolt/nhi.c
77
ioread32(nhi->iobase + REG_RING_NOTIFY_BASE + ring);
drivers/thunderbolt/nhi.c
771
void tb_ring_stop(struct tb_ring *ring)
drivers/thunderbolt/nhi.c
773
spin_lock_irq(&ring->nhi->lock);
drivers/thunderbolt/nhi.c
774
spin_lock(&ring->lock);
drivers/thunderbolt/nhi.c
775
dev_dbg(&ring->nhi->pdev->dev, "stopping %s %d\n",
drivers/thunderbolt/nhi.c
776
RING_TYPE(ring), ring->hop);
drivers/thunderbolt/nhi.c
777
if (ring->nhi->going_away)
drivers/thunderbolt/nhi.c
779
if (!ring->running) {
drivers/thunderbolt/nhi.c
780
dev_WARN(&ring->nhi->pdev->dev, "%s %d already stopped\n",
drivers/thunderbolt/nhi.c
781
RING_TYPE(ring), ring->hop);
drivers/thunderbolt/nhi.c
784
ring_interrupt_active(ring, false);
drivers/thunderbolt/nhi.c
786
ring_iowrite32options(ring, 0, 0);
drivers/thunderbolt/nhi.c
787
ring_iowrite64desc(ring, 0, 0);
drivers/thunderbolt/nhi.c
788
ring_iowrite32desc(ring, 0, 8);
drivers/thunderbolt/nhi.c
789
ring_iowrite32desc(ring, 0, 12);
drivers/thunderbolt/nhi.c
79
iowrite32(~0, nhi->iobase + REG_RING_INT_CLEAR + ring);
drivers/thunderbolt/nhi.c
790
ring->head = 0;
drivers/thunderbolt/nhi.c
791
ring->tail = 0;
drivers/thunderbolt/nhi.c
792
ring->running = false;
drivers/thunderbolt/nhi.c
795
spin_unlock(&ring->lock);
drivers/thunderbolt/nhi.c
796
spin_unlock_irq(&ring->nhi->lock);
drivers/thunderbolt/nhi.c
801
schedule_work(&ring->work);
drivers/thunderbolt/nhi.c
802
flush_work(&ring->work);
drivers/thunderbolt/nhi.c
816
void tb_ring_free(struct tb_ring *ring)
drivers/thunderbolt/nhi.c
818
spin_lock_irq(&ring->nhi->lock);
drivers/thunderbolt/nhi.c
823
if (ring->is_tx)
drivers/thunderbolt/nhi.c
824
ring->nhi->tx_rings[ring->hop] = NULL;
drivers/thunderbolt/nhi.c
826
ring->nhi->rx_rings[ring->hop] = NULL;
drivers/thunderbolt/nhi.c
828
if (ring->running) {
drivers/thunderbolt/nhi.c
829
dev_WARN(&ring->nhi->pdev->dev, "%s %d still running\n",
drivers/thunderbolt/nhi.c
830
RING_TYPE(ring), ring->hop);
drivers/thunderbolt/nhi.c
832
spin_unlock_irq(&ring->nhi->lock);
drivers/thunderbolt/nhi.c
834
ring_release_msix(ring);
drivers/thunderbolt/nhi.c
836
dma_free_coherent(&ring->nhi->pdev->dev,
drivers/thunderbolt/nhi.c
837
ring->size * sizeof(*ring->descriptors),
drivers/thunderbolt/nhi.c
838
ring->descriptors, ring->descriptors_dma);
drivers/thunderbolt/nhi.c
840
ring->descriptors = NULL;
drivers/thunderbolt/nhi.c
841
ring->descriptors_dma = 0;
drivers/thunderbolt/nhi.c
844
dev_dbg(&ring->nhi->pdev->dev, "freeing %s %d\n", RING_TYPE(ring),
drivers/thunderbolt/nhi.c
845
ring->hop);
drivers/thunderbolt/nhi.c
852
flush_work(&ring->work);
drivers/thunderbolt/nhi.c
853
kfree(ring);
drivers/thunderbolt/nhi.c
87
static void ring_interrupt_active(struct tb_ring *ring, bool active)
drivers/thunderbolt/nhi.c
89
int index = ring_interrupt_index(ring) / 32 * 4;
drivers/thunderbolt/nhi.c
91
int interrupt_bit = ring_interrupt_index(ring) & 31;
drivers/thunderbolt/nhi.c
922
struct tb_ring *ring;
drivers/thunderbolt/nhi.c
949
ring = nhi->tx_rings[hop];
drivers/thunderbolt/nhi.c
95
if (ring->irq > 0) {
drivers/thunderbolt/nhi.c
951
ring = nhi->rx_rings[hop];
drivers/thunderbolt/nhi.c
952
if (ring == NULL) {
drivers/thunderbolt/nhi.c
960
spin_lock(&ring->lock);
drivers/thunderbolt/nhi.c
961
__ring_interrupt(ring);
drivers/thunderbolt/nhi.c
962
spin_unlock(&ring->lock);
drivers/tty/serial/atmel_serial.c
1104
struct circ_buf *ring = &atmel_port->rx_ring;
drivers/tty/serial/atmel_serial.c
1135
ring->head = ATMEL_SERIAL_RX_SIZE - state.residue;
drivers/tty/serial/atmel_serial.c
1136
BUG_ON(ring->head > ATMEL_SERIAL_RX_SIZE);
drivers/tty/serial/atmel_serial.c
1149
if (ring->head < ring->tail) {
drivers/tty/serial/atmel_serial.c
1150
count = ATMEL_SERIAL_RX_SIZE - ring->tail;
drivers/tty/serial/atmel_serial.c
1152
tty_insert_flip_string(tport, ring->buf + ring->tail, count);
drivers/tty/serial/atmel_serial.c
1153
ring->tail = 0;
drivers/tty/serial/atmel_serial.c
1158
if (ring->tail < ring->head) {
drivers/tty/serial/atmel_serial.c
1159
count = ring->head - ring->tail;
drivers/tty/serial/atmel_serial.c
1161
tty_insert_flip_string(tport, ring->buf + ring->tail, count);
drivers/tty/serial/atmel_serial.c
1163
if (ring->head >= ATMEL_SERIAL_RX_SIZE)
drivers/tty/serial/atmel_serial.c
1164
ring->head = 0;
drivers/tty/serial/atmel_serial.c
1165
ring->tail = ring->head;
drivers/tty/serial/atmel_serial.c
1185
struct circ_buf *ring;
drivers/tty/serial/atmel_serial.c
1189
ring = &atmel_port->rx_ring;
drivers/tty/serial/atmel_serial.c
1205
BUG_ON(!PAGE_ALIGNED(ring->buf));
drivers/tty/serial/atmel_serial.c
1206
atmel_port->rx_phys = dma_map_single(port->dev, ring->buf,
drivers/tty/serial/atmel_serial.c
1215
ATMEL_SERIAL_RX_SIZE, ring->buf, &atmel_port->rx_phys);
drivers/tty/serial/atmel_serial.c
1509
struct circ_buf *ring = &atmel_port->rx_ring;
drivers/tty/serial/atmel_serial.c
1513
while (ring->head != ring->tail) {
drivers/tty/serial/atmel_serial.c
1519
c = ((struct atmel_uart_char *)ring->buf)[ring->tail];
drivers/tty/serial/atmel_serial.c
1521
ring->tail = (ring->tail + 1) & (ATMEL_SERIAL_RINGSIZE - 1);
drivers/tty/serial/atmel_serial.c
741
struct circ_buf *ring = &atmel_port->rx_ring;
drivers/tty/serial/atmel_serial.c
744
if (!CIRC_SPACE(ring->head, ring->tail, ATMEL_SERIAL_RINGSIZE))
drivers/tty/serial/atmel_serial.c
748
c = &((struct atmel_uart_char *)ring->buf)[ring->head];
drivers/tty/serial/atmel_serial.c
755
ring->head = (ring->head + 1) & (ATMEL_SERIAL_RINGSIZE - 1);
drivers/tty/serial/fsl_lpuart.c
1076
struct circ_buf *ring = &sport->rx_ring;
drivers/tty/serial/fsl_lpuart.c
1079
if (ring->head < ring->tail) {
drivers/tty/serial/fsl_lpuart.c
1080
count = sport->rx_sgl.length - ring->tail;
drivers/tty/serial/fsl_lpuart.c
1082
ring->buf + ring->tail, count);
drivers/tty/serial/fsl_lpuart.c
1083
ring->tail = 0;
drivers/tty/serial/fsl_lpuart.c
1086
if (ring->head > ring->tail) {
drivers/tty/serial/fsl_lpuart.c
1087
count = ring->head - ring->tail;
drivers/tty/serial/fsl_lpuart.c
1089
ring->buf + ring->tail, count);
drivers/tty/serial/fsl_lpuart.c
1090
ring->tail = ring->head;
drivers/tty/serial/fsl_lpuart.c
1111
struct circ_buf *ring = &sport->rx_ring;
drivers/tty/serial/fsl_lpuart.c
1189
ring->head = sport->rx_sgl.length - state.residue;
drivers/tty/serial/fsl_lpuart.c
1190
BUG_ON(ring->head > sport->rx_sgl.length);
drivers/tty/serial/fsl_lpuart.c
1212
if (ring->head < ring->tail) {
drivers/tty/serial/fsl_lpuart.c
1213
count = sport->rx_sgl.length - ring->tail;
drivers/tty/serial/fsl_lpuart.c
1215
copied = lpuart_tty_insert_flip_string(port, ring->buf + ring->tail,
drivers/tty/serial/fsl_lpuart.c
1219
ring->tail = 0;
drivers/tty/serial/fsl_lpuart.c
1224
if (ring->tail < ring->head) {
drivers/tty/serial/fsl_lpuart.c
1225
count = ring->head - ring->tail;
drivers/tty/serial/fsl_lpuart.c
1226
copied = lpuart_tty_insert_flip_string(port, ring->buf + ring->tail,
drivers/tty/serial/fsl_lpuart.c
1231
if (ring->head >= sport->rx_sgl.length)
drivers/tty/serial/fsl_lpuart.c
1232
ring->head = 0;
drivers/tty/serial/fsl_lpuart.c
1233
ring->tail = ring->head;
drivers/tty/serial/fsl_lpuart.c
1261
struct circ_buf *ring = &sport->rx_ring;
drivers/tty/serial/fsl_lpuart.c
1271
ring->head = sport->rx_sgl.length - state.residue;
drivers/tty/serial/fsl_lpuart.c
1272
count = CIRC_CNT(ring->head, ring->tail, sport->rx_sgl.length);
drivers/tty/serial/fsl_lpuart.c
1313
struct circ_buf *ring = &sport->rx_ring;
drivers/tty/serial/fsl_lpuart.c
1324
ring->head = sport->rx_sgl.length - state.residue;
drivers/tty/serial/fsl_lpuart.c
1325
count = CIRC_CNT(ring->head, ring->tail, sport->rx_sgl.length);
drivers/tty/serial/fsl_lpuart.c
1343
struct circ_buf *ring = &sport->rx_ring;
drivers/tty/serial/fsl_lpuart.c
1372
ring->buf = kzalloc(sport->rx_dma_rng_buf_len, GFP_ATOMIC);
drivers/tty/serial/fsl_lpuart.c
1373
if (!ring->buf)
drivers/tty/serial/fsl_lpuart.c
1376
sg_init_one(&sport->rx_sgl, ring->buf, sport->rx_dma_rng_buf_len);
drivers/usb/cdns3/cdnsp-gadget.c
697
if ((pdev->eps[i - 1].ring && !(ctrl_ctx->drop_flags & le32)) ||
drivers/usb/cdns3/cdnsp-gadget.h
1510
struct cdnsp_ring *ring,
drivers/usb/cdns3/cdnsp-gadget.h
1535
bool cdnsp_last_trb_on_ring(struct cdnsp_ring *ring,
drivers/usb/cdns3/cdnsp-gadget.h
1542
void cdnsp_initialize_ring_info(struct cdnsp_ring *ring);
drivers/usb/cdns3/cdnsp-gadget.h
1566
void cdnsp_inc_deq(struct cdnsp_device *pdev, struct cdnsp_ring *ring);
drivers/usb/cdns3/cdnsp-gadget.h
836
struct cdnsp_ring *ring;
drivers/usb/cdns3/cdnsp-mem.c
1000
ep_ctx->deq = cpu_to_le64(pep->ring->first_seg->dma |
drivers/usb/cdns3/cdnsp-mem.c
1001
pep->ring->cycle_state);
drivers/usb/cdns3/cdnsp-mem.c
137
struct cdnsp_ring *ring,
drivers/usb/cdns3/cdnsp-mem.c
144
if (!ring || !first || !last)
drivers/usb/cdns3/cdnsp-mem.c
147
next = ring->enq_seg->next;
drivers/usb/cdns3/cdnsp-mem.c
148
cdnsp_link_segments(pdev, ring->enq_seg, first, ring->type);
drivers/usb/cdns3/cdnsp-mem.c
149
cdnsp_link_segments(pdev, last, next, ring->type);
drivers/usb/cdns3/cdnsp-mem.c
150
ring->num_segs += num_segs;
drivers/usb/cdns3/cdnsp-mem.c
151
ring->num_trbs_free += (TRBS_PER_SEGMENT - 1) * num_segs;
drivers/usb/cdns3/cdnsp-mem.c
153
if (ring->type != TYPE_EVENT && ring->enq_seg == ring->last_seg) {
drivers/usb/cdns3/cdnsp-mem.c
154
ring->last_seg->trbs[TRBS_PER_SEGMENT - 1].link.control &=
drivers/usb/cdns3/cdnsp-mem.c
158
ring->last_seg = last;
drivers/usb/cdns3/cdnsp-mem.c
194
struct cdnsp_ring *ring,
drivers/usb/cdns3/cdnsp-mem.c
211
ret = radix_tree_insert(trb_address_map, key, ring);
drivers/usb/cdns3/cdnsp-mem.c
228
struct cdnsp_ring *ring,
drivers/usb/cdns3/cdnsp-mem.c
239
ret = cdnsp_insert_segment_mapping(trb_address_map, ring, seg,
drivers/usb/cdns3/cdnsp-mem.c
263
static void cdnsp_remove_stream_mapping(struct cdnsp_ring *ring)
drivers/usb/cdns3/cdnsp-mem.c
267
seg = ring->first_seg;
drivers/usb/cdns3/cdnsp-mem.c
269
cdnsp_remove_segment_mapping(ring->trb_address_map, seg);
drivers/usb/cdns3/cdnsp-mem.c
271
} while (seg != ring->first_seg);
drivers/usb/cdns3/cdnsp-mem.c
274
static int cdnsp_update_stream_mapping(struct cdnsp_ring *ring)
drivers/usb/cdns3/cdnsp-mem.c
276
return cdnsp_update_stream_segment_mapping(ring->trb_address_map, ring,
drivers/usb/cdns3/cdnsp-mem.c
277
ring->first_seg, ring->last_seg, GFP_ATOMIC);
drivers/usb/cdns3/cdnsp-mem.c
280
static void cdnsp_ring_free(struct cdnsp_device *pdev, struct cdnsp_ring *ring)
drivers/usb/cdns3/cdnsp-mem.c
282
if (!ring)
drivers/usb/cdns3/cdnsp-mem.c
285
trace_cdnsp_ring_free(ring);
drivers/usb/cdns3/cdnsp-mem.c
287
if (ring->first_seg) {
drivers/usb/cdns3/cdnsp-mem.c
288
if (ring->type == TYPE_STREAM)
drivers/usb/cdns3/cdnsp-mem.c
289
cdnsp_remove_stream_mapping(ring);
drivers/usb/cdns3/cdnsp-mem.c
291
cdnsp_free_segments_for_ring(pdev, ring->first_seg);
drivers/usb/cdns3/cdnsp-mem.c
294
kfree(ring);
drivers/usb/cdns3/cdnsp-mem.c
297
void cdnsp_initialize_ring_info(struct cdnsp_ring *ring)
drivers/usb/cdns3/cdnsp-mem.c
299
ring->enqueue = ring->first_seg->trbs;
drivers/usb/cdns3/cdnsp-mem.c
300
ring->enq_seg = ring->first_seg;
drivers/usb/cdns3/cdnsp-mem.c
301
ring->dequeue = ring->enqueue;
drivers/usb/cdns3/cdnsp-mem.c
302
ring->deq_seg = ring->first_seg;
drivers/usb/cdns3/cdnsp-mem.c
312
ring->cycle_state = 1;
drivers/usb/cdns3/cdnsp-mem.c
318
ring->num_trbs_free = ring->num_segs * (TRBS_PER_SEGMENT - 1) - 1;
drivers/usb/cdns3/cdnsp-mem.c
376
struct cdnsp_ring *ring;
drivers/usb/cdns3/cdnsp-mem.c
379
ring = kzalloc_obj(*(ring), flags);
drivers/usb/cdns3/cdnsp-mem.c
380
if (!ring)
drivers/usb/cdns3/cdnsp-mem.c
383
ring->num_segs = num_segs;
drivers/usb/cdns3/cdnsp-mem.c
384
ring->bounce_buf_len = max_packet;
drivers/usb/cdns3/cdnsp-mem.c
385
INIT_LIST_HEAD(&ring->td_list);
drivers/usb/cdns3/cdnsp-mem.c
386
ring->type = type;
drivers/usb/cdns3/cdnsp-mem.c
389
return ring;
drivers/usb/cdns3/cdnsp-mem.c
391
ret = cdnsp_alloc_segments_for_ring(pdev, &ring->first_seg,
drivers/usb/cdns3/cdnsp-mem.c
392
&ring->last_seg, num_segs,
drivers/usb/cdns3/cdnsp-mem.c
399
ring->last_seg->trbs[TRBS_PER_SEGMENT - 1].link.control |=
drivers/usb/cdns3/cdnsp-mem.c
402
cdnsp_initialize_ring_info(ring);
drivers/usb/cdns3/cdnsp-mem.c
403
trace_cdnsp_ring_alloc(ring);
drivers/usb/cdns3/cdnsp-mem.c
404
return ring;
drivers/usb/cdns3/cdnsp-mem.c
406
kfree(ring);
drivers/usb/cdns3/cdnsp-mem.c
412
cdnsp_ring_free(pdev, pep->ring);
drivers/usb/cdns3/cdnsp-mem.c
413
pep->ring = NULL;
drivers/usb/cdns3/cdnsp-mem.c
422
struct cdnsp_ring *ring,
drivers/usb/cdns3/cdnsp-mem.c
436
num_segs = max(ring->num_segs, num_segs_needed);
drivers/usb/cdns3/cdnsp-mem.c
439
ring->cycle_state, ring->type,
drivers/usb/cdns3/cdnsp-mem.c
440
ring->bounce_buf_len, flags);
drivers/usb/cdns3/cdnsp-mem.c
444
if (ring->type == TYPE_STREAM)
drivers/usb/cdns3/cdnsp-mem.c
445
ret = cdnsp_update_stream_segment_mapping(ring->trb_address_map,
drivers/usb/cdns3/cdnsp-mem.c
446
ring, first,
drivers/usb/cdns3/cdnsp-mem.c
455
cdnsp_link_rings(pdev, ring, first, last, num_segs);
drivers/usb/cdns3/cdnsp-mem.c
456
trace_cdnsp_ring_expansion(ring);
drivers/usb/cdns3/cdnsp-mem.c
549
return pep->ring;
drivers/usb/cdns3/cdnsp-mem.c
695
pdev->eps[0].ring = cdnsp_ring_alloc(pdev, 2, TYPE_CTRL, 0, GFP_ATOMIC);
drivers/usb/cdns3/cdnsp-mem.c
696
if (!pdev->eps[0].ring)
drivers/usb/cdns3/cdnsp-mem.c
717
struct cdnsp_ring *ep_ring = pdev->eps[0].ring;
drivers/usb/cdns3/cdnsp-mem.c
768
ep0_ctx->deq = cpu_to_le64(pdev->eps[0].ring->first_seg->dma |
drivers/usb/cdns3/cdnsp-mem.c
769
pdev->eps[0].ring->cycle_state);
drivers/usb/cdns3/cdnsp-mem.c
988
pep->ring = cdnsp_ring_alloc(pdev, 2, ring_type, max_packet, mem_flags);
drivers/usb/cdns3/cdnsp-mem.c
989
if (!pep->ring)
drivers/usb/cdns3/cdnsp-ring.c
101
return cdnsp_last_trb_on_seg(seg, trb) && (seg->next == ring->first_seg);
drivers/usb/cdns3/cdnsp-ring.c
130
struct cdnsp_ring *ring,
drivers/usb/cdns3/cdnsp-ring.c
146
void cdnsp_inc_deq(struct cdnsp_device *pdev, struct cdnsp_ring *ring)
drivers/usb/cdns3/cdnsp-ring.c
149
if (ring->type == TYPE_EVENT) {
drivers/usb/cdns3/cdnsp-ring.c
150
if (!cdnsp_last_trb_on_seg(ring->deq_seg, ring->dequeue)) {
drivers/usb/cdns3/cdnsp-ring.c
151
ring->dequeue++;
drivers/usb/cdns3/cdnsp-ring.c
155
if (cdnsp_last_trb_on_ring(ring, ring->deq_seg, ring->dequeue))
drivers/usb/cdns3/cdnsp-ring.c
156
ring->cycle_state ^= 1;
drivers/usb/cdns3/cdnsp-ring.c
158
ring->deq_seg = ring->deq_seg->next;
drivers/usb/cdns3/cdnsp-ring.c
159
ring->dequeue = ring->deq_seg->trbs;
drivers/usb/cdns3/cdnsp-ring.c
1607
static void cdnsp_queue_trb(struct cdnsp_device *pdev, struct cdnsp_ring *ring,
drivers/usb/cdns3/cdnsp-ring.c
1613
trb = &ring->enqueue->generic;
drivers/usb/cdns3/cdnsp-ring.c
1620
trace_cdnsp_queue_trb(ring, trb);
drivers/usb/cdns3/cdnsp-ring.c
1621
cdnsp_inc_enq(pdev, ring, more_trbs_coming);
drivers/usb/cdns3/cdnsp-ring.c
164
if (!cdnsp_trb_is_link(ring->dequeue)) {
drivers/usb/cdns3/cdnsp-ring.c
165
ring->dequeue++;
drivers/usb/cdns3/cdnsp-ring.c
166
ring->num_trbs_free++;
drivers/usb/cdns3/cdnsp-ring.c
168
while (cdnsp_trb_is_link(ring->dequeue)) {
drivers/usb/cdns3/cdnsp-ring.c
169
ring->deq_seg = ring->deq_seg->next;
drivers/usb/cdns3/cdnsp-ring.c
170
ring->dequeue = ring->deq_seg->trbs;
drivers/usb/cdns3/cdnsp-ring.c
173
trace_cdnsp_inc_deq(ring);
drivers/usb/cdns3/cdnsp-ring.c
1874
struct cdnsp_ring *ring;
drivers/usb/cdns3/cdnsp-ring.c
188
struct cdnsp_ring *ring,
drivers/usb/cdns3/cdnsp-ring.c
1881
ring = cdnsp_request_to_transfer_ring(pdev, preq);
drivers/usb/cdns3/cdnsp-ring.c
1882
if (!ring)
drivers/usb/cdns3/cdnsp-ring.c
1934
start_trb = &ring->enqueue->generic;
drivers/usb/cdns3/cdnsp-ring.c
1935
start_cycle = ring->cycle_state;
drivers/usb/cdns3/cdnsp-ring.c
194
chain = le32_to_cpu(ring->enqueue->generic.field[3]) & TRB_CHAIN;
drivers/usb/cdns3/cdnsp-ring.c
1955
field |= ring->cycle_state;
drivers/usb/cdns3/cdnsp-ring.c
1964
if (cdnsp_trb_is_link(ring->enqueue + 1)) {
drivers/usb/cdns3/cdnsp-ring.c
1967
ring->enq_seg)) {
drivers/usb/cdns3/cdnsp-ring.c
1968
send_addr = ring->enq_seg->bounce_dma;
drivers/usb/cdns3/cdnsp-ring.c
197
if (!cdnsp_trb_is_link(ring->enqueue))
drivers/usb/cdns3/cdnsp-ring.c
1970
preq->td.bounce_seg = ring->enq_seg;
drivers/usb/cdns3/cdnsp-ring.c
198
ring->num_trbs_free--;
drivers/usb/cdns3/cdnsp-ring.c
1984
preq->td.last_trb = ring->enqueue;
drivers/usb/cdns3/cdnsp-ring.c
199
next = ++(ring->enqueue);
drivers/usb/cdns3/cdnsp-ring.c
2001
cdnsp_queue_trb(pdev, ring, more_trbs_coming,
drivers/usb/cdns3/cdnsp-ring.c
2024
if (cdnsp_trb_is_link(ring->enqueue + 1)) {
drivers/usb/cdns3/cdnsp-ring.c
2026
if (!ring->cycle_state)
drivers/usb/cdns3/cdnsp-ring.c
2029
pep->wa1_nop_trb = ring->enqueue;
drivers/usb/cdns3/cdnsp-ring.c
2031
cdnsp_queue_trb(pdev, ring, 0, 0x0, 0x0,
drivers/usb/cdns3/cdnsp-ring.c
222
ring->cycle_state ^= 1;
drivers/usb/cdns3/cdnsp-ring.c
2238
ep_ring = preq->pep->ring;
drivers/usb/cdns3/cdnsp-ring.c
224
ring->enq_seg = ring->enq_seg->next;
drivers/usb/cdns3/cdnsp-ring.c
225
ring->enqueue = ring->enq_seg->trbs;
drivers/usb/cdns3/cdnsp-ring.c
226
next = ring->enqueue;
drivers/usb/cdns3/cdnsp-ring.c
229
trace_cdnsp_inc_enq(ring);
drivers/usb/cdns3/cdnsp-ring.c
237
struct cdnsp_ring *ring,
drivers/usb/cdns3/cdnsp-ring.c
242
if (ring->num_trbs_free < num_trbs)
drivers/usb/cdns3/cdnsp-ring.c
245
if (ring->type != TYPE_COMMAND && ring->type != TYPE_EVENT) {
drivers/usb/cdns3/cdnsp-ring.c
246
num_trbs_in_deq_seg = ring->dequeue - ring->deq_seg->trbs;
drivers/usb/cdns3/cdnsp-ring.c
248
if (ring->num_trbs_free < num_trbs + num_trbs_in_deq_seg)
drivers/usb/cdns3/cdnsp-ring.c
328
return pep->ring;
drivers/usb/cdns3/cdnsp-ring.c
360
if (pep->ring && !list_empty(&pep->ring->td_list))
drivers/usb/cdns3/cdnsp-ring.c
483
if (new_deq == pep->ring->dequeue) {
drivers/usb/cdns3/cdnsp-ring.c
599
struct cdnsp_ring *ring,
drivers/usb/cdns3/cdnsp-ring.c
616
ring->bounce_buf_len, DMA_TO_DEVICE);
drivers/usb/cdns3/cdnsp-ring.c
620
dma_unmap_single(pdev->dev, seg->bounce_dma, ring->bounce_buf_len,
drivers/usb/cdns3/cdnsp-ring.c
954
struct cdnsp_ring *ring,
drivers/usb/cdns3/cdnsp-ring.c
957
struct cdnsp_segment *seg = ring->deq_seg;
drivers/usb/cdns3/cdnsp-ring.c
958
union cdnsp_trb *trb = ring->dequeue;
drivers/usb/cdns3/cdnsp-ring.c
961
for (sum = 0; trb != stop_trb; cdnsp_next_trb(pdev, ring, &seg, &trb)) {
drivers/usb/cdns3/cdnsp-ring.c
97
bool cdnsp_last_trb_on_ring(struct cdnsp_ring *ring,
drivers/usb/cdns3/cdnsp-trace.h
332
TP_PROTO(struct cdnsp_ring *ring, struct cdnsp_generic_trb *trb),
drivers/usb/cdns3/cdnsp-trace.h
333
TP_ARGS(ring, trb),
drivers/usb/cdns3/cdnsp-trace.h
344
__entry->type = ring->type;
drivers/usb/cdns3/cdnsp-trace.h
350
__entry->trb_dma = cdnsp_trb_virt_to_dma(ring->deq_seg,
drivers/usb/cdns3/cdnsp-trace.h
363
TP_PROTO(struct cdnsp_ring *ring, struct cdnsp_generic_trb *trb),
drivers/usb/cdns3/cdnsp-trace.h
364
TP_ARGS(ring, trb)
drivers/usb/cdns3/cdnsp-trace.h
368
TP_PROTO(struct cdnsp_ring *ring, struct cdnsp_generic_trb *trb),
drivers/usb/cdns3/cdnsp-trace.h
369
TP_ARGS(ring, trb)
drivers/usb/cdns3/cdnsp-trace.h
373
TP_PROTO(struct cdnsp_ring *ring, struct cdnsp_generic_trb *trb),
drivers/usb/cdns3/cdnsp-trace.h
374
TP_ARGS(ring, trb)
drivers/usb/cdns3/cdnsp-trace.h
378
TP_PROTO(struct cdnsp_ring *ring, struct cdnsp_generic_trb *trb),
drivers/usb/cdns3/cdnsp-trace.h
379
TP_ARGS(ring, trb)
drivers/usb/cdns3/cdnsp-trace.h
383
TP_PROTO(struct cdnsp_ring *ring, struct cdnsp_generic_trb *trb),
drivers/usb/cdns3/cdnsp-trace.h
384
TP_ARGS(ring, trb)
drivers/usb/cdns3/cdnsp-trace.h
388
TP_PROTO(struct cdnsp_ring *ring, struct cdnsp_generic_trb *trb),
drivers/usb/cdns3/cdnsp-trace.h
389
TP_ARGS(ring, trb)
drivers/usb/cdns3/cdnsp-trace.h
393
TP_PROTO(struct cdnsp_ring *ring, struct cdnsp_generic_trb *trb),
drivers/usb/cdns3/cdnsp-trace.h
394
TP_ARGS(ring, trb)
drivers/usb/cdns3/cdnsp-trace.h
673
TP_PROTO(struct cdnsp_ring *ring),
drivers/usb/cdns3/cdnsp-trace.h
674
TP_ARGS(ring),
drivers/usb/cdns3/cdnsp-trace.h
677
__field(void *, ring)
drivers/usb/cdns3/cdnsp-trace.h
689
__entry->ring = ring;
drivers/usb/cdns3/cdnsp-trace.h
690
__entry->type = ring->type;
drivers/usb/cdns3/cdnsp-trace.h
691
__entry->num_segs = ring->num_segs;
drivers/usb/cdns3/cdnsp-trace.h
692
__entry->stream_id = ring->stream_id;
drivers/usb/cdns3/cdnsp-trace.h
693
__entry->enq_seg = ring->enq_seg->dma;
drivers/usb/cdns3/cdnsp-trace.h
694
__entry->deq_seg = ring->deq_seg->dma;
drivers/usb/cdns3/cdnsp-trace.h
695
__entry->cycle_state = ring->cycle_state;
drivers/usb/cdns3/cdnsp-trace.h
696
__entry->num_trbs_free = ring->num_trbs_free;
drivers/usb/cdns3/cdnsp-trace.h
697
__entry->bounce_buf_len = ring->bounce_buf_len;
drivers/usb/cdns3/cdnsp-trace.h
698
__entry->enq = cdnsp_trb_virt_to_dma(ring->enq_seg,
drivers/usb/cdns3/cdnsp-trace.h
699
ring->enqueue);
drivers/usb/cdns3/cdnsp-trace.h
700
__entry->deq = cdnsp_trb_virt_to_dma(ring->deq_seg,
drivers/usb/cdns3/cdnsp-trace.h
701
ring->dequeue);
drivers/usb/cdns3/cdnsp-trace.h
705
cdnsp_ring_type_string(__entry->type), __entry->ring,
drivers/usb/cdns3/cdnsp-trace.h
717
TP_PROTO(struct cdnsp_ring *ring),
drivers/usb/cdns3/cdnsp-trace.h
718
TP_ARGS(ring)
drivers/usb/cdns3/cdnsp-trace.h
722
TP_PROTO(struct cdnsp_ring *ring),
drivers/usb/cdns3/cdnsp-trace.h
723
TP_ARGS(ring)
drivers/usb/cdns3/cdnsp-trace.h
727
TP_PROTO(struct cdnsp_ring *ring),
drivers/usb/cdns3/cdnsp-trace.h
728
TP_ARGS(ring)
drivers/usb/cdns3/cdnsp-trace.h
732
TP_PROTO(struct cdnsp_ring *ring),
drivers/usb/cdns3/cdnsp-trace.h
733
TP_ARGS(ring)
drivers/usb/cdns3/cdnsp-trace.h
737
TP_PROTO(struct cdnsp_ring *ring),
drivers/usb/cdns3/cdnsp-trace.h
738
TP_ARGS(ring)
drivers/usb/cdns3/cdnsp-trace.h
742
TP_PROTO(struct cdnsp_ring *ring),
drivers/usb/cdns3/cdnsp-trace.h
743
TP_ARGS(ring)
drivers/usb/early/xhci-dbc.c
177
xdbc_alloc_ring(struct xdbc_segment *seg, struct xdbc_ring *ring)
drivers/usb/early/xhci-dbc.c
183
ring->segment = seg;
drivers/usb/early/xhci-dbc.c
188
static void __init xdbc_free_ring(struct xdbc_ring *ring)
drivers/usb/early/xhci-dbc.c
190
struct xdbc_segment *seg = ring->segment;
drivers/usb/early/xhci-dbc.c
196
ring->segment = NULL;
drivers/usb/early/xhci-dbc.c
199
static void xdbc_reset_ring(struct xdbc_ring *ring)
drivers/usb/early/xhci-dbc.c
201
struct xdbc_segment *seg = ring->segment;
drivers/usb/early/xhci-dbc.c
206
ring->enqueue = seg->trbs;
drivers/usb/early/xhci-dbc.c
207
ring->dequeue = seg->trbs;
drivers/usb/early/xhci-dbc.c
208
ring->cycle_state = 1;
drivers/usb/early/xhci-dbc.c
210
if (ring != &xdbc.evt_ring) {
drivers/usb/early/xhci-dbc.c
390
xdbc_queue_trb(struct xdbc_ring *ring, u32 field1, u32 field2, u32 field3, u32 field4)
drivers/usb/early/xhci-dbc.c
394
trb = ring->enqueue;
drivers/usb/early/xhci-dbc.c
400
++(ring->enqueue);
drivers/usb/early/xhci-dbc.c
401
if (ring->enqueue >= &ring->segment->trbs[TRBS_PER_SEGMENT - 1]) {
drivers/usb/early/xhci-dbc.c
402
link_trb = ring->enqueue;
drivers/usb/early/xhci-dbc.c
403
if (ring->cycle_state)
drivers/usb/early/xhci-dbc.c
408
ring->enqueue = ring->segment->trbs;
drivers/usb/early/xhci-dbc.c
409
ring->cycle_state ^= 1;
drivers/usb/early/xhci-dbc.c
466
struct xdbc_ring *ring;
drivers/usb/early/xhci-dbc.c
486
ring = (read ? &xdbc.in_ring : &xdbc.out_ring);
drivers/usb/early/xhci-dbc.c
487
trb = ring->enqueue;
drivers/usb/early/xhci-dbc.c
488
cycle = ring->cycle_state;
drivers/usb/early/xhci-dbc.c
507
xdbc_queue_trb(ring, lower_32_bits(addr), upper_32_bits(addr), length, control);
drivers/usb/gadget/udc/cdns2/cdns2-debug.h
102
struct cdns2_ring *ring = &pep->ring;
drivers/usb/gadget/udc/cdns2/cdns2-debug.h
110
trb = &trbs[ring->dequeue];
drivers/usb/gadget/udc/cdns2/cdns2-debug.h
114
ring->dequeue, trb, &dma);
drivers/usb/gadget/udc/cdns2/cdns2-debug.h
116
trb = &trbs[ring->enqueue];
drivers/usb/gadget/udc/cdns2/cdns2-debug.h
120
ring->enqueue, trb, &dma);
drivers/usb/gadget/udc/cdns2/cdns2-debug.h
124
ring->free_trbs, ring->ccs, ring->pcs);
drivers/usb/gadget/udc/cdns2/cdns2-debug.h
132
dma = ring->dma;
drivers/usb/gadget/udc/cdns2/cdns2-ep0.c
46
struct cdns2_ring *ring = &pep->ring;
drivers/usb/gadget/udc/cdns2/cdns2-ep0.c
466
trace_cdns2_complete_trb(pep, pep->ring.trbs);
drivers/usb/gadget/udc/cdns2/cdns2-ep0.c
470
TRB_LEN(le32_to_cpu(pep->ring.trbs->length));
drivers/usb/gadget/udc/cdns2/cdns2-ep0.c
48
ring->trbs[0].buffer = cpu_to_le32(TRB_BUFFER(dma_addr));
drivers/usb/gadget/udc/cdns2/cdns2-ep0.c
49
ring->trbs[0].length = cpu_to_le32(TRB_LEN(length));
drivers/usb/gadget/udc/cdns2/cdns2-ep0.c
52
ring->trbs[0].control = cpu_to_le32(TRB_CYCLE |
drivers/usb/gadget/udc/cdns2/cdns2-ep0.c
54
ring->trbs[1].buffer = cpu_to_le32(TRB_BUFFER(dma_addr));
drivers/usb/gadget/udc/cdns2/cdns2-ep0.c
55
ring->trbs[1].length = cpu_to_le32(TRB_LEN(0));
drivers/usb/gadget/udc/cdns2/cdns2-ep0.c
56
ring->trbs[1].control = cpu_to_le32(TRB_CYCLE | TRB_IOC |
drivers/usb/gadget/udc/cdns2/cdns2-ep0.c
59
ring->trbs[0].control = cpu_to_le32(TRB_CYCLE | TRB_IOC |
drivers/usb/gadget/udc/cdns2/cdns2-ep0.c
61
ring->trbs[1].control = 0;
drivers/usb/gadget/udc/cdns2/cdns2-ep0.c
64
trace_cdns2_queue_trb(pep, ring->trbs);
drivers/usb/gadget/udc/cdns2/cdns2-ep0.c
72
writel(pep->ring.dma, ®s->ep_traddr);
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
1006
trb = pep->ring.trbs + pep->ring.dequeue;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
1019
cdns2_ep_inc_deq(&pep->ring);
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
103
u32 offset = (char *)trb - (char *)pep->ring.trbs;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
105
return pep->ring.dma + offset;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
111
struct cdns2_ring *ring = &pep->ring;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
113
if (pep->ring.trbs) {
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
114
dma_pool_free(pdev->eps_dma_pool, ring->trbs, ring->dma);
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
115
memset(ring, 0, sizeof(*ring));
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
124
struct cdns2_ring *ring;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
126
ring = &pep->ring;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
128
if (!ring->trbs) {
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
129
ring->trbs = dma_pool_alloc(pdev->eps_dma_pool,
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
131
&ring->dma);
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
132
if (!ring->trbs)
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
136
memset(ring->trbs, 0, TR_SEG_SIZE);
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
142
link_trb = (ring->trbs + (TRBS_PER_SEGMENT - 1));
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
143
link_trb->buffer = cpu_to_le32(TRB_BUFFER(ring->dma));
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
1595
pep->ring.enqueue = 0;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
1596
pep->ring.dequeue = 0;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
1598
pep->ring.pcs = !!DMA_EP_STS_CCS(reg);
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
1599
pep->ring.ccs = !!DMA_EP_STS_CCS(reg);
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
1601
writel(pep->ring.dma, &pdev->adma_regs->ep_traddr);
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
1604
pep->ring.free_trbs = TRBS_PER_SEGMENT - 1;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
1795
buffer = cpu_to_le32(TRB_BUFFER(pep->ring.dma +
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
186
static void cdns2_ep_inc_enq(struct cdns2_ring *ring)
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
188
ring->free_trbs--;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
189
cdns2_ep_inc_trb(&ring->enqueue, &ring->pcs, TRBS_PER_SEGMENT);
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
192
static void cdns2_ep_inc_deq(struct cdns2_ring *ring)
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
194
ring->free_trbs++;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
195
cdns2_ep_inc_trb(&ring->dequeue, &ring->ccs, TRBS_PER_SEGMENT);
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
231
if (trb == (pep->ring.trbs + (TRBS_PER_SEGMENT - 1)))
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
232
return pep->ring.trbs;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
292
pep->wa1_cycle_bit = pep->ring.pcs ? TRB_CYCLE : 0;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
295
pep->wa1_trb_index = pep->ring.enqueue;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
322
struct cdns2_ring *ring;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
325
ring = &pep->ring;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
327
if (num_trbs > ring->free_trbs) {
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
333
if ((ring->enqueue + num_trbs) >= (TRBS_PER_SEGMENT - 1)) {
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
344
link_trb = ring->trbs + (TRBS_PER_SEGMENT - 1);
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
357
link_trb->control = cpu_to_le32(((ring->pcs) ? TRB_CYCLE : 0) |
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
367
struct cdns2_trb *link_trb = pep->ring.trbs + (TRBS_PER_SEGMENT - 1);
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
375
trb = pep->ring.trbs;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
575
if (pep->ring.pcs == 0)
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
578
control |= pep->ring.pcs;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
587
trb = pep->ring.trbs + pep->ring.enqueue;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
608
preq->end_trb = pep->ring.enqueue;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
610
cdns2_ep_inc_enq(&pep->ring);
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
620
struct cdns2_ring *ring;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
636
ring = &pep->ring;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
639
control = TRB_TYPE(TRB_NORMAL) | ring->pcs | TRB_ISP;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
640
trb = pep->ring.trbs + ring->enqueue;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
643
preq->end_trb = ring->enqueue;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
644
control = ring->pcs | TRB_TYPE(TRB_LINK) | TRB_CHAIN
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
646
cdns2_ep_inc_enq(&pep->ring);
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
648
if (ring->enqueue == 0)
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
652
trb->buffer = cpu_to_le32(pep->ring.dma +
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
653
(ring->enqueue * TRB_SIZE));
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
684
preq->end_trb = ring->enqueue;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
685
cdns2_ep_inc_enq(&pep->ring);
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
724
writel(pep->ring.dma + pep->ring.dequeue,
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
733
buffer = pep->ring.dma + pep->ring.dequeue * TRB_SIZE;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
736
trb = &pep->ring.trbs[TRBS_PER_SEGMENT];
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
751
if (hw_ccs != pep->ring.ccs)
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
755
writel(pep->ring.dma + (TRBS_PER_SEGMENT * TRB_SIZE),
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
766
struct cdns2_ring *ring;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
783
ring = &pep->ring;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
784
preq->start_trb = ring->enqueue;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
785
preq->trb = ring->trbs + ring->enqueue;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
790
togle_pcs = cdns2_wa1_update_guard(pep, ring->trbs + ring->enqueue);
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
80
dma_index = readl(&pdev->adma_regs->ep_traddr) - pep->ring.dma;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
883
struct cdns2_ring *ring;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
889
ring = &pep->ring;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
903
if (ring->dequeue > preq->end_trb)
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
906
if (ring->dequeue < preq->start_trb)
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
910
if (preq->start_trb > preq->end_trb && ring->dequeue > preq->end_trb &&
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
911
ring->dequeue < preq->start_trb)
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
914
if (preq->start_trb == preq->end_trb && ring->dequeue != preq->end_trb)
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
917
trb = &ring->trbs[ring->dequeue];
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
919
if ((le32_to_cpu(trb->control) & TRB_CYCLE) != ring->ccs)
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
922
if (doorbell == 1 && current_index == ring->dequeue)
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
931
if (ring->enqueue == ring->dequeue &&
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
932
ring->free_trbs == 0) {
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
934
} else if (ring->dequeue < current_index) {
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
936
!ring->dequeue)
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
940
} else if (ring->dequeue > current_index) {
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
957
trb = pep->ring.trbs + pep->ring.dequeue;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
962
cdns2_ep_inc_deq(&pep->ring);
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
981
trb = pep->ring.trbs + pep->ring.dequeue;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
990
cdns2_ep_inc_deq(&pep->ring);
drivers/usb/gadget/udc/cdns2/cdns2-gadget.c
991
trb = pep->ring.trbs + pep->ring.dequeue;
drivers/usb/gadget/udc/cdns2/cdns2-gadget.h
559
struct cdns2_ring ring;
drivers/usb/gadget/udc/cdns2/cdns2-trace.h
386
memcpy(__get_dynamic_array(tr_seg), pep->ring.trbs,
drivers/usb/gadget/udc/cdns2/cdns2-trace.h
420
__entry->enqueue = pep->ring.enqueue;
drivers/usb/gadget/udc/cdns2/cdns2-trace.h
421
__entry->dequeue = pep->ring.dequeue;
drivers/usb/gadget/udc/cdns2/cdns2-trace.h
468
__entry->dequeue_idx = priv_req->pep->ring.dequeue;
drivers/usb/gadget/udc/cdns2/cdns2-trace.h
469
__entry->enqueue_idx = priv_req->pep->ring.enqueue;
drivers/usb/host/xhci-dbgcap.c
264
xhci_dbc_queue_trb(struct xhci_ring *ring, u32 field1,
drivers/usb/host/xhci-dbgcap.c
269
trb = ring->enqueue;
drivers/usb/host/xhci-dbgcap.c
275
trace_xhci_dbc_gadget_ep_queue(ring, &trb->generic,
drivers/usb/host/xhci-dbgcap.c
276
xhci_trb_virt_to_dma(ring->enq_seg,
drivers/usb/host/xhci-dbgcap.c
277
ring->enqueue));
drivers/usb/host/xhci-dbgcap.c
278
ring->num_trbs_free--;
drivers/usb/host/xhci-dbgcap.c
279
next = ++(ring->enqueue);
drivers/usb/host/xhci-dbgcap.c
282
ring->enqueue = ring->enq_seg->trbs;
drivers/usb/host/xhci-dbgcap.c
283
ring->cycle_state ^= 1;
drivers/usb/host/xhci-dbgcap.c
294
struct xhci_ring *ring = dep->ring;
drivers/usb/host/xhci-dbgcap.c
299
if (ring->num_trbs_free < num_trbs)
drivers/usb/host/xhci-dbgcap.c
303
trb = ring->enqueue;
drivers/usb/host/xhci-dbgcap.c
304
cycle = ring->cycle_state;
drivers/usb/host/xhci-dbgcap.c
313
req->trb = ring->enqueue;
drivers/usb/host/xhci-dbgcap.c
314
req->trb_dma = xhci_trb_virt_to_dma(ring->enq_seg, ring->enqueue);
drivers/usb/host/xhci-dbgcap.c
315
xhci_dbc_queue_trb(ring,
drivers/usb/host/xhci-dbgcap.c
406
dep->ring = direction ? dbc->ring_in : dbc->ring_out;
drivers/usb/host/xhci-dbgcap.c
463
static void xhci_dbc_ring_init(struct xhci_ring *ring)
drivers/usb/host/xhci-dbgcap.c
465
struct xhci_segment *seg = ring->first_seg;
drivers/usb/host/xhci-dbgcap.c
47
static void dbc_ring_free(struct device *dev, struct xhci_ring *ring)
drivers/usb/host/xhci-dbgcap.c
471
if (ring->type != TYPE_EVENT) {
drivers/usb/host/xhci-dbgcap.c
474
trb->link.segment_ptr = cpu_to_le64(ring->first_seg->dma);
drivers/usb/host/xhci-dbgcap.c
477
xhci_initialize_ring_info(ring);
drivers/usb/host/xhci-dbgcap.c
482
struct xhci_ring *in_ring = dbc->eps[BULK_IN].ring;
drivers/usb/host/xhci-dbgcap.c
483
struct xhci_ring *out_ring = dbc->eps[BULK_OUT].ring;
drivers/usb/host/xhci-dbgcap.c
49
if (!ring)
drivers/usb/host/xhci-dbgcap.c
502
struct xhci_ring *ring;
drivers/usb/host/xhci-dbgcap.c
506
ring = kzalloc_obj(*ring, flags);
drivers/usb/host/xhci-dbgcap.c
507
if (!ring)
drivers/usb/host/xhci-dbgcap.c
510
ring->num_segs = 1;
drivers/usb/host/xhci-dbgcap.c
511
ring->type = type;
drivers/usb/host/xhci-dbgcap.c
517
ring->first_seg = seg;
drivers/usb/host/xhci-dbgcap.c
518
ring->last_seg = seg;
drivers/usb/host/xhci-dbgcap.c
52
if (ring->first_seg) {
drivers/usb/host/xhci-dbgcap.c
527
INIT_LIST_HEAD(&ring->td_list);
drivers/usb/host/xhci-dbgcap.c
529
xhci_dbc_ring_init(ring);
drivers/usb/host/xhci-dbgcap.c
531
return ring;
drivers/usb/host/xhci-dbgcap.c
535
kfree(ring);
drivers/usb/host/xhci-dbgcap.c
54
ring->first_seg->trbs,
drivers/usb/host/xhci-dbgcap.c
55
ring->first_seg->dma);
drivers/usb/host/xhci-dbgcap.c
56
kfree(ring->first_seg);
drivers/usb/host/xhci-dbgcap.c
58
kfree(ring);
drivers/usb/host/xhci-dbgcap.c
774
struct xhci_ring *ring;
drivers/usb/host/xhci-dbgcap.c
789
ring = dep->ring;
drivers/usb/host/xhci-dbgcap.c
799
ring->num_trbs_free++;
drivers/usb/host/xhci-dbgcap.c
809
trace_xhci_dbc_handle_transfer(ring, &req->trb->generic, req->trb_dma);
drivers/usb/host/xhci-dbgcap.c
864
ring->num_trbs_free++;
drivers/usb/host/xhci-dbgcap.c
869
static void inc_evt_deq(struct xhci_ring *ring)
drivers/usb/host/xhci-dbgcap.c
872
if (ring->dequeue == &ring->deq_seg->trbs[TRBS_PER_SEGMENT - 1]) {
drivers/usb/host/xhci-dbgcap.c
873
ring->cycle_state ^= 1;
drivers/usb/host/xhci-dbgcap.c
874
ring->dequeue = ring->deq_seg->trbs;
drivers/usb/host/xhci-dbgcap.c
877
ring->dequeue++;
drivers/usb/host/xhci-dbgcap.h
106
struct xhci_ring *ring;
drivers/usb/host/xhci-debugfs.c
168
struct xhci_ring *ring = *(struct xhci_ring **)s->private;
drivers/usb/host/xhci-debugfs.c
170
dma = xhci_trb_virt_to_dma(ring->enq_seg, ring->enqueue);
drivers/usb/host/xhci-debugfs.c
179
struct xhci_ring *ring = *(struct xhci_ring **)s->private;
drivers/usb/host/xhci-debugfs.c
181
dma = xhci_trb_virt_to_dma(ring->deq_seg, ring->dequeue);
drivers/usb/host/xhci-debugfs.c
189
struct xhci_ring *ring = *(struct xhci_ring **)s->private;
drivers/usb/host/xhci-debugfs.c
191
seq_printf(s, "%d\n", ring->cycle_state);
drivers/usb/host/xhci-debugfs.c
217
struct xhci_ring *ring = *(struct xhci_ring **)s->private;
drivers/usb/host/xhci-debugfs.c
218
struct xhci_segment *seg = ring->first_seg;
drivers/usb/host/xhci-debugfs.c
220
xhci_for_each_ring_seg(ring->first_seg, seg)
drivers/usb/host/xhci-debugfs.c
441
struct xhci_ring **ring,
drivers/usb/host/xhci-debugfs.c
449
ring, dir, &xhci_ring_fops);
drivers/usb/host/xhci-debugfs.c
483
epriv->show_ring = dev->eps[ep_index].ring;
drivers/usb/host/xhci-debugfs.c
628
xhci_debugfs_create_ring_dir(xhci, &dev->eps[0].ring,
drivers/usb/host/xhci-hub.c
479
if (virt_dev->eps[i].ring && virt_dev->eps[i].ring->dequeue) {
drivers/usb/host/xhci-hub.c
542
} else if (ep->ring && ep->ring->dequeue) {
drivers/usb/host/xhci-mem.c
1010
dev->eps[0].ring = xhci_ring_alloc(xhci, 2, TYPE_CTRL, 0, flags);
drivers/usb/host/xhci-mem.c
1011
if (!dev->eps[0].ring)
drivers/usb/host/xhci-mem.c
1048
ep_ring = virt_dev->eps[0].ring;
drivers/usb/host/xhci-mem.c
116
static void xhci_initialize_ring_segments(struct xhci_hcd *xhci, struct xhci_ring *ring)
drivers/usb/host/xhci-mem.c
1195
ep0_ctx->deq = cpu_to_le64(dev->eps[0].ring->first_seg->dma |
drivers/usb/host/xhci-mem.c
1196
dev->eps[0].ring->cycle_state);
drivers/usb/host/xhci-mem.c
121
if (ring->type == TYPE_EVENT)
drivers/usb/host/xhci-mem.c
124
chain_links = xhci_link_chain_quirk(xhci, ring->type);
drivers/usb/host/xhci-mem.c
125
xhci_for_each_ring_seg(ring->first_seg, seg)
drivers/usb/host/xhci-mem.c
129
ring->last_seg->trbs[TRBS_PER_SEGMENT - 1].link.control |= cpu_to_le32(LINK_TOGGLE);
drivers/usb/host/xhci-mem.c
207
struct xhci_ring *ring,
drivers/usb/host/xhci-mem.c
223
key, ring);
drivers/usb/host/xhci-mem.c
240
struct xhci_ring *ring,
drivers/usb/host/xhci-mem.c
253
ring, seg, mem_flags);
drivers/usb/host/xhci-mem.c
271
static void xhci_remove_stream_mapping(struct xhci_ring *ring)
drivers/usb/host/xhci-mem.c
275
if (WARN_ON_ONCE(ring->trb_address_map == NULL))
drivers/usb/host/xhci-mem.c
278
xhci_for_each_ring_seg(ring->first_seg, seg)
drivers/usb/host/xhci-mem.c
279
xhci_remove_segment_mapping(ring->trb_address_map, seg);
drivers/usb/host/xhci-mem.c
282
static int xhci_update_stream_mapping(struct xhci_ring *ring, gfp_t mem_flags)
drivers/usb/host/xhci-mem.c
284
return xhci_update_stream_segment_mapping(ring->trb_address_map, ring,
drivers/usb/host/xhci-mem.c
285
ring->first_seg, mem_flags);
drivers/usb/host/xhci-mem.c
289
void xhci_ring_free(struct xhci_hcd *xhci, struct xhci_ring *ring)
drivers/usb/host/xhci-mem.c
291
if (!ring)
drivers/usb/host/xhci-mem.c
294
trace_xhci_ring_free(ring);
drivers/usb/host/xhci-mem.c
296
if (ring->first_seg) {
drivers/usb/host/xhci-mem.c
297
if (ring->type == TYPE_STREAM)
drivers/usb/host/xhci-mem.c
298
xhci_remove_stream_mapping(ring);
drivers/usb/host/xhci-mem.c
299
xhci_ring_segments_free(xhci, ring);
drivers/usb/host/xhci-mem.c
302
kfree(ring);
drivers/usb/host/xhci-mem.c
305
void xhci_initialize_ring_info(struct xhci_ring *ring)
drivers/usb/host/xhci-mem.c
308
ring->enqueue = ring->first_seg->trbs;
drivers/usb/host/xhci-mem.c
309
ring->enq_seg = ring->first_seg;
drivers/usb/host/xhci-mem.c
310
ring->dequeue = ring->enqueue;
drivers/usb/host/xhci-mem.c
311
ring->deq_seg = ring->first_seg;
drivers/usb/host/xhci-mem.c
319
ring->cycle_state = 1;
drivers/usb/host/xhci-mem.c
325
ring->num_trbs_free = ring->num_segs * (TRBS_PER_SEGMENT - 1) - 1;
drivers/usb/host/xhci-mem.c
330
static int xhci_alloc_segments_for_ring(struct xhci_hcd *xhci, struct xhci_ring *ring, gfp_t flags)
drivers/usb/host/xhci-mem.c
335
prev = xhci_segment_alloc(xhci, ring->bounce_buf_len, num, flags);
drivers/usb/host/xhci-mem.c
340
ring->first_seg = prev;
drivers/usb/host/xhci-mem.c
341
while (num < ring->num_segs) {
drivers/usb/host/xhci-mem.c
344
next = xhci_segment_alloc(xhci, ring->bounce_buf_len, num, flags);
drivers/usb/host/xhci-mem.c
352
ring->last_seg = prev;
drivers/usb/host/xhci-mem.c
354
ring->last_seg->next = ring->first_seg;
drivers/usb/host/xhci-mem.c
358
ring->last_seg = prev;
drivers/usb/host/xhci-mem.c
359
xhci_ring_segments_free(xhci, ring);
drivers/usb/host/xhci-mem.c
373
struct xhci_ring *ring;
drivers/usb/host/xhci-mem.c
377
ring = kzalloc_node(sizeof(*ring), flags, dev_to_node(dev));
drivers/usb/host/xhci-mem.c
378
if (!ring)
drivers/usb/host/xhci-mem.c
381
ring->num_segs = num_segs;
drivers/usb/host/xhci-mem.c
382
ring->bounce_buf_len = max_packet;
drivers/usb/host/xhci-mem.c
383
INIT_LIST_HEAD(&ring->td_list);
drivers/usb/host/xhci-mem.c
384
ring->type = type;
drivers/usb/host/xhci-mem.c
386
return ring;
drivers/usb/host/xhci-mem.c
388
ret = xhci_alloc_segments_for_ring(xhci, ring, flags);
drivers/usb/host/xhci-mem.c
392
xhci_initialize_ring_segments(xhci, ring);
drivers/usb/host/xhci-mem.c
393
xhci_initialize_ring_info(ring);
drivers/usb/host/xhci-mem.c
394
trace_xhci_ring_alloc(ring);
drivers/usb/host/xhci-mem.c
395
return ring;
drivers/usb/host/xhci-mem.c
398
kfree(ring);
drivers/usb/host/xhci-mem.c
406
xhci_ring_free(xhci, virt_dev->eps[ep_index].ring);
drivers/usb/host/xhci-mem.c
407
virt_dev->eps[ep_index].ring = NULL;
drivers/usb/host/xhci-mem.c
414
int xhci_ring_expansion(struct xhci_hcd *xhci, struct xhci_ring *ring,
drivers/usb/host/xhci-mem.c
424
new_ring.bounce_buf_len = ring->bounce_buf_len;
drivers/usb/host/xhci-mem.c
425
new_ring.type = ring->type;
drivers/usb/host/xhci-mem.c
432
if (ring->type == TYPE_STREAM) {
drivers/usb/host/xhci-mem.c
433
ret = xhci_update_stream_segment_mapping(ring->trb_address_map, ring,
drivers/usb/host/xhci-mem.c
439
xhci_link_rings(xhci, &new_ring, ring);
drivers/usb/host/xhci-mem.c
440
trace_xhci_ring_expansion(ring);
drivers/usb/host/xhci-mem.c
443
ring->num_segs);
drivers/usb/host/xhci-mem.c
598
return ep->ring;
drivers/usb/host/xhci-mem.c
74
static void xhci_ring_segments_free(struct xhci_hcd *xhci, struct xhci_ring *ring)
drivers/usb/host/xhci-mem.c
754
addr = xhci_trb_virt_to_dma(ep->ring->deq_seg, ep->ring->dequeue);
drivers/usb/host/xhci-mem.c
755
ep_ctx->deq = cpu_to_le64(addr | ep->ring->cycle_state);
drivers/usb/host/xhci-mem.c
78
ring->last_seg->next = NULL;
drivers/usb/host/xhci-mem.c
79
seg = ring->first_seg;
drivers/usb/host/xhci-mem.c
889
if (dev->eps[i].ring)
drivers/usb/host/xhci-mem.c
890
xhci_ring_free(xhci, dev->eps[i].ring);
drivers/usb/host/xhci-ring.c
1040
struct xhci_ring *ring;
drivers/usb/host/xhci-ring.c
1061
ring = xhci_urb_to_transfer_ring(xhci, td->urb);
drivers/usb/host/xhci-ring.c
1062
if (!ring) {
drivers/usb/host/xhci-ring.c
1163
if (!list_empty(&ep->ring->td_list)) { /* Not streams compatible */
drivers/usb/host/xhci-ring.c
1166
td = list_first_entry(&ep->ring->td_list, struct xhci_td, td_list);
drivers/usb/host/xhci-ring.c
117
static bool last_trb_on_ring(struct xhci_ring *ring,
drivers/usb/host/xhci-ring.c
120
return last_trb_on_seg(seg, trb) && (seg->next == ring->first_seg);
drivers/usb/host/xhci-ring.c
1306
static void xhci_kill_ring_urbs(struct xhci_hcd *xhci, struct xhci_ring *ring)
drivers/usb/host/xhci-ring.c
1311
list_for_each_entry_safe(cur_td, tmp, &ring->td_list, td_list) {
drivers/usb/host/xhci-ring.c
1317
xhci_unmap_td_bounce_buffer(xhci, ring, cur_td);
drivers/usb/host/xhci-ring.c
1331
struct xhci_ring *ring;
drivers/usb/host/xhci-ring.c
1343
ring = ep->stream_info->stream_rings[stream_id];
drivers/usb/host/xhci-ring.c
1344
if (!ring)
drivers/usb/host/xhci-ring.c
135
static bool unhandled_event_trb(struct xhci_ring *ring)
drivers/usb/host/xhci-ring.c
1350
xhci_kill_ring_urbs(xhci, ring);
drivers/usb/host/xhci-ring.c
1353
ring = ep->ring;
drivers/usb/host/xhci-ring.c
1354
if (!ring)
drivers/usb/host/xhci-ring.c
1359
xhci_kill_ring_urbs(xhci, ring);
drivers/usb/host/xhci-ring.c
137
return ((le32_to_cpu(ring->dequeue->event_cmd.flags) & TRB_CYCLE) ==
drivers/usb/host/xhci-ring.c
138
ring->cycle_state);
drivers/usb/host/xhci-ring.c
186
void inc_deq(struct xhci_hcd *xhci, struct xhci_ring *ring)
drivers/usb/host/xhci-ring.c
191
if (ring->type == TYPE_EVENT) {
drivers/usb/host/xhci-ring.c
192
if (!last_trb_on_seg(ring->deq_seg, ring->dequeue)) {
drivers/usb/host/xhci-ring.c
193
ring->dequeue++;
drivers/usb/host/xhci-ring.c
196
if (last_trb_on_ring(ring, ring->deq_seg, ring->dequeue))
drivers/usb/host/xhci-ring.c
197
ring->cycle_state ^= 1;
drivers/usb/host/xhci-ring.c
198
ring->deq_seg = ring->deq_seg->next;
drivers/usb/host/xhci-ring.c
199
ring->dequeue = ring->deq_seg->trbs;
drivers/usb/host/xhci-ring.c
201
trace_xhci_inc_deq(ring);
drivers/usb/host/xhci-ring.c
207
if (!trb_is_link(ring->dequeue)) {
drivers/usb/host/xhci-ring.c
208
if (last_trb_on_seg(ring->deq_seg, ring->dequeue))
drivers/usb/host/xhci-ring.c
211
ring->dequeue++;
drivers/usb/host/xhci-ring.c
214
while (trb_is_link(ring->dequeue)) {
drivers/usb/host/xhci-ring.c
215
ring->deq_seg = ring->deq_seg->next;
drivers/usb/host/xhci-ring.c
216
ring->dequeue = ring->deq_seg->trbs;
drivers/usb/host/xhci-ring.c
218
trace_xhci_inc_deq(ring);
drivers/usb/host/xhci-ring.c
220
if (link_trb_count++ > ring->num_segs) {
drivers/usb/host/xhci-ring.c
232
static void inc_enq_past_link(struct xhci_hcd *xhci, struct xhci_ring *ring, u32 chain)
drivers/usb/host/xhci-ring.c
236
while (trb_is_link(ring->enqueue)) {
drivers/usb/host/xhci-ring.c
247
if (!xhci_link_chain_quirk(xhci, ring->type)) {
drivers/usb/host/xhci-ring.c
248
ring->enqueue->link.control &= cpu_to_le32(~TRB_CHAIN);
drivers/usb/host/xhci-ring.c
249
ring->enqueue->link.control |= cpu_to_le32(chain);
drivers/usb/host/xhci-ring.c
2512
xhci_dequeue_td(xhci, td, ep->ring, status);
drivers/usb/host/xhci-ring.c
254
ring->enqueue->link.control ^= cpu_to_le32(TRB_CYCLE);
drivers/usb/host/xhci-ring.c
257
if (link_trb_toggles_cycle(ring->enqueue))
drivers/usb/host/xhci-ring.c
258
ring->cycle_state ^= 1;
drivers/usb/host/xhci-ring.c
260
ring->enq_seg = ring->enq_seg->next;
drivers/usb/host/xhci-ring.c
261
ring->enqueue = ring->enq_seg->trbs;
drivers/usb/host/xhci-ring.c
2613
struct xhci_ring *ring)
drivers/usb/host/xhci-ring.c
2615
switch (ring->old_trb_comp_code) {
drivers/usb/host/xhci-ring.c
2622
ring->type == TYPE_ISOC;
drivers/usb/host/xhci-ring.c
263
trace_xhci_inc_enq(ring);
drivers/usb/host/xhci-ring.c
265
if (link_trb_count++ > ring->num_segs) {
drivers/usb/host/xhci-ring.c
283
static void inc_enq(struct xhci_hcd *xhci, struct xhci_ring *ring,
drivers/usb/host/xhci-ring.c
288
chain = le32_to_cpu(ring->enqueue->generic.field[3]) & TRB_CHAIN;
drivers/usb/host/xhci-ring.c
290
if (last_trb_on_seg(ring->enq_seg, ring->enqueue)) {
drivers/usb/host/xhci-ring.c
295
ring->enqueue++;
drivers/usb/host/xhci-ring.c
303
if (trb_is_link(ring->enqueue) && (chain || more_trbs_coming))
drivers/usb/host/xhci-ring.c
304
inc_enq_past_link(xhci, ring, chain);
drivers/usb/host/xhci-ring.c
3148
struct xhci_ring *ring, struct xhci_interrupter *ir)
drivers/usb/host/xhci-ring.c
3167
ring->cycle_state = le32_to_cpu(current_trb->event_cmd.flags) & TRB_CYCLE;
drivers/usb/host/xhci-ring.c
3239
static void queue_trb(struct xhci_hcd *xhci, struct xhci_ring *ring,
drivers/usb/host/xhci-ring.c
3245
trb = &ring->enqueue->generic;
drivers/usb/host/xhci-ring.c
3253
trace_xhci_queue_trb(ring, trb,
drivers/usb/host/xhci-ring.c
3254
xhci_trb_virt_to_dma(ring->enq_seg, ring->enqueue));
drivers/usb/host/xhci-ring.c
3256
inc_enq(xhci, ring, more_trbs_coming);
drivers/usb/host/xhci-ring.c
343
static unsigned int xhci_num_trbs_free(struct xhci_ring *ring)
drivers/usb/host/xhci-ring.c
345
struct xhci_segment *enq_seg = ring->enq_seg;
drivers/usb/host/xhci-ring.c
346
union xhci_trb *enq = ring->enqueue;
drivers/usb/host/xhci-ring.c
358
if (enq == ring->dequeue)
drivers/usb/host/xhci-ring.c
359
return ring->num_segs * (TRBS_PER_SEGMENT - 1);
drivers/usb/host/xhci-ring.c
3614
struct xhci_ring *ring;
drivers/usb/host/xhci-ring.c
362
if (ring->deq_seg == enq_seg && ring->dequeue >= enq)
drivers/usb/host/xhci-ring.c
3629
ring = xhci_urb_to_transfer_ring(xhci, urb);
drivers/usb/host/xhci-ring.c
363
return free + (ring->dequeue - enq);
drivers/usb/host/xhci-ring.c
3630
if (!ring)
drivers/usb/host/xhci-ring.c
3665
start_trb = &ring->enqueue->generic;
drivers/usb/host/xhci-ring.c
3666
start_cycle = ring->cycle_state;
drivers/usb/host/xhci-ring.c
368
} while (i++ < ring->num_segs);
drivers/usb/host/xhci-ring.c
3687
field |= ring->cycle_state;
drivers/usb/host/xhci-ring.c
3694
if (trb_is_link(ring->enqueue + 1)) {
drivers/usb/host/xhci-ring.c
3697
ring->enq_seg)) {
drivers/usb/host/xhci-ring.c
3698
send_addr = ring->enq_seg->bounce_dma;
drivers/usb/host/xhci-ring.c
3700
td->bounce_seg = ring->enq_seg;
drivers/usb/host/xhci-ring.c
3708
td->end_trb = ring->enqueue;
drivers/usb/host/xhci-ring.c
3709
td->end_seg = ring->enq_seg;
drivers/usb/host/xhci-ring.c
3730
queue_trb(xhci, ring, more_trbs_coming | need_zero_pkt,
drivers/usb/host/xhci-ring.c
3757
urb_priv->td[1].end_trb = ring->enqueue;
drivers/usb/host/xhci-ring.c
3758
urb_priv->td[1].end_seg = ring->enq_seg;
drivers/usb/host/xhci-ring.c
3759
field = TRB_TYPE(TRB_NORMAL) | ring->cycle_state | TRB_IOC;
drivers/usb/host/xhci-ring.c
3760
queue_trb(xhci, ring, 0, 0, 0, TRB_INTR_TARGET(0), field);
drivers/usb/host/xhci-ring.c
379
static unsigned int xhci_ring_expansion_needed(struct xhci_hcd *xhci, struct xhci_ring *ring,
drivers/usb/host/xhci-ring.c
387
enq_used = ring->enqueue - ring->enq_seg->trbs;
drivers/usb/host/xhci-ring.c
402
if (trb_is_link(ring->enqueue) && ring->enq_seg->next->trbs == ring->dequeue)
drivers/usb/host/xhci-ring.c
406
seg = ring->enq_seg;
drivers/usb/host/xhci-ring.c
4097
ep_ring = xhci->devs[slot_id]->eps[ep_index].ring;
drivers/usb/host/xhci-ring.c
410
if (seg == ring->deq_seg) {
drivers/usb/host/xhci-ring.c
4284
ep_ring = xdev->eps[ep_index].ring;
drivers/usb/host/xhci-ring.c
587
if (ep->ring && !(list_empty(&ep->ring->td_list)))
drivers/usb/host/xhci-ring.c
634
return ep->ring;
drivers/usb/host/xhci-ring.c
742
if (new_deq == ep->ring->dequeue) {
drivers/usb/host/xhci-ring.c
809
unchain_links = !xhci_link_chain_quirk(xhci, ep->ring ? ep->ring->type : TYPE_STREAM);
drivers/usb/host/xhci-ring.c
846
struct xhci_ring *ring, struct xhci_td *td)
drivers/usb/host/xhci-ring.c
853
if (!ring || !seg || !urb)
drivers/usb/host/xhci-ring.c
857
dma_unmap_single(dev, seg->bounce_dma, ring->bounce_buf_len,
drivers/usb/host/xhci-ring.c
862
dma_unmap_single(dev, seg->bounce_dma, ring->bounce_buf_len,
drivers/usb/host/xhci-ring.c
926
static void xhci_dequeue_td(struct xhci_hcd *xhci, struct xhci_td *td, struct xhci_ring *ring,
drivers/usb/host/xhci-ring.c
929
ring->dequeue = td->end_trb;
drivers/usb/host/xhci-ring.c
930
ring->deq_seg = td->end_seg;
drivers/usb/host/xhci-ring.c
931
inc_deq(xhci, ring);
drivers/usb/host/xhci-ring.c
933
xhci_td_cleanup(xhci, td, ring, status);
drivers/usb/host/xhci-ring.c
939
struct xhci_ring *ring;
drivers/usb/host/xhci-ring.c
945
ring = xhci_urb_to_transfer_ring(ep->xhci, td->urb);
drivers/usb/host/xhci-ring.c
950
xhci_td_cleanup(ep->xhci, td, ring, td->status);
drivers/usb/host/xhci-sideband.c
18
xhci_ring_to_sgtable(struct xhci_sideband *sb, struct xhci_ring *ring)
drivers/usb/host/xhci-sideband.c
252
if (!ep || !ep->ring || !ep->sideband || ep->sideband != sb)
drivers/usb/host/xhci-sideband.c
255
return xhci_ring_to_sgtable(sb, ep->ring);
drivers/usb/host/xhci-sideband.c
29
sz = ring->num_segs * TRB_SEGMENT_SIZE;
drivers/usb/host/xhci-sideband.c
41
seg = ring->first_seg;
drivers/usb/host/xhci-sideband.c
50
for (i = 0; i < ring->num_segs; i++) {
drivers/usb/host/xhci-sideband.c
65
sg_dma_address(sgt->sgl) = ring->first_seg->dma;
drivers/usb/host/xhci-trace.h
101
TP_PROTO(struct xhci_ring *ring, struct xhci_generic_trb *trb, dma_addr_t dma),
drivers/usb/host/xhci-trace.h
102
TP_ARGS(ring, trb, dma),
drivers/usb/host/xhci-trace.h
113
__entry->type = ring->type;
drivers/usb/host/xhci-trace.h
127
TP_PROTO(struct xhci_ring *ring, struct xhci_generic_trb *trb, dma_addr_t dma),
drivers/usb/host/xhci-trace.h
128
TP_ARGS(ring, trb, dma)
drivers/usb/host/xhci-trace.h
132
TP_PROTO(struct xhci_ring *ring, struct xhci_generic_trb *trb, dma_addr_t dma),
drivers/usb/host/xhci-trace.h
133
TP_ARGS(ring, trb, dma)
drivers/usb/host/xhci-trace.h
137
TP_PROTO(struct xhci_ring *ring, struct xhci_generic_trb *trb, dma_addr_t dma),
drivers/usb/host/xhci-trace.h
138
TP_ARGS(ring, trb, dma)
drivers/usb/host/xhci-trace.h
142
TP_PROTO(struct xhci_ring *ring, struct xhci_generic_trb *trb, dma_addr_t dma),
drivers/usb/host/xhci-trace.h
143
TP_ARGS(ring, trb, dma)
drivers/usb/host/xhci-trace.h
148
TP_PROTO(struct xhci_ring *ring, struct xhci_generic_trb *trb, dma_addr_t dma),
drivers/usb/host/xhci-trace.h
149
TP_ARGS(ring, trb, dma)
drivers/usb/host/xhci-trace.h
153
TP_PROTO(struct xhci_ring *ring, struct xhci_generic_trb *trb, dma_addr_t dma),
drivers/usb/host/xhci-trace.h
154
TP_ARGS(ring, trb, dma)
drivers/usb/host/xhci-trace.h
158
TP_PROTO(struct xhci_ring *ring, struct xhci_generic_trb *trb, dma_addr_t dma),
drivers/usb/host/xhci-trace.h
159
TP_ARGS(ring, trb, dma)
drivers/usb/host/xhci-trace.h
475
TP_PROTO(struct xhci_ring *ring),
drivers/usb/host/xhci-trace.h
476
TP_ARGS(ring),
drivers/usb/host/xhci-trace.h
479
__field(void *, ring)
drivers/usb/host/xhci-trace.h
488
__entry->ring = ring;
drivers/usb/host/xhci-trace.h
489
__entry->type = ring->type;
drivers/usb/host/xhci-trace.h
490
__entry->num_segs = ring->num_segs;
drivers/usb/host/xhci-trace.h
491
__entry->stream_id = ring->stream_id;
drivers/usb/host/xhci-trace.h
492
__entry->cycle_state = ring->cycle_state;
drivers/usb/host/xhci-trace.h
493
__entry->bounce_buf_len = ring->bounce_buf_len;
drivers/usb/host/xhci-trace.h
494
__entry->enq = xhci_trb_virt_to_dma(ring->enq_seg, ring->enqueue);
drivers/usb/host/xhci-trace.h
495
__entry->deq = xhci_trb_virt_to_dma(ring->deq_seg, ring->dequeue);
drivers/usb/host/xhci-trace.h
498
xhci_ring_type_string(__entry->type), __entry->ring,
drivers/usb/host/xhci-trace.h
509
TP_PROTO(struct xhci_ring *ring),
drivers/usb/host/xhci-trace.h
510
TP_ARGS(ring)
drivers/usb/host/xhci-trace.h
514
TP_PROTO(struct xhci_ring *ring),
drivers/usb/host/xhci-trace.h
515
TP_ARGS(ring)
drivers/usb/host/xhci-trace.h
519
TP_PROTO(struct xhci_ring *ring),
drivers/usb/host/xhci-trace.h
520
TP_ARGS(ring)
drivers/usb/host/xhci-trace.h
524
TP_PROTO(struct xhci_ring *ring),
drivers/usb/host/xhci-trace.h
525
TP_ARGS(ring)
drivers/usb/host/xhci-trace.h
529
TP_PROTO(struct xhci_ring *ring),
drivers/usb/host/xhci-trace.h
530
TP_ARGS(ring)
drivers/usb/host/xhci.c
1947
if (xhci->devs[udev->slot_id]->eps[ep_index].ring != NULL)
drivers/usb/host/xhci.c
2032
if (virt_dev->eps[ep_index].ring &&
drivers/usb/host/xhci.c
2912
if (ep->ring) {
drivers/usb/host/xhci.c
3130
if ((virt_dev->eps[i-1].ring && !(ctrl_ctx->drop_flags & le32))
drivers/usb/host/xhci.c
3163
if (virt_dev->eps[i].ring) {
drivers/usb/host/xhci.c
3167
virt_dev->eps[i].ring = virt_dev->eps[i].new_ring;
drivers/usb/host/xhci.c
3392
if (!list_empty(&ep->ring->td_list)) {
drivers/usb/host/xhci.c
3485
if (!list_empty(&xhci->devs[slot_id]->eps[ep_index].ring->td_list)) {
drivers/usb/host/xhci.c
3876
if (virt_dev->eps[i].ring) {
drivers/usb/host/xhci.c
4061
if (ep->ring) {
drivers/usb/host/xhci.c
57
static bool td_on_ring(struct xhci_td *td, struct xhci_ring *ring)
drivers/usb/host/xhci.c
64
xhci_for_each_ring_seg(ring->first_seg, seg) {
drivers/usb/host/xhci.c
864
struct xhci_ring *ring;
drivers/usb/host/xhci.c
867
ring = xhci->cmd_ring;
drivers/usb/host/xhci.c
868
xhci_for_each_ring_seg(ring->first_seg, seg) {
drivers/usb/host/xhci.c
875
xhci_initialize_ring_info(ring);
drivers/usb/host/xhci.h
1822
void xhci_ring_free(struct xhci_hcd *xhci, struct xhci_ring *ring);
drivers/usb/host/xhci.h
1823
int xhci_ring_expansion(struct xhci_hcd *xhci, struct xhci_ring *ring,
drivers/usb/host/xhci.h
1825
void xhci_initialize_ring_info(struct xhci_ring *ring);
drivers/usb/host/xhci.h
1866
struct xhci_ring *ring,
drivers/usb/host/xhci.h
1952
void inc_deq(struct xhci_hcd *xhci, struct xhci_ring *ring);
drivers/usb/host/xhci.h
655
struct xhci_ring *ring;
drivers/usb/mtu3/mtu3_debugfs.c
206
struct mtu3_gpd_ring *ring;
drivers/usb/mtu3/mtu3_debugfs.c
209
ring = &mep->gpd_ring;
drivers/usb/mtu3/mtu3_debugfs.c
213
&ring->dma, ring->start, ring->end,
drivers/usb/mtu3/mtu3_debugfs.c
214
ring->enqueue, ring->dequeue);
drivers/usb/mtu3/mtu3_debugfs.c
224
struct mtu3_gpd_ring *ring;
drivers/usb/mtu3/mtu3_debugfs.c
231
ring = &mep->gpd_ring;
drivers/usb/mtu3/mtu3_debugfs.c
232
gpd = ring->start;
drivers/usb/mtu3/mtu3_debugfs.c
239
dma = ring->dma + i * sizeof(*gpd);
drivers/usb/mtu3/mtu3_qmu.c
119
static struct qmu_gpd *gpd_dma_to_virt(struct mtu3_gpd_ring *ring,
drivers/usb/mtu3/mtu3_qmu.c
122
dma_addr_t dma_base = ring->dma;
drivers/usb/mtu3/mtu3_qmu.c
123
struct qmu_gpd *gpd_head = ring->start;
drivers/usb/mtu3/mtu3_qmu.c
132
static dma_addr_t gpd_virt_to_dma(struct mtu3_gpd_ring *ring,
drivers/usb/mtu3/mtu3_qmu.c
135
dma_addr_t dma_base = ring->dma;
drivers/usb/mtu3/mtu3_qmu.c
136
struct qmu_gpd *gpd_head = ring->start;
drivers/usb/mtu3/mtu3_qmu.c
146
static void gpd_ring_init(struct mtu3_gpd_ring *ring, struct qmu_gpd *gpd)
drivers/usb/mtu3/mtu3_qmu.c
148
ring->start = gpd;
drivers/usb/mtu3/mtu3_qmu.c
149
ring->enqueue = gpd;
drivers/usb/mtu3/mtu3_qmu.c
150
ring->dequeue = gpd;
drivers/usb/mtu3/mtu3_qmu.c
151
ring->end = gpd + MAX_GPD_NUM - 1;
drivers/usb/mtu3/mtu3_qmu.c
156
struct mtu3_gpd_ring *ring = &mep->gpd_ring;
drivers/usb/mtu3/mtu3_qmu.c
157
struct qmu_gpd *gpd = ring->start;
drivers/usb/mtu3/mtu3_qmu.c
161
gpd_ring_init(ring, gpd);
drivers/usb/mtu3/mtu3_qmu.c
168
struct mtu3_gpd_ring *ring = &mep->gpd_ring;
drivers/usb/mtu3/mtu3_qmu.c
171
gpd = dma_pool_zalloc(mep->mtu->qmu_gpd_pool, GFP_ATOMIC, &ring->dma);
drivers/usb/mtu3/mtu3_qmu.c
175
gpd_ring_init(ring, gpd);
drivers/usb/mtu3/mtu3_qmu.c
182
struct mtu3_gpd_ring *ring = &mep->gpd_ring;
drivers/usb/mtu3/mtu3_qmu.c
185
ring->start, ring->dma);
drivers/usb/mtu3/mtu3_qmu.c
186
memset(ring, 0, sizeof(*ring));
drivers/usb/mtu3/mtu3_qmu.c
203
static struct qmu_gpd *advance_enq_gpd(struct mtu3_gpd_ring *ring)
drivers/usb/mtu3/mtu3_qmu.c
205
if (ring->enqueue < ring->end)
drivers/usb/mtu3/mtu3_qmu.c
206
ring->enqueue++;
drivers/usb/mtu3/mtu3_qmu.c
208
ring->enqueue = ring->start;
drivers/usb/mtu3/mtu3_qmu.c
210
return ring->enqueue;
drivers/usb/mtu3/mtu3_qmu.c
214
static struct qmu_gpd *advance_deq_gpd(struct mtu3_gpd_ring *ring)
drivers/usb/mtu3/mtu3_qmu.c
216
if (ring->dequeue < ring->end)
drivers/usb/mtu3/mtu3_qmu.c
217
ring->dequeue++;
drivers/usb/mtu3/mtu3_qmu.c
219
ring->dequeue = ring->start;
drivers/usb/mtu3/mtu3_qmu.c
221
return ring->dequeue;
drivers/usb/mtu3/mtu3_qmu.c
225
static bool gpd_ring_empty(struct mtu3_gpd_ring *ring)
drivers/usb/mtu3/mtu3_qmu.c
227
struct qmu_gpd *enq = ring->enqueue;
drivers/usb/mtu3/mtu3_qmu.c
230
if (ring->enqueue < ring->end)
drivers/usb/mtu3/mtu3_qmu.c
233
next = ring->start;
drivers/usb/mtu3/mtu3_qmu.c
236
return next == ring->dequeue;
drivers/usb/mtu3/mtu3_qmu.c
247
struct mtu3_gpd_ring *ring = &mep->gpd_ring;
drivers/usb/mtu3/mtu3_qmu.c
248
struct qmu_gpd *gpd = ring->enqueue;
drivers/usb/mtu3/mtu3_qmu.c
260
enq = advance_enq_gpd(ring);
drivers/usb/mtu3/mtu3_qmu.c
261
enq_dma = gpd_virt_to_dma(ring, enq);
drivers/usb/mtu3/mtu3_qmu.c
290
struct mtu3_gpd_ring *ring = &mep->gpd_ring;
drivers/usb/mtu3/mtu3_qmu.c
291
struct qmu_gpd *gpd = ring->enqueue;
drivers/usb/mtu3/mtu3_qmu.c
303
enq = advance_enq_gpd(ring);
drivers/usb/mtu3/mtu3_qmu.c
304
enq_dma = gpd_virt_to_dma(ring, enq);
drivers/usb/mtu3/mtu3_qmu.c
335
struct mtu3_gpd_ring *ring = &mep->gpd_ring;
drivers/usb/mtu3/mtu3_qmu.c
340
write_txq_start_addr(mbase, epnum, ring->dma);
drivers/usb/mtu3/mtu3_qmu.c
354
write_rxq_start_addr(mbase, epnum, ring->dma);
drivers/usb/mtu3/mtu3_qmu.c
428
struct mtu3_gpd_ring *ring = &mep->gpd_ring;
drivers/usb/mtu3/mtu3_qmu.c
441
gpd_current = gpd_dma_to_virt(ring, cur_gpd_dma);
drivers/usb/mtu3/mtu3_qmu.c
477
struct mtu3_gpd_ring *ring = &mep->gpd_ring;
drivers/usb/mtu3/mtu3_qmu.c
483
gpd_current = gpd_dma_to_virt(ring, cur_gpd_dma);
drivers/usb/mtu3/mtu3_qmu.c
511
struct mtu3_gpd_ring *ring = &mep->gpd_ring;
drivers/usb/mtu3/mtu3_qmu.c
513
struct qmu_gpd *gpd = ring->dequeue;
drivers/usb/mtu3/mtu3_qmu.c
521
gpd_current = gpd_dma_to_virt(ring, cur_gpd_dma);
drivers/usb/mtu3/mtu3_qmu.c
524
__func__, epnum, gpd, gpd_current, ring->enqueue);
drivers/usb/mtu3/mtu3_qmu.c
540
gpd = advance_deq_gpd(ring);
drivers/usb/mtu3/mtu3_qmu.c
544
__func__, epnum, ring->dequeue, ring->enqueue);
drivers/usb/mtu3/mtu3_qmu.c
551
struct mtu3_gpd_ring *ring = &mep->gpd_ring;
drivers/usb/mtu3/mtu3_qmu.c
553
struct qmu_gpd *gpd = ring->dequeue;
drivers/usb/mtu3/mtu3_qmu.c
560
gpd_current = gpd_dma_to_virt(ring, cur_gpd_dma);
drivers/usb/mtu3/mtu3_qmu.c
563
__func__, epnum, gpd, gpd_current, ring->enqueue);
drivers/usb/mtu3/mtu3_qmu.c
579
gpd = advance_deq_gpd(ring);
drivers/usb/mtu3/mtu3_qmu.c
583
__func__, epnum, ring->dequeue, ring->enqueue);
drivers/usb/musb/musb_host.c
2103
list_add_tail(&qh->ring, head);
drivers/usb/musb/musb_host.c
2167
INIT_LIST_HEAD(&qh->ring);
drivers/usb/musb/musb_host.c
2409
list_del(&qh->ring);
drivers/usb/musb/musb_host.c
2466
list_del(&qh->ring);
drivers/usb/musb/musb_host.c
364
head = qh->ring.prev;
drivers/usb/musb/musb_host.c
365
list_del(&qh->ring);
drivers/usb/musb/musb_host.c
951
list_move_tail(&cur_qh->ring, &musb->in_bulk);
drivers/usb/musb/musb_host.c
960
list_move_tail(&cur_qh->ring, &musb->out_bulk);
drivers/usb/musb/musb_host.h
21
struct list_head ring; /* of musb_qh */
drivers/usb/musb/musb_host.h
50
return list_entry(q->next, struct musb_qh, ring);
drivers/vfio/platform/reset/vfio_platform_bcmflexrm.c
35
static int vfio_platform_bcmflexrm_shutdown(void __iomem *ring)
drivers/vfio/platform/reset/vfio_platform_bcmflexrm.c
40
writel_relaxed(0x0, ring + RING_CONTROL);
drivers/vfio/platform/reset/vfio_platform_bcmflexrm.c
44
writel_relaxed(BIT(CONTROL_FLUSH_SHIFT), ring + RING_CONTROL);
drivers/vfio/platform/reset/vfio_platform_bcmflexrm.c
46
if (readl_relaxed(ring + RING_FLUSH_DONE) &
drivers/vfio/platform/reset/vfio_platform_bcmflexrm.c
56
writel_relaxed(0x0, ring + RING_CONTROL);
drivers/vfio/platform/reset/vfio_platform_bcmflexrm.c
58
if (!(readl_relaxed(ring + RING_FLUSH_DONE) &
drivers/vfio/platform/reset/vfio_platform_bcmflexrm.c
71
void __iomem *ring;
drivers/vfio/platform/reset/vfio_platform_bcmflexrm.c
85
for (ring = reg->ioaddr;
drivers/vfio/platform/reset/vfio_platform_bcmflexrm.c
86
ring < (reg->ioaddr + reg->size); ring += RING_REGS_SIZE) {
drivers/vfio/platform/reset/vfio_platform_bcmflexrm.c
87
if (readl_relaxed(ring + RING_VER) == RING_VER_MAGIC) {
drivers/vfio/platform/reset/vfio_platform_bcmflexrm.c
88
rc = vfio_platform_bcmflexrm_shutdown(ring);
drivers/vhost/net.c
1482
struct ptr_ring *ring;
drivers/vhost/net.c
1483
ring = tun_get_tx_ring(file);
drivers/vhost/net.c
1484
if (!IS_ERR(ring))
drivers/vhost/net.c
1486
ring = tap_get_ptr_ring(file);
drivers/vhost/net.c
1487
if (!IS_ERR(ring))
drivers/vhost/net.c
1489
ring = NULL;
drivers/vhost/net.c
1491
return ring;
drivers/vhost/vhost.c
1466
return vhost_copy_to_user(vq, vq->used->ring + idx, head,
drivers/vhost/vhost.c
1558
&vq->avail->ring[idx & (vq->num - 1)]);
drivers/vhost/vhost.c
2846
&vq->avail->ring[last_avail_idx % vq->num]);
drivers/vhost/vhost.c
3006
used = vq->used->ring + start;
drivers/vhost/vhost.c
3060
used = vq->used->ring + start;
drivers/vhost/vhost.c
562
return size_add(struct_size(vq->avail, ring, num), event);
drivers/vhost/vhost.c
57
#define vhost_used_event(vq) ((__virtio16 __user *)&vq->avail->ring[vq->num])
drivers/vhost/vhost.c
571
return size_add(struct_size(vq->used, ring, num), event);
drivers/vhost/vhost.c
58
#define vhost_avail_event(vq) ((__virtio16 __user *)&vq->used->ring[vq->num])
drivers/vhost/vringh.c
468
err = putused(vrh, &used_ring->ring[off], used, part);
drivers/vhost/vringh.c
470
err = putused(vrh, &used_ring->ring[0], used + part,
drivers/vhost/vringh.c
473
err = putused(vrh, &used_ring->ring[off], used, num_used);
drivers/vhost/vringh.c
477
num_used, off, &used_ring->ring[off]);
drivers/vhost/vringh.c
61
err = getu16(vrh, &head, &vrh->vring.avail->ring[i]);
drivers/vhost/vringh.c
64
*last_avail_idx, &vrh->vring.avail->ring[i]);
drivers/virtio/virtio_ring.c
1007
vq->split.vring.used->ring[last_used_idx].id);
drivers/virtio/virtio_ring.c
1009
vq->split.vring.used->ring[last_used_idx].len);
drivers/virtio/virtio_ring.c
1183
vq->split.vring.avail->ring[num] = 0;
drivers/virtio/virtio_ring.c
1189
*(__virtio16 *)&(vq->split.vring.used->ring[num]) = 0;
drivers/virtio/virtio_ring.c
2396
struct vring_packed_desc *ring;
drivers/virtio/virtio_ring.c
2403
ring = vring_alloc_queue(vdev, ring_size_in_bytes,
drivers/virtio/virtio_ring.c
2407
if (!ring)
drivers/virtio/virtio_ring.c
2410
vring_packed->vring.desc = ring;
drivers/virtio/virtio_ring.c
752
vq->split.vring.avail->ring[avail] = cpu_to_virtio16(vq->vq.vdev, head);
drivers/virtio/virtio_ring.c
943
vq->split.vring.used->ring[last_used].id);
drivers/virtio/virtio_ring.c
945
vq->split.vring.used->ring[last_used].len);
drivers/xen/evtchn.c
105
return u->ring + evtchn_ring_offset(u, idx);
drivers/xen/evtchn.c
261
copy_to_user(&buf[bytes1], &u->ring[0], bytes2)))
drivers/xen/evtchn.c
339
old_ring = u->ring;
drivers/xen/evtchn.c
359
memcpy(new_ring, old_ring, u->ring_size * sizeof(*u->ring));
drivers/xen/evtchn.c
361
u->ring_size * sizeof(*u->ring));
drivers/xen/evtchn.c
363
u->ring = new_ring;
drivers/xen/evtchn.c
68
evtchn_port_t *ring;
drivers/xen/evtchn.c
681
evtchn_free_ring(u->ring);
drivers/xen/evtchn.c
91
static void evtchn_free_ring(evtchn_port_t *ring)
drivers/xen/evtchn.c
93
kvfree(ring);
drivers/xen/pvcalls-back.c
170
struct pvcalls_data_intf *intf = map->ring;
drivers/xen/pvcalls-back.c
280
rsp = RING_GET_RESPONSE(&fedata->ring, fedata->ring.rsp_prod_pvt++);
drivers/xen/pvcalls-back.c
341
map->ring = page;
drivers/xen/pvcalls-back.c
342
map->ring_order = map->ring->ring_order;
drivers/xen/pvcalls-back.c
350
ret = xenbus_map_ring_valloc(fedata->dev, map->ring->ref,
drivers/xen/pvcalls-back.c
42
struct xen_pvcalls_back_ring ring;
drivers/xen/pvcalls-back.c
427
rsp = RING_GET_RESPONSE(&fedata->ring, fedata->ring.rsp_prod_pvt++);
drivers/xen/pvcalls-back.c
452
xenbus_unmap_ring_vfree(dev, (void *)map->ring);
drivers/xen/pvcalls-back.c
508
rsp = RING_GET_RESPONSE(&fedata->ring, fedata->ring.rsp_prod_pvt++);
drivers/xen/pvcalls-back.c
578
rsp = RING_GET_RESPONSE(&fedata->ring, fedata->ring.rsp_prod_pvt++);
drivers/xen/pvcalls-back.c
583
RING_PUSH_RESPONSES_AND_CHECK_NOTIFY(&fedata->ring, notify);
drivers/xen/pvcalls-back.c
606
rsp = RING_GET_RESPONSE(&fedata->ring,
drivers/xen/pvcalls-back.c
607
fedata->ring.rsp_prod_pvt++);
drivers/xen/pvcalls-back.c
61
struct pvcalls_data_intf *ring;
drivers/xen/pvcalls-back.c
616
RING_PUSH_RESPONSES_AND_CHECK_NOTIFY(&fedata->ring, notify);
drivers/xen/pvcalls-back.c
682
rsp = RING_GET_RESPONSE(&fedata->ring, fedata->ring.rsp_prod_pvt++);
drivers/xen/pvcalls-back.c
709
rsp = RING_GET_RESPONSE(&fedata->ring, fedata->ring.rsp_prod_pvt++);
drivers/xen/pvcalls-back.c
754
rsp = RING_GET_RESPONSE(&fedata->ring, fedata->ring.rsp_prod_pvt++);
drivers/xen/pvcalls-back.c
810
rsp = RING_GET_RESPONSE(&fedata->ring, fedata->ring.rsp_prod_pvt++);
drivers/xen/pvcalls-back.c
852
&fedata->ring, fedata->ring.rsp_prod_pvt++);
drivers/xen/pvcalls-back.c
869
while (RING_HAS_UNCONSUMED_REQUESTS(&fedata->ring)) {
drivers/xen/pvcalls-back.c
870
RING_COPY_REQUEST(&fedata->ring,
drivers/xen/pvcalls-back.c
871
fedata->ring.req_cons++,
drivers/xen/pvcalls-back.c
876
&fedata->ring, notify);
drivers/xen/pvcalls-back.c
886
RING_FINAL_CHECK_FOR_REQUESTS(&fedata->ring, more);
drivers/xen/pvcalls-back.c
974
BACK_RING_INIT(&fedata->ring, fedata->sring, XEN_PAGE_SIZE * 1);
drivers/xen/pvcalls-back.c
99
struct pvcalls_data_intf *intf = map->ring;
drivers/xen/pvcalls-front.c
1045
req = RING_GET_REQUEST(&bedata->ring, req_id);
drivers/xen/pvcalls-front.c
1050
bedata->ring.req_prod_pvt++;
drivers/xen/pvcalls-front.c
1051
RING_PUSH_REQUESTS_AND_CHECK_NOTIFY(&bedata->ring, notify);
drivers/xen/pvcalls-front.c
1064
map->active.ring->in_error = -EBADF;
drivers/xen/pvcalls-front.c
1120
map->active.ring->in_error = -EBADF;
drivers/xen/pvcalls-front.c
1139
kfree(bedata->ring.sring);
drivers/xen/pvcalls-front.c
1202
FRONT_RING_INIT(&bedata->ring, sring, XEN_PAGE_SIZE);
drivers/xen/pvcalls-front.c
127
*req_id = bedata->ring.req_prod_pvt & (RING_SIZE(&bedata->ring) - 1);
drivers/xen/pvcalls-front.c
128
if (RING_FULL(&bedata->ring) ||
drivers/xen/pvcalls-front.c
136
struct pvcalls_data_intf *intf = map->active.ring;
drivers/xen/pvcalls-front.c
153
struct pvcalls_data_intf *intf = map->active.ring;
drivers/xen/pvcalls-front.c
184
while (RING_HAS_UNCONSUMED_RESPONSES(&bedata->ring)) {
drivers/xen/pvcalls-front.c
185
rsp = RING_GET_RESPONSE(&bedata->ring, bedata->ring.rsp_cons);
drivers/xen/pvcalls-front.c
216
bedata->ring.rsp_cons++;
drivers/xen/pvcalls-front.c
219
RING_FINAL_CHECK_FOR_RESPONSES(&bedata->ring, more);
drivers/xen/pvcalls-front.c
245
gnttab_end_foreign_access(map->active.ring->ref[i], NULL);
drivers/xen/pvcalls-front.c
319
req = RING_GET_REQUEST(&bedata->ring, req_id);
drivers/xen/pvcalls-front.c
32
struct xen_pvcalls_front_ring ring;
drivers/xen/pvcalls-front.c
327
bedata->ring.req_prod_pvt++;
drivers/xen/pvcalls-front.c
328
RING_PUSH_REQUESTS_AND_CHECK_NOTIFY(&bedata->ring, notify);
drivers/xen/pvcalls-front.c
348
if (!map->active.ring)
drivers/xen/pvcalls-front.c
352
PAGE_SIZE << map->active.ring->ring_order);
drivers/xen/pvcalls-front.c
353
free_page((unsigned long)map->active.ring);
drivers/xen/pvcalls-front.c
360
map->active.ring = (struct pvcalls_data_intf *)
drivers/xen/pvcalls-front.c
362
if (!map->active.ring)
drivers/xen/pvcalls-front.c
365
map->active.ring->ring_order = PVCALLS_RING_ORDER;
drivers/xen/pvcalls-front.c
392
map->active.ring->ref[i] = gnttab_grant_foreign_access(
drivers/xen/pvcalls-front.c
398
pfn_to_gfn(virt_to_pfn((void *)map->active.ring)), 0);
drivers/xen/pvcalls-front.c
461
req = RING_GET_REQUEST(&bedata->ring, req_id);
drivers/xen/pvcalls-front.c
473
bedata->ring.req_prod_pvt++;
drivers/xen/pvcalls-front.c
474
RING_PUSH_REQUESTS_AND_CHECK_NOTIFY(&bedata->ring, notify);
drivers/xen/pvcalls-front.c
569
sent = __write_ring(map->active.ring,
drivers/xen/pvcalls-front.c
65
struct pvcalls_data_intf *ring;
drivers/xen/pvcalls-front.c
658
ret = __read_ring(map->active.ring, &map->active.data,
drivers/xen/pvcalls-front.c
696
req = RING_GET_REQUEST(&bedata->ring, req_id);
drivers/xen/pvcalls-front.c
708
bedata->ring.req_prod_pvt++;
drivers/xen/pvcalls-front.c
709
RING_PUSH_REQUESTS_AND_CHECK_NOTIFY(&bedata->ring, notify);
drivers/xen/pvcalls-front.c
752
req = RING_GET_REQUEST(&bedata->ring, req_id);
drivers/xen/pvcalls-front.c
758
bedata->ring.req_prod_pvt++;
drivers/xen/pvcalls-front.c
759
RING_PUSH_REQUESTS_AND_CHECK_NOTIFY(&bedata->ring, notify);
drivers/xen/pvcalls-front.c
861
req = RING_GET_REQUEST(&bedata->ring, req_id);
drivers/xen/pvcalls-front.c
870
bedata->ring.req_prod_pvt++;
drivers/xen/pvcalls-front.c
871
RING_PUSH_REQUESTS_AND_CHECK_NOTIFY(&bedata->ring, notify);
drivers/xen/pvcalls-front.c
958
req = RING_GET_REQUEST(&bedata->ring, req_id);
drivers/xen/pvcalls-front.c
963
bedata->ring.req_prod_pvt++;
drivers/xen/pvcalls-front.c
964
RING_PUSH_REQUESTS_AND_CHECK_NOTIFY(&bedata->ring, notify);
drivers/xen/pvcalls-front.c
980
struct pvcalls_data_intf *intf = map->active.ring;
drivers/xen/xen-scsiback.c
359
ring_res = RING_GET_RESPONSE(&info->ring, info->ring.rsp_prod_pvt);
drivers/xen/xen-scsiback.c
360
info->ring.rsp_prod_pvt++;
drivers/xen/xen-scsiback.c
378
RING_PUSH_RESPONSES_AND_CHECK_NOTIFY(&info->ring, notify);
drivers/xen/xen-scsiback.c
611
xenbus_unmap_ring_vfree(info->dev, info->ring.sring);
drivers/xen/xen-scsiback.c
671
static struct vscsibk_pend *scsiback_get_pend_req(struct vscsiif_back_ring *ring,
drivers/xen/xen-scsiback.c
698
struct vscsiif_back_ring *ring,
drivers/xen/xen-scsiback.c
731
pending_req = scsiback_get_pend_req(ring, v2p);
drivers/xen/xen-scsiback.c
749
struct vscsiif_back_ring *ring = &info->ring;
drivers/xen/xen-scsiback.c
756
rc = ring->req_cons;
drivers/xen/xen-scsiback.c
757
rp = ring->sring->req_prod;
drivers/xen/xen-scsiback.c
760
if (RING_REQUEST_PROD_OVERFLOW(ring, rp)) {
drivers/xen/xen-scsiback.c
761
rc = ring->rsp_prod_pvt;
drivers/xen/xen-scsiback.c
770
if (RING_REQUEST_CONS_OVERFLOW(ring, rc))
drivers/xen/xen-scsiback.c
773
RING_COPY_REQUEST(ring, rc, &ring_req);
drivers/xen/xen-scsiback.c
774
ring->req_cons = ++rc;
drivers/xen/xen-scsiback.c
776
pending_req = prepare_pending_reqs(info, ring, &ring_req);
drivers/xen/xen-scsiback.c
823
RING_FINAL_CHECK_FOR_REQUESTS(&info->ring, more_to_do);
drivers/xen/xen-scsiback.c
858
BACK_RING_INIT(&info->ring, sring, PAGE_SIZE);
drivers/xen/xen-scsiback.c
91
struct vscsiif_back_ring ring;
fs/afs/cm_security.c
123
struct key *ring;
fs/afs/cm_security.c
128
ring = keyring_alloc("kafs",
fs/afs/cm_security.c
134
if (IS_ERR(ring))
fs/afs/cm_security.c
135
return PTR_ERR(ring);
fs/afs/cm_security.c
137
ret = rxrpc_sock_set_security_keyring(socket->sk, ring);
fs/afs/cm_security.c
158
key = key_create(make_key_ref(ring, true),
fs/afs/cm_security.c
172
key_put(ring);
fs/aio.c
1017
struct aio_ring *ring;
fs/aio.c
1029
ring = folio_address(ctx->ring_folios[0]);
fs/aio.c
1030
head = ring->head;
fs/aio.c
1076
struct aio_ring __user *ring = (void __user *)ctx_id;
fs/aio.c
1082
if (get_user(id, &ring->id))
fs/aio.c
1123
struct aio_ring *ring;
fs/aio.c
1159
ring = folio_address(ctx->ring_folios[0]);
fs/aio.c
1160
head = ring->head;
fs/aio.c
1161
ring->tail = tail;
fs/aio.c
1220
struct aio_ring *ring;
fs/aio.c
1235
ring = folio_address(ctx->ring_folios[0]);
fs/aio.c
1236
head = ring->head;
fs/aio.c
1237
tail = ring->tail;
fs/aio.c
1283
ring = folio_address(ctx->ring_folios[0]);
fs/aio.c
1284
ring->head = head;
fs/aio.c
484
struct aio_ring *ring;
fs/aio.c
566
ring = folio_address(ctx->ring_folios[0]);
fs/aio.c
567
ring->nr = nr_events; /* user copy */
fs/aio.c
568
ring->id = ~0U;
fs/aio.c
569
ring->head = ring->tail = 0;
fs/aio.c
570
ring->magic = AIO_RING_MAGIC;
fs/aio.c
571
ring->compat_features = AIO_RING_COMPAT_FEATURES;
fs/aio.c
572
ring->incompat_features = AIO_RING_INCOMPAT_FEATURES;
fs/aio.c
573
ring->header_length = sizeof(struct aio_ring);
fs/aio.c
670
struct aio_ring *ring;
fs/aio.c
687
ring = folio_address(ctx->ring_folios[0]);
fs/aio.c
688
ring->id = ctx->id;
fs/fuse/dev.c
949
cs->ring.copied_sz += ncpy;
fs/fuse/dev_uring.c
1003
WRITE_ONCE(ring->ready, true);
fs/fuse/dev_uring.c
1039
struct fuse_ring *ring = queue->ring;
fs/fuse/dev_uring.c
1059
if (payload_size < ring->max_payload_sz) {
fs/fuse/dev_uring.c
1076
atomic_inc(&ring->queue_refs);
fs/fuse/dev_uring.c
1089
struct fuse_ring *ring = smp_load_acquire(&fc->ring);
fs/fuse/dev_uring.c
1096
if (!ring) {
fs/fuse/dev_uring.c
1097
ring = fuse_uring_create(fc);
fs/fuse/dev_uring.c
1098
if (!ring)
fs/fuse/dev_uring.c
1102
if (qid >= ring->nr_queues) {
fs/fuse/dev_uring.c
1107
queue = ring->queues[qid];
fs/fuse/dev_uring.c
1109
queue = fuse_uring_create_queue(ring, qid);
fs/fuse/dev_uring.c
122
void fuse_uring_abort_end_requests(struct fuse_ring *ring)
fs/fuse/dev_uring.c
1239
static struct fuse_ring_queue *fuse_uring_task_to_queue(struct fuse_ring *ring)
fs/fuse/dev_uring.c
1246
if (WARN_ONCE(qid >= ring->nr_queues,
fs/fuse/dev_uring.c
1248
ring->nr_queues))
fs/fuse/dev_uring.c
1251
queue = ring->queues[qid];
fs/fuse/dev_uring.c
126
struct fuse_conn *fc = ring->fc;
fs/fuse/dev_uring.c
1269
struct fuse_ring *ring = fc->ring;
fs/fuse/dev_uring.c
1275
queue = fuse_uring_task_to_queue(ring);
fs/fuse/dev_uring.c
128
for (qid = 0; qid < ring->nr_queues; qid++) {
fs/fuse/dev_uring.c
129
queue = READ_ONCE(ring->queues[qid]);
fs/fuse/dev_uring.c
1312
struct fuse_ring *ring = fc->ring;
fs/fuse/dev_uring.c
1316
queue = fuse_uring_task_to_queue(ring);
fs/fuse/dev_uring.c
135
WARN_ON_ONCE(ring->fc->max_background != UINT_MAX);
fs/fuse/dev_uring.c
162
struct fuse_ring *ring = fc->ring;
fs/fuse/dev_uring.c
166
if (!ring)
fs/fuse/dev_uring.c
169
for (qid = 0; qid < ring->nr_queues; qid++) {
fs/fuse/dev_uring.c
170
queue = READ_ONCE(ring->queues[qid]);
fs/fuse/dev_uring.c
190
struct fuse_ring *ring = fc->ring;
fs/fuse/dev_uring.c
193
if (!ring)
fs/fuse/dev_uring.c
196
for (qid = 0; qid < ring->nr_queues; qid++) {
fs/fuse/dev_uring.c
197
struct fuse_ring_queue *queue = ring->queues[qid];
fs/fuse/dev_uring.c
216
ring->queues[qid] = NULL;
fs/fuse/dev_uring.c
219
kfree(ring->queues);
fs/fuse/dev_uring.c
220
kfree(ring);
fs/fuse/dev_uring.c
221
fc->ring = NULL;
fs/fuse/dev_uring.c
229
struct fuse_ring *ring;
fs/fuse/dev_uring.c
234
ring = kzalloc_obj(*fc->ring, GFP_KERNEL_ACCOUNT);
fs/fuse/dev_uring.c
235
if (!ring)
fs/fuse/dev_uring.c
238
ring->queues = kzalloc_objs(struct fuse_ring_queue *, nr_queues,
fs/fuse/dev_uring.c
240
if (!ring->queues)
fs/fuse/dev_uring.c
247
if (fc->ring) {
fs/fuse/dev_uring.c
250
res = fc->ring;
fs/fuse/dev_uring.c
254
init_waitqueue_head(&ring->stop_waitq);
fs/fuse/dev_uring.c
256
ring->nr_queues = nr_queues;
fs/fuse/dev_uring.c
257
ring->fc = fc;
fs/fuse/dev_uring.c
258
ring->max_payload_sz = max_payload_size;
fs/fuse/dev_uring.c
259
smp_store_release(&fc->ring, ring);
fs/fuse/dev_uring.c
262
return ring;
fs/fuse/dev_uring.c
265
kfree(ring->queues);
fs/fuse/dev_uring.c
266
kfree(ring);
fs/fuse/dev_uring.c
270
static struct fuse_ring_queue *fuse_uring_create_queue(struct fuse_ring *ring,
fs/fuse/dev_uring.c
273
struct fuse_conn *fc = ring->fc;
fs/fuse/dev_uring.c
287
queue->ring = ring;
fs/fuse/dev_uring.c
302
if (ring->queues[qid]) {
fs/fuse/dev_uring.c
306
return ring->queues[qid];
fs/fuse/dev_uring.c
312
WRITE_ONCE(ring->queues[qid], queue);
fs/fuse/dev_uring.c
366
struct fuse_ring *ring = queue->ring;
fs/fuse/dev_uring.c
387
queue_refs = atomic_dec_return(&ring->queue_refs);
fs/fuse/dev_uring.c
403
static void fuse_uring_log_ent_state(struct fuse_ring *ring)
fs/fuse/dev_uring.c
408
for (qid = 0; qid < ring->nr_queues; qid++) {
fs/fuse/dev_uring.c
409
struct fuse_ring_queue *queue = ring->queues[qid];
fs/fuse/dev_uring.c
421
ring, qid, ent, ent->state);
fs/fuse/dev_uring.c
425
ring, qid, ent, ent->state);
fs/fuse/dev_uring.c
429
ring->stop_debug_log = 1;
fs/fuse/dev_uring.c
435
struct fuse_ring *ring =
fs/fuse/dev_uring.c
439
for (qid = 0; qid < ring->nr_queues; qid++) {
fs/fuse/dev_uring.c
440
struct fuse_ring_queue *queue = READ_ONCE(ring->queues[qid]);
fs/fuse/dev_uring.c
455
if (atomic_read(&ring->queue_refs) > 0) {
fs/fuse/dev_uring.c
457
ring->teardown_time + FUSE_URING_TEARDOWN_TIMEOUT))
fs/fuse/dev_uring.c
458
fuse_uring_log_ent_state(ring);
fs/fuse/dev_uring.c
460
schedule_delayed_work(&ring->async_teardown_work,
fs/fuse/dev_uring.c
463
wake_up_all(&ring->stop_waitq);
fs/fuse/dev_uring.c
470
void fuse_uring_stop_queues(struct fuse_ring *ring)
fs/fuse/dev_uring.c
474
for (qid = 0; qid < ring->nr_queues; qid++) {
fs/fuse/dev_uring.c
475
struct fuse_ring_queue *queue = READ_ONCE(ring->queues[qid]);
fs/fuse/dev_uring.c
483
if (atomic_read(&ring->queue_refs) > 0) {
fs/fuse/dev_uring.c
484
ring->teardown_time = jiffies;
fs/fuse/dev_uring.c
485
INIT_DELAYED_WORK(&ring->async_teardown_work,
fs/fuse/dev_uring.c
487
schedule_delayed_work(&ring->async_teardown_work,
fs/fuse/dev_uring.c
490
wake_up_all(&ring->stop_waitq);
fs/fuse/dev_uring.c
53
struct fuse_ring *ring = queue->ring;
fs/fuse/dev_uring.c
54
struct fuse_conn *fc = ring->fc;
fs/fuse/dev_uring.c
578
static int fuse_uring_copy_from_ring(struct fuse_ring *ring,
fs/fuse/dev_uring.c
593
err = import_ubuf(ITER_SOURCE, ent->payload, ring->max_payload_sz,
fs/fuse/dev_uring.c
610
static int fuse_uring_args_to_ring(struct fuse_ring *ring, struct fuse_req *req,
fs/fuse/dev_uring.c
624
err = import_ubuf(ITER_DEST, ent->payload, ring->max_payload_sz, &iter);
fs/fuse/dev_uring.c
661
ent_in_out.payload_sz = cs.ring.copied_sz;
fs/fuse/dev_uring.c
671
struct fuse_ring *ring = queue->ring;
fs/fuse/dev_uring.c
686
err = fuse_uring_args_to_ring(ring, req, ent);
fs/fuse/dev_uring.c
818
struct fuse_ring *ring = ent->queue->ring;
fs/fuse/dev_uring.c
819
struct fuse_conn *fc = ring->fc;
fs/fuse/dev_uring.c
83
struct fuse_ring *ring = queue->ring;
fs/fuse/dev_uring.c
835
err = fuse_uring_copy_from_ring(ring, req, ent);
fs/fuse/dev_uring.c
84
struct fuse_conn *fc = ring->fc;
fs/fuse/dev_uring.c
886
struct fuse_ring *ring = fc->ring;
fs/fuse/dev_uring.c
894
if (!ring)
fs/fuse/dev_uring.c
897
if (qid >= ring->nr_queues)
fs/fuse/dev_uring.c
900
queue = ring->queues[qid];
fs/fuse/dev_uring.c
954
static bool is_ring_ready(struct fuse_ring *ring, int current_qid)
fs/fuse/dev_uring.c
960
for (qid = 0; qid < ring->nr_queues && ready; qid++) {
fs/fuse/dev_uring.c
964
queue = ring->queues[qid];
fs/fuse/dev_uring.c
987
struct fuse_ring *ring = queue->ring;
fs/fuse/dev_uring.c
988
struct fuse_conn *fc = ring->fc;
fs/fuse/dev_uring.c
998
if (!ring->ready) {
fs/fuse/dev_uring.c
999
bool ready = is_ring_ready(ring, queue->qid);
fs/fuse/dev_uring_i.h
140
void fuse_uring_stop_queues(struct fuse_ring *ring);
fs/fuse/dev_uring_i.h
141
void fuse_uring_abort_end_requests(struct fuse_ring *ring);
fs/fuse/dev_uring_i.h
150
struct fuse_ring *ring = fc->ring;
fs/fuse/dev_uring_i.h
152
if (ring == NULL)
fs/fuse/dev_uring_i.h
155
if (atomic_read(&ring->queue_refs) > 0) {
fs/fuse/dev_uring_i.h
156
fuse_uring_abort_end_requests(ring);
fs/fuse/dev_uring_i.h
157
fuse_uring_stop_queues(ring);
fs/fuse/dev_uring_i.h
163
struct fuse_ring *ring = fc->ring;
fs/fuse/dev_uring_i.h
165
if (ring)
fs/fuse/dev_uring_i.h
166
wait_event(ring->stop_waitq,
fs/fuse/dev_uring_i.h
167
atomic_read(&ring->queue_refs) == 0);
fs/fuse/dev_uring_i.h
172
return fc->ring && fc->ring->ready;
fs/fuse/dev_uring_i.h
64
struct fuse_ring *ring;
fs/fuse/fuse_dev_i.h
39
} ring;
fs/fuse/fuse_i.h
987
struct fuse_ring *ring;
include/linux/fsl/ntmp.h
55
struct netc_cbdr *ring;
include/linux/if_tap.h
73
struct ptr_ring ring;
include/linux/kvm_dirty_ring.h
45
static inline int kvm_dirty_ring_alloc(struct kvm *kvm, struct kvm_dirty_ring *ring,
include/linux/kvm_dirty_ring.h
52
struct kvm_dirty_ring *ring,
include/linux/kvm_dirty_ring.h
63
static inline struct page *kvm_dirty_ring_get_page(struct kvm_dirty_ring *ring,
include/linux/kvm_dirty_ring.h
69
static inline void kvm_dirty_ring_free(struct kvm_dirty_ring *ring)
include/linux/kvm_dirty_ring.h
79
int kvm_dirty_ring_alloc(struct kvm *kvm, struct kvm_dirty_ring *ring,
include/linux/kvm_dirty_ring.h
81
int kvm_dirty_ring_reset(struct kvm *kvm, struct kvm_dirty_ring *ring,
include/linux/kvm_dirty_ring.h
88
struct page *kvm_dirty_ring_get_page(struct kvm_dirty_ring *ring, u32 offset);
include/linux/kvm_dirty_ring.h
90
void kvm_dirty_ring_free(struct kvm_dirty_ring *ring);
include/linux/libata.h
689
struct ata_ering_entry ring[ATA_ERING_SIZE];
include/linux/net/intel/libie/fwlog.h
69
struct libie_fwlog_ring ring;
include/linux/platform_data/cros_ec_sensorhub.h
159
struct cros_ec_sensors_ring_sample *ring;
include/linux/skb_array.h
103
return ptr_ring_consume(&a->ring);
include/linux/skb_array.h
109
return ptr_ring_consume_batched(&a->ring, (void **)array, n);
include/linux/skb_array.h
114
return ptr_ring_consume_irq(&a->ring);
include/linux/skb_array.h
120
return ptr_ring_consume_batched_irq(&a->ring, (void **)array, n);
include/linux/skb_array.h
125
return ptr_ring_consume_any(&a->ring);
include/linux/skb_array.h
131
return ptr_ring_consume_batched_any(&a->ring, (void **)array, n);
include/linux/skb_array.h
137
return ptr_ring_consume_bh(&a->ring);
include/linux/skb_array.h
143
return ptr_ring_consume_batched_bh(&a->ring, (void **)array, n);
include/linux/skb_array.h
162
return PTR_RING_PEEK_CALL(&a->ring, __skb_array_len_with_tag);
include/linux/skb_array.h
167
return PTR_RING_PEEK_CALL_IRQ(&a->ring, __skb_array_len_with_tag);
include/linux/skb_array.h
172
return PTR_RING_PEEK_CALL_BH(&a->ring, __skb_array_len_with_tag);
include/linux/skb_array.h
177
return PTR_RING_PEEK_CALL_ANY(&a->ring, __skb_array_len_with_tag);
include/linux/skb_array.h
182
return ptr_ring_init_noprof(&a->ring, size, gfp);
include/linux/skb_array.h
194
ptr_ring_unconsume(&a->ring, (void **)skbs, n, __skb_array_destroy_skb);
include/linux/skb_array.h
199
return ptr_ring_resize(&a->ring, size, gfp, __skb_array_destroy_skb);
include/linux/skb_array.h
207
BUILD_BUG_ON(offsetof(struct skb_array, ring));
include/linux/skb_array.h
217
ptr_ring_cleanup(&a->ring, __skb_array_destroy_skb);
include/linux/skb_array.h
26
struct ptr_ring ring;
include/linux/skb_array.h
34
return __ptr_ring_full(&a->ring);
include/linux/skb_array.h
39
return ptr_ring_full(&a->ring);
include/linux/skb_array.h
44
return ptr_ring_produce(&a->ring, skb);
include/linux/skb_array.h
49
return ptr_ring_produce_irq(&a->ring, skb);
include/linux/skb_array.h
54
return ptr_ring_produce_bh(&a->ring, skb);
include/linux/skb_array.h
59
return ptr_ring_produce_any(&a->ring, skb);
include/linux/skb_array.h
68
return __ptr_ring_empty(&a->ring);
include/linux/skb_array.h
73
return __ptr_ring_peek(&a->ring);
include/linux/skb_array.h
78
return ptr_ring_empty(&a->ring);
include/linux/skb_array.h
83
return ptr_ring_empty_bh(&a->ring);
include/linux/skb_array.h
88
return ptr_ring_empty_irq(&a->ring);
include/linux/skb_array.h
93
return ptr_ring_empty_any(&a->ring);
include/linux/skb_array.h
98
return __ptr_ring_consume(&a->ring);
include/linux/soc/mediatek/mtk_wed.h
121
struct mtk_wed_ring ring;
include/linux/soc/mediatek/mtk_wed.h
197
int (*tx_ring_setup)(struct mtk_wed_device *dev, int ring,
include/linux/soc/mediatek/mtk_wed.h
199
int (*rx_ring_setup)(struct mtk_wed_device *dev, int ring,
include/linux/soc/mediatek/mtk_wed.h
222
void (*rro_rx_ring_setup)(struct mtk_wed_device *dev, int ring,
include/linux/soc/mediatek/mtk_wed.h
224
void (*msdu_pg_rx_ring_setup)(struct mtk_wed_device *dev, int ring,
include/linux/soc/ti/k3-ringacc.h
127
void k3_ringacc_ring_reset(struct k3_ring *ring);
include/linux/soc/ti/k3-ringacc.h
135
void k3_ringacc_ring_reset_dma(struct k3_ring *ring, u32 occ);
include/linux/soc/ti/k3-ringacc.h
143
int k3_ringacc_ring_free(struct k3_ring *ring);
include/linux/soc/ti/k3-ringacc.h
151
u32 k3_ringacc_get_ring_id(struct k3_ring *ring);
include/linux/soc/ti/k3-ringacc.h
159
int k3_ringacc_get_ring_irq_num(struct k3_ring *ring);
include/linux/soc/ti/k3-ringacc.h
169
int k3_ringacc_ring_cfg(struct k3_ring *ring, struct k3_ring_cfg *cfg);
include/linux/soc/ti/k3-ringacc.h
177
u32 k3_ringacc_ring_get_size(struct k3_ring *ring);
include/linux/soc/ti/k3-ringacc.h
185
u32 k3_ringacc_ring_get_free(struct k3_ring *ring);
include/linux/soc/ti/k3-ringacc.h
193
u32 k3_ringacc_ring_get_occ(struct k3_ring *ring);
include/linux/soc/ti/k3-ringacc.h
201
u32 k3_ringacc_ring_is_full(struct k3_ring *ring);
include/linux/soc/ti/k3-ringacc.h
213
int k3_ringacc_ring_push(struct k3_ring *ring, void *elem);
include/linux/soc/ti/k3-ringacc.h
225
int k3_ringacc_ring_pop(struct k3_ring *ring, void *elem);
include/linux/soc/ti/k3-ringacc.h
238
int k3_ringacc_ring_push_head(struct k3_ring *ring, void *elem);
include/linux/soc/ti/k3-ringacc.h
251
int k3_ringacc_ring_pop_tail(struct k3_ring *ring, void *elem);
include/linux/soc/ti/k3-ringacc.h
253
u32 k3_ringacc_get_tisci_dev_id(struct k3_ring *ring);
include/linux/thunderbolt.h
620
void tb_ring_start(struct tb_ring *ring);
include/linux/thunderbolt.h
621
void tb_ring_stop(struct tb_ring *ring);
include/linux/thunderbolt.h
622
void tb_ring_free(struct tb_ring *ring);
include/linux/thunderbolt.h
624
int __tb_ring_enqueue(struct tb_ring *ring, struct ring_frame *frame);
include/linux/thunderbolt.h
642
static inline int tb_ring_rx(struct tb_ring *ring, struct ring_frame *frame)
include/linux/thunderbolt.h
644
WARN_ON(ring->is_tx);
include/linux/thunderbolt.h
645
return __tb_ring_enqueue(ring, frame);
include/linux/thunderbolt.h
663
static inline int tb_ring_tx(struct tb_ring *ring, struct ring_frame *frame)
include/linux/thunderbolt.h
665
WARN_ON(!ring->is_tx);
include/linux/thunderbolt.h
666
return __tb_ring_enqueue(ring, frame);
include/linux/thunderbolt.h
670
struct ring_frame *tb_ring_poll(struct tb_ring *ring);
include/linux/thunderbolt.h
671
void tb_ring_poll_complete(struct tb_ring *ring);
include/linux/thunderbolt.h
682
static inline struct device *tb_ring_dma_device(struct tb_ring *ring)
include/linux/thunderbolt.h
684
return &ring->nhi->pdev->dev;
include/net/page_pool/types.h
130
u64 ring;
include/net/page_pool/types.h
224
struct ptr_ring ring;
include/trace/events/kvm.h
299
TP_PROTO(struct kvm_dirty_ring *ring, u32 slot, u64 offset),
include/trace/events/kvm.h
300
TP_ARGS(ring, slot, offset),
include/trace/events/kvm.h
311
__entry->index = ring->index;
include/trace/events/kvm.h
312
__entry->dirty_index = ring->dirty_index;
include/trace/events/kvm.h
313
__entry->reset_index = ring->reset_index;
include/trace/events/kvm.h
326
TP_PROTO(struct kvm_dirty_ring *ring),
include/trace/events/kvm.h
327
TP_ARGS(ring),
include/trace/events/kvm.h
336
__entry->index = ring->index;
include/trace/events/kvm.h
337
__entry->dirty_index = ring->dirty_index;
include/trace/events/kvm.h
338
__entry->reset_index = ring->reset_index;
include/uapi/drm/amdgpu_drm.h
1026
__u32 ring;
include/uapi/drm/amdgpu_drm.h
1032
__u32 ring;
include/uapi/drm/amdgpu_drm.h
763
__u32 ring;
include/uapi/drm/amdgpu_drm.h
781
__u32 ring;
include/uapi/linux/genwqe/genwqe_card.h
53
#define IO_EXTENDED_DIAG_MAP(ring) (0x00000500 | ((ring) << 3))
include/uapi/linux/genwqe/genwqe_card.h
55
#define GENWQE_EXTENDED_DIAG_SELECTOR(ring, trace) (((ring) << 8) | (trace))
include/uapi/linux/virtio_ring.h
114
__virtio16 ring[];
include/uapi/linux/virtio_ring.h
131
vring_used_elem_t ring[];
include/uapi/linux/virtio_ring.h
193
#define vring_used_event(vr) ((vr)->avail->ring[(vr)->num])
include/uapi/linux/virtio_ring.h
194
#define vring_avail_event(vr) (*(__virtio16 *)&(vr)->used->ring[(vr)->num])
include/uapi/linux/virtio_ring.h
202
vr->used = (void *)(((unsigned long)&vr->avail->ring[num] + sizeof(__virtio16)
include/xen/interface/io/console.h
15
#define MASK_XENCONS_IDX(idx, ring) ((idx) & (sizeof(ring)-1))
include/xen/interface/io/ring.h
194
(&((_r)->sring->ring[((_idx) & (RING_SIZE(_r) - 1))].req))
include/xen/interface/io/ring.h
197
(&((_r)->sring->ring[((_idx) & (RING_SIZE(_r) - 1))].rsp))
include/xen/interface/io/ring.h
46
(__RD32(((_sz) - offsetof(struct _s##_sring, ring)) / \
include/xen/interface/io/ring.h
47
sizeof(((struct _s##_sring *)0)->ring[0])))
include/xen/interface/io/ring.h
52
(__RD32(((_sz) - (long)(_s)->ring + (long)(_s)) / sizeof((_s)->ring[0])))
include/xen/interface/io/ring.h
98
union __name##_sring_entry ring[]; \
kernel/bpf/cpumap.c
119
static void __cpu_map_ring_cleanup(struct ptr_ring *ring)
kernel/bpf/cpumap.c
128
while ((ptr = ptr_ring_consume(ring))) {
kernel/power/swap.c
1205
unsigned ring = 0, pg = 0, ring_size = 0,
kernel/power/swap.c
1342
ret = swap_read_page(handle, page[ring], &hb);
kernel/power/swap.c
1356
if (++ring >= ring_size)
kernel/power/swap.c
1357
ring = 0;
net/9p/trans_xen.c
105
static bool p9_xen_write_todo(struct xen_9pfs_dataring *ring, RING_IDX size)
net/9p/trans_xen.c
109
cons = ring->intf->out_cons;
net/9p/trans_xen.c
110
prod = ring->intf->out_prod;
net/9p/trans_xen.c
113
return XEN_9PFS_RING_SIZE(ring) -
net/9p/trans_xen.c
114
xen_9pfs_queued(prod, cons, XEN_9PFS_RING_SIZE(ring)) >= size;
net/9p/trans_xen.c
123
struct xen_9pfs_dataring *ring;
net/9p/trans_xen.c
136
ring = &priv->rings[num];
net/9p/trans_xen.c
139
while (io_wait_event_killable(ring->wq,
net/9p/trans_xen.c
140
p9_xen_write_todo(ring, size)) != 0)
net/9p/trans_xen.c
143
spin_lock_irqsave(&ring->lock, flags);
net/9p/trans_xen.c
144
cons = ring->intf->out_cons;
net/9p/trans_xen.c
145
prod = ring->intf->out_prod;
net/9p/trans_xen.c
148
if (XEN_9PFS_RING_SIZE(ring) -
net/9p/trans_xen.c
149
xen_9pfs_queued(prod, cons, XEN_9PFS_RING_SIZE(ring)) < size) {
net/9p/trans_xen.c
150
spin_unlock_irqrestore(&ring->lock, flags);
net/9p/trans_xen.c
154
masked_prod = xen_9pfs_mask(prod, XEN_9PFS_RING_SIZE(ring));
net/9p/trans_xen.c
155
masked_cons = xen_9pfs_mask(cons, XEN_9PFS_RING_SIZE(ring));
net/9p/trans_xen.c
157
xen_9pfs_write_packet(ring->data.out, p9_req->tc.sdata, size,
net/9p/trans_xen.c
159
XEN_9PFS_RING_SIZE(ring));
net/9p/trans_xen.c
164
ring->intf->out_prod = prod;
net/9p/trans_xen.c
165
spin_unlock_irqrestore(&ring->lock, flags);
net/9p/trans_xen.c
166
notify_remote_via_irq(ring->irq);
net/9p/trans_xen.c
175
struct xen_9pfs_dataring *ring;
net/9p/trans_xen.c
181
ring = container_of(work, struct xen_9pfs_dataring, work);
net/9p/trans_xen.c
182
priv = ring->priv;
net/9p/trans_xen.c
185
cons = ring->intf->in_cons;
net/9p/trans_xen.c
186
prod = ring->intf->in_prod;
net/9p/trans_xen.c
189
if (xen_9pfs_queued(prod, cons, XEN_9PFS_RING_SIZE(ring)) <
net/9p/trans_xen.c
191
notify_remote_via_irq(ring->irq);
net/9p/trans_xen.c
195
masked_prod = xen_9pfs_mask(prod, XEN_9PFS_RING_SIZE(ring));
net/9p/trans_xen.c
196
masked_cons = xen_9pfs_mask(cons, XEN_9PFS_RING_SIZE(ring));
net/9p/trans_xen.c
199
xen_9pfs_read_packet(&h, ring->data.in, sizeof(h),
net/9p/trans_xen.c
201
XEN_9PFS_RING_SIZE(ring));
net/9p/trans_xen.c
208
ring->intf->in_cons = cons;
net/9p/trans_xen.c
225
masked_cons = xen_9pfs_mask(cons, XEN_9PFS_RING_SIZE(ring));
net/9p/trans_xen.c
227
xen_9pfs_read_packet(req->rc.sdata, ring->data.in, h.size,
net/9p/trans_xen.c
229
XEN_9PFS_RING_SIZE(ring));
net/9p/trans_xen.c
234
ring->intf->in_cons = cons;
net/9p/trans_xen.c
245
struct xen_9pfs_dataring *ring = r;
net/9p/trans_xen.c
247
if (!ring || !ring->priv->client) {
net/9p/trans_xen.c
25
#define XEN_9PFS_RING_SIZE(ring) XEN_FLEX_RING_SIZE(ring->intf->ring_order)
net/9p/trans_xen.c
252
wake_up_interruptible(&ring->wq);
net/9p/trans_xen.c
253
schedule_work(&ring->work);
net/9p/trans_xen.c
282
struct xen_9pfs_dataring *ring = &priv->rings[i];
net/9p/trans_xen.c
284
cancel_work_sync(&ring->work);
net/9p/trans_xen.c
289
unbind_from_irqhandler(priv->rings[i].irq, ring);
net/9p/trans_xen.c
330
struct xen_9pfs_dataring *ring,
net/9p/trans_xen.c
337
init_waitqueue_head(&ring->wq);
net/9p/trans_xen.c
338
spin_lock_init(&ring->lock);
net/9p/trans_xen.c
339
INIT_WORK(&ring->work, p9_xen_response);
net/9p/trans_xen.c
341
ring->intf = (struct xen_9pfs_data_intf *)get_zeroed_page(GFP_KERNEL);
net/9p/trans_xen.c
342
if (!ring->intf)
net/9p/trans_xen.c
345
virt_to_gfn(ring->intf), 0);
net/9p/trans_xen.c
348
ring->ref = ret;
net/9p/trans_xen.c
360
ring->intf->ref[i] = ret;
net/9p/trans_xen.c
362
ring->intf->ring_order = order;
net/9p/trans_xen.c
363
ring->data.in = bytes;
net/9p/trans_xen.c
364
ring->data.out = bytes + XEN_FLEX_RING_SIZE(order);
net/9p/trans_xen.c
366
ret = xenbus_alloc_evtchn(dev, &ring->evtchn);
net/9p/trans_xen.c
369
ring->irq = bind_evtchn_to_irqhandler(ring->evtchn,
net/9p/trans_xen.c
371
0, "xen_9pfs-frontend", ring);
net/9p/trans_xen.c
372
if (ring->irq >= 0)
net/9p/trans_xen.c
375
xenbus_free_evtchn(dev, ring->evtchn);
net/9p/trans_xen.c
376
ret = ring->irq;
net/9p/trans_xen.c
380
gnttab_end_foreign_access(ring->intf->ref[i], NULL);
net/9p/trans_xen.c
383
gnttab_end_foreign_access(ring->ref, NULL);
net/9p/trans_xen.c
384
free_page((unsigned long)ring->intf);
net/core/page_pool.c
107
stats->recycle_stats.ring += pcpu->ring;
net/core/page_pool.c
1111
while ((netmem = (__force netmem_ref)ptr_ring_consume_bh(&pool->ring))) {
net/core/page_pool.c
147
*data++ = pool_stats->recycle_stats.ring;
net/core/page_pool.c
162
__acquires(&pool->ring.producer_lock)
net/core/page_pool.c
167
spin_lock(&pool->ring.producer_lock);
net/core/page_pool.c
169
spin_lock_bh(&pool->ring.producer_lock);
net/core/page_pool.c
176
__releases(&pool->ring.producer_lock)
net/core/page_pool.c
179
spin_unlock(&pool->ring.producer_lock);
net/core/page_pool.c
181
spin_unlock_bh(&pool->ring.producer_lock);
net/core/page_pool.c
262
if (ptr_ring_init(&pool->ring, ring_qsize, GFP_KERNEL) < 0) {
net/core/page_pool.c
312
ptr_ring_cleanup(&pool->ring, NULL);
net/core/page_pool.c
323
ptr_ring_cleanup(&pool->ring, NULL);
net/core/page_pool.c
380
struct ptr_ring *r = &pool->ring;
net/core/page_pool.c
786
ret = !__ptr_ring_produce(&pool->ring, (__force void *)netmem);
net/core/page_pool.c
788
recycle_stat_inc(pool, ring);
net/core/page_pool.c
934
if (__ptr_ring_produce(&pool->ring, (__force void *)bulk[i])) {
net/core/page_pool.c
942
recycle_stat_add(pool, ring, i);
net/core/page_pool_user.c
156
stats.recycle_stats.ring) ||
net/ethtool/common.c
713
u64 ring = rule_info.fs.ring_cookie;
net/ethtool/common.c
720
ring += ethtool_get_rss_ctx_max_channel(ctx);
net/ethtool/common.c
722
max_ring = max_t(u64, max_ring, ring);
net/ipv6/seg6_hmac.c
105
char *ring, *off;
net/ipv6/seg6_hmac.c
126
ring = this_cpu_ptr(hmac_storage.hmac_ring);
net/ipv6/seg6_hmac.c
127
off = ring;
net/ipv6/seg6_hmac.c
151
hmac_sha1(&hinfo->key.sha1, ring, plen, output);
net/ipv6/seg6_hmac.c
157
hmac_sha256(&hinfo->key.sha256, ring, plen, output);
net/packet/diag.c
72
static int pdiag_put_ring(struct packet_ring_buffer *ring, int ver, int nl_type,
net/packet/diag.c
77
if (!ring->pg_vec)
net/packet/diag.c
80
pdr.pdr_block_size = ring->pg_vec_pages << PAGE_SHIFT;
net/packet/diag.c
81
pdr.pdr_block_nr = ring->pg_vec_len;
net/packet/diag.c
82
pdr.pdr_frame_size = ring->frame_size;
net/packet/diag.c
83
pdr.pdr_frame_nr = ring->frame_max + 1;
net/packet/diag.c
86
pdr.pdr_retire_tmo = ktime_to_ms(ring->prb_bdqc.interval_ktime);
net/packet/diag.c
87
pdr.pdr_sizeof_priv = ring->prb_bdqc.blk_sizeof_priv;
net/packet/diag.c
88
pdr.pdr_features = ring->prb_bdqc.feature_req_word;
net/rds/ib.h
411
void rds_ib_ring_init(struct rds_ib_work_ring *ring, u32 nr);
net/rds/ib.h
412
void rds_ib_ring_resize(struct rds_ib_work_ring *ring, u32 nr);
net/rds/ib.h
413
u32 rds_ib_ring_alloc(struct rds_ib_work_ring *ring, u32 val, u32 *pos);
net/rds/ib.h
414
void rds_ib_ring_free(struct rds_ib_work_ring *ring, u32 val);
net/rds/ib.h
415
void rds_ib_ring_unalloc(struct rds_ib_work_ring *ring, u32 val);
net/rds/ib.h
416
int rds_ib_ring_empty(struct rds_ib_work_ring *ring);
net/rds/ib.h
417
int rds_ib_ring_low(struct rds_ib_work_ring *ring);
net/rds/ib.h
418
u32 rds_ib_ring_oldest(struct rds_ib_work_ring *ring);
net/rds/ib.h
419
u32 rds_ib_ring_completed(struct rds_ib_work_ring *ring, u32 wr_id, u32 oldest);
net/rds/ib_ring.c
101
avail = ring->w_nr - __rds_ib_ring_used(ring);
net/rds/ib_ring.c
103
rdsdebug("ring %p val %u next %u free %u\n", ring, val,
net/rds/ib_ring.c
104
ring->w_alloc_ptr, avail);
net/rds/ib_ring.c
108
*pos = ring->w_alloc_ptr;
net/rds/ib_ring.c
110
ring->w_alloc_ptr = (ring->w_alloc_ptr + ret) % ring->w_nr;
net/rds/ib_ring.c
111
ring->w_alloc_ctr += ret;
net/rds/ib_ring.c
117
void rds_ib_ring_free(struct rds_ib_work_ring *ring, u32 val)
net/rds/ib_ring.c
119
ring->w_free_ptr = (ring->w_free_ptr + val) % ring->w_nr;
net/rds/ib_ring.c
120
atomic_add(val, &ring->w_free_ctr);
net/rds/ib_ring.c
122
if (__rds_ib_ring_empty(ring) &&
net/rds/ib_ring.c
127
void rds_ib_ring_unalloc(struct rds_ib_work_ring *ring, u32 val)
net/rds/ib_ring.c
129
ring->w_alloc_ptr = (ring->w_alloc_ptr - val) % ring->w_nr;
net/rds/ib_ring.c
130
ring->w_alloc_ctr -= val;
net/rds/ib_ring.c
133
int rds_ib_ring_empty(struct rds_ib_work_ring *ring)
net/rds/ib_ring.c
135
return __rds_ib_ring_empty(ring);
net/rds/ib_ring.c
138
int rds_ib_ring_low(struct rds_ib_work_ring *ring)
net/rds/ib_ring.c
140
return __rds_ib_ring_used(ring) <= (ring->w_nr >> 1);
net/rds/ib_ring.c
147
u32 rds_ib_ring_oldest(struct rds_ib_work_ring *ring)
net/rds/ib_ring.c
149
return ring->w_free_ptr;
net/rds/ib_ring.c
156
u32 rds_ib_ring_completed(struct rds_ib_work_ring *ring, u32 wr_id, u32 oldest)
net/rds/ib_ring.c
163
ret = ring->w_nr - oldest + (unsigned long long)wr_id + 1;
net/rds/ib_ring.c
165
rdsdebug("ring %p ret %u wr_id %u oldest %u\n", ring, ret,
net/rds/ib_ring.c
66
void rds_ib_ring_init(struct rds_ib_work_ring *ring, u32 nr)
net/rds/ib_ring.c
68
memset(ring, 0, sizeof(*ring));
net/rds/ib_ring.c
69
ring->w_nr = nr;
net/rds/ib_ring.c
70
rdsdebug("ring %p nr %u\n", ring, ring->w_nr);
net/rds/ib_ring.c
73
static inline u32 __rds_ib_ring_used(struct rds_ib_work_ring *ring)
net/rds/ib_ring.c
78
diff = ring->w_alloc_ctr - (u32) atomic_read(&ring->w_free_ctr);
net/rds/ib_ring.c
79
BUG_ON(diff > ring->w_nr);
net/rds/ib_ring.c
84
void rds_ib_ring_resize(struct rds_ib_work_ring *ring, u32 nr)
net/rds/ib_ring.c
88
BUG_ON(__rds_ib_ring_used(ring));
net/rds/ib_ring.c
89
ring->w_nr = nr;
net/rds/ib_ring.c
92
static int __rds_ib_ring_empty(struct rds_ib_work_ring *ring)
net/rds/ib_ring.c
94
return __rds_ib_ring_used(ring) == 0;
net/rds/ib_ring.c
97
u32 rds_ib_ring_alloc(struct rds_ib_work_ring *ring, u32 val, u32 *pos)
net/sched/sch_generic.c
824
if (!q->ring.queue)
net/sched/sch_generic.c
891
if (!q->ring.queue)
net/sched/sch_generic.c
896
ptr_ring_cleanup(&q->ring, NULL);
net/xdp/xsk.c
1506
xs->tx->ring->flags |= XDP_RING_NEED_WAKEUP;
net/xdp/xsk.c
1598
static void xsk_enter_rxtx_offsets(struct xdp_ring_offset_v1 *ring)
net/xdp/xsk.c
1600
ring->producer = offsetof(struct xdp_rxtx_ring, ptrs.producer);
net/xdp/xsk.c
1601
ring->consumer = offsetof(struct xdp_rxtx_ring, ptrs.consumer);
net/xdp/xsk.c
1602
ring->desc = offsetof(struct xdp_rxtx_ring, desc);
net/xdp/xsk.c
1605
static void xsk_enter_umem_offsets(struct xdp_ring_offset_v1 *ring)
net/xdp/xsk.c
1607
ring->producer = offsetof(struct xdp_umem_ring, ptrs.producer);
net/xdp/xsk.c
1608
ring->consumer = offsetof(struct xdp_umem_ring, ptrs.consumer);
net/xdp/xsk.c
1609
ring->desc = offsetof(struct xdp_umem_ring, desc);
net/xdp/xsk.c
1784
return remap_vmalloc_range(vma, q->ring, 0);
net/xdp/xsk.c
51
pool->fq->ring->flags |= XDP_RING_NEED_WAKEUP;
net/xdp/xsk.c
65
xs->tx->ring->flags |= XDP_RING_NEED_WAKEUP;
net/xdp/xsk.c
78
pool->fq->ring->flags &= ~XDP_RING_NEED_WAKEUP;
net/xdp/xsk.c
92
xs->tx->ring->flags &= ~XDP_RING_NEED_WAKEUP;
net/xdp/xsk_queue.c
49
q->ring = vmalloc_user(size);
net/xdp/xsk_queue.c
50
if (!q->ring) {
net/xdp/xsk_queue.c
64
vfree(q->ring);
net/xdp/xsk_queue.h
127
struct xdp_umem_ring *ring = (struct xdp_umem_ring *)q->ring;
net/xdp/xsk_queue.h
130
*addr = ring->desc[idx];
net/xdp/xsk_queue.h
243
struct xdp_rxtx_ring *ring = (struct xdp_rxtx_ring *)q->ring;
net/xdp/xsk_queue.h
246
*desc = ring->desc[idx];
net/xdp/xsk_queue.h
278
struct xdp_rxtx_ring *ring = (struct xdp_rxtx_ring *)q->ring;
net/xdp/xsk_queue.h
282
descs[nb_entries] = ring->desc[idx];
net/xdp/xsk_queue.h
311
smp_store_release(&q->ring->consumer, q->cached_cons); /* D, matchees A */
net/xdp/xsk_queue.h
317
q->cached_prod = smp_load_acquire(&q->ring->producer); /* C, matches B */
net/xdp/xsk_queue.h
372
return READ_ONCE(q->ring->producer) - READ_ONCE(q->ring->consumer);
net/xdp/xsk_queue.h
379
return READ_ONCE(q->ring->producer);
net/xdp/xsk_queue.h
390
q->cached_cons = READ_ONCE(q->ring->consumer);
net/xdp/xsk_queue.h
418
struct xdp_umem_ring *ring = (struct xdp_umem_ring *)q->ring;
net/xdp/xsk_queue.h
424
ring->desc[q->cached_prod++ & q->ring_mask] = addr;
net/xdp/xsk_queue.h
430
struct xdp_umem_ring *ring = (struct xdp_umem_ring *)q->ring;
net/xdp/xsk_queue.h
432
ring->desc[idx & q->ring_mask] = addr;
net/xdp/xsk_queue.h
438
struct xdp_umem_ring *ring = (struct xdp_umem_ring *)q->ring;
net/xdp/xsk_queue.h
444
ring->desc[cached_prod++ & q->ring_mask] = descs[i].addr;
net/xdp/xsk_queue.h
45
struct xdp_ring *ring;
net/xdp/xsk_queue.h
451
struct xdp_rxtx_ring *ring = (struct xdp_rxtx_ring *)q->ring;
net/xdp/xsk_queue.h
459
ring->desc[idx].addr = addr;
net/xdp/xsk_queue.h
460
ring->desc[idx].len = len;
net/xdp/xsk_queue.h
461
ring->desc[idx].options = flags;
net/xdp/xsk_queue.h
468
smp_store_release(&q->ring->producer, idx); /* B, matches C */
net/xdp/xsk_queue.h
478
__xskq_prod_submit(q, q->ring->producer + nb_entries);
net/xdp/xsk_queue.h
484
return READ_ONCE(q->ring->consumer) == READ_ONCE(q->ring->producer);
sound/soc/codecs/cs42l43-jack.c
740
unsigned int tip, ring;
sound/soc/codecs/cs42l43-jack.c
756
ring = (sts >> CS42L43_RINGSENSE_PLUG_DB_STS_SHIFT) & CS42L43_JACK_PRESENT;
sound/soc/codecs/cs42l43-jack.c
764
if (priv->use_ring_sense && ring == CS42L43_JACK_ABSENT) {
sound/xen/xen_snd_front.c
31
req = RING_GET_REQUEST(&evtchnl->u.req.ring,
sound/xen/xen_snd_front.c
32
evtchnl->u.req.ring.req_prod_pvt);
sound/xen/xen_snd_front_evtchnl.c
134
channel->u.req.ring.req_prod_pvt++;
sound/xen/xen_snd_front_evtchnl.c
135
RING_PUSH_REQUESTS_AND_CHECK_NOTIFY(&channel->u.req.ring, notify);
sound/xen/xen_snd_front_evtchnl.c
146
page = channel->u.req.ring.sring;
sound/xen/xen_snd_front_evtchnl.c
223
XEN_FRONT_RING_INIT(&channel->u.req.ring, sring, XEN_PAGE_SIZE);
sound/xen/xen_snd_front_evtchnl.c
34
rp = channel->u.req.ring.sring->rsp_prod;
sound/xen/xen_snd_front_evtchnl.c
43
for (i = channel->u.req.ring.rsp_cons; i != rp; i++) {
sound/xen/xen_snd_front_evtchnl.c
44
resp = RING_GET_RESPONSE(&channel->u.req.ring, i);
sound/xen/xen_snd_front_evtchnl.c
71
channel->u.req.ring.rsp_cons = i;
sound/xen/xen_snd_front_evtchnl.c
72
if (i != channel->u.req.ring.req_prod_pvt) {
sound/xen/xen_snd_front_evtchnl.c
75
RING_FINAL_CHECK_FOR_RESPONSES(&channel->u.req.ring,
sound/xen/xen_snd_front_evtchnl.c
80
channel->u.req.ring.sring->rsp_event = i + 1;
sound/xen/xen_snd_front_evtchnl.h
48
struct xen_sndif_front_ring ring;
tools/hv/hv_fcopy_uio_daemon.c
455
void *ring;
tools/hv/hv_fcopy_uio_daemon.c
511
ring = vmbus_uio_map(&fcopy_fd, ring_size);
tools/hv/hv_fcopy_uio_daemon.c
512
if (!ring) {
tools/hv/hv_fcopy_uio_daemon.c
517
vmbus_br_setup(&txbr, ring, ring_size);
tools/hv/hv_fcopy_uio_daemon.c
518
vmbus_br_setup(&rxbr, (char *)ring + ring_size, ring_size);
tools/include/io_uring/mini_liburing.h
140
struct io_uring *ring,
tools/include/io_uring/mini_liburing.h
145
memset(ring, 0, sizeof(*ring));
tools/include/io_uring/mini_liburing.h
150
ret = io_uring_mmap(fd, p, &ring->sq, &ring->cq);
tools/include/io_uring/mini_liburing.h
152
ring->ring_fd = fd;
tools/include/io_uring/mini_liburing.h
153
ring->flags = p->flags;
tools/include/io_uring/mini_liburing.h
161
struct io_uring *ring,
tools/include/io_uring/mini_liburing.h
169
return io_uring_queue_init_params(entries, ring, &p);
tools/include/io_uring/mini_liburing.h
173
static inline struct io_uring_sqe *io_uring_get_sqe(struct io_uring *ring)
tools/include/io_uring/mini_liburing.h
175
struct io_uring_sq *sq = &ring->sq;
tools/include/io_uring/mini_liburing.h
182
static inline int io_uring_wait_cqe(struct io_uring *ring,
tools/include/io_uring/mini_liburing.h
185
struct io_uring_cq *cq = &ring->cq;
tools/include/io_uring/mini_liburing.h
197
ret = io_uring_enter(ring->ring_fd, 0, 1,
tools/include/io_uring/mini_liburing.h
206
static inline int io_uring_submit(struct io_uring *ring)
tools/include/io_uring/mini_liburing.h
208
struct io_uring_sq *sq = &ring->sq;
tools/include/io_uring/mini_liburing.h
224
if (!(ring->flags & IORING_SETUP_NO_SQARRAY)) {
tools/include/io_uring/mini_liburing.h
244
ret = io_uring_enter(ring->ring_fd, submitted, 0,
tools/include/io_uring/mini_liburing.h
249
static inline void io_uring_queue_exit(struct io_uring *ring)
tools/include/io_uring/mini_liburing.h
251
struct io_uring_sq *sq = &ring->sq;
tools/include/io_uring/mini_liburing.h
255
close(ring->ring_fd);
tools/include/io_uring/mini_liburing.h
276
static inline int io_uring_register_buffers(struct io_uring *ring,
tools/include/io_uring/mini_liburing.h
282
ret = syscall(__NR_io_uring_register, ring->ring_fd,
tools/include/io_uring/mini_liburing.h
307
static inline void io_uring_cqe_seen(struct io_uring *ring)
tools/include/io_uring/mini_liburing.h
309
*(&ring->cq)->khead += 1;
tools/lib/bpf/libbpf.h
1414
struct ring;
tools/lib/bpf/libbpf.h
1447
LIBBPF_API struct ring *ring_buffer__ring(struct ring_buffer *rb,
tools/lib/bpf/libbpf.h
1457
LIBBPF_API unsigned long ring__consumer_pos(const struct ring *r);
tools/lib/bpf/libbpf.h
1466
LIBBPF_API unsigned long ring__producer_pos(const struct ring *r);
tools/lib/bpf/libbpf.h
1477
LIBBPF_API size_t ring__avail_data_size(const struct ring *r);
tools/lib/bpf/libbpf.h
1487
LIBBPF_API size_t ring__size(const struct ring *r);
tools/lib/bpf/libbpf.h
1496
LIBBPF_API int ring__map_fd(const struct ring *r);
tools/lib/bpf/libbpf.h
1506
LIBBPF_API int ring__consume(struct ring *r);
tools/lib/bpf/libbpf.h
1517
LIBBPF_API int ring__consume_n(struct ring *r, size_t n);
tools/lib/bpf/ringbuf.c
234
static int64_t ringbuf_process_ring(struct ring *r, size_t n)
tools/lib/bpf/ringbuf.c
293
struct ring *ring = rb->rings[i];
tools/lib/bpf/ringbuf.c
295
err = ringbuf_process_ring(ring, n);
tools/lib/bpf/ringbuf.c
318
struct ring *ring = rb->rings[i];
tools/lib/bpf/ringbuf.c
320
err = ringbuf_process_ring(ring, INT_MAX);
tools/lib/bpf/ringbuf.c
347
struct ring *ring = rb->rings[ring_id];
tools/lib/bpf/ringbuf.c
349
err = ringbuf_process_ring(ring, INT_MAX);
tools/lib/bpf/ringbuf.c
365
struct ring *ring_buffer__ring(struct ring_buffer *rb, unsigned int idx)
tools/lib/bpf/ringbuf.c
37
struct ring **rings;
tools/lib/bpf/ringbuf.c
373
unsigned long ring__consumer_pos(const struct ring *r)
tools/lib/bpf/ringbuf.c
379
unsigned long ring__producer_pos(const struct ring *r)
tools/lib/bpf/ringbuf.c
387
size_t ring__avail_data_size(const struct ring *r)
tools/lib/bpf/ringbuf.c
396
size_t ring__size(const struct ring *r)
tools/lib/bpf/ringbuf.c
401
int ring__map_fd(const struct ring *r)
tools/lib/bpf/ringbuf.c
406
int ring__consume_n(struct ring *r, size_t n)
tools/lib/bpf/ringbuf.c
417
int ring__consume(struct ring *r)
tools/lib/bpf/ringbuf.c
60
static void ringbuf_free_ring(struct ring_buffer *rb, struct ring *r)
tools/lib/bpf/ringbuf.c
81
struct ring *r;
tools/sched_ext/scx_qmap.bpf.c
202
void *ring;
tools/sched_ext/scx_qmap.bpf.c
262
ring = bpf_map_lookup_elem(&queue_arr, &idx);
tools/sched_ext/scx_qmap.bpf.c
263
if (!ring) {
tools/sched_ext/scx_qmap.bpf.c
269
if (bpf_map_push_elem(ring, &pid, 0)) {
tools/testing/selftests/bpf/prog_tests/ringbuf.c
150
struct ring *ring;
tools/testing/selftests/bpf/prog_tests/ringbuf.c
224
ring = ring_buffer__ring(ringbuf, 0);
tools/testing/selftests/bpf/prog_tests/ringbuf.c
225
if (!ASSERT_OK_PTR(ring, "ring_buffer__ring_idx_0"))
tools/testing/selftests/bpf/prog_tests/ringbuf.c
228
map_fd = ring__map_fd(ring);
tools/testing/selftests/bpf/prog_tests/ringbuf.c
248
avail_data = ring__avail_data_size(ring);
tools/testing/selftests/bpf/prog_tests/ringbuf.c
250
ring_size = ring__size(ring);
tools/testing/selftests/bpf/prog_tests/ringbuf.c
252
cons_pos = ring__consumer_pos(ring);
tools/testing/selftests/bpf/prog_tests/ringbuf.c
254
prod_pos = ring__producer_pos(ring);
tools/testing/selftests/bpf/prog_tests/ringbuf.c
364
err = ring__consume(ring);
tools/testing/selftests/bpf/prog_tests/ringbuf_multi.c
103
ring = ring_buffer__ring(ringbuf, 0);
tools/testing/selftests/bpf/prog_tests/ringbuf_multi.c
104
if (!ASSERT_EQ(ring, ring_old, "ring_buffer__ring_again"))
tools/testing/selftests/bpf/prog_tests/ringbuf_multi.c
18
int ring = (unsigned long)ctx;
tools/testing/selftests/bpf/prog_tests/ringbuf_multi.c
23
CHECK(ring != 1, "sample1_ring", "exp %d, got %d\n", 1, ring);
tools/testing/selftests/bpf/prog_tests/ringbuf_multi.c
28
CHECK(ring != 2, "sample2_ring", "exp %d, got %d\n", 2, ring);
tools/testing/selftests/bpf/prog_tests/ringbuf_multi.c
45
struct ring *ring_old;
tools/testing/selftests/bpf/prog_tests/ringbuf_multi.c
46
struct ring *ring;
tools/testing/selftests/bpf/prog_tests/ringbuf_multi.c
90
ring = ring_buffer__ring(ringbuf, 0);
tools/testing/selftests/bpf/prog_tests/ringbuf_multi.c
91
if (!ASSERT_OK_PTR(ring, "ring_buffer__ring_idx_0"))
tools/testing/selftests/bpf/prog_tests/ringbuf_multi.c
93
ring_old = ring;
tools/testing/selftests/bpf/prog_tests/ringbuf_multi.c
94
ring = ring_buffer__ring(ringbuf, 1);
tools/testing/selftests/bpf/prog_tests/ringbuf_multi.c
95
ASSERT_ERR_PTR(ring, "ring_buffer__ring_idx_1");
tools/testing/selftests/bpf/prog_tests/sockopt.c
1000
err = io_uring_queue_init(1, &ring, 0);
tools/testing/selftests/bpf/prog_tests/sockopt.c
1004
sqe = io_uring_get_sqe(&ring);
tools/testing/selftests/bpf/prog_tests/sockopt.c
1012
err = io_uring_submit(&ring);
tools/testing/selftests/bpf/prog_tests/sockopt.c
1016
err = io_uring_wait_cqe(&ring, &cqe);
tools/testing/selftests/bpf/prog_tests/sockopt.c
1023
io_uring_queue_exit(&ring);
tools/testing/selftests/bpf/prog_tests/sockopt.c
997
struct io_uring ring;
tools/testing/selftests/bpf/prog_tests/test_xsk.c
188
ret = set_hw_ring_size(ifobj->ifname, &ifobj->ring);
tools/testing/selftests/bpf/prog_tests/test_xsk.c
204
ifobj->ring.tx_pending = ifobj->set_ring.default_tx;
tools/testing/selftests/bpf/prog_tests/test_xsk.c
205
ifobj->ring.rx_pending = ifobj->set_ring.default_rx;
tools/testing/selftests/bpf/prog_tests/test_xsk.c
2427
test->ifobj_tx->ring.tx_pending = DEFAULT_BATCH_SIZE;
tools/testing/selftests/bpf/prog_tests/test_xsk.c
2428
test->ifobj_tx->ring.rx_pending = DEFAULT_BATCH_SIZE * 2;
tools/testing/selftests/bpf/prog_tests/test_xsk.c
2448
test->ifobj_tx->ring.tx_pending = test->ifobj_tx->ring.tx_max_pending;
tools/testing/selftests/bpf/prog_tests/test_xsk.c
2449
test->ifobj_tx->ring.rx_pending = test->ifobj_tx->ring.rx_max_pending;
tools/testing/selftests/bpf/prog_tests/test_xsk.c
2463
test->ifobj_tx->xsk->batch_size = test->ifobj_tx->ring.tx_max_pending - 8;
tools/testing/selftests/bpf/prog_tests/test_xsk.c
2464
test->ifobj_rx->xsk->batch_size = test->ifobj_tx->ring.tx_max_pending - 8;
tools/testing/selftests/bpf/prog_tests/test_xsk.h
132
struct ethtool_ringparam ring;
tools/testing/selftests/bpf/prog_tests/xsk.c
81
ret = get_hw_ring_size(ifobj_tx->ifname, &ifobj_tx->ring);
tools/testing/selftests/bpf/prog_tests/xsk.c
84
ifobj_tx->set_ring.default_tx = ifobj_tx->ring.tx_pending;
tools/testing/selftests/bpf/prog_tests/xsk.c
85
ifobj_tx->set_ring.default_rx = ifobj_tx->ring.rx_pending;
tools/testing/selftests/bpf/xsk.c
198
fill->ring = map + off.fr.desc;
tools/testing/selftests/bpf/xsk.c
214
comp->ring = map + off.cr.desc;
tools/testing/selftests/bpf/xsk.c
492
munmap(ctx->fill->ring - off.fr.desc, off.fr.desc + umem->config.fill_size *
tools/testing/selftests/bpf/xsk.c
494
munmap(ctx->comp->ring - off.cr.desc, off.cr.desc + umem->config.comp_size *
tools/testing/selftests/bpf/xsk.c
639
rx->ring = rx_map + off.rx.desc;
tools/testing/selftests/bpf/xsk.c
660
tx->ring = tx_map + off.tx.desc;
tools/testing/selftests/bpf/xsk.c
734
munmap(umem->fill_save->ring - off.fr.desc,
tools/testing/selftests/bpf/xsk.c
736
munmap(umem->comp_save->ring - off.cr.desc,
tools/testing/selftests/bpf/xsk.c
765
munmap(xsk->rx->ring - off.rx.desc,
tools/testing/selftests/bpf/xsk.c
769
munmap(xsk->tx->ring - off.tx.desc,
tools/testing/selftests/bpf/xsk.h
35
void *ring; \
tools/testing/selftests/bpf/xsk.h
52
__u64 *addrs = (__u64 *)fill->ring;
tools/testing/selftests/bpf/xsk.h
60
const __u64 *addrs = (const __u64 *)comp->ring;
tools/testing/selftests/bpf/xsk.h
68
struct xdp_desc *descs = (struct xdp_desc *)tx->ring;
tools/testing/selftests/bpf/xsk.h
76
const struct xdp_desc *descs = (const struct xdp_desc *)rx->ring;
tools/testing/selftests/bpf/xskxceiver.c
404
ret = get_hw_ring_size(ifobj_tx->ifname, &ifobj_tx->ring);
tools/testing/selftests/bpf/xskxceiver.c
407
ifobj_tx->set_ring.default_tx = ifobj_tx->ring.tx_pending;
tools/testing/selftests/bpf/xskxceiver.c
408
ifobj_tx->set_ring.default_rx = ifobj_tx->ring.rx_pending;
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
141
static void setup_zcrx(struct io_uring *ring)
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
203
ret = io_uring_register_ifq(ring, (void *)®);
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
221
static void add_accept(struct io_uring *ring, int sockfd)
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
225
sqe = io_uring_get_sqe(ring);
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
231
static void add_recvzc(struct io_uring *ring, int sockfd)
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
235
sqe = io_uring_get_sqe(ring);
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
242
static void add_recvzc_oneshot(struct io_uring *ring, int sockfd, size_t len)
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
246
sqe = io_uring_get_sqe(ring);
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
253
static void process_accept(struct io_uring *ring, struct io_uring_cqe *cqe)
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
262
add_recvzc_oneshot(ring, connfd, page_size);
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
264
add_recvzc(ring, connfd);
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
267
static void process_recvzc(struct io_uring *ring, struct io_uring_cqe *cqe)
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
288
add_recvzc_oneshot(ring, connfd, page_size);
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
292
add_recvzc(ring, connfd);
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
313
static void server_loop(struct io_uring *ring)
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
320
io_uring_submit_and_wait(ring, 1);
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
322
io_uring_for_each_cqe(ring, head, cqe) {
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
324
process_accept(ring, cqe);
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
326
process_recvzc(ring, cqe);
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
331
io_uring_cq_advance(ring, count);
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
337
struct io_uring ring;
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
363
io_uring_queue_init(512, &ring, flags);
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
365
setup_zcrx(&ring);
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
369
add_accept(&ring, fd);
tools/testing/selftests/drivers/net/hw/iou-zcrx.c
373
server_loop(&ring);
tools/testing/selftests/drivers/net/hw/toeplitz.c
218
static char *recv_frame(const struct ring_state *ring, char *frame)
tools/testing/selftests/drivers/net/hw/toeplitz.c
224
ring->cpu);
tools/testing/selftests/drivers/net/hw/toeplitz.c
232
static bool recv_block(struct ring_state *ring)
tools/testing/selftests/drivers/net/hw/toeplitz.c
238
block = (void *)(ring->mmap + ring->idx * ring_block_sz);
tools/testing/selftests/drivers/net/hw/toeplitz.c
246
frame = recv_frame(ring, frame);
tools/testing/selftests/drivers/net/hw/toeplitz.c
251
ring->idx = (ring->idx + 1) % ring_block_nr;
tools/testing/selftests/drivers/net/hw/toeplitz.c
290
void *ring;
tools/testing/selftests/drivers/net/hw/toeplitz.c
308
ring = mmap(0, req3.tp_block_size * req3.tp_block_nr,
tools/testing/selftests/drivers/net/hw/toeplitz.c
311
if (ring == MAP_FAILED)
tools/testing/selftests/drivers/net/hw/toeplitz.c
314
return ring;
tools/testing/selftests/drivers/net/hw/toeplitz.c
366
static int create_ring(char **ring)
tools/testing/selftests/drivers/net/hw/toeplitz.c
383
*ring = setup_ring(fd);
tools/testing/selftests/kvm/coalesced_io_test.c
117
struct kvm_coalesced_mmio_ring *ring = io->ring;
tools/testing/selftests/kvm/coalesced_io_test.c
122
TEST_ASSERT((ring->last + 1) % io->ring_size == ring->first,
tools/testing/selftests/kvm/coalesced_io_test.c
124
ring->first, ring->last, io->ring_size, ring_start);
tools/testing/selftests/kvm/coalesced_io_test.c
127
uint32_t idx = (ring->first + i) % io->ring_size;
tools/testing/selftests/kvm/coalesced_io_test.c
128
struct kvm_coalesced_mmio *entry = &ring->coalesced_mmio[idx];
tools/testing/selftests/kvm/coalesced_io_test.c
153
struct kvm_coalesced_mmio_ring *ring = io->ring;
tools/testing/selftests/kvm/coalesced_io_test.c
16
struct kvm_coalesced_mmio_ring *ring;
tools/testing/selftests/kvm/coalesced_io_test.c
169
WRITE_ONCE(ring->first, ring_start);
tools/testing/selftests/kvm/coalesced_io_test.c
170
WRITE_ONCE(ring->last, ring_start);
tools/testing/selftests/kvm/coalesced_io_test.c
173
TEST_ASSERT_EQ(ring->first, ring_start);
tools/testing/selftests/kvm/coalesced_io_test.c
174
TEST_ASSERT_EQ(ring->last, ring_start);
tools/testing/selftests/kvm/coalesced_io_test.c
206
.ring = (void *)vcpu->run +
tools/testing/selftests/kvm/coalesced_io_test.c
77
struct kvm_coalesced_mmio_ring *ring = io->ring;
tools/testing/selftests/kvm/coalesced_io_test.c
81
WRITE_ONCE(ring->first, ring_start);
tools/testing/selftests/kvm/coalesced_io_test.c
82
WRITE_ONCE(ring->last, ring_start);
tools/testing/selftests/mm/cow.c
416
struct io_uring ring;
tools/testing/selftests/mm/cow.c
446
ret = io_uring_queue_init(1, &ring, 0);
tools/testing/selftests/mm/cow.c
462
ret = io_uring_register_buffers(&ring, &iov, 1);
tools/testing/selftests/mm/cow.c
517
sqe = io_uring_get_sqe(&ring);
tools/testing/selftests/mm/cow.c
525
ret = io_uring_submit(&ring);
tools/testing/selftests/mm/cow.c
532
ret = io_uring_wait_cqe(&ring, &cqe);
tools/testing/selftests/mm/cow.c
544
io_uring_cqe_seen(&ring, cqe);
tools/testing/selftests/mm/cow.c
572
io_uring_unregister_buffers(&ring);
tools/testing/selftests/mm/cow.c
574
io_uring_queue_exit(&ring);
tools/testing/selftests/mm/gup_longterm.c
231
struct io_uring ring;
tools/testing/selftests/mm/gup_longterm.c
244
ret = io_uring_queue_init(1, &ring, 0);
tools/testing/selftests/mm/gup_longterm.c
257
ret = io_uring_register_buffers(&ring, &iov, 1);
tools/testing/selftests/mm/gup_longterm.c
283
io_uring_unregister_buffers(&ring);
tools/testing/selftests/mm/gup_longterm.c
286
io_uring_queue_exit(&ring);
tools/testing/selftests/mm/page_frag/page_frag_test.c
103
ret = __ptr_ring_produce(ring, va);
tools/testing/selftests/mm/page_frag/page_frag_test.c
49
struct ptr_ring *ring = arg;
tools/testing/selftests/mm/page_frag/page_frag_test.c
55
void *obj = __ptr_ring_consume(ring);
tools/testing/selftests/mm/page_frag/page_frag_test.c
79
struct ptr_ring *ring = arg;
tools/testing/selftests/net/io_uring_zerocopy_tx.c
107
ret = io_uring_queue_init(512, &ring, 0);
tools/testing/selftests/net/io_uring_zerocopy_tx.c
114
ret = io_uring_register_buffers(&ring, &iov, 1);
tools/testing/selftests/net/io_uring_zerocopy_tx.c
132
sqe = io_uring_get_sqe(&ring);
tools/testing/selftests/net/io_uring_zerocopy_tx.c
150
ret = io_uring_submit(&ring);
tools/testing/selftests/net/io_uring_zerocopy_tx.c
157
ret = io_uring_wait_cqe(&ring, &cqe);
tools/testing/selftests/net/io_uring_zerocopy_tx.c
172
io_uring_cqe_seen(&ring);
tools/testing/selftests/net/io_uring_zerocopy_tx.c
186
io_uring_cqe_seen(&ring);
tools/testing/selftests/net/io_uring_zerocopy_tx.c
191
ret = io_uring_wait_cqe(&ring, &cqe);
tools/testing/selftests/net/io_uring_zerocopy_tx.c
199
io_uring_cqe_seen(&ring);
tools/testing/selftests/net/io_uring_zerocopy_tx.c
99
struct io_uring ring;
tools/testing/selftests/net/psock_fanout.c
226
char *ring;
tools/testing/selftests/net/psock_fanout.c
240
ring = mmap(0, req.tp_block_size * req.tp_block_nr,
tools/testing/selftests/net/psock_fanout.c
242
if (ring == MAP_FAILED) {
tools/testing/selftests/net/psock_fanout.c
247
return ring;
tools/testing/selftests/net/psock_fanout.c
250
static int sock_fanout_read_ring(int fd, void *ring)
tools/testing/selftests/net/psock_fanout.c
252
struct tpacket2_hdr *header = ring;
tools/testing/selftests/net/psock_fanout.c
257
header = ring + (count * getpagesize());
tools/testing/selftests/net/psock_tpacket.c
220
static void walk_v1_v2_rx(int sock, struct ring *ring)
tools/testing/selftests/net/psock_tpacket.c
227
bug_on(ring->type != PACKET_RX_RING);
tools/testing/selftests/net/psock_tpacket.c
239
while (__v1_v2_rx_kernel_ready(ring->rd[frame_num].iov_base,
tools/testing/selftests/net/psock_tpacket.c
240
ring->version)) {
tools/testing/selftests/net/psock_tpacket.c
241
ppd.raw = ring->rd[frame_num].iov_base;
tools/testing/selftests/net/psock_tpacket.c
243
switch (ring->version) {
tools/testing/selftests/net/psock_tpacket.c
260
__v1_v2_rx_user_ready(ppd.raw, ring->version);
tools/testing/selftests/net/psock_tpacket.c
262
frame_num = (frame_num + 1) % ring->rd_num;
tools/testing/selftests/net/psock_tpacket.c
272
ring->version, total_packets, NUM_PACKETS);
tools/testing/selftests/net/psock_tpacket.c
354
static inline void *get_next_frame(struct ring *ring, int n)
tools/testing/selftests/net/psock_tpacket.c
356
uint8_t *f0 = ring->rd[0].iov_base;
tools/testing/selftests/net/psock_tpacket.c
358
switch (ring->version) {
tools/testing/selftests/net/psock_tpacket.c
361
return ring->rd[n].iov_base;
tools/testing/selftests/net/psock_tpacket.c
363
return f0 + (n * ring->req3.tp_frame_size);
tools/testing/selftests/net/psock_tpacket.c
369
static void walk_tx(int sock, struct ring *ring)
tools/testing/selftests/net/psock_tpacket.c
387
if (ring->version <= TPACKET_V2)
tools/testing/selftests/net/psock_tpacket.c
388
nframes = ring->rd_num;
tools/testing/selftests/net/psock_tpacket.c
390
nframes = ring->req3.tp_frame_nr;
tools/testing/selftests/net/psock_tpacket.c
392
bug_on(ring->type != PACKET_TX_RING);
tools/testing/selftests/net/psock_tpacket.c
419
void *next = get_next_frame(ring, frame_num);
tools/testing/selftests/net/psock_tpacket.c
421
while (__tx_kernel_ready(next, ring->version) &&
tools/testing/selftests/net/psock_tpacket.c
425
switch (ring->version) {
tools/testing/selftests/net/psock_tpacket.c
463
__tx_user_ready(next, ring->version);
tools/testing/selftests/net/psock_tpacket.c
493
ring->version, total_packets, NUM_PACKETS);
tools/testing/selftests/net/psock_tpacket.c
500
static void walk_v1_v2(int sock, struct ring *ring)
tools/testing/selftests/net/psock_tpacket.c
502
if (ring->type == PACKET_RX_RING)
tools/testing/selftests/net/psock_tpacket.c
503
walk_v1_v2_rx(sock, ring);
tools/testing/selftests/net/psock_tpacket.c
505
walk_tx(sock, ring);
tools/testing/selftests/net/psock_tpacket.c
581
static void walk_v3_rx(int sock, struct ring *ring)
tools/testing/selftests/net/psock_tpacket.c
588
bug_on(ring->type != PACKET_RX_RING);
tools/testing/selftests/net/psock_tpacket.c
600
pbd = (struct block_desc *) ring->rd[block_num].iov_base;
tools/testing/selftests/net/psock_tpacket.c
608
block_num = (block_num + 1) % ring->rd_num;
tools/testing/selftests/net/psock_tpacket.c
622
static void walk_v3(int sock, struct ring *ring)
tools/testing/selftests/net/psock_tpacket.c
624
if (ring->type == PACKET_RX_RING)
tools/testing/selftests/net/psock_tpacket.c
625
walk_v3_rx(sock, ring);
tools/testing/selftests/net/psock_tpacket.c
627
walk_tx(sock, ring);
tools/testing/selftests/net/psock_tpacket.c
630
static void __v1_v2_fill(struct ring *ring, unsigned int blocks)
tools/testing/selftests/net/psock_tpacket.c
632
ring->req.tp_block_size = getpagesize() << 2;
tools/testing/selftests/net/psock_tpacket.c
633
ring->req.tp_frame_size = TPACKET_ALIGNMENT << 7;
tools/testing/selftests/net/psock_tpacket.c
634
ring->req.tp_block_nr = blocks;
tools/testing/selftests/net/psock_tpacket.c
636
ring->req.tp_frame_nr = ring->req.tp_block_size /
tools/testing/selftests/net/psock_tpacket.c
637
ring->req.tp_frame_size *
tools/testing/selftests/net/psock_tpacket.c
638
ring->req.tp_block_nr;
tools/testing/selftests/net/psock_tpacket.c
640
ring->mm_len = ring->req.tp_block_size * ring->req.tp_block_nr;
tools/testing/selftests/net/psock_tpacket.c
641
ring->walk = walk_v1_v2;
tools/testing/selftests/net/psock_tpacket.c
642
ring->rd_num = ring->req.tp_frame_nr;
tools/testing/selftests/net/psock_tpacket.c
643
ring->flen = ring->req.tp_frame_size;
tools/testing/selftests/net/psock_tpacket.c
646
static void __v3_fill(struct ring *ring, unsigned int blocks, int type)
tools/testing/selftests/net/psock_tpacket.c
649
ring->req3.tp_retire_blk_tov = 64;
tools/testing/selftests/net/psock_tpacket.c
650
ring->req3.tp_sizeof_priv = 0;
tools/testing/selftests/net/psock_tpacket.c
651
ring->req3.tp_feature_req_word = TP_FT_REQ_FILL_RXHASH;
tools/testing/selftests/net/psock_tpacket.c
653
ring->req3.tp_block_size = getpagesize() << 2;
tools/testing/selftests/net/psock_tpacket.c
654
ring->req3.tp_frame_size = TPACKET_ALIGNMENT << 7;
tools/testing/selftests/net/psock_tpacket.c
655
ring->req3.tp_block_nr = blocks;
tools/testing/selftests/net/psock_tpacket.c
657
ring->req3.tp_frame_nr = ring->req3.tp_block_size /
tools/testing/selftests/net/psock_tpacket.c
658
ring->req3.tp_frame_size *
tools/testing/selftests/net/psock_tpacket.c
659
ring->req3.tp_block_nr;
tools/testing/selftests/net/psock_tpacket.c
661
ring->mm_len = ring->req3.tp_block_size * ring->req3.tp_block_nr;
tools/testing/selftests/net/psock_tpacket.c
662
ring->walk = walk_v3;
tools/testing/selftests/net/psock_tpacket.c
663
ring->rd_num = ring->req3.tp_block_nr;
tools/testing/selftests/net/psock_tpacket.c
664
ring->flen = ring->req3.tp_block_size;
tools/testing/selftests/net/psock_tpacket.c
667
static void setup_ring(int sock, struct ring *ring, int version, int type)
tools/testing/selftests/net/psock_tpacket.c
672
ring->type = type;
tools/testing/selftests/net/psock_tpacket.c
673
ring->version = version;
tools/testing/selftests/net/psock_tpacket.c
680
__v1_v2_fill(ring, blocks);
tools/testing/selftests/net/psock_tpacket.c
681
ret = setsockopt(sock, SOL_PACKET, type, &ring->req,
tools/testing/selftests/net/psock_tpacket.c
682
sizeof(ring->req));
tools/testing/selftests/net/psock_tpacket.c
686
__v3_fill(ring, blocks, type);
tools/testing/selftests/net/psock_tpacket.c
687
ret = setsockopt(sock, SOL_PACKET, type, &ring->req3,
tools/testing/selftests/net/psock_tpacket.c
688
sizeof(ring->req3));
tools/testing/selftests/net/psock_tpacket.c
697
ring->rd_len = ring->rd_num * sizeof(*ring->rd);
tools/testing/selftests/net/psock_tpacket.c
698
ring->rd = malloc(ring->rd_len);
tools/testing/selftests/net/psock_tpacket.c
699
if (ring->rd == NULL) {
tools/testing/selftests/net/psock_tpacket.c
708
static void mmap_ring(int sock, struct ring *ring)
tools/testing/selftests/net/psock_tpacket.c
71
void (*walk)(int sock, struct ring *ring);
tools/testing/selftests/net/psock_tpacket.c
712
ring->mm_space = mmap(0, ring->mm_len, PROT_READ | PROT_WRITE,
tools/testing/selftests/net/psock_tpacket.c
714
if (ring->mm_space == MAP_FAILED) {
tools/testing/selftests/net/psock_tpacket.c
719
memset(ring->rd, 0, ring->rd_len);
tools/testing/selftests/net/psock_tpacket.c
720
for (i = 0; i < ring->rd_num; ++i) {
tools/testing/selftests/net/psock_tpacket.c
721
ring->rd[i].iov_base = ring->mm_space + (i * ring->flen);
tools/testing/selftests/net/psock_tpacket.c
722
ring->rd[i].iov_len = ring->flen;
tools/testing/selftests/net/psock_tpacket.c
726
static void bind_ring(int sock, struct ring *ring)
tools/testing/selftests/net/psock_tpacket.c
732
ring->ll.sll_family = PF_PACKET;
tools/testing/selftests/net/psock_tpacket.c
733
ring->ll.sll_protocol = htons(ETH_P_ALL);
tools/testing/selftests/net/psock_tpacket.c
734
ring->ll.sll_ifindex = if_nametoindex("lo");
tools/testing/selftests/net/psock_tpacket.c
735
ring->ll.sll_hatype = 0;
tools/testing/selftests/net/psock_tpacket.c
736
ring->ll.sll_pkttype = 0;
tools/testing/selftests/net/psock_tpacket.c
737
ring->ll.sll_halen = 0;
tools/testing/selftests/net/psock_tpacket.c
739
ret = bind(sock, (struct sockaddr *) &ring->ll, sizeof(ring->ll));
tools/testing/selftests/net/psock_tpacket.c
746
static void walk_ring(int sock, struct ring *ring)
tools/testing/selftests/net/psock_tpacket.c
748
ring->walk(sock, ring);
tools/testing/selftests/net/psock_tpacket.c
751
static void unmap_ring(int sock, struct ring *ring)
tools/testing/selftests/net/psock_tpacket.c
753
munmap(ring->mm_space, ring->mm_len);
tools/testing/selftests/net/psock_tpacket.c
754
free(ring->rd);
tools/testing/selftests/net/psock_tpacket.c
805
struct ring ring;
tools/testing/selftests/net/psock_tpacket.c
820
memset(&ring, 0, sizeof(ring));
tools/testing/selftests/net/psock_tpacket.c
821
setup_ring(sock, &ring, version, type);
tools/testing/selftests/net/psock_tpacket.c
822
mmap_ring(sock, &ring);
tools/testing/selftests/net/psock_tpacket.c
823
bind_ring(sock, &ring);
tools/testing/selftests/net/psock_tpacket.c
824
walk_ring(sock, &ring);
tools/testing/selftests/net/psock_tpacket.c
825
unmap_ring(sock, &ring);
tools/testing/selftests/net/txring_overwrite.c
114
*ring = mmap(0, req.tp_block_size * req.tp_block_nr,
tools/testing/selftests/net/txring_overwrite.c
116
if (*ring == MAP_FAILED)
tools/testing/selftests/net/txring_overwrite.c
161
char *ring;
tools/testing/selftests/net/txring_overwrite.c
165
fdt = setup_tx(&ring);
tools/testing/selftests/net/txring_overwrite.c
167
send_pkt(fdt, ring, payload_patterns[0]);
tools/testing/selftests/net/txring_overwrite.c
168
send_pkt(fdt, ring, payload_patterns[1]);
tools/testing/selftests/net/txring_overwrite.c
86
static int setup_tx(char **ring)
tools/testing/selftests/ublk/batch.c
169
io_uring_free_buf_ring(&t->ring, t->fetch[i].br, 1, i);
tools/testing/selftests/ublk/batch.c
200
t->fetch[i].br = io_uring_setup_buf_ring(&t->ring, 1,
tools/testing/selftests/ublk/kublk.c
390
ret = ublk_setup_ring(&dev->ring, UBLK_CTRL_RING_DEPTH,
tools/testing/selftests/ublk/kublk.c
436
io_uring_unregister_buffers(&t->ring);
tools/testing/selftests/ublk/kublk.c
440
io_uring_unregister_ring_fd(&t->ring);
tools/testing/selftests/ublk/kublk.c
442
if (t->ring.ring_fd > 0) {
tools/testing/selftests/ublk/kublk.c
443
io_uring_unregister_files(&t->ring);
tools/testing/selftests/ublk/kublk.c
444
close(t->ring.ring_fd);
tools/testing/selftests/ublk/kublk.c
445
t->ring.ring_fd = -1;
tools/testing/selftests/ublk/kublk.c
527
ret = ublk_setup_ring(&t->ring, ring_depth, cq_depth,
tools/testing/selftests/ublk/kublk.c
551
ret = io_uring_register_buffers_sparse(&t->ring, t->nr_bufs);
tools/testing/selftests/ublk/kublk.c
568
io_uring_register_ring_fd(&t->ring);
tools/testing/selftests/ublk/kublk.c
573
ret = io_uring_register_files(&t->ring, &dev->fds[1], dev->nr_fds - 1);
tools/testing/selftests/ublk/kublk.c
579
ret = io_uring_register_files(&t->ring, dev->fds, dev->nr_fds);
tools/testing/selftests/ublk/kublk.c
727
if (io_uring_sq_space_left(&t->ring) < 1)
tools/testing/selftests/ublk/kublk.c
728
io_uring_submit(&t->ring);
tools/testing/selftests/ublk/kublk.c
78
sqe = io_uring_get_sqe(&dev->ring);
tools/testing/selftests/ublk/kublk.c
817
return !io_uring_sq_ready(&t->ring) && !t->io_inflight;
tools/testing/selftests/ublk/kublk.c
86
ret = io_uring_submit(&dev->ring);
tools/testing/selftests/ublk/kublk.c
916
io_uring_for_each_cqe(&t->ring, head, cqe) {
tools/testing/selftests/ublk/kublk.c
92
ret = io_uring_wait_cqe(&dev->ring, &cqe);
tools/testing/selftests/ublk/kublk.c
920
io_uring_cq_advance(&t->ring, count);
tools/testing/selftests/ublk/kublk.c
931
t->idx, io_uring_sq_ready(&t->ring),
tools/testing/selftests/ublk/kublk.c
938
ret = io_uring_submit_and_wait(&t->ring, 1);
tools/testing/selftests/ublk/kublk.c
97
io_uring_cqe_seen(&dev->ring, cqe);
tools/testing/selftests/ublk/kublk.h
247
struct io_uring ring;
tools/testing/selftests/ublk/kublk.h
260
struct io_uring ring;
tools/testing/selftests/ublk/kublk.h
377
struct io_uring *ring = &t->ring;
tools/testing/selftests/ublk/kublk.h
378
unsigned left = io_uring_sq_space_left(ring);
tools/testing/selftests/ublk/kublk.h
382
io_uring_submit(ring);
tools/testing/selftests/ublk/kublk.h
385
sqes[i] = io_uring_get_sqe(ring);
tools/testing/selftests/x86/lam.c
605
int handle_uring_sq(struct io_ring *ring, struct file_io *fi, unsigned long lam)
tools/testing/selftests/x86/lam.c
608
struct io_uring_queue *sring = &ring->sq_ring;
tools/testing/selftests/x86/lam.c
638
index = tail & *ring->sq_ring.ring_mask;
tools/testing/selftests/x86/lam.c
640
sqe = &ring->sq_ring.queue.sqes[index];
tools/testing/selftests/x86/lam.c
657
if (sys_uring_enter(ring->ring_fd, 1, 1, IORING_ENTER_GETEVENTS) < 0)
tools/testing/selftests/x86/lam.c
669
struct io_ring *ring;
tools/testing/selftests/x86/lam.c
698
ring = malloc(sizeof(*ring));
tools/testing/selftests/x86/lam.c
699
if (!ring) {
tools/testing/selftests/x86/lam.c
704
memset(ring, 0, sizeof(struct io_ring));
tools/testing/selftests/x86/lam.c
706
if (setup_io_uring(ring))
tools/testing/selftests/x86/lam.c
709
if (handle_uring_sq(ring, fi, lam))
tools/testing/selftests/x86/lam.c
712
ret = handle_uring_cq(ring);
tools/testing/selftests/x86/lam.c
715
free(ring);
tools/testing/vsock/vsock_uring_test.c
102
io_uring_cqe_seen(&ring, cqe);
tools/testing/vsock/vsock_uring_test.c
107
io_uring_queue_exit(&ring);
tools/testing/vsock/vsock_uring_test.c
117
struct io_uring ring;
tools/testing/vsock/vsock_uring_test.c
137
if (io_uring_queue_init(RING_ENTRIES_NUM, &ring, 0))
tools/testing/vsock/vsock_uring_test.c
147
sqe = io_uring_get_sqe(&ring);
tools/testing/vsock/vsock_uring_test.c
153
if (io_uring_submit(&ring) != 1)
tools/testing/vsock/vsock_uring_test.c
156
if (io_uring_wait_cqe(&ring, &cqe))
tools/testing/vsock/vsock_uring_test.c
160
io_uring_cqe_seen(&ring, cqe);
tools/testing/vsock/vsock_uring_test.c
178
io_uring_queue_exit(&ring);
tools/testing/vsock/vsock_uring_test.c
64
struct io_uring ring;
tools/testing/vsock/vsock_uring_test.c
80
if (io_uring_queue_init(RING_ENTRIES_NUM, &ring, 0))
tools/testing/vsock/vsock_uring_test.c
83
if (io_uring_register_buffers(&ring, iovec, test_data->vecs_cnt))
tools/testing/vsock/vsock_uring_test.c
89
sqe = io_uring_get_sqe(&ring);
tools/testing/vsock/vsock_uring_test.c
96
if (io_uring_submit(&ring) != 1)
tools/testing/vsock/vsock_uring_test.c
99
if (io_uring_wait_cqe(&ring, &cqe))
tools/virtio/ringtest/ring.c
101
ring[i] = desc;
tools/virtio/ringtest/ring.c
125
ring[head].addr = (unsigned long)(void*)buf;
tools/virtio/ringtest/ring.c
126
ring[head].len = len;
tools/virtio/ringtest/ring.c
133
index = ring[head].index;
tools/virtio/ringtest/ring.c
138
ring[head].flags = DESC_HW;
tools/virtio/ringtest/ring.c
149
if (ring[head].flags & DESC_HW)
tools/virtio/ringtest/ring.c
153
*lenp = ring[head].len;
tools/virtio/ringtest/ring.c
154
index = ring[head].index & (ring_size - 1);
tools/virtio/ringtest/ring.c
168
return (ring[head].flags & DESC_HW);
tools/virtio/ringtest/ring.c
223
return !(ring[head].flags & DESC_HW);
tools/virtio/ringtest/ring.c
230
if (!(ring[head].flags & DESC_HW))
tools/virtio/ringtest/ring.c
241
ring[head].len--;
tools/virtio/ringtest/ring.c
249
ring[head].flags = 0;
tools/virtio/ringtest/ring.c
56
struct desc *ring;
tools/virtio/ringtest/ring.c
82
ret = posix_memalign((void **)&ring, 0x1000, ring_size * sizeof *ring);
tools/virtio/ringtest/virtio_ring_0_9.c
117
desc = ring.desc;
tools/virtio/ringtest/virtio_ring_0_9.c
136
ring.avail->ring[avail & (ring_size - 1)] =
tools/virtio/ringtest/virtio_ring_0_9.c
143
ring.avail->ring[avail] = head;
tools/virtio/ringtest/virtio_ring_0_9.c
148
ring.avail->idx = guest.avail_idx;
tools/virtio/ringtest/virtio_ring_0_9.c
160
index = ring.used->ring[head].id;
tools/virtio/ringtest/virtio_ring_0_9.c
167
if (ring.used->idx == guest.last_used_idx)
tools/virtio/ringtest/virtio_ring_0_9.c
176
index = ring.used->ring[head].id;
tools/virtio/ringtest/virtio_ring_0_9.c
181
*lenp = ring.desc[index].len;
tools/virtio/ringtest/virtio_ring_0_9.c
183
*lenp = ring.used->ring[head].len;
tools/virtio/ringtest/virtio_ring_0_9.c
186
*bufp = (void*)(unsigned long)ring.desc[index].addr;
tools/virtio/ringtest/virtio_ring_0_9.c
189
ring.desc[index].next = guest.free_head;
tools/virtio/ringtest/virtio_ring_0_9.c
202
unsigned index = ring.used->ring[head].id;
tools/virtio/ringtest/virtio_ring_0_9.c
206
return ring.used->idx == last_used_idx;
tools/virtio/ringtest/virtio_ring_0_9.c
219
vring_used_event(&ring) = guest.last_used_idx;
tools/virtio/ringtest/virtio_ring_0_9.c
22
struct vring ring;
tools/virtio/ringtest/virtio_ring_0_9.c
233
need = vring_need_event(vring_avail_event(&ring),
tools/virtio/ringtest/virtio_ring_0_9.c
252
vring_avail_event(&ring) = host.used_idx;
tools/virtio/ringtest/virtio_ring_0_9.c
262
unsigned index = ring.avail->ring[head & (ring_size - 1)];
tools/virtio/ringtest/virtio_ring_0_9.c
266
return head == ring.avail->idx;
tools/virtio/ringtest/virtio_ring_0_9.c
277
head = ring.avail->ring[used_idx & (ring_size - 1)];
tools/virtio/ringtest/virtio_ring_0_9.c
284
desc = &ring.desc[head & (ring_size - 1)];
tools/virtio/ringtest/virtio_ring_0_9.c
286
if (used_idx == ring.avail->idx)
tools/virtio/ringtest/virtio_ring_0_9.c
296
head = ring.avail->ring[used_idx];
tools/virtio/ringtest/virtio_ring_0_9.c
298
desc = &ring.desc[head];
tools/virtio/ringtest/virtio_ring_0_9.c
308
ring.used->ring[used_idx].id = head;
tools/virtio/ringtest/virtio_ring_0_9.c
309
ring.used->ring[used_idx].len = desc->len - 1;
tools/virtio/ringtest/virtio_ring_0_9.c
314
ring.used->idx = host.used_idx;
tools/virtio/ringtest/virtio_ring_0_9.c
326
need = vring_need_event(vring_used_event(&ring),
tools/virtio/ringtest/virtio_ring_0_9.c
76
vring_init(&ring, ring_size, p, 0x1000);
tools/virtio/ringtest/virtio_ring_0_9.c
86
ring.desc[i].next = i + 1;
tools/virtio/vhost_net_test.c
183
memset(info->ring, 0, vring_size(num, 4096));
tools/virtio/vhost_net_test.c
184
vring_init(&info->vring, num, info->ring, 4096);
tools/virtio/vhost_net_test.c
186
info->ring, vq_notify, NULL, "test");
tools/virtio/vhost_net_test.c
199
r = posix_memalign(&info->ring, 4096, vring_size(num, 4096));
tools/virtio/vhost_net_test.c
39
void *ring;
tools/virtio/virtio_test.c
103
memset(info->ring, 0, vring_size(num, 4096));
tools/virtio/virtio_test.c
104
vring_init(&info->vring, num, info->ring, 4096);
tools/virtio/virtio_test.c
106
info->ring, vq_notify, vq_callback, "test");
tools/virtio/virtio_test.c
118
r = posix_memalign(&info->ring, 4096, vring_size(num, 4096));
tools/virtio/virtio_test.c
32
void *ring;
tools/virtio/vringh_test.c
128
err = get_user(*head, &vrh->vring.avail->ring[i]);
virt/kvm/coalesced_mmio.c
48
struct kvm_coalesced_mmio_ring *ring = dev->kvm->coalesced_mmio_ring;
virt/kvm/coalesced_mmio.c
62
insert = READ_ONCE(ring->last);
virt/kvm/coalesced_mmio.c
64
(insert + 1) % KVM_COALESCED_MMIO_MAX == READ_ONCE(ring->first)) {
virt/kvm/coalesced_mmio.c
71
ring->coalesced_mmio[insert].phys_addr = addr;
virt/kvm/coalesced_mmio.c
72
ring->coalesced_mmio[insert].len = len;
virt/kvm/coalesced_mmio.c
73
memcpy(ring->coalesced_mmio[insert].data, val, len);
virt/kvm/coalesced_mmio.c
74
ring->coalesced_mmio[insert].pio = dev->zone.pio;
virt/kvm/coalesced_mmio.c
76
ring->last = (insert + 1) % KVM_COALESCED_MMIO_MAX;
virt/kvm/dirty_ring.c
105
int kvm_dirty_ring_reset(struct kvm *kvm, struct kvm_dirty_ring *ring,
virt/kvm/dirty_ring.c
137
entry = &ring->dirty_gfns[ring->reset_index & (ring->size - 1)];
virt/kvm/dirty_ring.c
148
ring->reset_index++;
virt/kvm/dirty_ring.c
213
trace_kvm_dirty_ring_reset(ring);
virt/kvm/dirty_ring.c
220
struct kvm_dirty_ring *ring = &vcpu->dirty_ring;
virt/kvm/dirty_ring.c
224
WARN_ON_ONCE(kvm_dirty_ring_full(ring));
virt/kvm/dirty_ring.c
226
entry = &ring->dirty_gfns[ring->dirty_index & (ring->size - 1)];
virt/kvm/dirty_ring.c
236
ring->dirty_index++;
virt/kvm/dirty_ring.c
237
trace_kvm_dirty_ring_push(ring, slot, offset);
virt/kvm/dirty_ring.c
239
if (kvm_dirty_ring_soft_full(ring))
virt/kvm/dirty_ring.c
262
struct page *kvm_dirty_ring_get_page(struct kvm_dirty_ring *ring, u32 offset)
virt/kvm/dirty_ring.c
264
return vmalloc_to_page((void *)ring->dirty_gfns + offset * PAGE_SIZE);
virt/kvm/dirty_ring.c
267
void kvm_dirty_ring_free(struct kvm_dirty_ring *ring)
virt/kvm/dirty_ring.c
269
vfree(ring->dirty_gfns);
virt/kvm/dirty_ring.c
270
ring->dirty_gfns = NULL;
virt/kvm/dirty_ring.c
38
static u32 kvm_dirty_ring_used(struct kvm_dirty_ring *ring)
virt/kvm/dirty_ring.c
40
return READ_ONCE(ring->dirty_index) - READ_ONCE(ring->reset_index);
virt/kvm/dirty_ring.c
43
static bool kvm_dirty_ring_soft_full(struct kvm_dirty_ring *ring)
virt/kvm/dirty_ring.c
45
return kvm_dirty_ring_used(ring) >= ring->soft_limit;
virt/kvm/dirty_ring.c
48
static bool kvm_dirty_ring_full(struct kvm_dirty_ring *ring)
virt/kvm/dirty_ring.c
50
return kvm_dirty_ring_used(ring) >= ring->size;
virt/kvm/dirty_ring.c
74
int kvm_dirty_ring_alloc(struct kvm *kvm, struct kvm_dirty_ring *ring,
virt/kvm/dirty_ring.c
77
ring->dirty_gfns = vzalloc(size);
virt/kvm/dirty_ring.c
78
if (!ring->dirty_gfns)
virt/kvm/dirty_ring.c
81
ring->size = size / sizeof(struct kvm_dirty_gfn);
virt/kvm/dirty_ring.c
82
ring->soft_limit = ring->size - kvm_dirty_ring_get_rsvd_entries(kvm);
virt/kvm/dirty_ring.c
83
ring->dirty_index = 0;
virt/kvm/dirty_ring.c
84
ring->reset_index = 0;
virt/kvm/dirty_ring.c
85
ring->index = index;