Home
last modified time | relevance | path

Searched refs:e (Results 1 – 25 of 3174) sorted by relevance

12345678910>>...127

/linux/drivers/net/ethernet/chelsio/cxgb4/
H A Dl2t.c65 static inline unsigned int vlan_prio(const struct l2t_entry *e) in vlan_prio() argument
67 return e->vlan >> VLAN_PRIO_SHIFT; in vlan_prio()
70 static inline void l2t_hold(struct l2t_data *d, struct l2t_entry *e) in l2t_hold() argument
72 if (atomic_add_return(1, &e->refcnt) == 1) /* 0 -> 1 transition */ in l2t_hold()
118 static int addreq(const struct l2t_entry *e, const u32 *addr) in addreq() argument
120 if (e->v6) in addreq()
121 return (e->addr[0] ^ addr[0]) | (e->addr[1] ^ addr[1]) | in addreq()
122 (e->addr[2] ^ addr[2]) | (e->addr[3] ^ addr[3]); in addreq()
123 return e->addr[0] ^ addr[0]; in addreq()
126 static void neigh_replace(struct l2t_entry *e, struct neighbour *n) in neigh_replace() argument
[all …]
H A Dsmt.c68 struct smt_entry *e, *end; in find_or_alloc_smte() local
70 for (e = &s->smtab[0], end = &s->smtab[s->smt_size]; e != end; ++e) { in find_or_alloc_smte()
71 if (e->refcnt == 0) { in find_or_alloc_smte()
73 first_free = e; in find_or_alloc_smte()
75 if (e->state == SMT_STATE_SWITCHING) { in find_or_alloc_smte()
79 if (memcmp(e->src_mac, smac, ETH_ALEN) == 0) in find_or_alloc_smte()
86 e = first_free; in find_or_alloc_smte()
92 e->state = SMT_STATE_UNUSED; in find_or_alloc_smte()
95 return e; in find_or_alloc_smte()
98 static void t4_smte_free(struct smt_entry *e) in t4_smte_free() argument
[all …]
H A Dsched.c47 struct sched_class *e; in t4_sched_class_fw_cmd() local
50 e = &s->tab[p->u.params.class]; in t4_sched_class_fw_cmd()
58 p->u.params.channel, e->idx, in t4_sched_class_fw_cmd()
125 struct sched_class *e, *end; in t4_sched_entry_lookup() local
130 for (e = &s->tab[0]; e != end; ++e) { in t4_sched_entry_lookup()
131 if (e->state == SCHED_STATE_UNUSED || in t4_sched_entry_lookup()
132 e->bind_type != type) in t4_sched_entry_lookup()
139 list_for_each_entry(qe, &e->entry_list, list) { in t4_sched_entry_lookup()
150 list_for_each_entry(fe, &e->entry_list, list) { in t4_sched_entry_lookup()
190 struct sched_class *e; in t4_sched_queue_unbind() local
[all …]
/linux/drivers/net/ethernet/chelsio/cxgb3/
H A Dl2t.c63 static inline unsigned int vlan_prio(const struct l2t_entry *e) in vlan_prio() argument
65 return e->vlan >> 13; in vlan_prio()
74 static inline void neigh_replace(struct l2t_entry *e, struct neighbour *n) in neigh_replace() argument
77 if (e->neigh) in neigh_replace()
78 neigh_release(e->neigh); in neigh_replace()
79 e->neigh = n; in neigh_replace()
88 struct l2t_entry *e) in setup_l2e_send_pending() argument
101 OPCODE_TID(req) = htonl(MK_OPCODE_TID(CPL_L2T_WRITE_REQ, e->idx)); in setup_l2e_send_pending()
102 req->params = htonl(V_L2T_W_IDX(e->idx) | V_L2T_W_IFF(e->smt_idx) | in setup_l2e_send_pending()
103 V_L2T_W_VLAN(e->vlan & VLAN_VID_MASK) | in setup_l2e_send_pending()
[all …]
/linux/tools/testing/selftests/powerpc/pmu/
H A Devent.c24 static void __event_init_opts(struct event *e, u64 config, in __event_init_opts() argument
27 memset(e, 0, sizeof(*e)); in __event_init_opts()
29 e->name = name; in __event_init_opts()
31 e->attr.type = type; in __event_init_opts()
32 e->attr.config = config; in __event_init_opts()
33 e->attr.size = sizeof(e->attr); in __event_init_opts()
35 e->attr.read_format = PERF_FORMAT_TOTAL_TIME_ENABLED | \ in __event_init_opts()
38 e->attr.sample_period = 1000; in __event_init_opts()
39 e->attr.sample_type = PERF_SAMPLE_REGS_INTR; in __event_init_opts()
40 e->attr.disabled = 1; in __event_init_opts()
[all …]
/linux/scripts/kconfig/
H A Dexpr.c16 static struct expr *expr_eliminate_yn(struct expr *e);
20 struct expr *e = xcalloc(1, sizeof(*e)); in expr_alloc_symbol() local
21 e->type = E_SYMBOL; in expr_alloc_symbol()
22 e->left.sym = sym; in expr_alloc_symbol()
23 return e; in expr_alloc_symbol()
28 struct expr *e = xcalloc(1, sizeof(*e)); in expr_alloc_one() local
29 e->type = type; in expr_alloc_one()
30 e->left.expr = ce; in expr_alloc_one()
31 return e; in expr_alloc_one()
36 struct expr *e = xcalloc(1, sizeof(*e)); in expr_alloc_two() local
[all …]
/linux/drivers/media/test-drivers/vidtv/
H A Dvidtv_s302m.c165 static void vidtv_s302m_access_unit_destroy(struct vidtv_encoder *e) in vidtv_s302m_access_unit_destroy() argument
167 struct vidtv_access_unit *head = e->access_units; in vidtv_s302m_access_unit_destroy()
176 e->access_units = NULL; in vidtv_s302m_access_unit_destroy()
179 static void vidtv_s302m_alloc_au(struct vidtv_encoder *e) in vidtv_s302m_alloc_au() argument
184 if (e->sync && e->sync->is_video_encoder) { in vidtv_s302m_alloc_au()
185 sync_au = e->sync->access_units; in vidtv_s302m_alloc_au()
188 temp = vidtv_s302m_access_unit_init(e->access_units); in vidtv_s302m_alloc_au()
189 if (!e->access_units) in vidtv_s302m_alloc_au()
190 e->access_units = temp; in vidtv_s302m_alloc_au()
198 e->access_units = vidtv_s302m_access_unit_init(NULL); in vidtv_s302m_alloc_au()
[all …]
/linux/security/apparmor/
H A Dpolicy_unpack_test.c50 struct aa_ext *e; member
58 struct aa_ext *e; in build_aa_ext_struct() local
63 e = kunit_kmalloc(test, sizeof(*e), GFP_USER); in build_aa_ext_struct()
64 KUNIT_EXPECT_NOT_ERR_OR_NULL(test, e); in build_aa_ext_struct()
66 e->start = buf; in build_aa_ext_struct()
67 e->end = e->start + buf_size; in build_aa_ext_struct()
68 e->pos = e->start; in build_aa_ext_struct()
72 strscpy(buf + 3, TEST_STRING_NAME, e->end - (void *)(buf + 3)); in build_aa_ext_struct()
74 buf = e->start + TEST_STRING_BUF_OFFSET; in build_aa_ext_struct()
77 strscpy(buf + 3, TEST_STRING_DATA, e->end - (void *)(buf + 3)); in build_aa_ext_struct()
[all …]
H A Dpolicy_unpack.c63 const char *name, const char *info, struct aa_ext *e, in audit_iface() argument
68 if (e) in audit_iface()
69 ad.iface.pos = e->pos - e->start; in audit_iface()
162 VISIBLE_IF_KUNIT bool aa_inbounds(struct aa_ext *e, size_t size) in aa_inbounds() argument
164 return (size <= e->end - e->pos); in aa_inbounds()
175 VISIBLE_IF_KUNIT size_t aa_unpack_u16_chunk(struct aa_ext *e, char **chunk) in aa_unpack_u16_chunk() argument
178 void *pos = e->pos; in aa_unpack_u16_chunk()
180 if (!aa_inbounds(e, sizeof(u16))) in aa_unpack_u16_chunk()
182 size = le16_to_cpu(get_unaligned((__le16 *) e->pos)); in aa_unpack_u16_chunk()
183 e->pos += sizeof(__le16); in aa_unpack_u16_chunk()
[all …]
/linux/block/
H A Delevator.c63 struct elevator_queue *e = q->elevator; in elv_iosched_allow_bio_merge() local
65 if (e->type->ops.allow_merge) in elv_iosched_allow_bio_merge()
66 return e->type->ops.allow_merge(q, rq, bio); in elv_iosched_allow_bio_merge()
93 static bool elevator_match(const struct elevator_type *e, const char *name) in elevator_match() argument
95 return !strcmp(e->elevator_name, name) || in elevator_match()
96 (e->elevator_alias && !strcmp(e->elevator_alias, name)); in elevator_match()
101 struct elevator_type *e; in __elevator_find() local
103 list_for_each_entry(e, &elv_list, list) in __elevator_find()
104 if (elevator_match(e, name)) in __elevator_find()
105 return e; in __elevator_find()
[all …]
/linux/fs/
H A Dbinfmt_misc.c94 Node *e; in search_binfmt_handler() local
97 list_for_each_entry(e, &misc->entries, list) { in search_binfmt_handler()
102 if (!test_bit(Enabled, &e->flags)) in search_binfmt_handler()
106 if (!test_bit(Magic, &e->flags)) { in search_binfmt_handler()
107 if (p && !strcmp(e->magic, p + 1)) in search_binfmt_handler()
108 return e; in search_binfmt_handler()
113 s = bprm->buf + e->offset; in search_binfmt_handler()
114 if (e->mask) { in search_binfmt_handler()
115 for (j = 0; j < e->size; j++) in search_binfmt_handler()
116 if ((*s++ ^ e->magic[j]) & e->mask[j]) in search_binfmt_handler()
[all …]
/linux/drivers/md/
H A Ddm-cache-policy-smq.c88 struct entry *e; in __get_entry() local
90 e = es->begin + block; in __get_entry()
91 BUG_ON(e >= es->end); in __get_entry()
93 return e; in __get_entry()
96 static unsigned int to_index(struct entry_space *es, struct entry *e) in to_index() argument
98 BUG_ON(e < es->begin || e >= es->end); in to_index()
99 return e - es->begin; in to_index()
133 static struct entry *l_next(struct entry_space *es, struct entry *e) in l_next() argument
135 return to_entry(es, e->next); in l_next()
138 static struct entry *l_prev(struct entry_space *es, struct entry *e) in l_prev() argument
[all …]
/linux/tools/testing/selftests/powerpc/pmu/ebb/
H A Dtrace.c78 struct trace_entry *e; in trace_alloc_entry() local
80 e = trace_alloc(tb, sizeof(*e) + payload_size); in trace_alloc_entry()
81 if (e) in trace_alloc_entry()
82 e->length = payload_size; in trace_alloc_entry()
84 return e; in trace_alloc_entry()
89 struct trace_entry *e; in trace_log_reg() local
92 e = trace_alloc_entry(tb, sizeof(reg) + sizeof(value)); in trace_log_reg()
93 if (!e) in trace_log_reg()
96 e->type = TRACE_TYPE_REG; in trace_log_reg()
97 p = (u64 *)e->data; in trace_log_reg()
[all …]
/linux/lib/
H A Dlru_cache.c39 #define PARANOIA_LC_ELEMENT(lc, e) do { \ argument
41 struct lc_element *e_ = (e); \
84 struct lc_element *e; in lc_create() local
128 e = p + e_off; in lc_create()
129 e->lc_index = i; in lc_create()
130 e->lc_number = LC_FREE; in lc_create()
131 e->lc_new_number = LC_FREE; in lc_create()
132 list_add(&e->list, &lc->free); in lc_create()
133 element[i] = e; in lc_create()
202 struct lc_element *e = lc->lc_element[i]; in lc_reset() local
[all …]
/linux/drivers/net/ethernet/mellanox/mlx5/core/esw/
H A Dindir_table.c97 struct mlx5_esw_indir_table_entry *e) in mlx5_esw_indir_table_rule_get() argument
109 if (e->recirc_rule) { in mlx5_esw_indir_table_rule_get()
110 refcount_inc(&e->recirc_rule->refcnt); in mlx5_esw_indir_table_rule_get()
139 flow_act.fg = e->recirc_grp; in mlx5_esw_indir_table_rule_get()
146 handle = mlx5_add_flow_rules(e->ft, NULL, &flow_act, &dest, 1); in mlx5_esw_indir_table_rule_get()
156 e->recirc_rule = rule; in mlx5_esw_indir_table_rule_get()
172 struct mlx5_esw_indir_table_entry *e) in mlx5_esw_indir_table_rule_put() argument
174 struct mlx5_esw_indir_table_rule *rule = e->recirc_rule; in mlx5_esw_indir_table_rule_put()
187 e->recirc_rule = NULL; in mlx5_esw_indir_table_rule_put()
190 static int mlx5_create_indir_recirc_group(struct mlx5_esw_indir_table_entry *e) in mlx5_create_indir_recirc_group() argument
[all …]
/linux/drivers/mtd/ubi/
H A Dwl.c127 struct ubi_wl_entry *e, struct rb_root *root);
129 struct ubi_wl_entry *e);
139 static void wl_tree_add(struct ubi_wl_entry *e, struct rb_root *root) in wl_tree_add() argument
150 if (e->ec < e1->ec) in wl_tree_add()
152 else if (e->ec > e1->ec) in wl_tree_add()
155 ubi_assert(e->pnum != e1->pnum); in wl_tree_add()
156 if (e->pnum < e1->pnum) in wl_tree_add()
163 rb_link_node(&e->u.rb, parent, p); in wl_tree_add()
164 rb_insert_color(&e->u.rb, root); in wl_tree_add()
175 static void wl_entry_destroy(struct ubi_device *ubi, struct ubi_wl_entry *e) in wl_entry_destroy() argument
[all …]
/linux/arch/sparc/vdso/
H A Dvma.c68 static void *one_section64(struct vdso_elfinfo64 *e, const char *name, in one_section64() argument
75 shdrs = (void *)e->hdr + e->hdr->e_shoff; in one_section64()
76 snames = (void *)e->hdr + shdrs[e->hdr->e_shstrndx].sh_offset; in one_section64()
77 for (i = 1; i < e->hdr->e_shnum; i++) { in one_section64()
81 return (void *)e->hdr + shdrs[i].sh_offset; in one_section64()
89 struct vdso_elfinfo64 *e = &_e->u.elf64; in find_sections64() local
91 e->hdr = image->data; in find_sections64()
92 e in find_sections64()
102 find_sym64(const struct vdso_elfinfo64 * e,const char * name) find_sym64() argument
119 struct vdso_elfinfo64 *e = &_e->u.elf64; patchsym64() local
136 one_section32(struct vdso_elfinfo32 * e,const char * name,unsigned long * size) one_section32() argument
157 struct vdso_elfinfo32 *e = &_e->u.elf32; find_sections32() local
170 find_sym32(const struct vdso_elfinfo32 * e,const char * name) find_sym32() argument
187 struct vdso_elfinfo32 *e = &_e->u.elf32; patchsym32() local
204 find_sections(const struct vdso_image * image,struct vdso_elfinfo * e,bool elf64) find_sections() argument
213 patch_one_symbol(struct vdso_elfinfo * e,const char * orig,const char * new_target,bool elf64) patch_one_symbol() argument
222 stick_patch(const struct vdso_image * image,struct vdso_elfinfo * e,bool elf64) stick_patch() argument
[all...]
/linux/tools/testing/selftests/powerpc/pmu/event_code_tests/
H A Devent_alternatives_tests_p10.c27 struct event *e, events[5]; in event_alternatives_tests_p10() local
47 e = &events[0]; in event_alternatives_tests_p10()
48 event_init(e, 0x0001e); in event_alternatives_tests_p10()
50 e = &events[1]; in event_alternatives_tests_p10()
51 event_init(e, EventCode_1); in event_alternatives_tests_p10()
53 e = &events[2]; in event_alternatives_tests_p10()
54 event_init(e, EventCode_2); in event_alternatives_tests_p10()
56 e = &events[3]; in event_alternatives_tests_p10()
57 event_init(e, EventCode_3); in event_alternatives_tests_p10()
59 e = &events[4]; in event_alternatives_tests_p10()
[all …]
/linux/net/netfilter/ipset/
H A Dip_set_list_set.c55 struct set_elem *e; in list_set_ktest() local
63 list_for_each_entry_rcu(e, &map->members, list) { in list_set_ktest()
64 ret = ip_set_test(e->id, skb, par, opt); in list_set_ktest()
67 if (ip_set_match_extensions(set, ext, mext, flags, e)) in list_set_ktest()
79 struct set_elem *e; in list_set_kadd() local
82 list_for_each_entry(e, &map->members, list) { in list_set_kadd()
84 ip_set_timeout_expired(ext_timeout(e, set))) in list_set_kadd()
86 ret = ip_set_add(e->id, skb, par, opt); in list_set_kadd()
99 struct set_elem *e; in list_set_kdel() local
102 list_for_each_entry(e, in list_set_kdel()
145 struct set_elem *e = container_of(rcu, struct set_elem, rcu); __list_set_del_rcu() local
153 list_set_del(struct ip_set * set,struct set_elem * e) list_set_del() argument
164 list_set_replace(struct ip_set * set,struct set_elem * e,struct set_elem * old) list_set_replace() argument
177 struct set_elem *e, *n; set_cleanup_entries() local
190 struct set_elem *e, *next, *prev = NULL; list_set_utest() local
218 list_set_init_extensions(struct ip_set * set,const struct ip_set_ext * ext,struct set_elem * e) list_set_init_extensions() argument
237 struct set_elem *e, *n, *prev, *next; list_set_uadd() local
319 struct set_elem *e, *next, *prev = NULL; list_set_udel() local
351 struct set_adt_elem e = { .refid = IPSET_INVALID_ID }; list_set_uadt() local
415 struct set_elem *e, *n; list_set_flush() local
427 struct set_elem *e, *n; list_set_destroy() local
444 struct set_elem *e; list_set_memsize() local
487 struct set_elem *e; list_set_list() local
[all...]
H A Dip_set_hash_netnet.c136 hash_netnet4_init(struct hash_netnet4_elem *e) in hash_netnet4_init() argument
138 e->cidr[0] = HOST_MASK; in hash_netnet4_init()
139 e->cidr[1] = HOST_MASK; in hash_netnet4_init()
149 struct hash_netnet4_elem e = { }; in hash_netnet4_kadt() local
152 e.cidr[0] = INIT_CIDR(h->nets[0].cidr[0], HOST_MASK); in hash_netnet4_kadt()
153 e.cidr[1] = INIT_CIDR(h->nets[0].cidr[1], HOST_MASK); in hash_netnet4_kadt()
155 e.ccmp = (HOST_MASK << (sizeof(e.cidr[0]) * 8)) | HOST_MASK; in hash_netnet4_kadt()
157 ip4addrptr(skb, opt->flags & IPSET_DIM_ONE_SRC, &e.ip[0]); in hash_netnet4_kadt()
158 ip4addrptr(skb, opt->flags & IPSET_DIM_TWO_SRC, &e.ip[1]); in hash_netnet4_kadt()
159 e.ip[0] &= (ip_set_netmask(e.cidr[0]) & h->bitmask.ip); in hash_netnet4_kadt()
[all …]
H A Dip_set_hash_netportnet.c144 hash_netportnet4_init(struct hash_netportnet4_elem *e) in hash_netportnet4_init() argument
146 e->cidr[0] = HOST_MASK; in hash_netportnet4_init()
147 e->cidr[1] = HOST_MASK; in hash_netportnet4_init()
157 struct hash_netportnet4_elem e = { }; in hash_netportnet4_kadt() local
160 e.cidr[0] = INIT_CIDR(h->nets[0].cidr[0], HOST_MASK); in hash_netportnet4_kadt()
161 e.cidr[1] = INIT_CIDR(h->nets[0].cidr[1], HOST_MASK); in hash_netportnet4_kadt()
163 e.ccmp = (HOST_MASK << (sizeof(e.cidr[0]) * 8)) | HOST_MASK; in hash_netportnet4_kadt()
166 &e.port, &e.proto)) in hash_netportnet4_kadt()
169 ip4addrptr(skb, opt->flags & IPSET_DIM_ONE_SRC, &e.ip[0]); in hash_netportnet4_kadt()
170 ip4addrptr(skb, opt->flags & IPSET_DIM_THREE_SRC, &e.ip[1]); in hash_netportnet4_kadt()
[all …]
/linux/drivers/edac/
H A Dedac_mc.c56 static struct mem_ctl_info *error_desc_to_mci(struct edac_raw_error_desc *e) in error_desc_to_mci() argument
58 return container_of(e, struct mem_ctl_info, error_desc); in error_desc_to_mci()
804 static void edac_inc_ce_error(struct edac_raw_error_desc *e) in edac_inc_ce_error() argument
806 int pos[EDAC_MAX_LAYERS] = { e->top_layer, e->mid_layer, e->low_layer }; in edac_inc_ce_error()
807 struct mem_ctl_info *mci = error_desc_to_mci(e); in edac_inc_ce_error()
810 mci->ce_mc += e->error_count; in edac_inc_ce_error()
813 dimm->ce_count += e->error_count; in edac_inc_ce_error()
815 mci->ce_noinfo_count += e->error_count; in edac_inc_ce_error()
818 static void edac_inc_ue_error(struct edac_raw_error_desc *e) in edac_inc_ue_error() argument
820 int pos[EDAC_MAX_LAYERS] = { e->top_layer, e->mid_layer, e->low_layer }; in edac_inc_ue_error()
[all …]
/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_ring_mux.c78 struct amdgpu_mux_entry *e = NULL; in amdgpu_mux_resubmit_chunks() local
89 e = &mux->ring_entry[i]; in amdgpu_mux_resubmit_chunks()
94 if (!e) { in amdgpu_mux_resubmit_chunks()
99 last_seq = atomic_read(&e->ring->fence_drv.last_seq); in amdgpu_mux_resubmit_chunks()
103 list_for_each_entry(chunk, &e->list, entry) { in amdgpu_mux_resubmit_chunks()
105 amdgpu_fence_update_start_timestamp(e->ring, in amdgpu_mux_resubmit_chunks()
109 le32_to_cpu(*(e->ring->fence_drv.cpu_addr + 2))) { in amdgpu_mux_resubmit_chunks()
110 if (chunk->cntl_offset <= e->ring->buf_mask) in amdgpu_mux_resubmit_chunks()
111 amdgpu_ring_patch_cntl(e->ring, in amdgpu_mux_resubmit_chunks()
113 if (chunk->ce_offset <= e->ring->buf_mask) in amdgpu_mux_resubmit_chunks()
[all …]
H A Damdgpu_sync.c135 struct amdgpu_sync_entry *e; in amdgpu_sync_add_later() local
137 hash_for_each_possible(sync->fences, e, node, f->context) { in amdgpu_sync_add_later()
138 if (unlikely(e->fence->context != f->context)) in amdgpu_sync_add_later()
141 amdgpu_sync_keep_later(&e->fence, f); in amdgpu_sync_add_later()
157 struct amdgpu_sync_entry *e; in amdgpu_sync_fence() local
165 e = kmem_cache_alloc(amdgpu_sync_slab, GFP_KERNEL); in amdgpu_sync_fence()
166 if (!e) in amdgpu_sync_fence()
169 hash_add(sync->fences, &e->node, f->context); in amdgpu_sync_fence()
170 e->fence = dma_fence_get(f); in amdgpu_sync_fence()
264 static void amdgpu_sync_entry_free(struct amdgpu_sync_entry *e) in amdgpu_sync_entry_free() argument
[all …]
/linux/arch/arm64/kvm/vgic/
H A Dvgic-irqfd.c18 static int vgic_irqfd_set_irq(struct kvm_kernel_irq_routing_entry *e, in vgic_irqfd_set_irq() argument
22 unsigned int spi_id = e->irqchip.pin + VGIC_NR_PRIVATE_IRQS; in vgic_irqfd_set_irq()
39 struct kvm_kernel_irq_routing_entry *e, in kvm_set_routing_entry() argument
46 e->set = vgic_irqfd_set_irq; in kvm_set_routing_entry()
47 e->irqchip.irqchip = ue->u.irqchip.irqchip; in kvm_set_routing_entry()
48 e->irqchip.pin = ue->u.irqchip.pin; in kvm_set_routing_entry()
49 if ((e->irqchip.pin >= KVM_IRQCHIP_NUM_PINS) || in kvm_set_routing_entry()
50 (e->irqchip.irqchip >= KVM_NR_IRQCHIPS)) in kvm_set_routing_entry()
54 e->set = kvm_set_msi; in kvm_set_routing_entry()
55 e->msi.address_lo = ue->u.msi.address_lo; in kvm_set_routing_entry()
[all …]

12345678910>>...127