There's GFP_KERNEL vs. GFP_ATOMIC confusion here:
@@ -5466,10 +5470,17 @@ static int ath10k_add_interface(struct ieee80211_hw *hw,
if (vif->type == NL80211_IFTYPE_ADHOC ||
vif->type == NL80211_IFTYPE_MESH_POINT ||
vif->type == NL80211_IFTYPE_AP) {
- arvif->beacon_buf = dma_alloc_coherent(ar->dev,
- IEEE80211_MAX_FRAME_LEN,
- &arvif->beacon_paddr,
- GFP_ATOMIC);
+ if (ar->bus_param.dev_type == ATH10K_DEV_TYPE_HL) {
+ arvif->beacon_buf = kmalloc(IEEE80211_MAX_FRAME_LEN,
+ GFP_KERNEL);
+ arvif->beacon_paddr = (dma_addr_t)arvif->beacon_buf;
+ } else {
+ arvif->beacon_buf =
+ dma_alloc_coherent(ar->dev,
+ IEEE80211_MAX_FRAME_LEN,
+ &arvif->beacon_paddr,
+ GFP_ATOMIC);
+ }
if (!arvif->beacon_buf) {
ret = -ENOMEM;
ath10k_warn(ar, "failed to allocate beacon
buffer: %d\n",
I'd expect both allocations to use same GFP_ flags.