Searched refs:nskb (Results 1 - 25 of 36) sorted by relevance

12

/drivers/bluetooth/
H A Dhci_bcsp.c182 struct sk_buff *nskb; local
232 nskb = alloc_skb((len + 6) * 2 + 2, GFP_ATOMIC);
233 if (!nskb)
236 bt_cb(nskb)->pkt_type = pkt_type;
238 bcsp_slip_msgdelim(nskb);
259 bcsp_slip_one_byte(nskb, hdr[i]);
267 bcsp_slip_one_byte(nskb, data[i]);
276 bcsp_slip_one_byte(nskb, (u8) ((bcsp_txmsg_crc >> 8) & 0x00ff));
277 bcsp_slip_one_byte(nskb, (u8) (bcsp_txmsg_crc & 0x00ff));
280 bcsp_slip_msgdelim(nskb);
296 struct sk_buff *nskb = bcsp_prepare_pkt(bcsp, skb->data, skb->len, bt_cb(skb)->pkt_type); local
315 struct sk_buff *nskb = bcsp_prepare_pkt(bcsp, skb->data, skb->len, local
338 struct sk_buff *nskb = bcsp_prepare_pkt(bcsp, NULL, 0, BCSP_ACK_PKT); local
411 struct sk_buff *nskb = alloc_skb(4, GFP_ATOMIC); local
[all...]
H A Dhci_h5.c104 struct sk_buff *nskb; local
106 nskb = alloc_skb(3, GFP_ATOMIC);
107 if (!nskb)
110 bt_cb(nskb)->pkt_type = HCI_3WIRE_LINK_PKT;
112 memcpy(skb_put(nskb, len), data, len);
114 skb_queue_tail(&h5->unrel, nskb);
601 struct sk_buff *nskb; local
616 nskb = alloc_skb((len + 6) * 2 + 2, GFP_ATOMIC);
617 if (!nskb)
620 bt_cb(nskb)
658 struct sk_buff *skb, *nskb; local
[all...]
H A Dbfusb.c476 struct sk_buff *nskb; local
503 nskb = bt_skb_alloc(count + 32, GFP_ATOMIC);
504 if (!nskb) {
509 nskb->dev = (void *) data;
518 memcpy(skb_put(nskb, 3), buf, 3);
519 skb_copy_from_linear_data_offset(skb, sent, skb_put(nskb, size), size);
526 if ((nskb->len % data->bulk_pkt_size) == 0) {
529 memcpy(skb_put(nskb, 2), buf, 2);
534 skb_queue_tail(&data->transmit_q, nskb);
/drivers/net/wireless/ath/ath9k/
H A Dhif_usb.c311 struct sk_buff *nskb = NULL; local
331 nskb = __skb_dequeue(&hif_dev->tx.tx_skb_queue);
334 BUG_ON(!nskb);
341 *hdr++ = cpu_to_le16(nskb->len);
344 memcpy(buf, nskb->data, nskb->len);
345 tx_buf->len = nskb->len + 4;
353 __skb_queue_tail(&tx_buf->skb_queue, nskb);
530 struct sk_buff *nskb, *skb_pool[MAX_PKT_NUM_IN_TRANSFER]; local
596 nskb
686 struct sk_buff *nskb; local
[all...]
/drivers/isdn/mISDN/
H A Ddsp_core.c285 struct sk_buff *nskb; local
591 nskb = _alloc_mISDN_skb(PH_CONTROL_IND, MISDN_ID_ANY,
593 if (nskb) {
595 if (dsp->up->send(dsp->up, nskb))
596 dev_kfree_skb(nskb);
598 dev_kfree_skb(nskb);
743 struct sk_buff *nskb; local
749 nskb = _alloc_mISDN_skb(PH_CONTROL_IND,
752 if (nskb) {
755 dsp->up, nskb))
789 struct sk_buff *nskb; local
[all...]
H A Ddsp_cmx.c1315 struct sk_buff *nskb, *txskb; local
1354 nskb = mI_alloc_skb(len + preload, GFP_ATOMIC);
1355 if (!nskb) {
1361 hh = mISDN_HEAD_P(nskb);
1370 d = skb_put(nskb, preload + len); /* result */
1584 skb_queue_tail(&dsp->sendq, nskb);
1598 memcpy(skb_put(txskb, len), nskb->data + preload,
1609 dsp_change_volume(nskb, dsp->tx_volume);
1612 dsp_pipeline_process_tx(&dsp->pipeline, nskb->data,
1613 nskb
1920 struct sk_buff *nskb = NULL; local
[all...]
H A Dlayer2.c247 struct sk_buff *nskb = skb; local
252 nskb = skb_dequeue(&l2->down_queue);
253 if (nskb) {
254 l2->down_id = mISDN_HEAD_ID(nskb);
255 if (l2down_skb(l2, nskb)) {
256 dev_kfree_skb(nskb);
272 nskb = skb_dequeue(&l2->down_queue);
273 if (nskb) {
274 l2->down_id = mISDN_HEAD_ID(nskb);
275 if (l2down_skb(l2, nskb)) {
1479 struct sk_buff *skb, *nskb, *oskb; local
[all...]
H A Dl1oip_core.c365 struct sk_buff *nskb; local
396 nskb = mI_alloc_skb((remotecodec == 3) ? (len << 1) : len, GFP_ATOMIC);
397 if (!nskb) {
401 p = skb_put(nskb, (remotecodec == 3) ? (len << 1) : len);
414 dch->rx_skb = nskb;
444 hc->chan[channel].disorder_skb = nskb;
445 nskb = skb;
451 if (nskb)
453 queue_ch_frame(&bch->ch, PH_DATA_IND, rx_counter, nskb);
H A Ddsp_tones.c440 struct sk_buff *nskb; local
443 nskb = _alloc_mISDN_skb(PH_CONTROL_REQ,
446 if (nskb) {
448 if (dsp->ch.recv(dsp->ch.peer, nskb))
449 dev_kfree_skb(nskb);
451 dev_kfree_skb(nskb);
/drivers/isdn/i4l/
H A Disdn_v110.c463 struct sk_buff *nskb; local
483 if (!(nskb = dev_alloc_skb(size + v->skbres + sizeof(int)))) {
487 skb_reserve(nskb, v->skbres + sizeof(int));
489 memcpy(skb_put(nskb, v->framelen), v->OnlineFrame, v->framelen);
490 *((int *)skb_push(nskb, sizeof(int))) = 0;
491 return nskb;
495 rbuf = skb_put(nskb, size);
512 skb_trim(nskb, olen);
513 *((int *)skb_push(nskb, sizeof(int))) = rlen;
514 return nskb;
[all...]
H A Disdn_common.c2005 struct sk_buff *nskb = NULL; local
2011 nskb = isdn_v110_encode(dev->v110[idx], skb);
2013 if (!nskb)
2015 v110_ret = *((int *)nskb->data);
2016 skb_pull(nskb, sizeof(int));
2017 if (!nskb->len) {
2018 dev_kfree_skb(nskb);
2023 ret = dev->drv[drvidx]->interface->writebuf_skb(drvidx, chan, ack, nskb);
2066 dev_kfree_skb(nskb);
/drivers/net/
H A Dmacvlan.c238 struct sk_buff *nskb; local
256 nskb = skb_clone(skb, GFP_ATOMIC);
257 if (likely(nskb))
259 nskb, vlan, eth,
261 netif_rx_ni(nskb);
315 struct sk_buff *nskb; local
318 nskb = skb_clone(skb, GFP_ATOMIC);
319 if (!nskb)
324 __skb_queue_tail(&port->bc_queue, nskb);
336 kfree_skb(nskb);
364 struct sk_buff *nskb; local
[all...]
H A Dvirtio_net.c382 struct sk_buff *nskb = alloc_skb(0, GFP_ATOMIC); local
384 if (unlikely(!nskb))
387 skb_shinfo(curr_skb)->frag_list = nskb;
389 curr_skb->next = nskb;
390 curr_skb = nskb;
391 head_skb->truesize += nskb->truesize;
/drivers/net/irda/
H A Dstir4200.c318 struct sk_buff *skb, *nskb; local
340 nskb = dev_alloc_skb(len + 1);
341 if (unlikely(!nskb)) {
345 skb_reserve(nskb, 1);
346 skb = nskb;
347 skb_copy_to_linear_data(nskb, rx_buff->data, len);
349 nskb = dev_alloc_skb(rx_buff->truesize);
350 if (unlikely(!nskb)) {
354 skb_reserve(nskb, 1);
356 rx_buff->skb = nskb;
[all...]
/drivers/isdn/capi/
H A Dcapi.c385 struct sk_buff *nskb; local
386 nskb = alloc_skb(CAPI_DATA_B3_RESP_LEN, GFP_KERNEL);
387 if (nskb) {
389 unsigned char *s = skb_put(nskb, CAPI_DATA_B3_RESP_LEN);
398 return nskb;
405 struct sk_buff *nskb; local
439 nskb = gen_data_b3_resp_for(mp, skb);
440 if (!nskb) {
447 errcode = capi20_put_message(mp->ap, nskb);
457 kfree_skb(nskb);
[all...]
/drivers/s390/net/
H A Dnetiucv.c1192 struct sk_buff *nskb = skb; local
1201 nskb = alloc_skb(skb->len + NETIUCV_HDRLEN +
1203 if (!nskb) {
1208 skb_reserve(nskb, NETIUCV_HDRLEN);
1209 memcpy(skb_put(nskb, skb->len),
1217 header.next = nskb->len + NETIUCV_HDRLEN;
1218 memcpy(skb_push(nskb, NETIUCV_HDRLEN), &header, NETIUCV_HDRLEN);
1220 memcpy(skb_put(nskb, NETIUCV_HDRLEN), &header, NETIUCV_HDRLEN);
1228 nskb->data, nskb
[all...]
H A Dctcm_main.c471 struct sk_buff *nskb; local
518 nskb = alloc_skb(skb->len, GFP_ATOMIC | GFP_DMA);
519 if (!nskb) {
525 memcpy(skb_put(nskb, skb->len), skb->data, skb->len);
526 atomic_inc(&nskb->users);
529 skb = nskb;
670 struct sk_buff *nskb; local
727 nskb = __dev_alloc_skb(skb->len, GFP_ATOMIC | GFP_DMA);
728 if (!nskb) {
731 memcpy(skb_put(nskb, sk
[all...]
/drivers/net/xen-netback/
H A Dnetback.c1029 struct sk_buff *nskb = xenvif_alloc_skb(0); local
1030 if (unlikely(nskb == NULL)) {
1037 shinfo = skb_shinfo(nskb);
1049 skb_shinfo(skb)->frag_list = nskb;
1536 struct sk_buff *nskb = skb_shinfo(skb)->frag_list; local
1541 xenvif_fill_frags(queue, nskb);
1544 skb->len += nskb->len;
1545 skb->data_len += nskb->len;
1589 xenvif_skb_zerocopy_prepare(queue, nskb);
1590 kfree_skb(nskb);
1618 struct sk_buff *nskb = local
[all...]
/drivers/isdn/hisax/
H A Disdnl1.c206 struct sk_buff *skb, *nskb; local
238 if ((nskb = skb_clone(skb, GFP_ATOMIC)))
239 stptr->l1.l1l2(stptr, PH_DATA | INDICATION, nskb);
246 if ((nskb = skb_clone(skb, GFP_ATOMIC)))
247 stptr->l1.l1tei(stptr, PH_DATA | INDICATION, nskb);
H A Dcallc.c1746 struct sk_buff *nskb; local
1775 nskb = skb_clone(skb, GFP_ATOMIC);
1776 if (nskb) {
1777 nskb->truesize = nskb->len;
1779 nskb->pkt_type = PACKET_NOACK;
1781 st->l3.l3l2(st, DL_DATA | REQUEST, nskb);
1784 st->l2.l2l1(st, PH_DATA | REQUEST, nskb);
/drivers/scsi/fcoe/
H A Dfcoe_transport.c337 struct sk_buff *nskb; local
340 nskb = skb_clone(skb, GFP_ATOMIC);
341 if (!nskb)
343 rc = dev_queue_xmit(nskb);
/drivers/net/ethernet/sun/
H A Dsunvnet.c868 struct sk_buff *nskb; local
884 nskb = alloc_and_align_skb(skb->dev, skb->len);
885 skb_reserve(nskb, VNET_PACKET_SKIP);
886 if (skb_copy_bits(skb, 0, nskb->data, skb->len)) {
887 dev_kfree_skb(nskb);
891 (void)skb_put(nskb, skb->len);
893 skb = nskb;
/drivers/isdn/isdnloop/
H A Disdnloop.c403 struct sk_buff *nskb; local
416 nskb = dev_alloc_skb(skb->len);
417 if (nskb) {
419 skb_put(nskb, len), len);
420 skb_queue_tail(&card->bqueue[channel], nskb);
/drivers/net/ethernet/broadcom/
H A Dbcm63xx_enet.c386 struct sk_buff *nskb; local
388 nskb = netdev_alloc_skb_ip_align(dev, len);
389 if (!nskb) {
397 memcpy(nskb->data, skb->data, len);
400 skb = nskb;
602 struct sk_buff *nskb; local
604 nskb = skb_copy_expand(skb, 0, needed, GFP_ATOMIC);
605 if (!nskb) {
610 skb = nskb;
/drivers/net/wireless/
H A Dmac80211_hwsim.c1014 struct sk_buff *nskb; local
1049 nskb = dev_alloc_skb(128);
1050 if (!nskb) {
1056 skb_add_rx_frag(nskb, 0, page, 0, skb->len, skb->len);
1058 nskb = skb_copy(skb, GFP_ATOMIC);
1059 if (!nskb)
1068 memcpy(IEEE80211_SKB_RXCB(nskb), &rx_status, sizeof(rx_status));
1069 ieee80211_rx_irqsafe(data2->hw, nskb);

Completed in 431 milliseconds

12