This thread has been locked.

If you have a related question, please click the "Ask a related question" button in the top right corner. The newly created question will be automatically linked to this question.

RPMessage_send: no object for endpoint

Hi!

I'm working with the EVMK2H board (Rev 4.0), MCSDK v3.1.4.7 and IPC v3.36.02.13. I'm trying to have a very basic application for testing purposes. The goal is simply to send a message from DSP to ARM and then back from ARM to DSP. The configuration used on the DSP side is the same as the one provided in the image processing demo. Everything seems to work fine on the ARM side as I am able to open the DSP's queue and send it a message and receive one on the ARM's local queue. Receiving the sent message on the DSP side however doesn't seem to work. The error message I got is also given when trying to open the ARM's queue although this operation ends up working.

Here is the report:

root@k2hk-evm:~/keystone2_tests/arm/tests/hwi/hwi_test_arm_MessageQ# ./hwi_test_arm_MessageQ 
Initializing IPC module... done!
Creating receiving message queue... done.
Opening dsp queue... done!
Waiting for message from DSP... done!
RECEIVED ACK FOR INTERRUPT #0.
Sending message to dsp...
Message sent!

root@k2hk-evm:~/keystone2_tests/arm/tests/hwi/hwi_test_arm_MessageQ# cat /sys/kernel/debug/remoteproc/remoteproc1/trace0 
3 Resource entries at 0x800000
registering rpmsg-proto service on 61 with HOST
Opening arm queue... [t=0x271cc334] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 53
[t=0x27583acd] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 53
[t=0x2a67a916] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 0
[t=0x35eae895] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 0
[t=0x416f9fdf] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 0
[t=0x49dae1e9] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 0
[t=0x52b85e8d] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 0
[t=0x5e3aeb6d] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 0
[t=0x69bf7101] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 0
[t=0x7541e4a1] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 0
[t=0x80ca684d] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 0
[t=0x8c4d2173] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 0
[t=0x92927a50] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 0
[t=0x9d93f0a1] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 0
[t=0xa91a71cf] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 0
[t=0xb49d1e29] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 0
[t=0xc021e5b1] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 0
[t=0xcba48349] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 0
[t=0xd72ee151] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 0
done!
Creating MessageQ... done!
Sending message to ARM... done!
Waiting for message from ARM...
[t=0xdcef6b46] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 0
[t=0xe2b32f45] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 0
[t=0xe2b98e4d] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 53
[t=0xe2cc96db] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 53
 error.

The lad report seemed good at first sight but here it is in case I missed something. I also attach the DSP and ARM code.

Thanks in advance,

Thomas.

/* SysLink/IPC Headers: */
#include <ti/ipc/Std.h>
#include <ti/ipc/Ipc.h>
#include <ti/ipc/MessageQ.h>
#include <ti/ipc/transports/TransportRpmsg.h>

#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <stdint.h>

#include <sys/types.h>
#include <sys/stat.h>
#include <unistd.h>
#include <fcntl.h>
#include <sys/mman.h>

#include "mpmclient.h"

#include "hwi_tests_common.h"

#define DSP_OUT "./keystone2_tests.out"

#ifndef MSGQ_GET_TIMEOUT
#define MSGQ_GET_TIMEOUT 7500000
// MessageQ_FOREVER for infinite time
#endif // MSGQ_GET_TIMEOUT

typedef struct {
	MessageQ_MsgHeader header; // Required, usage reserved to the MessageQ module
	int16_t core_id;
	uint8_t workload;
} hwi_test_messageQ_t;

static inline int spawn_dsp(const char *dsp_name, const char *prog_path) {
	int err = 0;
	if (mpm_reset(dsp_name, &err) < 0) {
		fprintf(stderr, "mpm_reset error: %i\n", err);
	}
	if (mpm_load(dsp_name, prog_path, &err) < 0) {
		fprintf(stderr, "mpm_load error: %i\n",err);
	}
	if (mpm_run(dsp_name, &err) < 0) {
		fprintf(stderr, "mpm_run error: %i\n", err);
	}
	return err;
}

int spawn_dsps() {
	spawn_dsp("dsp0", DSP_OUT);
	spawn_dsp("dsp1", DSP_OUT);
	spawn_dsp("dsp2", DSP_OUT);
	spawn_dsp("dsp3", DSP_OUT);
	spawn_dsp("dsp4", DSP_OUT);
	spawn_dsp("dsp5", DSP_OUT);
	spawn_dsp("dsp6", DSP_OUT);
	spawn_dsp("dsp7", DSP_OUT);
	return 0;
}

int main(void) {
	int8_t status = 0;
	int8_t ret = 0;
	
	// LAUNCHING DSP (HAVE TO BE DONE BEFORE INITIALIZING THE IPC MODULE!)
	if (spawn_dsps()) {
		return -1;
	}
	
	// IPC CONFIGURATION
	MessageQ_Handle msgQ;
	
	MessageQ_Params msg_params;
	
	// Configuring transport:
	Ipc_transportConfig(&TransportRpmsg_Factory);
	
	// Initializing IPC module:
	fprintf(stdout, "Initializing IPC module...");
	if (Ipc_start() < 0) {
		fprintf(stderr, "Ipc_init failed, exiting now.\n");
		return -1;
	} else {
		fprintf(stdout, " done!\n");
	}
	
	fprintf(stdout, "Creating receiving message queue...");
	/* Create the local Message Queue for receiving. */
	MessageQ_Params_init(&msg_params);
	msgQ = MessageQ_create("hwi_test_MQ_arm", &msg_params);
	if (msgQ == NULL) {
		fprintf(stderr, "Error in MessageQ_create\n");
		return -1;
	} else {
		fprintf(stdout, " done.\n");
	}
	
	// Open the DSP's Message Queue:
	fprintf(stdout, "Opening dsp queue... ");
	MessageQ_QueueId dsp_qid;
	do {
		status = MessageQ_open("hwi_test_MQ_dsp", &dsp_qid);
	} while (status == MessageQ_E_NOTFOUND || status == MessageQ_E_TIMEOUT);
	if (status < 0) {
		fprintf(stderr, " Error #%i while opening the MessageQ.\n", status);
		ret = -1;
		goto end;
	} else {
		fprintf(stdout, "done!\n");
	}
	
	MessageQ_Msg msg_proxy;
	hwi_test_messageQ_t *msg_dsp;
	
	// Wait for an ACK to come
	fprintf(stdout, "Waiting for message from DSP...");
	status = MessageQ_get(msgQ, &msg_proxy, MSGQ_GET_TIMEOUT);
	if (status < 0) {
		fprintf(stderr, "Error while getting message.\n");
		ret = -1;
		goto end;
	} else {
		fprintf(stdout, " done!\n");
	}
		
	msg_dsp = (hwi_test_messageQ_t *) msg_proxy;
	uint8_t dsp_isr_count = msg_dsp->workload;
	
	
	// Terminate application by sending a final message to the DSP:
	fprintf(stdout, "Sending message to dsp...\n");
	hwi_test_messageQ_t *msg = (hwi_test_messageQ_t *) MessageQ_alloc(0,128);
	if (msg == NULL) {
		fprintf(stderr, "MessageQ_alloc failed\n");
	}
	
	msg->core_id = 1337;
	msg->workload = 12;
	
	status = MessageQ_put(dsp_qid, (MessageQ_Msg) msg);
	if (status < 0) {
		fprintf(stderr, "MessageQ_put failed.\n");
		ret = -1;
		goto end;
	}
	fprintf(stdout, "Message sent!\n");
	
end:
	MessageQ_delete(&(msgQ));
	Ipc_stop();
	return ret;
}

#include <stdint.h>
#include <c6x.h>

#include <xdc/runtime/System.h>
#include <xdc/runtime/Error.h>
#include <ti/sysbios/hal/Hwi.h>

#include <ti/ipc/Ipc.h>
#include <ti/ipc/MessageQ.h>

#include "hwi_tests_common.h"

typedef struct {
	MessageQ_MsgHeader header; // Required, usage reserved to the MessageQ module
	int16_t core_id;
	uint8_t workload;
} hwi_test_messageQ_t;

static volatile unsigned int wait = 1;
static MessageQ_QueueId arm_qid;
static uint8_t isr_count = 0;

static void send_msg(void) {
	int status = -1;
	hwi_test_messageQ_t *msg = (hwi_test_messageQ_t *) MessageQ_alloc(0,
									  sizeof(hwi_test_messageQ_t));
	
	if (msg == NULL) {
		System_abort("MessageQ_alloc failed\n");
	}
	
	msg->core_id = DNUM;
	msg->workload = isr_count;
	
	status = MessageQ_put(arm_qid, (MessageQ_Msg) msg);
	if (status < 0) {
		System_abort("MessageQ_put failed\n");
	}
}

int hwi_test_arm_MessageQ(void) {

	if (DNUM != 1) {
		System_printf("Nothing to do!\n");
		while(1);
		//return 1;
	}
	
	int status = -1;
	
	MessageQ_Params msg_params;
	MessageQ_Handle msgQ;
	MessageQ_Msg msg_proxy;
	
	System_printf("Opening arm queue... ");
	do {
		status = MessageQ_open("hwi_test_MQ_arm", &arm_qid);
	} while (status == MessageQ_E_NOTFOUND || status == MessageQ_E_TIMEOUT);
	if (status < 0) {
		System_printf("Unable to open the MessageQ: error #%i\n",
			      status);
		System_abort("Exiting program.\n");
	} else {
		System_printf("done!\n");
	}
	
	/* Create the local Message Queue for receiving. */
	MessageQ_Params_init(&msg_params);
	
	System_printf("Creating MessageQ...");
	msgQ = MessageQ_create("hwi_test_MQ_dsp", &msg_params);
	
	if (!msgQ) {
		System_abort(" error.\n");
	} else {
		System_printf(" done!\n");
	}
	
	System_printf("Sending message to ARM...");
	send_msg();
	System_printf(" done!\n");
	
	System_printf("Waiting for message from ARM...\n");
	Hwi_disableInterrupt(12);
	status = MessageQ_get(msgQ, &msg_proxy, 7500000);
	if (status < 0) {
		System_abort(" error.\n");
	}
	Hwi_enableInterrupt(12);
	System_printf("Got one!\n");
	
	return 0;
}


1108.lad.txt

  • Puting the timeout for Message_get to infinite resolve the message being not received but I still get the "RPMessage_send: no object for endpoint: 53" warnings. I can also see them when running the basic image processing demo. Is it something to be expected?

    Thanks,

    Thomas.

  • Assigned this thread to factory team for response. Thank you for your patience.
  • Thank you :)
  • Hi!


    Do you have any news considering this issue?

    Thanks!


    Thomas.

  • Sorry for the late response

    Here is what I suggest. In an old K2H training we used to have an IPC lab that showed very slowly how to exchange messages between ARM and DSPs.  This is Lab number 9 in the Lab instructions. that I attach to here.  I suggest that you read the instructions and try to follow them.  The revision of the MCSDK and the IPC might be slightly different than what you use (or not) , but I think that you will be able to figure it out.

    Then after you make the Lab example working, you can compare it to the demo code and see what is not working

    Ran

    /cfs-file/__key/communityserver-discussions-components-files/791/2553.Keystone-II_2D00_Lab-Manual.docx

  • Hi!

    Thank you for the link it's very interesting! However as I said now everything is working (the issue preventing messages to be received was just a bad timeout) but I still have the warning RPMessage_send: no object for endpoint as soon as I try to open a remote MessageQ. As I also see this warning when trying the vanilla image processing demo, I was just wondering what it meant and if it was to be expected.


    Thomas.

  • Can you try my example and see if you get the same error message?

    I will forward your question to LINUX expert and see what he can say

    Ran
  • I'll try it as soon as I can (I have other work priorities for the next few days) and keep you posted! Thank you for your help.

    Thomas.
  • Skinner,

    In addition to Ran's suggestion,
    please try out the ex44 compute example of IPC pacakage on K2H which demonstrate the IPC communication between DSP and ARM core. You can follow up the readme.txt to build and run the example on Keystone-II devices.
  • Hi!

    Thanks for your answer, I'll try it as soon as I have some time to do so!

  • Skinner Sweet, did you ever get an answer about the "RPMessage_send: no object for endpoint: 53" error.  I am just integrating an ARM(Linux) to DSP(SYS/BIOS) IPC and we see the same message.

  • Hi!

    As a matter of fact, no, I didn't get the answer. The only thing I know for sure is that we managed to have everything working as intended even with those warnings so my advice would be to simply ignore them. My guess is that it is some kind of debug message coming from the NameServer module which complies when you first try to open a MessageQ in a while loop.
  • Hi Skinner, Chris:

    Do you have the xdc Diags_STATUS enabled in DSP .cfg file? if Diags_ENTRY and Diags_INFO are also enabled, we can see more traces from ex02_messageq:

    [      0.000] [t=0x003baa0e] ti.ipc.rpmsg.RPMessage: <-- RPMessage_init

    [      0.000] registering rpmsg-proto:rpmsg-proto service on 61 with HOST

    [      0.000] [t=0x003cc675] xdc.runtime.Main: NameMap_sendMessage: HOST 53, port=61

    [      0.000] [t=0x003d836d] ti.ipc.rpmsg.RPMessage: --> RPMessage_send: (dstProc=0, dstEndpt=53, srcEndpt=61, data=0x95171b5c, len=72

    [      0.000] [t=0x00000002:cda7e93f] ti.ipc.rpmsg.RPMessage: --> RPMessage_send: (dstProc=4, dstEndpt=53, srcEndpt=0, data=0xa0060010, len=72

    [      0.000] [t=0x00000002:cda91667] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 53

    Looking into the code ./packages/ti/ipc/rpmsg/RPMessage.c, the message is expected as the dstEndpt 53 is not in the local processor.

    ./packages/ti/ipc/rpmsg/RPMessage.c:

    Int RPMessage_send(UInt16 dstProc,

                         UInt32 dstEndpt,

                         UInt32 srcEndpt,

                         Ptr    data,

                         UInt16 len)

    {

    ….

       if (dstProc != MultiProc_self()) {

           /* Send to remote processor: */

    ...

       }

       else {

           /* Put on a Message queue on this processor: */

           /* Protect from RPMessage_delete */

           key = GateHwi_enter(module.gateH);

           obj = module.msgqObjects[dstEndpt];

           GateHwi_leave(module.gateH, key);

          if (obj == NULL) {

               Log_print1(Diags_STATUS, FXNN": no object for endpoint: %d",

                      (IArg)dstEndpt);

               status = RPMessage_E_NOENDPT;

               return status;

           }

    You should be able to turn off the message by Registry.common$.diags_STATUS = Diags.RUNTIME_OFF.

    Regards, Garrett

  • I don't think the question was how do we turn off the error message, the question is why is the source sending a message the destination is not setup to receive?  I was guessing something in Linux is sending the DSP an unexpected message, but since it is TI code on both sides, we were concerned if this is a problem or expected behavior.

  • Chris,

    It would be an issue and needs to be investigated if you continuously get the message "RPMessage_send: no object for endpoint: 0". The RPMessage_send occurs when sending a message from DSP to ARM. I would suggest you to modify the ex02_messageq example (decrease the number of messages) to identify if unexpected message received from ARM/Linux so DSP needs to respond. You can turn on IPC trace in Linux by setting IPC_DEBUG=2 environment variable before running application, and IPC trace in DSP cfg file via

    Registry.common$.diags_ENTRY = Diags.ALWAYS_ON;

    Registry.common$.diags_INFO  = Diags.ALWAYS_ON;

    Registry.common$.diags_LIFECYCLE = Diags.ALWAYS_ON;

    Registry.common$.diags_STATUS = Diags.ALWAYS_ON;

    Regards, Garrett

  • I see two of these errors before the data transfer and two after. I am guessing they correspond to the NameServer_attach and NameServer_destroy, but it is hard to match up the logs 100%.

    Here is the Linux side:

    LAD_connect: PID = 521, fifoName = /tmp/LAD/521

    LAD_putCommand: cmd = 0
    [ 197.502953] omap_rproc_kick: EnteredLAD_putCommand: status = 0

    LAD_connect: got response
    status == LAD_SUCCESS, assignedId=0

    LAD_putCommand: cmd = 2
    LAD_putCommand: status = 0
    LAD_getResponse: client = 0
    LAD_getResponse: got response

    LAD_putCommand: cmd = 24
    LAD_putCommand: status = 0
    LAD_getResponse: client = 0
    LAD_getResponse: got response
    MultiProc_getConfig: got LAD response for client 0, status=0

    LAD_putCommand: cmd = 3
    LAD_putCommand: status = 0
    LAD_getResponse: client = 0
    LAD_getResponse: got response
    NameServer_setup: got LAD response for client 0, status=0

    LAD_putCommand: cmd = 18
    LAD_putCommand: status = 0
    LAD_getResponse: client = 0
    LAD_getResponse: got response
    MessageQ_getConfig: got LAD response for client 0, status=0

    LAD_putCommand: cmd = 19
    LAD_putCommand: status = 0
    LAD_getResponse: client = 0
    LAD_getRespons 197.522347] omap_rproc_kick: Exite: got response
    MessageQ_setup: LAD response for client 0, status=0

    LAD_putCommand: cmd = 16
    LAD_putCommand: status = 0
    LAD_getResponse: client = 0
    LAD_getResponse: got response
    NameServer_attach: LAD response, status=-1

    LAD_putCommand: cmd = 16
    LAD_putCommand: status = 0
    LAD_getResponse: client = 0
    LAD_getResponse: got response
    NameServer_attach: LAD response, status=-1

    LAD_putCommand: cmd = 16
    LAD_putCommand: status = 0
    LAD_getResponse: client = 0
    LAD_getResponse: got response
    NameServer_attach: LAD response, status=-1

    LAD_putCommand: cmd = 16
    LAD_putCommand: status = 0
    LAD_getResponse: client = 0
    p_rproc_kick: EnteredLAD_getResponse: got response
    NameServer_attach: LAD response, status=0
    Connected over sock: 7
    dst vproc_id: 2, dst addr: 61
    src vproc_id: -1, src addr: 1024

    LAD_putCommand: cmd = 31
    LAD_putCommand: status = 0
    LAD_getResponse: client = 0
    LAD_getResponse: got response
    GateMP_isSetup: got LAD response for client 0, status=0

    LAD_putCommand: cmd = 31
    LAD_putCommand: status = 0
    LAD_getResponse: client = 0
    LAD_getResponse: got response
    GateMP_isSetup: got LAD response for client 0, status=0
    Information 521 (0) Initializing MessageQ.
    Information 521 (0) Initializing host/local queue.

    LAD_putCommand: cmd = 21
    LAD_putCommand: status = 0
    LAD_getResponse: client = 0
    LAD_getResponse: got response
    MessageQ_create: got LAD response for client 0, status=0
    MessageQ_create: creating endpoints for 'HOST:MsgQ:01' queueIndex 0
    197.656760] omap_rproc_kick: Exitsocket_bind_addr: bound sock: 8
    to dst vproc_id: 2, src addr: 128
    src vproc_id: -1, src addr: 128
    Information 521 (0) Initialized host/local queue.
    omap_rproc_kick: EnteredInformation 521 (0) Opening slave queue "DSP1:MsgQ:01" (ID=4).

    LAD_putCommand: cmd = 11

    LAD_putCommand: status = 0
    LAD_getResponse: client = 0
    [ 197.748892] omap_rproc_kick: ExitLAD_getResponse: got response
    NameServer_getUInt32: got LAD response for client 0
    Information 521 (0) Sending 1 MessageQ message(s).
    Information 521 (0) Allocating message for send.
    omap_rproc_kick: EnteredInformation 521 (0) Allocated message for send successfully.
    Information 521 (0) Sending message.

    [ 198.777399] omap_rproc_kick: ExitInformation 521 (0) Sent message.
    Information 521 (0) Receiving 1 MessageQ message(s).
    Information 521 (0) Attempting to receive message. Waiting for message.
    Information 521 (0) Receive status 0!
    Information 521 (0) Received message of payload size 0x0001
    omap_rproc_kick: EnteredInformation 521 (0) [0]: 0x0000
    Information 521 (0) Freeing received message.
    Information 521 (0) Unitializing MessageQ.

    LAD_putCommand: cmd = 22
    LAD_putCommand: status = 0
    LAD_getResponse: client = 0
    LAD_getResponse: got response
    MessageQ_delete: got LAD response for client 0, status=0

    [ 198.815894] omap_rproc_kick: ExitInformation 521 (0) Unitializing IPC.

    LAD_putCommand: cmd = 31
    LAD_putCommand: status = 0omap_rproc_kick: Entered
    LAD_getResponse: client = 0
    LAD_getResponse: got response
    GateMP_isSetup: got LAD response for client 0, status=0

    LAD_putCommand: cmd = 17
    LAD_putCommand: status = 0
    LAD_getResponse: client = 0

    [ 198.855853] omap_rproc_kick: ExitLAD_getResponse: got response
    NameServer_detach: LAD response, status=0

    LAD_putCommand: cmd = 20
    LAD_putCommand: status = 0
    LAD_getResponse: client = 0
    LAD_getResponse: got response
    MessageQ_destroy: got LAD response for client 0, status=0
    NameServer_destroy: entered

    LAD_putCommand: cmd = 4
    LAD_putCommand: status = 0
    LAD_getResponse: client = 0
    LAD_getResponse: got response
    NameServer_destroy: got LAD response for client 0, status=0

    LAD_putCommand: cmd = 1
    LAD_putCommand: status = 0

    And the DSP side:

    [ 46.996] [t=0x00000007:e18105d5] ti.ipc.family.vayu.VirtQueue: VirtQueue_isr received msg = 0x1
    [ 46.997]
    [ 46.997] [t=0x00000007:e18c9261] ti.ipc.rpmsg.RPMessage: callback_availBufReady: virtQueue_fromHost kicked
    [ 46.998] [t=0x00000007:e198fcae] ti.ipc.rpmsg.RPMessage: --> RPMessage_swiFxn
    [ 46.999] [t=0x00000007:e1a1aa5b] ti.ipc.family.vayu.VirtQueue: getAvailBuf vq: 0x80721b18 0 1 256 0x80721b28 0xa0005000
    [ 46.999]
    [ 47.000] [t=0x00000007:e1aff3f1] ti.ipc.rpmsg.RPMessage: RPMessage_swiFxn: Received msg from: 0x0, to: 0x35, dataLen: 72
    [ 47.001] [t=0x00000007:e1bd0701] ti.ipc.rpmsg.RPMessage: --> RPMessage_send: (dstProc=4, dstEndpt=53, srcEndpt=0, data=0xa0060010, len=72
    [ 47.002] [t=0x00000007:e1cc45b3] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 53
    [ 47.003] [t=0x00000007:e1d72772] ti.ipc.family.vayu.VirtQueue: getAvailBuf vq: 0x80721b18 1 1 256 0x80721b28 0xa0005000
    [ 47.003]
    [ 47.004] [t=0x00000007:e1e54e10] ti.ipc.family.vayu.VirtQueue: VirtQueue_kick: no kick because of VRING_AVAIL_F_NO_INTERRUPT
    [ 47.006]
    [ 47.006] [t=0x00000007:e1f3994d] ti.ipc.rpmsg.RPMessage: <-- RPMessage_swiFxn
    [ 47.135] [t=0x00000007:e7835412] ti.ipc.family.vayu.VirtQueue: VirtQueue_isr received msg = 0x1
    [ 47.136]
    [ 47.136] [t=0x00000007:e78ebf65] ti.ipc.rpmsg.RPMessage: callback_availBufReady: virtQueue_fromHost kicked
    [ 47.137] [t=0x00000007:e79b0d98] ti.ipc.rpmsg.RPMessage: --> RPMessage_swiFxn
    [ 47.138] [t=0x00000007:e7a3a11a] ti.ipc.family.vayu.VirtQueue: getAvailBuf vq: 0x80721b18 1 2 256 0x80721b28 0xa0005000
    [ 47.138]
    [ 47.139] [t=0x00000007:e7b1b9b1] ti.ipc.rpmsg.RPMessage: RPMessage_swiFxn: Received msg from: 0x80, to: 0x35, dataLen: 72
    [ 47.140] [t=0x00000007:e7bed735] ti.ipc.rpmsg.RPMessage: --> RPMessage_send: (dstProc=4, dstEndpt=53, srcEndpt=128, data=0xa0060210, len=72
    [ 47.141] [t=0x00000007:e7ce3e1f] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 53
    [ 47.142] [t=0x00000007:e7d92194] ti.ipc.family.vayu.VirtQueue: getAvailBuf vq: 0x80721b18 2 2 256 0x80721b28 0xa0005000
    [ 47.142]
    [ 47.143] [t=0x00000007:e7e73d9a] ti.ipc.family.vayu.VirtQueue: VirtQueue_kick: no kick because of VRING_AVAIL_F_NO_INTERRUPT
    [ 47.145]
    [ 47.145] [t=0x00000007:e7f58553] ti.ipc.rpmsg.RPMessage: <-- RPMessage_swiFxn
    [ 47.230] [t=0x00000007:eba1b711] ti.ipc.family.vayu.VirtQueue: VirtQueue_isr received msg = 0x1
    [ 47.231]
    [ 47.231] [t=0x00000007:ebad0e60] ti.ipc.rpmsg.RPMessage: callback_availBufReady: virtQueue_fromHost kicked
    [ 47.232] [t=0x00000007:ebb94685] ti.ipc.rpmsg.RPMessage: --> RPMessage_swiFxn
    [ 47.233] [t=0x00000007:ebc1dbff] ti.ipc.family.vayu.VirtQueue: getAvailBuf vq: 0x80721b18 2 3 256 0x80721b28 0xa0005000
    [ 47.233]
    [ 47.235] [t=0x00000007:ebd03378] ti.ipc.rpmsg.RPMessage: RPMessage_swiFxn: Received msg from: 0x400, to: 0x3d, dataLen: 484
    [ 47.236] [t=0x00000007:ebdd8b55] ti.ipc.rpmsg.RPMessage: --> RPMessage_send: (dstProc=4, dstEndpt=61, srcEndpt=1024, data=0xa0060410, len=484
    [ 47.237] [t=0x00000007:ebed2284] ti.ipc.rpmsg.RPMessage: RPMessage_send: calling callback with data len: 484, from: 1024
    [ 47.238] [t=0x00000007:ebfa27bd] ti.ipc.transports.TransportRpmsg: --> transportCallbackFxn
    [ 47.239] [t=0x00000007:ec04b7e3] ti.ipc.transports.TransportRpmsg: transportCallbackFxn: Received data: 0xa0060410 from: 1024, dataLen: 484
    [ 47.241] [t=0x00000007:ec14b020] ti.ipc.namesrv.NameServerRemoteRpmsg: NameServerRemote_processMessage: Request from procId 0.
    [ 47.242]
    [ 47.242] [t=0x00000007:ec23ee9f] ti.ipc.namesrv.NameServerRemoteRpmsg: NameServerRemote_processMessage: Replying with: MessageQ:DSP1:MsgQ:01, value: 040080
    [ 47.244]
    [ 47.244] [t=0x00000007:ec3686b5] ti.ipc.rpmsg.RPMessage: --> RPMessage_send: (dstProc=0, dstEndpt=0, srcEndpt=61, data=0xa0060410, len=484
    [ 47.245] [t=0x00000007:ec45d425] ti.ipc.family.vayu.VirtQueue: getAvailBuf vq: 0x80721ad8 1 257 256 0x80721ae8 0xa0001000
    [ 47.245]
    [ 47.246] [t=0x00000007:ec54749d] ti.ipc.family.vayu.VirtQueue: VirtQueue_kick: Sending interrupt to proc 0 with payload 0x0
    [ 47.247]
    [ 47.247] [t=0x00000007:ec62e79a] ti.ipc.rpmsg.RPMessage: <-- RPMessage_send: 0
    [ 47.248] [t=0x00000007:ec6bcb11] ti.ipc.transports.TransportRpmsg: <-- transportCallbackFxn
    [ 47.249] [t=0x00000007:ec765da4] ti.ipc.rpmsg.RPMessage: <-- RPMessage_send: 0
    [ 47.249] [t=0x00000007:ec7edf02] ti.ipc.family.vayu.VirtQueue: getAvailBuf vq: 0x80721b18 3 3 256 0x80721b28 0xa0005000
    [ 47.249]
    [ 47.250] [t=0x00000007:ec8cf1a8] ti.ipc.family.vayu.VirtQueue: VirtQueue_kick: no kick because of VRING_AVAIL_F_NO_INTERRUPT
    [ 47.251]
    [ 47.251] [t=0x00000007:ec9ae60f] ti.ipc.rpmsg.RPMessage: <-- RPMessage_swiFxn
    [ 48.300] [t=0x00000008:199c0c48] ti.ipc.family.vayu.VirtQueue: VirtQueue_isr received msg = 0x1
    [ 48.301]
    [ 48.301] [t=0x00000008:19a76d42] ti.ipc.rpmsg.RPMessage: callback_availBufReady: virtQueue_fromHost kicked
    [ 48.302] [t=0x00000008:19b3b218] ti.ipc.rpmsg.RPMessage: --> RPMessage_swiFxn
    [ 48.303] [t=0x00000008:19bc4795] ti.ipc.family.vayu.VirtQueue: getAvailBuf vq: 0x80721b18 3 4 256 0x80721b28 0xa0005000
    [ 48.303]
    [ 48.304] [t=0x00000008:19ca71ba] ti.ipc.rpmsg.RPMessage: RPMessage_swiFxn: Received msg from: 0x400, to: 0x3d, dataLen: 436
    [ 48.305] [t=0x00000008:19d7d906] ti.ipc.rpmsg.RPMessage: --> RPMessage_send: (dstProc=4, dstEndpt=61, srcEndpt=1024, data=0xa0060610, len=436
    [ 48.307] [t=0x00000008:19e7c6c0] ti.ipc.rpmsg.RPMessage: RPMessage_send: calling callback with data len: 436, from: 1024
    [ 48.308] [t=0x00000008:19f4dbe3] ti.ipc.transports.TransportRpmsg: --> transportCallbackFxn
    [ 48.309] [t=0x00000008:19ff770d] ti.ipc.transports.TransportRpmsg: transportCallbackFxn: Received data: 0xa0060610 from: 1024, dataLen: 436
    [ 48.310] [t=0x00000008:1a0f20de] ti.ipc.transports.TransportRpmsg: transportCallbackFxn:
    [ 48.311] msg->heapId: 0, msg->msgSize: 436, msg->dstId: 128, msg->msgId: 65535
    [ 48.312]
    [ 48.312] [t=0x00000008:1a235796] ti.ipc.transports.TransportRpmsg: <-- transportCallbackFxn
    [ 48.313] [t=0x00000008:1a2df9e0] ti.ipc.rpmsg.RPMessage: <-- RPMessage_send: 0
    [ 48.314] [t=0x00000008:1a36c653] ti.ipc.family.vayu.VirtQueue: getAvailBuf vq: 0x80721b18 4 4 256 0x80721b28 0xa0005000
    [ 48.314]
    [ 48.315] [t=0x00000008:1a44ea42] ti.ipc.family.vayu.VirtQueue: VirtQueue_kick: no kick because of VRING_AVAIL_F_NO_INTERRUPT
    [ 48.316]
    [ 48.316] [t=0x00000008:1a52e6ec] ti.ipc.rpmsg.RPMessage: <-- RPMessage_swiFxn
    [ 48.317] [t=0x00000008:1a5cde24] PhysicalMsgQ: MessageQ_get got a message, sending back to hostQueId 128(0x80)
    [ 48.318] [t=0x00000008:1a6a3d0f] ti.ipc.transports.TransportRpmsg: TransportRpmsg_put: sending msg from: 61, to: 128, dataLen: 436
    [ 48.320] [t=0x00000008:1a7b137f] ti.ipc.rpmsg.RPMessage: --> RPMessage_send: (dstProc=0, dstEndpt=128, srcEndpt=61, data=0x809304c8, len=436
    [ 48.321] [t=0x00000008:1a8b44e0] ti.ipc.family.vayu.VirtQueue: getAvailBuf vq: 0x80721ad8 2 258 256 0x80721ae8 0xa0001000
    [ 48.321]
    [ 48.322] [t=0x00000008:1a9b2c8e] ti.ipc.family.vayu.VirtQueue: VirtQueue_kick: Sending interrupt to proc 0 with payload 0x0
    [ 48.323]
    [ 48.323] [t=0x00000008:1aaa47c4] ti.ipc.rpmsg.RPMessage: <-- RPMessage_send: 0
    [ 48.339] [t=0x00000008:1b549913] ti.ipc.family.vayu.VirtQueue: VirtQueue_isr received msg = 0x1
    [ 48.340]
    [ 48.340] [t=0x00000008:1b600244] ti.ipc.rpmsg.RPMessage: callback_availBufReady: virtQueue_fromHost kicked
    [ 48.341] [t=0x00000008:1b6c4f27] ti.ipc.rpmsg.RPMessage: --> RPMessage_swiFxn
    [ 48.342] [t=0x00000008:1b74e978] ti.ipc.family.vayu.VirtQueue: getAvailBuf vq: 0x80721b18 4 5 256 0x80721b28 0xa0005000
    [ 48.342]
    [ 48.343] [t=0x00000008:1b8317c3] ti.ipc.rpmsg.RPMessage: RPMessage_swiFxn: Received msg from: 0x80, to: 0x35, dataLen: 72
    [ 48.344] [t=0x00000008:1b9039e5] ti.ipc.rpmsg.RPMessage: --> RPMessage_send: (dstProc=4, dstEndpt=53, srcEndpt=128, data=0xa0060810, len=72
    [ 48.346] [t=0x00000008:1b9ff1cc] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 53
    [ 48.347] [t=0x00000008:1baad6b5] ti.ipc.family.vayu.VirtQueue: getAvailBuf vq: 0x80721b18 5 5 256 0x80721b28 0xa0005000
    [ 48.347]
    [ 48.348] [t=0x00000008:1bb8efe5] ti.ipc.family.vayu.VirtQueue: VirtQueue_kick: no kick because of VRING_AVAIL_F_NO_INTERRUPT
    [ 48.349]
    [ 48.349] [t=0x00000008:1bc6f244] ti.ipc.rpmsg.RPMessage: <-- RPMessage_swiFxn
    [ 48.380] [t=0x00000008:1d1dd3d4] ti.ipc.family.vayu.VirtQueue: VirtQueue_isr received msg = 0x1
    [ 48.381]
    [ 48.381] [t=0x00000008:1d2930ba] ti.ipc.rpmsg.RPMessage: callback_availBufReady: virtQueue_fromHost kicked
    [ 48.383] [t=0x00000008:1d35b637] ti.ipc.rpmsg.RPMessage: --> RPMessage_swiFxn
    [ 48.383] [t=0x00000008:1d3e0b0a] ti.ipc.family.vayu.VirtQueue: getAvailBuf vq: 0x80721b18 5 6 256 0x80721b28 0xa0005000
    [ 48.383]
    [ 48.384] [t=0x00000008:1d4c3214] ti.ipc.rpmsg.RPMessage: RPMessage_swiFxn: Received msg from: 0x0, to: 0x35, dataLen: 72
    [ 48.385] [t=0x00000008:1d594245] ti.ipc.rpmsg.RPMessage: --> RPMessage_send: (dstProc=4, dstEndpt=53, srcEndpt=0, data=0xa0060a10, len=72
    [ 48.386] [t=0x00000008:1d688070] ti.ipc.rpmsg.RPMessage: RPMessage_send: no object for endpoint: 53
    [ 48.387] [t=0x00000008:1d736a45] ti.ipc.family.vayu.VirtQueue: getAvailBuf vq: 0x80721b18 6 6 256 0x80721b28 0xa0005000
    [ 48.387]
    [ 48.388] [t=0x00000008:1d818e76] ti.ipc.family.vayu.VirtQueue: VirtQueue_kick: no kick because of VRING_AVAIL_F_NO_INTERRUPT
    [ 48.390]
    [ 48.390] [t=0x00000008:1d8fd056] ti.ipc.rpmsg.RPMessage: <-- RPMessage_swiFxn