I have a test application written where I am forming a socket connection to a server, sending the server a static packet (socket send() function), echoing the packet back on the server end, and waiting for the echo on the client end (socket recv() function). My receive timeout is set to 10 seconds. This code works fine (no timeouts observed, echoed data is received as expected) as long as I insert a [short] delay between the call to send() and recv(). If no delay is inserted, the call to recv() fails every time. Is there any known issue related to this symptom? Here is a "representation" of the code:
int32_t GetConnectedSocket (void) { long result; sockaddr tSocketAddr; unsigned short port = htons (80); unsigned long timeoutInMs = 10000U; int32_t hSocket = socket (AF_INET, SOCK_STREAM, IPPROTO_TCP); tSocketAddr.sa_family = AF_INET; memcpy (&tSocketAddr.sa_data[0], &port, sizeof (unsigned short) ); unsigned long hostIP = GetHostIP_NetworkByteOrder ("someurl.com"); memcpy (&tSocketAddr.sa_data[2], &hostIP, 4); do { result = connect (hSocket, &tSocketAddr, sizeof (tSocketAddr)); } while (result != ESUCCESS); //set recv timeout (void)setsockopt (hSocket, SOL_SOCKET, SOCKOPT_RECV_TIMEOUT, &timeoutInMs, sizeof (timeoutInMs)); return hSocket; } /// ... called after network connection is established void TestThread (void) { int32_t hSocket = -1; uint8_t txBuffer[] = { ... some data ... }; uint8_t rxBuffer[RX_BUFFER_SIZE]; while (TRUE) { hSocket = GetConnectedSocket (); if (send (hSocket, txBuffer, sizeof (txBuffer), 0) <= 0) { //error case... } //without this delay, the following call to recv() times out every time //with this delay, the following call to recv() works every time HAL_DelayMs (1U); if (recv (hSocket, rxBuffer, RX_BUFFER_SIZE, 0) <= 0) { //error case... } if (closesocket (hSocket) < 0) { //error case... } } }