This thread has been locked.

If you have a related question, please click the "Ask a related question" button in the top right corner. The newly created question will be automatically linked to this question.

IWR6843ISK: About the Azimuth heatmap data

Part Number: IWR6843ISK
Other Parts Discussed in Thread: IWR6843

Hello everyone,
See, now that I managed to receive data from the IWR6843 radar, I have now a specific matter to tackle, in which I don't really have any idea on how to go about it..
Specifically, about the Azimuth heatmap data. According to your documentation, it is represented by an array of 32bit struct divided into a 16bit real integer and 16bit imaginary integer which has a width in virtual antennas (which was 8 in my case because of 4 receptors and 2 transceivers) and a height in Range FFT (which I deduced to be 256 because of Range Profile which was a multiple of Range FFT and 2bytes and received 512 bytes as data).
My question is, what do we interpret from those real and imaginary numbers in order to form a heatmap as shown in the demo. Because in the demo, we see the heatmap as a map with colours ranging from blue to red and passing by green, but what determines this colour scheme out of those integers we have at hand ?
Also I know that there is a code regarding this in the javascript, but I don't quite understand the steps that it goes through, like what kind of mathematical calculations are necessary to have the final data, and since I'm not that familiar with java and not that good in maths, I couldn't deduce what it does.
If anyone knows the mathematical equation to what is happening that would be great, it would even be greater if somebody had a C code version of the following code :
var processAzimuthHeatMap = function (bytevec, byteVecIdx, Params) {
    var elapsed_time = {}; // for profile this code only
    var subFrameNum = Params.currentSubFrameNumber;

    if (subFrameNum != Params.subFrameToPlot) return;

    if (Params.guiMonitor[subFrameNum].rangeAzimuthHeatMap == 1) {
        var start_time = new Date().getTime();
        // %Range complex bins at zero Doppler all virtual (azimuth) antennas
        var numBytes = Params.dataPath[subFrameNum].numTxAzimAnt *
            Params.dataPath[subFrameNum].numRxAnt *
            Params.dataPath[subFrameNum].numRangeBins * 4;
        var q = bytevec.slice(byteVecIdx, byteVecIdx + numBytes);
        // q = q(1:2:end)+q(2:2:end)*2^8;
        // q(q>32767) = q(q>32767) - 65536;
        // q = q(1:2:end)+1j*q(2:2:end);
        // ==>  q[4*idx+1]q[4*idx+0] is real, q[4*idx+3]q[4*idx+2] is imag,
        // q = reshape(q, Params.dataPath.numTxAzimAnt*Params.dataPath.numRxAnt, Params.dataPath.numRangeBins);
        // Q = fft(q, NUM_ANGLE_BINS);  % column based NUM_ANGLE_BINS-point fft, padded with zeros
        // QQ=fftshift(abs(Q),1);
        // QQ=QQ.';
        var qrows = Params.dataPath[subFrameNum].numTxAzimAnt * Params.dataPath[subFrameNum].numRxAnt, qcols = Params.dataPath[subFrameNum].numRangeBins;
        var qidx = 0;
        var QQ = [];
        for (var tmpc = 0; tmpc < qcols; tmpc++) {
            var real = math.zeros(NUM_ANGLE_BINS).valueOf();
            var imag = math.zeros(NUM_ANGLE_BINS).valueOf();
            for (var tmpr = 0; tmpr < qrows; tmpr++) {
                real[tmpr] = q[qidx + 1] * 256 + q[qidx];
                if (real[tmpr] > 32767) real[tmpr] = real[tmpr] - 65536;
                imag[tmpr] = q[qidx + 3] * 256 + q[qidx + 2];
                if (imag[tmpr] > 32767) imag[tmpr] = imag[tmpr] - 65536;
                qidx = qidx + 4;
            }
            fft.transform(real, imag);
            for (var ri = 0; ri < NUM_ANGLE_BINS; ri++) {
                real[ri] = Math.sqrt(real[ri] * real[ri] + imag[ri] * imag[ri]); // abs()
            }
            QQ.push(real.slice(NUM_ANGLE_BINS / 2).concat(real.slice(0, NUM_ANGLE_BINS / 2)));
        }
        // QQ=QQ(:,2:end);
        // fliplr(QQ)            
        var fliplrQQ = [];
        for (var tmpr = 0; tmpr < QQ.length; tmpr++) {
            fliplrQQ.push(QQ[tmpr].slice(1).reverse());
        }
        var start_time2 = new Date().getTime();
        if (Params.rangeAzimuthHeatMapGridInit == 0) {
            // theta = asind([-NUM_ANGLE_BINS/2+1 : NUM_ANGLE_BINS/2-1]'*(2/NUM_ANGLE_BINS));
            // range = [0:Params.dataPath.numRangeBins-1] * Params.dataPath.rangeIdxToMeters;
            var theta = math.asin(math.dotMultiply(math.range(-NUM_ANGLE_BINS / 2 + 1, NUM_ANGLE_BINS / 2 - 1, true), 2 / NUM_ANGLE_BINS)).valueOf(); // in radian
            var range = math.dotMultiply(math.range(0, Params.dataPath[subFrameNum].numRangeBins - 1, true), Params.dataPath[subFrameNum].rangeIdxToMeters).valueOf();
            range = math.subtract(range, Params.compRxChanCfg.rangeBias); //correct regardless of state (measurement or compensation)
            math.forEach(range, function (value, idx, ary) {
                ary[idx] = math.max(ary[idx], 0);
            });

            // posX = range' * sind(theta');
            // posY = range' * cosd(theta');
            var posX = MyUtil.tensor(range, math.sin(theta));
            var posY = MyUtil.tensor(range, math.cos(theta));
            Params.rangeAzimuthHeatMapGrid_xlin = math.range(-range_width, range_width, 2.0 * range_width / (Params.rangeAzimuthHeatMapGrid_points - 1), true).valueOf();
            if (Params.rangeAzimuthHeatMapGrid_xlin.length < Params.rangeAzimuthHeatMapGrid_points) Params.rangeAzimuthHeatMapGrid_xlin.push(range_width);
            Params.rangeAzimuthHeatMapGrid_ylin = math.range(0, range_depth, 1.0 * range_depth / (Params.rangeAzimuthHeatMapGrid_points - 1), true).valueOf();
            if (Params.rangeAzimuthHeatMapGrid_ylin.length < Params.rangeAzimuthHeatMapGrid_points) Params.rangeAzimuthHeatMapGrid_ylin.push(range_depth);
            var xiyi = MyUtil.meshgrid(Params.rangeAzimuthHeatMapGrid_xlin, Params.rangeAzimuthHeatMapGrid_ylin);
            Params.rangeAzimuthHeatMapGrid = new math_griddata();
            Params.rangeAzimuthHeatMapGrid.init(math.flatten(posX), math.flatten(posY), xiyi[0], xiyi[1]);
            Params.rangeAzimuthHeatMapGridInit = 1;
        }
        var zi = Params.rangeAzimuthHeatMapGrid.griddata_from_cache(math.flatten(fliplrQQ));
        zi = MyUtil.reshape_rowbased(zi, Params.rangeAzimuthHeatMapGrid_ylin.length, Params.rangeAzimuthHeatMapGrid_xlin.length);
        var start_time3 = new Date().getTime();
       
        templateObj.$.ti_widget_plot4.data[0].x = Params.rangeAzimuthHeatMapGrid_xlin;
        templateObj.$.ti_widget_plot4.data[0].y = Params.rangeAzimuthHeatMapGrid_ylin;
        templateObj.$.ti_widget_plot4.data[0].z = zi;
        plotredraw(templateObj.$.ti_widget_plot4);
       
        elapsed_time.rangeAzimuthHeatMap = [start_time2 - start_time, start_time3 - start_time2, new Date().getTime() - start_time3];
    }
};
 
I couldn't understand when the code began to initalise the QQ variable, because before was about capturing the data.
 
Best regards
Sebastien
  • To be even more specific, NUM_ANGLE_BIN is equal to 64 earlier in the code.

    We create therefore 1D arrays of real and imaginary numbers of 64 bytes because of that zeros.(NUM_ANGLE_BIN).valueof()

    although I don't know what valueof does, but I know zeros returns an array.

    and there are 2 loops where we go through real and imaginary numbers array.

    One that goes up to qrows, which at max is 8.

    And the second one that goes up to NUM_ANGLE_BINS which is 64.

    How does fft.transform(real,img) magically fill up the remaining 64-8 spaces remaining ?

  • Hi, 

    I believe it is just allocating the max possible amount of space for the angle bins, and is just 0-padding whatever is unused. As i'm sure you know, the number of angle bins depends on your chirp configuration, and the NUM_ANGLE_BIN is a static value which does not take the chirp configuration into account. In regards to the algorithm executed here, the majority of the code here is used to convert the RA heatmap from bins to having actual units associated with them.

    Best Regards,
    Alec

  • Hi,

    thanks for your reply about it, now the code makes a little bit more sense.

    But now here comes another question about the process of converting the data coming in into an azimuth heatmap.

    Why is there so much flipping around ?

    More specifically here :

                QQ.push(real.slice(NUM_ANGLE_BINS / 2).concat(real.slice(0NUM_ANGLE_BINS / 2)));
            }
            // QQ=QQ(:,2:end);
            // fliplr(QQ)            
            var fliplrQQ = [];
            for (var tmpr = 0tmpr < QQ.lengthtmpr++) {
                fliplrQQ.push(QQ[tmpr].slice(1).reverse());
            }
    But there also seems to be a bit reversal in the fft.transform() function.
    Sorry for asking so much about it. I geniunely don't have that many ideas on what it does and why it does it.
    Best regards,
    Sebastien
  • Hi,

    Similarly, I believe it is just adjusting the shapes and layouts of the arrays in order to organize the value in such a way that is most readable in a graph. If you have more questions on this, you can probably add some debug prints to the visualizer code to see exactly what the values are at various points in the code and use that to follow along with what exactly is going on.

    Best Regards,
    Alec