updated udp packet fragmentation so that we don't have to restrict number of samples to be a multiple of a fixed udp packet size

This commit is contained in:
2025-07-15 22:40:15 -05:00
parent 1ee9b4db20
commit 707e9f82a4
19 changed files with 12405 additions and 12283 deletions

View File

@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="ASCII"?>
<sdkproject:SdkProject xmi:version="2.0" xmlns:xmi="http://www.omg.org/XMI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:sdkproject="http://www.xilinx.com/sdkproject" name="radar" location="/home/bkiedinger/projects/castelion/radar_alinx_kintex/vitis/radar" platform="/home/bkiedinger/projects/castelion/radar_alinx_kintex/vitis/top/export/top/top.xpfm" platformUID="xilinx:::0.0(custom)" systemProject="radar_system" sysConfig="top" runtime="C/C++" cpu="freertos10_xilinx_microblaze_0" cpuInstance="microblaze_0" os="freertos10_xilinx" mssSignature="b81ac1744f29e93881cfaa8e8b019a98">
<sdkproject:SdkProject xmi:version="2.0" xmlns:xmi="http://www.omg.org/XMI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:sdkproject="http://www.xilinx.com/sdkproject" name="radar" location="/home/bkiedinger/projects/castelion/radar_alinx_kintex/vitis/radar" platform="/home/bkiedinger/projects/castelion/radar_alinx_kintex/vitis/top/export/top/top.xpfm" platformUID="xilinx:::0.0(custom)" systemProject="radar_system" sysConfig="top" runtime="C/C++" cpu="freertos10_xilinx_microblaze_0" cpuInstance="microblaze_0" os="freertos10_xilinx" mssSignature="31c4a066f121f9dfdf4b2a6e46d178c9">
<configuration name="Debug" id="xilinx.gnu.mb.exe.debug.245787499">
<configBuildOptions xsi:type="sdkproject:SdkOptions"/>
<lastBuildOptions xsi:type="sdkproject:SdkOptions"/>

View File

@@ -525,23 +525,23 @@ void setup_data_converter() {
#ifndef IBERT_TESTING
// Update FPGA TX Transceiver settings
// set_lane_cal(0, 0, 0, 11);
// set_lane_cal(1, 10, 5, 11);
// set_lane_cal(2, 5, 0, 11);
// set_lane_cal(3, 0, 0, 11);
// set_lane_cal(4, 0, 0, 11);
// set_lane_cal(5, 0, 0, 11);
// set_lane_cal(6, 12, 0, 11);
// set_lane_cal(7, 0, 0, 11);
set_lane_cal(0, 0, 0, 11);
set_lane_cal(1, 10, 5, 11);
set_lane_cal(2, 5, 0, 11);
set_lane_cal(3, 0, 0, 11);
set_lane_cal(4, 0, 0, 11);
set_lane_cal(5, 0, 0, 11);
set_lane_cal(6, 12, 0, 11);
set_lane_cal(7, 0, 0, 11);
set_lane_cal(0, 9, 0, 7);
set_lane_cal(1, 9, 0, 7);
set_lane_cal(2, 9, 0, 7);
set_lane_cal(3, 9, 0, 7);
set_lane_cal(4, 9, 0, 7);
set_lane_cal(5, 9, 0, 7);
set_lane_cal(6, 9, 0, 7);
set_lane_cal(7, 9, 0, 7);
// set_lane_cal(0, 9, 0, 7);
// set_lane_cal(1, 9, 0, 7);
// set_lane_cal(2, 9, 0, 7);
// set_lane_cal(3, 9, 0, 7);
// set_lane_cal(4, 9, 0, 7);
// set_lane_cal(5, 9, 0, 7);
// set_lane_cal(6, 9, 0, 7);
// set_lane_cal(7, 9, 0, 7);
vTaskDelay(100);
int subclass = jtx_param[uc][0].jesd_subclass;

View File

@@ -103,8 +103,12 @@ void status_task( void *pvParameters ) {
}
static void pps_irq_handler(u32 context) {
xil_printf("pps irq %lu\r\n", utc_time);
Xil_Out32(TIMING_ENGINE_ADDR + 0x14, utc_time + 1);
uint32_t current_seconds = Xil_In32(TIMING_ENGINE_ADDR + 0x14);
uint32_t new_time = utc_time + 1;
if (current_seconds != new_time) {
Xil_Out32(TIMING_ENGINE_ADDR + 0x14, new_time);
xil_printf("pps irq %lu, %lu\r\n", current_seconds, new_time);
}
}
void main_task( void *pvParameters ) {

View File

@@ -97,9 +97,9 @@ void novatel_task()
utc_time = utc_sec;
xil_printf("gps_week %lu\r\n", msg->hdr.week);
xil_printf("gps_ms %lu\r\n", msg->hdr.ms);
xil_printf("utc_time %lu\r\n", utc_sec);
// xil_printf("gps_week %lu\r\n", msg->hdr.week);
// xil_printf("gps_ms %lu\r\n", msg->hdr.ms);
// xil_printf("utc_time %lu\r\n", utc_sec);
}
break;