Change in osmo-ttcn3-hacks[master]: bts: Account for elapsed time in paging timer

This is merely a historical archive of years 2008-2021, before the migration to mailman3.

A maintained and still updated list archive can be found at https://lists.osmocom.org/hyperkitty/list/gerrit-log@lists.osmocom.org/.

Harald Welte gerrit-no-reply at lists.osmocom.org
Wed Sep 26 19:53:45 UTC 2018


Harald Welte has submitted this change and it was merged. ( https://gerrit.osmocom.org/11103 )

Change subject: bts: Account for elapsed time in paging timer
......................................................................

bts: Account for elapsed time in paging timer

Previous implementation always waited for "interval" time until sending
next paging cmd, and didn't finish the test until all expected paging
cmds were sent. As a result, each time it triggered it accumulated some
delay which could go from 2 seconds to 12 seconds depending on machine
load.
As a consequence, the expected number of paging cmd messages to be sent
in 20 seconds was being sent in 22-32 seconds, hence changing the load
on osmo-bts and as a result changing the test results.

Low threshold needs to be adapted since now they are sent in exactly 20
seconds max and the load handled by osmo-bts is bigger.

Fixes: OS#3025
Change-Id: I9651136d6810420e0a4d887bfb11c913a24f0457
---
M bts/BTS_Tests.ttcn
1 file changed, 44 insertions(+), 21 deletions(-)

Approvals:
  Jenkins Builder: Verified
  Harald Welte: Looks good to me, approved



diff --git a/bts/BTS_Tests.ttcn b/bts/BTS_Tests.ttcn
index aa402d1..809a403 100644
--- a/bts/BTS_Tests.ttcn
+++ b/bts/BTS_Tests.ttcn
@@ -1687,26 +1687,16 @@
 	}
 	var float pch_blocks_per_sec := max_pch_imsi_per_sec * cfg.load_factor;
 	var float interval := 1.0 / pch_blocks_per_sec;
-	log("pch_blocks_per_sec=", pch_blocks_per_sec, " interval=", interval);
+	var float time_total := 20.0;
+	var integer pkt_total := float2int(time_total * pch_blocks_per_sec);
+	log("pch_blocks_total=", pkt_total," pch_blocks_per_sec=", pch_blocks_per_sec, " interval=", interval);
 
-	for (var integer i := 0; i < float2int(20.0/interval); i := i+1) {
-		/* build mobile Identity */
-		var MobileL3_CommonIE_Types.MobileIdentityLV mi;
-		if (cfg.use_tmsi) {
-			mi := valueof(ts_MI_TMSI_LV(f_rnd_octstring(4)));
-		} else {
-			mi := valueof(ts_MI_IMSI_LV(f_gen_imsi(i)));
-		}
-		var octetstring mi_enc_lv := enc_MobileIdentityLV(mi);
-		var octetstring mi_enc := substr(mi_enc_lv, 1, lengthof(mi_enc_lv)-1);
+	timer T_total := 300.0; /* big value (far bigger than time_total), used to count elapsed time */
+	T_total.start;
 
-		/* Send RSL PAGING COMMAND */
-		RSL_CCHAN.send(ts_RSL_UD(ts_RSL_PAGING_CMD(mi_enc, i mod 4)));
-		st.num_paging_sent := st.num_paging_sent + 1;
-
-		/* Wait for interval to next PAGING COMMAND */
-		timer T_itv := interval;
-		T_itv.start;
+	timer T_itv := 0.0;
+	T_itv.start;
+	while (st.num_paging_sent < pkt_total) {
 		alt {
 		/* check for presence of CCCH LOAD IND (paging load) */
 		[cfg.exp_overload] RSL_CCHAN.receive(tr_RSL_UD(tr_RSL_PAGING_LOAD_IND(0))) {
@@ -1725,7 +1715,40 @@
 		/* check if paging requests arrive on Um side */
 		[] as_l1_count_paging(st.num_paging_rcv_msgs, st.num_paging_rcv_ids, cfg);
 		[] L1CTL.receive { repeat; }
-		[] T_itv.timeout { }
+		[] T_itv.timeout {
+			/* Send paging cmds based on elapsed time */
+			var integer new_sent := f_min(pkt_total, float2int(T_total.read * pch_blocks_per_sec) + 1);
+			while (st.num_paging_sent < new_sent) {
+				/* build mobile Identity */
+				var MobileL3_CommonIE_Types.MobileIdentityLV mi;
+				if (cfg.use_tmsi) {
+					mi := valueof(ts_MI_TMSI_LV(f_rnd_octstring(4)));
+				} else {
+					mi := valueof(ts_MI_IMSI_LV(f_gen_imsi(st.num_paging_sent)));
+				}
+				var octetstring mi_enc_lv := enc_MobileIdentityLV(mi);
+				var octetstring mi_enc := substr(mi_enc_lv, 1, lengthof(mi_enc_lv)-1);
+
+				/* Send RSL PAGING COMMAND */
+				RSL_CCHAN.send(ts_RSL_UD(ts_RSL_PAGING_CMD(mi_enc, st.num_paging_sent mod 4)));
+
+				st.num_paging_sent := st.num_paging_sent + 1;
+			}
+			if (st.num_paging_sent < pkt_total) {
+				/* Wait for interval to next PAGING COMMAND */
+				var float time_now := T_total.read;
+				var float next_sched := int2float(st.num_paging_sent)*interval;
+				if (next_sched > time_now) {
+					T_itv.start(next_sched - time_now);
+				} else {
+					T_itv.start(0.0);
+				}
+			} else {
+				/* We are done, no need to keep counting */
+				T_total.stop;
+			}
+			}
+		[] T_total.timeout { }
 		[] as_rsl_res_ind();
 		}
 	}
@@ -1816,7 +1839,7 @@
 	var PagingTestState st := f_TC_paging(cfg);
 	/* We expect about 80-85% to pass, given that we can fill the paging buffer of 200
 	 * slots and will fully drain that buffer before returning */
-	var template integer tpl := (st.num_paging_sent*80/100 .. st.num_paging_sent *85/100);
+	var template integer tpl := (st.num_paging_sent*78/100 .. st.num_paging_sent *85/100);
 	if (not match(st.num_paging_rcv_ids, tpl)) {
 		setverdict(fail, "Expected ", tpl, " pagings but have ", st.num_paging_rcv_ids);
 	} else {
@@ -1842,7 +1865,7 @@
 	var PagingTestState st := f_TC_paging(cfg);
 	/* We expect about 70% to pass, given that we can fill the paging buffer of 200
 	 * slots and will fully drain that buffer before returning */
-	var template integer tpl := (st.num_paging_sent*66/100 .. st.num_paging_sent *72/100);
+	var template integer tpl := (st.num_paging_sent*64/100 .. st.num_paging_sent *72/100);
 	if (not match(st.num_paging_rcv_ids, tpl)) {
 		setverdict(fail, "Expected ", tpl, " pagings but have ", st.num_paging_rcv_ids);
 	} else {

-- 
To view, visit https://gerrit.osmocom.org/11103
To unsubscribe, or for help writing mail filters, visit https://gerrit.osmocom.org/settings

Gerrit-Project: osmo-ttcn3-hacks
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: I9651136d6810420e0a4d887bfb11c913a24f0457
Gerrit-Change-Number: 11103
Gerrit-PatchSet: 1
Gerrit-Owner: Pau Espin Pedrol <pespin at sysmocom.de>
Gerrit-Reviewer: Harald Welte <laforge at gnumonks.org>
Gerrit-Reviewer: Jenkins Builder (1000002)
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.osmocom.org/pipermail/gerrit-log/attachments/20180926/f3f13618/attachment.htm>


More information about the gerrit-log mailing list