Fix for global buffer overflow in scale factor processing

Number of envelopes is equal to 2^n, where n obtained by
reading 2 bits from bit stream,so maximum value for number
of envelopes is 8. Time slot array table is accessed using
number of envelopes. The Minimum and Maximum values are 0
and 6,based on these values the table is modified.

Bug:112765917
Test: re-run poc
Change-Id: I42a44fc2376536d5119a8290a14726c9c5badd19
(cherry picked from commit 61a09f1063237a23127874bf27d5a72f1d03d3fe)
diff --git a/decoder/ixheaacd_env_extr.c b/decoder/ixheaacd_env_extr.c
index 67f3499..4224d9f 100644
--- a/decoder/ixheaacd_env_extr.c
+++ b/decoder/ixheaacd_env_extr.c
@@ -1328,8 +1328,7 @@
   WORD16 time_border[MAX_ENVELOPES + 1];
   WORD16 time_border_noise[2 + 1];
   WORD16 f[MAX_ENVELOPES + 1];
-  int rel_bord_lead[3];
-  int rel_bord_trail[3] = {0};
+  int rel_bord_lead[7] ={0};
 
   ia_frame_info_struct *v_frame_info = &h_frame_data->str_frame_info_details;
 
@@ -1382,11 +1381,6 @@
         time_border[env] = abs_bord_lead;
         for (k = 0; k <= env - 1; k++) time_border[env] += rel_bord_lead[k];
       }
-      for (env = num_rel_lead + 1; env < bs_num_env; env++) {
-        time_border[env] = abs_bord_trail;
-        for (k = 0; k <= bs_num_env - env - 1; k++)
-          time_border[env] -= rel_bord_trail[k];
-      }
       break;
 
     case LD_TRAN:
diff --git a/decoder/ixheaacd_sbr_const.h b/decoder/ixheaacd_sbr_const.h
index 10b0959..c75fa0c 100644
--- a/decoder/ixheaacd_sbr_const.h
+++ b/decoder/ixheaacd_sbr_const.h
@@ -223,7 +223,7 @@
     {2, 12, -1, 1}, {2, 13, -1, 1}, {2, 14, -1, 1},
 };
 
-static const int ixheaacd_ld_env_table_time_slot[4] = {8, 5, 0, 0};
+static const int ixheaacd_ld_env_table_time_slot[7] = {8, 5, 0, 0, 0, 0, 0};
 
 #define SBR_CLA_BITS 2
 #define SBR_ABS_BITS 2