| 1 | /* |
| 2 | * Copyright (c) 2015-2016, Intel Corporation |
| 3 | * |
| 4 | * Redistribution and use in source and binary forms, with or without |
| 5 | * modification, are permitted provided that the following conditions are met: |
| 6 | * |
| 7 | * * Redistributions of source code must retain the above copyright notice, |
| 8 | * this list of conditions and the following disclaimer. |
| 9 | * * Redistributions in binary form must reproduce the above copyright |
| 10 | * notice, this list of conditions and the following disclaimer in the |
| 11 | * documentation and/or other materials provided with the distribution. |
| 12 | * * Neither the name of Intel Corporation nor the names of its contributors |
| 13 | * may be used to endorse or promote products derived from this software |
| 14 | * without specific prior written permission. |
| 15 | * |
| 16 | * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" |
| 17 | * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
| 18 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE |
| 19 | * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE |
| 20 | * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR |
| 21 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
| 22 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
| 23 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN |
| 24 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
| 25 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
| 26 | * POSSIBILITY OF SUCH DAMAGE. |
| 27 | */ |
| 28 | |
| 29 | /** \file |
| 30 | * \brief LimEx NFA: runtime exception processing code. |
| 31 | * |
| 32 | * X-macro generic impl, included into the various LimEx model implementations. |
| 33 | */ |
| 34 | |
| 35 | #if !defined(SIZE) || !defined(STATE_T) || !defined(LOAD_FROM_ENG) |
| 36 | # error Must define SIZE, STATE_T, LOAD_FROM_ENG in includer. |
| 37 | #endif |
| 38 | |
| 39 | #include "config.h" |
| 40 | #include "limex_ring.h" |
| 41 | #include "util/join.h" |
| 42 | #include "util/uniform_ops.h" |
| 43 | |
| 44 | #define PE_FN JOIN(processExceptional, SIZE) |
| 45 | #define RUN_EXCEPTION_FN JOIN(runException, SIZE) |
| 46 | #define ZERO_STATE JOIN(zero_, STATE_T) |
| 47 | #define AND_STATE JOIN(and_, STATE_T) |
| 48 | #define EQ_STATE(a, b) (!JOIN(noteq_, STATE_T)((a), (b))) |
| 49 | #define OR_STATE JOIN(or_, STATE_T) |
| 50 | #define TESTBIT_STATE JOIN(testbit_, STATE_T) |
| 51 | #define EXCEPTION_T JOIN(struct NFAException, SIZE) |
| 52 | #define CONTEXT_T JOIN(NFAContext, SIZE) |
| 53 | #define IMPL_NFA_T JOIN(LimExNFA, SIZE) |
| 54 | #define GET_NFA_REPEAT_INFO_FN JOIN(getNfaRepeatInfo, SIZE) |
| 55 | |
| 56 | #ifdef ESTATE_ON_STACK |
| 57 | #define ESTATE_ARG STATE_T estate |
| 58 | #else |
| 59 | #define ESTATE_ARG const STATE_T *estatep |
| 60 | #define estate (*estatep) |
| 61 | #endif |
| 62 | |
| 63 | #ifdef STATE_ON_STACK |
| 64 | #define STATE_ARG_NAME s |
| 65 | #define STATE_ARG STATE_T STATE_ARG_NAME |
| 66 | #define STATE_ARG_P &s |
| 67 | #else |
| 68 | #define STATE_ARG_NAME sp |
| 69 | #define STATE_ARG const STATE_T *STATE_ARG_NAME |
| 70 | #define STATE_ARG_P sp |
| 71 | #endif |
| 72 | |
| 73 | #ifndef STATE_ON_STACK |
| 74 | #define BIG_MODEL |
| 75 | #endif |
| 76 | |
| 77 | #ifdef ARCH_64_BIT |
| 78 | #define CHUNK_T u64a |
| 79 | #define FIND_AND_CLEAR_FN findAndClearLSB_64 |
| 80 | #define POPCOUNT_FN popcount64 |
| 81 | #define RANK_IN_MASK_FN rank_in_mask64 |
| 82 | #else |
| 83 | #define CHUNK_T u32 |
| 84 | #define FIND_AND_CLEAR_FN findAndClearLSB_32 |
| 85 | #define POPCOUNT_FN popcount32 |
| 86 | #define RANK_IN_MASK_FN rank_in_mask32 |
| 87 | #endif |
| 88 | |
| 89 | /** \brief Process a single exception. Returns 1 if exception handling should |
| 90 | * continue, 0 if an accept callback has instructed us to halt. */ |
| 91 | static really_inline |
| 92 | int RUN_EXCEPTION_FN(const EXCEPTION_T *e, STATE_ARG, |
| 93 | STATE_T *succ, |
| 94 | #ifndef BIG_MODEL |
| 95 | STATE_T *local_succ, |
| 96 | #endif |
| 97 | const struct IMPL_NFA_T *limex, |
| 98 | u64a offset, |
| 99 | struct CONTEXT_T *ctx, |
| 100 | struct proto_cache *new_cache, |
| 101 | enum CacheResult *cacheable, |
| 102 | char in_rev, |
| 103 | const char flags) { |
| 104 | assert(e); |
| 105 | |
| 106 | #ifdef DEBUG_EXCEPTIONS |
| 107 | printf("EXCEPTION e=%p reports=%u trigger=" , e, e->reports); |
| 108 | if (e->trigger == LIMEX_TRIGGER_NONE) { |
| 109 | printf("none" ); |
| 110 | } else if (e->trigger == LIMEX_TRIGGER_POS) { |
| 111 | printf("pos" ); |
| 112 | } else if (e->trigger == LIMEX_TRIGGER_TUG) { |
| 113 | printf("tug" ); |
| 114 | } else { |
| 115 | printf("unknown!" ); |
| 116 | } |
| 117 | printf("\n" ); |
| 118 | #endif |
| 119 | |
| 120 | // Trigger exceptions, used in bounded repeats. |
| 121 | assert(!in_rev || e->trigger == LIMEX_TRIGGER_NONE); |
| 122 | if (!in_rev && e->trigger != LIMEX_TRIGGER_NONE) { |
| 123 | assert(e->repeatOffset != MO_INVALID_IDX); |
| 124 | const struct NFARepeatInfo *info = |
| 125 | (const struct NFARepeatInfo *)((const char *)limex + |
| 126 | e->repeatOffset); |
| 127 | const struct RepeatInfo *repeat = getRepeatInfo(info); |
| 128 | assert(ctx->repeat_ctrl && ctx->repeat_state); |
| 129 | union RepeatControl *repeat_ctrl = ctx->repeat_ctrl + info->ctrlIndex; |
| 130 | char *repeat_state = ctx->repeat_state + info->stateOffset; |
| 131 | |
| 132 | if (e->trigger == LIMEX_TRIGGER_POS) { |
| 133 | char cyclic_on = TESTBIT_STATE(*STATE_ARG_P, info->cyclicState); |
| 134 | processPosTrigger(repeat, repeat_ctrl, repeat_state, offset, |
| 135 | cyclic_on); |
| 136 | *cacheable = DO_NOT_CACHE_RESULT_AND_FLUSH_BR_ENTRIES; |
| 137 | } else { |
| 138 | assert(e->trigger == LIMEX_TRIGGER_TUG); |
| 139 | enum TriggerResult rv = |
| 140 | processTugTrigger(repeat, repeat_ctrl, repeat_state, offset); |
| 141 | if (rv == TRIGGER_FAIL) { |
| 142 | *cacheable = DO_NOT_CACHE_RESULT_AND_FLUSH_BR_ENTRIES; |
| 143 | DEBUG_PRINTF("tug found no valid matches in repeat state\n" ); |
| 144 | return 1; // continue |
| 145 | } else if (rv == TRIGGER_STALE) { |
| 146 | *cacheable = DO_NOT_CACHE_RESULT_AND_FLUSH_BR_ENTRIES; |
| 147 | DEBUG_PRINTF("stale history, squashing cyclic state\n" ); |
| 148 | assert(e->hasSquash == LIMEX_SQUASH_TUG); |
| 149 | *succ = AND_STATE(*succ, LOAD_FROM_ENG(&e->squash)); |
| 150 | return 1; // continue |
| 151 | } else if (rv == TRIGGER_SUCCESS_CACHE) { |
| 152 | new_cache->br = 1; |
| 153 | } else { |
| 154 | assert(rv == TRIGGER_SUCCESS); |
| 155 | *cacheable = DO_NOT_CACHE_RESULT_AND_FLUSH_BR_ENTRIES; |
| 156 | } |
| 157 | } |
| 158 | } |
| 159 | |
| 160 | // Some exceptions fire accepts. |
| 161 | if (e->reports != MO_INVALID_IDX) { |
| 162 | if (flags & CALLBACK_OUTPUT) { |
| 163 | const ReportID *reports = |
| 164 | (const ReportID *)((const char *)limex + e->reports); |
| 165 | if (unlikely(limexRunReports(reports, ctx->callback, |
| 166 | ctx->context, offset) |
| 167 | == MO_HALT_MATCHING)) { |
| 168 | DEBUG_PRINTF("callback instructed us to stop\n" ); |
| 169 | return 0; // halt |
| 170 | } |
| 171 | if (*cacheable == CACHE_RESULT) { |
| 172 | if (!new_cache->reports || new_cache->reports == reports) { |
| 173 | new_cache->reports = reports; |
| 174 | } else { |
| 175 | *cacheable = DO_NOT_CACHE_RESULT; |
| 176 | } |
| 177 | } |
| 178 | } else { |
| 179 | if ((flags & FIRST_BYTE) && *cacheable == CACHE_RESULT) { |
| 180 | *cacheable = DO_NOT_CACHE_RESULT; |
| 181 | } /* otherwise we can cache as we never care about accepts */ |
| 182 | } |
| 183 | } |
| 184 | |
| 185 | // Most exceptions have a set of successors to switch on. `local_succ' is |
| 186 | // ORed into `succ' at the end of the caller's loop. |
| 187 | #ifndef BIG_MODEL |
| 188 | *local_succ = OR_STATE(*local_succ, LOAD_FROM_ENG(&e->successors)); |
| 189 | #else |
| 190 | ctx->local_succ = OR_STATE(ctx->local_succ, LOAD_FROM_ENG(&e->successors)); |
| 191 | #endif |
| 192 | |
| 193 | // Some exceptions squash states behind them. Note that we squash states in |
| 194 | // 'succ', not local_succ. |
| 195 | if (e->hasSquash == LIMEX_SQUASH_CYCLIC |
| 196 | || e->hasSquash == LIMEX_SQUASH_REPORT) { |
| 197 | *succ = AND_STATE(*succ, LOAD_FROM_ENG(&e->squash)); |
| 198 | if (*cacheable == CACHE_RESULT) { |
| 199 | *cacheable = DO_NOT_CACHE_RESULT; |
| 200 | } |
| 201 | } |
| 202 | |
| 203 | return 1; // continue |
| 204 | } |
| 205 | |
| 206 | #ifndef RUN_EXCEPTION_FN_ONLY |
| 207 | |
| 208 | /** \brief Process all of the exceptions associated with the states in the \a |
| 209 | * estate. */ |
| 210 | static really_inline |
| 211 | int PE_FN(STATE_ARG, ESTATE_ARG, u32 diffmask, STATE_T *succ, |
| 212 | const struct IMPL_NFA_T *limex, const EXCEPTION_T *exceptions, |
| 213 | u64a offset, struct CONTEXT_T *ctx, char in_rev, char flags) { |
| 214 | assert(diffmask > 0); // guaranteed by caller macro |
| 215 | |
| 216 | if (EQ_STATE(estate, ctx->cached_estate)) { |
| 217 | DEBUG_PRINTF("using cached succ from previous state\n" ); |
| 218 | *succ = OR_STATE(*succ, ctx->cached_esucc); |
| 219 | if (ctx->cached_reports && (flags & CALLBACK_OUTPUT)) { |
| 220 | DEBUG_PRINTF("firing cached reports from previous state\n" ); |
| 221 | if (unlikely(limexRunReports(ctx->cached_reports, ctx->callback, |
| 222 | ctx->context, offset) |
| 223 | == MO_HALT_MATCHING)) { |
| 224 | return PE_RV_HALT; // halt; |
| 225 | } |
| 226 | } |
| 227 | return 0; |
| 228 | } |
| 229 | |
| 230 | #ifndef BIG_MODEL |
| 231 | STATE_T local_succ = ZERO_STATE; |
| 232 | #else |
| 233 | ctx->local_succ = ZERO_STATE; |
| 234 | #endif |
| 235 | |
| 236 | // A copy of the estate as an array of GPR-sized chunks. |
| 237 | CHUNK_T chunks[sizeof(STATE_T) / sizeof(CHUNK_T)]; |
| 238 | CHUNK_T emask_chunks[sizeof(STATE_T) / sizeof(CHUNK_T)]; |
| 239 | #ifdef ESTATE_ON_STACK |
| 240 | memcpy(chunks, &estate, sizeof(STATE_T)); |
| 241 | #else |
| 242 | memcpy(chunks, estatep, sizeof(STATE_T)); |
| 243 | #endif |
| 244 | memcpy(emask_chunks, &limex->exceptionMask, sizeof(STATE_T)); |
| 245 | |
| 246 | struct proto_cache new_cache = {0, NULL}; |
| 247 | enum CacheResult cacheable = CACHE_RESULT; |
| 248 | |
| 249 | u32 base_index[sizeof(STATE_T) / sizeof(CHUNK_T)]; |
| 250 | base_index[0] = 0; |
| 251 | for (s32 i = 0; i < (s32)ARRAY_LENGTH(base_index) - 1; i++) { |
| 252 | base_index[i + 1] = base_index[i] + POPCOUNT_FN(emask_chunks[i]); |
| 253 | } |
| 254 | |
| 255 | do { |
| 256 | u32 t = findAndClearLSB_32(&diffmask); |
| 257 | #ifdef ARCH_64_BIT |
| 258 | t >>= 1; // Due to diffmask64, which leaves holes in the bitmask. |
| 259 | #endif |
| 260 | assert(t < ARRAY_LENGTH(chunks)); |
| 261 | CHUNK_T word = chunks[t]; |
| 262 | assert(word != 0); |
| 263 | do { |
| 264 | u32 bit = FIND_AND_CLEAR_FN(&word); |
| 265 | u32 local_index = RANK_IN_MASK_FN(emask_chunks[t], bit); |
| 266 | u32 idx = local_index + base_index[t]; |
| 267 | const EXCEPTION_T *e = &exceptions[idx]; |
| 268 | |
| 269 | if (!RUN_EXCEPTION_FN(e, STATE_ARG_NAME, succ, |
| 270 | #ifndef BIG_MODEL |
| 271 | &local_succ, |
| 272 | #endif |
| 273 | limex, offset, ctx, &new_cache, &cacheable, |
| 274 | in_rev, flags)) { |
| 275 | return PE_RV_HALT; |
| 276 | } |
| 277 | } while (word); |
| 278 | } while (diffmask); |
| 279 | |
| 280 | #ifndef BIG_MODEL |
| 281 | *succ = OR_STATE(*succ, local_succ); |
| 282 | #else |
| 283 | *succ = OR_STATE(*succ, ctx->local_succ); |
| 284 | #endif |
| 285 | |
| 286 | if (cacheable == CACHE_RESULT) { |
| 287 | ctx->cached_estate = estate; |
| 288 | #ifndef BIG_MODEL |
| 289 | ctx->cached_esucc = local_succ; |
| 290 | #else |
| 291 | ctx->cached_esucc = ctx->local_succ; |
| 292 | #endif |
| 293 | ctx->cached_reports = new_cache.reports; |
| 294 | ctx->cached_br = new_cache.br; |
| 295 | } else if (cacheable == DO_NOT_CACHE_RESULT_AND_FLUSH_BR_ENTRIES) { |
| 296 | if (ctx->cached_br) { |
| 297 | ctx->cached_estate = ZERO_STATE; |
| 298 | } |
| 299 | } |
| 300 | |
| 301 | return 0; |
| 302 | } |
| 303 | |
| 304 | #endif |
| 305 | |
| 306 | #undef ZERO_STATE |
| 307 | #undef AND_STATE |
| 308 | #undef EQ_STATE |
| 309 | #undef OR_STATE |
| 310 | #undef TESTBIT_STATE |
| 311 | #undef PE_FN |
| 312 | #undef RUN_EXCEPTION_FN |
| 313 | #undef CONTEXT_T |
| 314 | #undef EXCEPTION_T |
| 315 | |
| 316 | #ifdef estate |
| 317 | #undef estate |
| 318 | #endif |
| 319 | |
| 320 | #ifdef BIG_MODEL |
| 321 | #undef BIG_MODEL |
| 322 | #endif |
| 323 | |
| 324 | #undef STATE_ARG |
| 325 | #undef STATE_ARG_NAME |
| 326 | #undef STATE_ARG_P |
| 327 | |
| 328 | #undef IMPL_NFA_T |
| 329 | |
| 330 | #undef CHUNK_T |
| 331 | #undef FIND_AND_CLEAR_FN |
| 332 | #undef POPCOUNT_FN |
| 333 | #undef RANK_IN_MASK_FN |
| 334 | |