#include <vppinfra/clib.h> /* for CLIB_LINUX_KERNEL */
#include <vppinfra/vector.h>
#include <vppinfra/error_bootstrap.h>
+#ifdef __SSE4_2__
#include <vppinfra/memcpy_x86_64.h>
+#endif
#ifdef CLIB_LINUX_KERNEL
#include <linux/string.h>
"behaviour");
#if defined(__COVERITY__)
return memcpy (dst, src, n);
-#elif defined(__x86_64__)
+#elif defined(__SSE4_2__)
clib_memcpy_x86_64 (dst, src, n);
return dst;
#else
#endif
}
+static_always_inline void *
+clib_memmove (void *dst, const void *src, size_t n)
+{
+ u8 *d = (u8 *) dst;
+ u8 *s = (u8 *) src;
+
+ if (s == d)
+ return d;
+
+ if (d > s)
+ for (uword i = n - 1; (i + 1) > 0; i--)
+ d[i] = s[i];
+ else
+ for (uword i = 0; i < n; i++)
+ d[i] = s[i];
+
+ return d;
+}
+
#include <vppinfra/memcpy.h>
/* c-11 string manipulation variants */
if (count == 0)
return;
#else
+#if defined(CLIB_HAVE_VEC128)
+ u64x2 v = u64x2_splat (val);
+#endif
while (count >= 4)
{
+#if defined(CLIB_HAVE_VEC128)
+ u64x2_store_unaligned (v, ptr);
+ u64x2_store_unaligned (v, ptr + 2);
+#else
ptr[0] = ptr[1] = ptr[2] = ptr[3] = val;
+#endif
ptr += 4;
count -= 4;
}
return EOK;
}
+static_always_inline const char *
+clib_string_skip_prefix (const char *s, const char *prefix)
+{
+ uword len = __builtin_strlen (prefix);
+ return s + (__builtin_strncmp (s, prefix, len) ? 0 : len);
+}
+
#endif /* included_clib_string_h */
/*