Commit 313dd1b6 authored by Kees Cook's avatar Kees Cook

gcc-plugins: Add the randstruct plugin

This randstruct plugin is modified from Brad Spengler/PaX Team's code
in the last public patch of grsecurity/PaX based on my understanding
of the code. Changes or omissions from the original code are mine and
don't reflect the original grsecurity/PaX code.

The randstruct GCC plugin randomizes the layout of selected structures
at compile time, as a probabilistic defense against attacks that need to
know the layout of structures within the kernel. This is most useful for
"in-house" kernel builds where neither the randomization seed nor other
build artifacts are made available to an attacker. While less useful for
distribution kernels (where the randomization seed must be exposed for
third party kernel module builds), it still has some value there since now
all kernel builds would need to be tracked by an attacker.

In more performance sensitive scenarios, GCC_PLUGIN_RANDSTRUCT_PERFORMANCE
can be selected to make a best effort to restrict randomization to
cacheline-sized groups of elements, and will not randomize bitfields. This
comes at the cost of reduced randomization.

Two annotations are defined,__randomize_layout and __no_randomize_layout,
which respectively tell the plugin to either randomize or not to
randomize instances of the struct in question. Follow-on patches enable
the auto-detection logic for selecting structures for randomization
that contain only function pointers. It is disabled here to assist with
bisection.

Since any randomized structs must be initialized using designated
initializers, __randomize_layout includes the __designated_init annotation
even when the plugin is disabled so that all builds will require
the needed initialization. (With the plugin enabled, annotations for
automatically chosen structures are marked as well.)

The main differences between this implemenation and grsecurity are:
- disable automatic struct selection (to be enabled in follow-up patch)
- add designated_init attribute at runtime and for manual marking
- clarify debugging output to differentiate bad cast warnings
- add whitelisting infrastructure
- support gcc 7's DECL_ALIGN and DECL_MODE changes (Laura Abbott)
- raise minimum required GCC version to 4.7

Earlier versions of this patch series were ported by Michael Leibowitz.
Signed-off-by: default avatarKees Cook <keescook@chromium.org>
parent 0aa5e49c
......@@ -207,6 +207,8 @@ r200_reg_safe.h
r300_reg_safe.h
r420_reg_safe.h
r600_reg_safe.h
randomize_layout_hash.h
randomize_layout_seed.h
recordmcount
relocs
rlim_names.h
......
......@@ -443,6 +443,45 @@ config GCC_PLUGIN_STRUCTLEAK_VERBOSE
initialized. Since not all existing initializers are detected
by the plugin, this can produce false positive warnings.
config GCC_PLUGIN_RANDSTRUCT
bool "Randomize layout of sensitive kernel structures"
depends on GCC_PLUGINS
select MODVERSIONS if MODULES
help
If you say Y here, the layouts of structures explicitly
marked by __randomize_layout will be randomized at
compile-time. This can introduce the requirement of an
additional information exposure vulnerability for exploits
targeting these structure types.
Enabling this feature will introduce some performance impact,
slightly increase memory usage, and prevent the use of forensic
tools like Volatility against the system (unless the kernel
source tree isn't cleaned after kernel installation).
The seed used for compilation is located at
scripts/gcc-plgins/randomize_layout_seed.h. It remains after
a make clean to allow for external modules to be compiled with
the existing seed and will be removed by a make mrproper or
make distclean.
Note that the implementation requires gcc 4.7 or newer.
This plugin was ported from grsecurity/PaX. More information at:
* https://grsecurity.net/
* https://pax.grsecurity.net/
config GCC_PLUGIN_RANDSTRUCT_PERFORMANCE
bool "Use cacheline-aware structure randomization"
depends on GCC_PLUGIN_RANDSTRUCT
depends on !COMPILE_TEST
help
If you say Y here, the RANDSTRUCT randomization will make a
best effort at restricting randomization to cacheline-sized
groups of elements. It will further not randomize bitfields
in structures. This reduces the performance hit of RANDSTRUCT
at the cost of weakened randomization.
config HAVE_CC_STACKPROTECTOR
bool
help
......
......@@ -223,6 +223,11 @@
/* Mark a function definition as prohibited from being cloned. */
#define __noclone __attribute__((__noclone__, __optimize__("no-tracer")))
#ifdef RANDSTRUCT_PLUGIN
#define __randomize_layout __attribute__((randomize_layout))
#define __no_randomize_layout __attribute__((no_randomize_layout))
#endif
#endif /* GCC_VERSION >= 40500 */
#if GCC_VERSION >= 40600
......
......@@ -448,6 +448,14 @@ static __always_inline void __write_once_size(volatile void *p, void *res, int s
# define __latent_entropy
#endif
#ifndef __randomize_layout
# define __randomize_layout __designated_init
#endif
#ifndef __no_randomize_layout
# define __no_randomize_layout
#endif
/*
* Tell gcc if a function is cold. The compiler will assume any path
* directly leading to the call is unlikely.
......
......@@ -24,10 +24,17 @@
#ifndef MODULE_ARCH_VERMAGIC
#define MODULE_ARCH_VERMAGIC ""
#endif
#ifdef RANDSTRUCT_PLUGIN
#include <generated/randomize_layout_hash.h>
#define MODULE_RANDSTRUCT_PLUGIN "RANDSTRUCT_PLUGIN_" RANDSTRUCT_HASHED_SEED
#else
#define MODULE_RANDSTRUCT_PLUGIN
#endif
#define VERMAGIC_STRING \
UTS_RELEASE " " \
MODULE_VERMAGIC_SMP MODULE_VERMAGIC_PREEMPT \
MODULE_VERMAGIC_MODULE_UNLOAD MODULE_VERMAGIC_MODVERSIONS \
MODULE_ARCH_VERMAGIC
MODULE_ARCH_VERMAGIC \
MODULE_RANDSTRUCT_PLUGIN
......@@ -29,6 +29,10 @@ ifdef CONFIG_GCC_PLUGINS
gcc-plugin-cflags-$(CONFIG_GCC_PLUGIN_STRUCTLEAK_VERBOSE) += -fplugin-arg-structleak_plugin-verbose
gcc-plugin-cflags-$(CONFIG_GCC_PLUGIN_STRUCTLEAK) += -DSTRUCTLEAK_PLUGIN
gcc-plugin-$(CONFIG_GCC_PLUGIN_RANDSTRUCT) += randomize_layout_plugin.so
gcc-plugin-cflags-$(CONFIG_GCC_PLUGIN_RANDSTRUCT) += -DRANDSTRUCT_PLUGIN
gcc-plugin-cflags-$(CONFIG_GCC_PLUGIN_RANDSTRUCT_PERFORMANCE) += -fplugin-arg-randomize_layout_plugin-performance-mode
GCC_PLUGINS_CFLAGS := $(strip $(addprefix -fplugin=$(objtree)/scripts/gcc-plugins/, $(gcc-plugin-y)) $(gcc-plugin-cflags-y))
export PLUGINCC GCC_PLUGINS_CFLAGS GCC_PLUGIN GCC_PLUGIN_SUBDIR
......
randomize_layout_seed.h
......@@ -18,6 +18,14 @@ endif
export HOSTLIBS
$(obj)/randomize_layout_plugin.o: $(objtree)/$(obj)/randomize_layout_seed.h
quiet_cmd_create_randomize_layout_seed = GENSEED $@
cmd_create_randomize_layout_seed = \
$(CONFIG_SHELL) $(srctree)/$(src)/gen-random-seed.sh $@ $(objtree)/include/generated/randomize_layout_hash.h
$(objtree)/$(obj)/randomize_layout_seed.h: FORCE
$(call if_changed,create_randomize_layout_seed)
targets = randomize_layout_seed.h randomize_layout_hash.h
$(HOSTLIBS)-y := $(foreach p,$(GCC_PLUGIN),$(if $(findstring /,$(p)),,$(p)))
always := $($(HOSTLIBS)-y)
......
......@@ -953,4 +953,9 @@ static inline void debug_gimple_stmt(const_gimple s)
get_inner_reference(exp, pbitsize, pbitpos, poffset, pmode, punsignedp, preversep, pvolatilep)
#endif
#if BUILDING_GCC_VERSION < 7000
#define SET_DECL_ALIGN(decl, align) DECL_ALIGN(decl) = (align)
#define SET_DECL_MODE(decl, mode) DECL_MODE(decl) = (mode)
#endif
#endif
#!/bin/sh
if [ ! -f "$1" ]; then
SEED=`od -A n -t x8 -N 32 /dev/urandom | tr -d ' \n'`
echo "const char *randstruct_seed = \"$SEED\";" > "$1"
HASH=`echo -n "$SEED" | sha256sum | cut -d" " -f1 | tr -d ' \n'`
echo "#define RANDSTRUCT_HASHED_SEED \"$HASH\"" > "$2"
fi
/*
* Copyright 2014-2016 by Open Source Security, Inc., Brad Spengler <spender@grsecurity.net>
* and PaX Team <pageexec@freemail.hu>
* Licensed under the GPL v2
*
* Note: the choice of the license means that the compilation process is
* NOT 'eligible' as defined by gcc's library exception to the GPL v3,
* but for the kernel it doesn't matter since it doesn't link against
* any of the gcc libraries
*
* Usage:
* $ # for 4.5/4.6/C based 4.7
* $ gcc -I`gcc -print-file-name=plugin`/include -I`gcc -print-file-name=plugin`/include/c-family -fPIC -shared -O2 -o randomize_layout_plugin.so randomize_layout_plugin.c
* $ # for C++ based 4.7/4.8+
* $ g++ -I`g++ -print-file-name=plugin`/include -I`g++ -print-file-name=plugin`/include/c-family -fPIC -shared -O2 -o randomize_layout_plugin.so randomize_layout_plugin.c
* $ gcc -fplugin=./randomize_layout_plugin.so test.c -O2
*/
#include "gcc-common.h"
#include "randomize_layout_seed.h"
#if BUILDING_GCC_MAJOR < 4 || (BUILDING_GCC_MAJOR == 4 && BUILDING_GCC_MINOR < 7)
#error "The RANDSTRUCT plugin requires GCC 4.7 or newer."
#endif
#define ORIG_TYPE_NAME(node) \
(TYPE_NAME(TYPE_MAIN_VARIANT(node)) != NULL_TREE ? ((const unsigned char *)IDENTIFIER_POINTER(TYPE_NAME(TYPE_MAIN_VARIANT(node)))) : (const unsigned char *)"anonymous")
#define INFORM(loc, msg, ...) inform(loc, "randstruct: " msg, ##__VA_ARGS__)
#define MISMATCH(loc, how, ...) INFORM(loc, "casting between randomized structure pointer types (" how "): %qT and %qT\n", __VA_ARGS__)
__visible int plugin_is_GPL_compatible;
static int performance_mode;
static struct plugin_info randomize_layout_plugin_info = {
.version = "201402201816vanilla",
.help = "disable\t\t\tdo not activate plugin\n"
"performance-mode\tenable cacheline-aware layout randomization\n"
};
struct whitelist_entry {
const char *pathname;
const char *lhs;
const char *rhs;
};
static const struct whitelist_entry whitelist[] = {
{ }
};
/* from old Linux dcache.h */
static inline unsigned long
partial_name_hash(unsigned long c, unsigned long prevhash)
{
return (prevhash + (c << 4) + (c >> 4)) * 11;
}
static inline unsigned int
name_hash(const unsigned char *name)
{
unsigned long hash = 0;
unsigned int len = strlen((const char *)name);
while (len--)
hash = partial_name_hash(*name++, hash);
return (unsigned int)hash;
}
static tree handle_randomize_layout_attr(tree *node, tree name, tree args, int flags, bool *no_add_attrs)
{
tree type;
*no_add_attrs = true;
if (TREE_CODE(*node) == FUNCTION_DECL) {
error("%qE attribute does not apply to functions (%qF)", name, *node);
return NULL_TREE;
}
if (TREE_CODE(*node) == PARM_DECL) {
error("%qE attribute does not apply to function parameters (%qD)", name, *node);
return NULL_TREE;
}
if (TREE_CODE(*node) == VAR_DECL) {
error("%qE attribute does not apply to variables (%qD)", name, *node);
return NULL_TREE;
}
if (TYPE_P(*node)) {
type = *node;
} else {
gcc_assert(TREE_CODE(*node) == TYPE_DECL);
type = TREE_TYPE(*node);
}
if (TREE_CODE(type) != RECORD_TYPE) {
error("%qE attribute used on %qT applies to struct types only", name, type);
return NULL_TREE;
}
if (lookup_attribute(IDENTIFIER_POINTER(name), TYPE_ATTRIBUTES(type))) {
error("%qE attribute is already applied to the type %qT", name, type);
return NULL_TREE;
}
*no_add_attrs = false;
return NULL_TREE;
}
/* set on complete types that we don't need to inspect further at all */
static tree handle_randomize_considered_attr(tree *node, tree name, tree args, int flags, bool *no_add_attrs)
{
*no_add_attrs = false;
return NULL_TREE;
}
/*
* set on types that we've performed a shuffle on, to prevent re-shuffling
* this does not preclude us from inspecting its fields for potential shuffles
*/
static tree handle_randomize_performed_attr(tree *node, tree name, tree args, int flags, bool *no_add_attrs)
{
*no_add_attrs = false;
return NULL_TREE;
}
/*
* 64bit variant of Bob Jenkins' public domain PRNG
* 256 bits of internal state
*/
typedef unsigned long long u64;
typedef struct ranctx { u64 a; u64 b; u64 c; u64 d; } ranctx;
#define rot(x,k) (((x)<<(k))|((x)>>(64-(k))))
static u64 ranval(ranctx *x) {
u64 e = x->a - rot(x->b, 7);
x->a = x->b ^ rot(x->c, 13);
x->b = x->c + rot(x->d, 37);
x->c = x->d + e;
x->d = e + x->a;
return x->d;
}
static void raninit(ranctx *x, u64 *seed) {
int i;
x->a = seed[0];
x->b = seed[1];
x->c = seed[2];
x->d = seed[3];
for (i=0; i < 30; ++i)
(void)ranval(x);
}
static u64 shuffle_seed[4];
struct partition_group {
tree tree_start;
unsigned long start;
unsigned long length;
};
static void partition_struct(tree *fields, unsigned long length, struct partition_group *size_groups, unsigned long *num_groups)
{
unsigned long i;
unsigned long accum_size = 0;
unsigned long accum_length = 0;
unsigned long group_idx = 0;
gcc_assert(length < INT_MAX);
memset(size_groups, 0, sizeof(struct partition_group) * length);
for (i = 0; i < length; i++) {
if (size_groups[group_idx].tree_start == NULL_TREE) {
size_groups[group_idx].tree_start = fields[i];
size_groups[group_idx].start = i;
accum_length = 0;
accum_size = 0;
}
accum_size += (unsigned long)int_size_in_bytes(TREE_TYPE(fields[i]));
accum_length++;
if (accum_size >= 64) {
size_groups[group_idx].length = accum_length;
accum_length = 0;
group_idx++;
}
}
if (size_groups[group_idx].tree_start != NULL_TREE &&
!size_groups[group_idx].length) {
size_groups[group_idx].length = accum_length;
group_idx++;
}
*num_groups = group_idx;
}
static void performance_shuffle(tree *newtree, unsigned long length, ranctx *prng_state)
{
unsigned long i, x;
struct partition_group size_group[length];
unsigned long num_groups = 0;
unsigned long randnum;
partition_struct(newtree, length, (struct partition_group *)&size_group, &num_groups);
for (i = num_groups - 1; i > 0; i--) {
struct partition_group tmp;
randnum = ranval(prng_state) % (i + 1);
tmp = size_group[i];
size_group[i] = size_group[randnum];
size_group[randnum] = tmp;
}
for (x = 0; x < num_groups; x++) {
for (i = size_group[x].start + size_group[x].length - 1; i > size_group[x].start; i--) {
tree tmp;
if (DECL_BIT_FIELD_TYPE(newtree[i]))
continue;
randnum = ranval(prng_state) % (i + 1);
// we could handle this case differently if desired
if (DECL_BIT_FIELD_TYPE(newtree[randnum]))
continue;
tmp = newtree[i];
newtree[i] = newtree[randnum];
newtree[randnum] = tmp;
}
}
}
static void full_shuffle(tree *newtree, unsigned long length, ranctx *prng_state)
{
unsigned long i, randnum;
for (i = length - 1; i > 0; i--) {
tree tmp;
randnum = ranval(prng_state) % (i + 1);
tmp = newtree[i];
newtree[i] = newtree[randnum];
newtree[randnum] = tmp;
}
}
/* modern in-place Fisher-Yates shuffle */
static void shuffle(const_tree type, tree *newtree, unsigned long length)
{
unsigned long i;
u64 seed[4];
ranctx prng_state;
const unsigned char *structname;
if (length == 0)
return;
gcc_assert(TREE_CODE(type) == RECORD_TYPE);
structname = ORIG_TYPE_NAME(type);
#ifdef __DEBUG_PLUGIN
fprintf(stderr, "Shuffling struct %s %p\n", (const char *)structname, type);
#ifdef __DEBUG_VERBOSE
debug_tree((tree)type);
#endif
#endif
for (i = 0; i < 4; i++) {
seed[i] = shuffle_seed[i];
seed[i] ^= name_hash(structname);
}
raninit(&prng_state, (u64 *)&seed);
if (performance_mode)
performance_shuffle(newtree, length, &prng_state);
else
full_shuffle(newtree, length, &prng_state);
}
static bool is_flexible_array(const_tree field)
{
const_tree fieldtype;
const_tree typesize;
const_tree elemtype;
const_tree elemsize;
fieldtype = TREE_TYPE(field);
typesize = TYPE_SIZE(fieldtype);
if (TREE_CODE(fieldtype) != ARRAY_TYPE)
return false;
elemtype = TREE_TYPE(fieldtype);
elemsize = TYPE_SIZE(elemtype);
/* size of type is represented in bits */
if (typesize == NULL_TREE && TYPE_DOMAIN(fieldtype) != NULL_TREE &&
TYPE_MAX_VALUE(TYPE_DOMAIN(fieldtype)) == NULL_TREE)
return true;
if (typesize != NULL_TREE &&
(TREE_CONSTANT(typesize) && (!tree_to_uhwi(typesize) ||
tree_to_uhwi(typesize) == tree_to_uhwi(elemsize))))
return true;
return false;
}
static int relayout_struct(tree type)
{
unsigned long num_fields = (unsigned long)list_length(TYPE_FIELDS(type));
unsigned long shuffle_length = num_fields;
tree field;
tree newtree[num_fields];
unsigned long i;
tree list;
tree variant;
tree main_variant;
expanded_location xloc;
bool has_flexarray = false;
if (TYPE_FIELDS(type) == NULL_TREE)
return 0;
if (num_fields < 2)
return 0;
gcc_assert(TREE_CODE(type) == RECORD_TYPE);
gcc_assert(num_fields < INT_MAX);
if (lookup_attribute("randomize_performed", TYPE_ATTRIBUTES(type)) ||
lookup_attribute("no_randomize_layout", TYPE_ATTRIBUTES(TYPE_MAIN_VARIANT(type))))
return 0;
/* Workaround for 3rd-party VirtualBox source that we can't modify ourselves */
if (!strcmp((const char *)ORIG_TYPE_NAME(type), "INTNETTRUNKFACTORY") ||
!strcmp((const char *)ORIG_TYPE_NAME(type), "RAWPCIFACTORY"))
return 0;
/* throw out any structs in uapi */
xloc = expand_location(DECL_SOURCE_LOCATION(TYPE_FIELDS(type)));
if (strstr(xloc.file, "/uapi/"))
error(G_("attempted to randomize userland API struct %s"), ORIG_TYPE_NAME(type));
for (field = TYPE_FIELDS(type), i = 0; field; field = TREE_CHAIN(field), i++) {
gcc_assert(TREE_CODE(field) == FIELD_DECL);
newtree[i] = field;
}
/*
* enforce that we don't randomize the layout of the last
* element of a struct if it's a 0 or 1-length array
* or a proper flexible array
*/
if (is_flexible_array(newtree[num_fields - 1])) {
has_flexarray = true;
shuffle_length--;
}
shuffle(type, (tree *)newtree, shuffle_length);
/*
* set up a bogus anonymous struct field designed to error out on unnamed struct initializers
* as gcc provides no other way to detect such code
*/
list = make_node(FIELD_DECL);
TREE_CHAIN(list) = newtree[0];
TREE_TYPE(list) = void_type_node;
DECL_SIZE(list) = bitsize_zero_node;
DECL_NONADDRESSABLE_P(list) = 1;
DECL_FIELD_BIT_OFFSET(list) = bitsize_zero_node;
DECL_SIZE_UNIT(list) = size_zero_node;
DECL_FIELD_OFFSET(list) = size_zero_node;
DECL_CONTEXT(list) = type;
// to satisfy the constify plugin
TREE_READONLY(list) = 1;
for (i = 0; i < num_fields - 1; i++)
TREE_CHAIN(newtree[i]) = newtree[i+1];
TREE_CHAIN(newtree[num_fields - 1]) = NULL_TREE;
main_variant = TYPE_MAIN_VARIANT(type);
for (variant = main_variant; variant; variant = TYPE_NEXT_VARIANT(variant)) {
TYPE_FIELDS(variant) = list;
TYPE_ATTRIBUTES(variant) = copy_list(TYPE_ATTRIBUTES(variant));
TYPE_ATTRIBUTES(variant) = tree_cons(get_identifier("randomize_performed"), NULL_TREE, TYPE_ATTRIBUTES(variant));
TYPE_ATTRIBUTES(variant) = tree_cons(get_identifier("designated_init"), NULL_TREE, TYPE_ATTRIBUTES(variant));
if (has_flexarray)
TYPE_ATTRIBUTES(type) = tree_cons(get_identifier("has_flexarray"), NULL_TREE, TYPE_ATTRIBUTES(type));
}
/*
* force a re-layout of the main variant
* the TYPE_SIZE for all variants will be recomputed
* by finalize_type_size()
*/
TYPE_SIZE(main_variant) = NULL_TREE;
layout_type(main_variant);
gcc_assert(TYPE_SIZE(main_variant) != NULL_TREE);
return 1;
}
/* from constify plugin */
static const_tree get_field_type(const_tree field)
{
return strip_array_types(TREE_TYPE(field));
}
/* from constify plugin */
static bool is_fptr(const_tree fieldtype)
{
if (TREE_CODE(fieldtype) != POINTER_TYPE)
return false;
return TREE_CODE(TREE_TYPE(fieldtype)) == FUNCTION_TYPE;
}
/* derived from constify plugin */
static int is_pure_ops_struct(const_tree node)
{
const_tree field;
gcc_assert(TREE_CODE(node) == RECORD_TYPE || TREE_CODE(node) == UNION_TYPE);
/* XXX: Do not apply randomization to all-ftpr structs yet. */
return 0;
for (field = TYPE_FIELDS(node); field; field = TREE_CHAIN(field)) {
const_tree fieldtype = get_field_type(field);
enum tree_code code = TREE_CODE(fieldtype);
if (node == fieldtype)
continue;
if (!is_fptr(fieldtype))
return 0;
if (code != RECORD_TYPE && code != UNION_TYPE)
continue;
if (!is_pure_ops_struct(fieldtype))
return 0;
}
return 1;
}
static void randomize_type(tree type)
{
tree variant;
gcc_assert(TREE_CODE(type) == RECORD_TYPE);
if (lookup_attribute("randomize_considered", TYPE_ATTRIBUTES(type)))
return;
if (lookup_attribute("randomize_layout", TYPE_ATTRIBUTES(TYPE_MAIN_VARIANT(type))) || is_pure_ops_struct(type))
relayout_struct(type);
for (variant = TYPE_MAIN_VARIANT(type); variant; variant = TYPE_NEXT_VARIANT(variant)) {
TYPE_ATTRIBUTES(type) = copy_list(TYPE_ATTRIBUTES(type));
TYPE_ATTRIBUTES(type) = tree_cons(get_identifier("randomize_considered"), NULL_TREE, TYPE_ATTRIBUTES(type));
}
#ifdef __DEBUG_PLUGIN
fprintf(stderr, "Marking randomize_considered on struct %s\n", ORIG_TYPE_NAME(type));
#ifdef __DEBUG_VERBOSE
debug_tree(type);
#endif
#endif
}
static void update_decl_size(tree decl)
{
tree lastval, lastidx, field, init, type, flexsize;
unsigned HOST_WIDE_INT len;
type = TREE_TYPE(decl);
if (!lookup_attribute("has_flexarray", TYPE_ATTRIBUTES(type)))
return;
init = DECL_INITIAL(decl);
if (init == NULL_TREE || init == error_mark_node)
return;
if (TREE_CODE(init) != CONSTRUCTOR)
return;
len = CONSTRUCTOR_NELTS(init);
if (!len)
return;
lastval = CONSTRUCTOR_ELT(init, CONSTRUCTOR_NELTS(init) - 1)->value;
lastidx = CONSTRUCTOR_ELT(init, CONSTRUCTOR_NELTS(init) - 1)->index;
for (field = TYPE_FIELDS(TREE_TYPE(decl)); TREE_CHAIN(field); field = TREE_CHAIN(field))
;
if (lastidx != field)
return;
if (TREE_CODE(lastval) != STRING_CST) {
error("Only string constants are supported as initializers "
"for randomized structures with flexible arrays");
return;
}
flexsize = bitsize_int(TREE_STRING_LENGTH(lastval) *
tree_to_uhwi(TYPE_SIZE(TREE_TYPE(TREE_TYPE(lastval)))));
DECL_SIZE(decl) = size_binop(PLUS_EXPR, TYPE_SIZE(type), flexsize);
return;
}
static void randomize_layout_finish_decl(void *event_data, void *data)
{
tree decl = (tree)event_data;
tree type;
if (decl == NULL_TREE || decl == error_mark_node)
return;
type = TREE_TYPE(decl);
if (TREE_CODE(decl) != VAR_DECL)
return;
if (TREE_CODE(type) != RECORD_TYPE && TREE_CODE(type) != UNION_TYPE)
return;
if (!lookup_attribute("randomize_performed", TYPE_ATTRIBUTES(type)))
return;
DECL_SIZE(decl) = 0;
DECL_SIZE_UNIT(decl) = 0;
SET_DECL_ALIGN(decl, 0);
SET_DECL_MODE (decl, VOIDmode);
SET_DECL_RTL(decl, 0);
update_decl_size(decl);
layout_decl(decl, 0);
}
static void finish_type(void *event_data, void *data)
{
tree type = (tree)event_data;
if (type == NULL_TREE || type == error_mark_node)
return;
if (TREE_CODE(type) != RECORD_TYPE)
return;
if (TYPE_FIELDS(type) == NULL_TREE)
return;
if (lookup_attribute("randomize_considered", TYPE_ATTRIBUTES(type)))
return;