clang  19.0.0git
Randstruct.cpp
Go to the documentation of this file.
1 //===--- Randstruct.cpp ---------------------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file contains the implementation for Clang's structure field layout
10 // randomization.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #include "clang/AST/Randstruct.h"
15 #include "clang/AST/ASTContext.h"
17 #include "clang/AST/Attr.h"
18 #include "clang/AST/Decl.h"
19 #include "clang/AST/DeclCXX.h" // For StaticAssertDecl
20 #include "clang/Basic/Diagnostic.h"
21 #include "llvm/ADT/SmallVector.h"
22 
23 #include <algorithm>
24 #include <random>
25 #include <set>
26 #include <sstream>
27 #include <string>
28 
29 using clang::ASTContext;
30 using clang::FieldDecl;
31 using llvm::SmallVector;
32 
33 namespace {
34 
35 // FIXME: Replace this with some discovery once that mechanism exists.
36 enum { CACHE_LINE = 64 };
37 
38 // The Bucket class holds the struct fields we're trying to fill to a
39 // cache-line.
40 class Bucket {
42  int Size = 0;
43 
44 public:
45  virtual ~Bucket() = default;
46 
47  SmallVector<FieldDecl *, 64> &fields() { return Fields; }
48  void addField(FieldDecl *Field, int FieldSize);
49  virtual bool canFit(int FieldSize) const {
50  return Size + FieldSize <= CACHE_LINE;
51  }
52  virtual bool isBitfieldRun() const { return false; }
53  bool full() const { return Size >= CACHE_LINE; }
54 };
55 
56 void Bucket::addField(FieldDecl *Field, int FieldSize) {
57  Size += FieldSize;
58  Fields.push_back(Field);
59 }
60 
61 struct BitfieldRunBucket : public Bucket {
62  bool canFit(int FieldSize) const override { return true; }
63  bool isBitfieldRun() const override { return true; }
64 };
65 
66 void randomizeStructureLayoutImpl(const ASTContext &Context,
68  std::mt19937 &RNG) {
69  // All of the Buckets produced by best-effort cache-line algorithm.
71 
72  // The current bucket of fields that we are trying to fill to a cache-line.
73  std::unique_ptr<Bucket> CurrentBucket;
74 
75  // The current bucket containing the run of adjacent bitfields to ensure they
76  // remain adjacent.
77  std::unique_ptr<BitfieldRunBucket> CurrentBitfieldRun;
78 
79  // Tracks the number of fields that we failed to fit to the current bucket,
80  // and thus still need to be added later.
81  size_t Skipped = 0;
82 
83  while (!FieldsOut.empty()) {
84  // If we've Skipped more fields than we have remaining to place, that means
85  // that they can't fit in our current bucket, and we need to start a new
86  // one.
87  if (Skipped >= FieldsOut.size()) {
88  Skipped = 0;
89  Buckets.push_back(std::move(CurrentBucket));
90  }
91 
92  // Take the first field that needs to be put in a bucket.
93  auto FieldIter = FieldsOut.begin();
94  FieldDecl *FD = *FieldIter;
95 
96  if (FD->isBitField() && !FD->isZeroLengthBitField(Context)) {
97  // Start a bitfield run if this is the first bitfield we have found.
98  if (!CurrentBitfieldRun)
99  CurrentBitfieldRun = std::make_unique<BitfieldRunBucket>();
100 
101  // We've placed the field, and can remove it from the "awaiting Buckets"
102  // vector called "Fields."
103  CurrentBitfieldRun->addField(FD, /*FieldSize is irrelevant here*/ 1);
104  FieldsOut.erase(FieldIter);
105  continue;
106  }
107 
108  // Else, current field is not a bitfield. If we were previously in a
109  // bitfield run, end it.
110  if (CurrentBitfieldRun)
111  Buckets.push_back(std::move(CurrentBitfieldRun));
112 
113  // If we don't have a bucket, make one.
114  if (!CurrentBucket)
115  CurrentBucket = std::make_unique<Bucket>();
116 
117  uint64_t Width = Context.getTypeInfo(FD->getType()).Width;
118  if (Width >= CACHE_LINE) {
119  std::unique_ptr<Bucket> OverSized = std::make_unique<Bucket>();
120  OverSized->addField(FD, Width);
121  FieldsOut.erase(FieldIter);
122  Buckets.push_back(std::move(OverSized));
123  continue;
124  }
125 
126  // If it fits, add it.
127  if (CurrentBucket->canFit(Width)) {
128  CurrentBucket->addField(FD, Width);
129  FieldsOut.erase(FieldIter);
130 
131  // If it's now full, tie off the bucket.
132  if (CurrentBucket->full()) {
133  Skipped = 0;
134  Buckets.push_back(std::move(CurrentBucket));
135  }
136  } else {
137  // We can't fit it in our current bucket. Move to the end for processing
138  // later.
139  ++Skipped; // Mark it skipped.
140  FieldsOut.push_back(FD);
141  FieldsOut.erase(FieldIter);
142  }
143  }
144 
145  // Done processing the fields awaiting a bucket.
146 
147  // If we were filling a bucket, tie it off.
148  if (CurrentBucket)
149  Buckets.push_back(std::move(CurrentBucket));
150 
151  // If we were processing a bitfield run bucket, tie it off.
152  if (CurrentBitfieldRun)
153  Buckets.push_back(std::move(CurrentBitfieldRun));
154 
155  std::shuffle(std::begin(Buckets), std::end(Buckets), RNG);
156 
157  // Produce the new ordering of the elements from the Buckets.
158  SmallVector<FieldDecl *, 16> FinalOrder;
159  for (const std::unique_ptr<Bucket> &B : Buckets) {
160  llvm::SmallVectorImpl<FieldDecl *> &RandFields = B->fields();
161  if (!B->isBitfieldRun())
162  std::shuffle(std::begin(RandFields), std::end(RandFields), RNG);
163 
164  FinalOrder.insert(FinalOrder.end(), RandFields.begin(), RandFields.end());
165  }
166 
167  FieldsOut = FinalOrder;
168 }
169 
170 } // anonymous namespace
171 
172 namespace clang {
173 namespace randstruct {
174 
176  SmallVectorImpl<Decl *> &FinalOrdering) {
177  SmallVector<FieldDecl *, 64> RandomizedFields;
178  SmallVector<Decl *, 8> PostRandomizedFields;
179 
180  unsigned TotalNumFields = 0;
181  for (Decl *D : RD->decls()) {
182  ++TotalNumFields;
183  if (auto *FD = dyn_cast<FieldDecl>(D))
184  RandomizedFields.push_back(FD);
185  else if (isa<StaticAssertDecl>(D) || isa<IndirectFieldDecl>(D))
186  PostRandomizedFields.push_back(D);
187  else
188  FinalOrdering.push_back(D);
189  }
190 
191  if (RandomizedFields.empty())
192  return false;
193 
194  // Struct might end with a flexible array or an array of size 0 or 1,
195  // in which case we don't want to randomize it.
196  FieldDecl *FlexibleArray =
197  RD->hasFlexibleArrayMember() ? RandomizedFields.pop_back_val() : nullptr;
198  if (!FlexibleArray) {
199  if (const auto *CA =
200  dyn_cast<ConstantArrayType>(RandomizedFields.back()->getType()))
201  if (CA->getSize().sle(2))
202  FlexibleArray = RandomizedFields.pop_back_val();
203  }
204 
205  std::string Seed =
206  Context.getLangOpts().RandstructSeed + RD->getNameAsString();
207  std::seed_seq SeedSeq(Seed.begin(), Seed.end());
208  std::mt19937 RNG(SeedSeq);
209 
210  randomizeStructureLayoutImpl(Context, RandomizedFields, RNG);
211 
212  // Plorp the randomized decls into the final ordering.
213  FinalOrdering.insert(FinalOrdering.end(), RandomizedFields.begin(),
214  RandomizedFields.end());
215 
216  // Add fields that belong towards the end of the RecordDecl.
217  FinalOrdering.insert(FinalOrdering.end(), PostRandomizedFields.begin(),
218  PostRandomizedFields.end());
219 
220  // Add back the flexible array.
221  if (FlexibleArray)
222  FinalOrdering.push_back(FlexibleArray);
223 
224  assert(TotalNumFields == FinalOrdering.size() &&
225  "Decl count has been altered after Randstruct randomization!");
226  (void)TotalNumFields;
227  return true;
228 }
229 
230 } // end namespace randstruct
231 } // end namespace clang
Defines the clang::ASTContext interface.
Defines the Diagnostic-related interfaces.
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate....
Holds long-lived AST nodes (such as types and decls) that can be referred to throughout the semantic ...
Definition: ASTContext.h:185
const LangOptions & getLangOpts() const
Definition: ASTContext.h:778
TypeInfo getTypeInfo(const Type *T) const
Get the size and alignment of the specified complete type in bits.
decl_range decls() const
decls_begin/decls_end - Iterate over the declarations stored in this context.
Definition: DeclBase.h:2322
Decl - This represents one declaration (or definition), e.g.
Definition: DeclBase.h:86
Represents a member of a struct/union/class.
Definition: Decl.h:3060
bool isBitField() const
Determines whether this field is a bitfield.
Definition: Decl.h:3151
bool isZeroLengthBitField(const ASTContext &Ctx) const
Is this a zero-length bit-field? Such bit-fields aren't really bit-fields at all and instead act as a...
Definition: Decl.cpp:4601
std::string RandstructSeed
The seed used by the randomize structure layout feature.
Definition: LangOptions.h:592
std::string getNameAsString() const
Get a human-readable name for the declaration, even if it is one of the special kinds of names (C++ c...
Definition: Decl.h:292
Represents a struct/union/class.
Definition: Decl.h:4171
bool hasFlexibleArrayMember() const
Definition: Decl.h:4204
QualType getType() const
Definition: Decl.h:718
bool randomizeStructureLayout(const ASTContext &Context, RecordDecl *RD, llvm::SmallVectorImpl< Decl * > &FinalOrdering)
Definition: Randstruct.cpp:175
The JSON file list parser is used to communicate input to InstallAPI.
unsigned long uint64_t
char2 __ovld __cnfn shuffle(char2, uchar2)
The shuffle and shuffle2 built-in functions construct a permutation of elements from one or two input...
uint64_t Width
Definition: ASTContext.h:156