1//===- llvm/IR/Metadata.h - Metadata definitions ----------------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9/// @file
10/// This file contains the declarations for metadata subclasses.
11/// They represent the different flavors of metadata that live in LLVM.
12//
13//===----------------------------------------------------------------------===//
14
15#ifndef LLVM_IR_METADATA_H
16#define LLVM_IR_METADATA_H
17
18#include "llvm/ADT/ArrayRef.h"
19#include "llvm/ADT/DenseMap.h"
20#include "llvm/ADT/DenseMapInfo.h"
21#include "llvm/ADT/PointerUnion.h"
22#include "llvm/ADT/SmallVector.h"
23#include "llvm/ADT/StringRef.h"
24#include "llvm/ADT/ilist_node.h"
25#include "llvm/ADT/iterator_range.h"
26#include "llvm/IR/Constant.h"
27#include "llvm/IR/LLVMContext.h"
28#include "llvm/IR/Value.h"
29#include "llvm/Support/CBindingWrapping.h"
30#include "llvm/Support/Casting.h"
31#include "llvm/Support/ErrorHandling.h"
32#include <cassert>
33#include <cstddef>
34#include <cstdint>
35#include <iterator>
36#include <memory>
37#include <string>
38#include <type_traits>
39#include <utility>
40
41namespace llvm {
42
43class Module;
44class ModuleSlotTracker;
45class raw_ostream;
46class DbgVariableRecord;
47template <typename T> class StringMapEntry;
48template <typename ValueTy> class StringMapEntryStorage;
49class Type;
50
51enum LLVMConstants : uint32_t {
52 DEBUG_METADATA_VERSION = 3 // Current debug info version number.
53};
54
55/// Magic number in the value profile metadata showing a target has been
56/// promoted for the instruction and shouldn't be promoted again.
57const uint64_t NOMORE_ICP_MAGICNUM = -1;
58
59/// Root of the metadata hierarchy.
60///
61/// This is a root class for typeless data in the IR.
62class Metadata {
63 friend class ReplaceableMetadataImpl;
64
65 /// RTTI.
66 const unsigned char SubclassID;
67
68protected:
69 /// Active type of storage.
70 enum StorageType { Uniqued, Distinct, Temporary };
71
72 /// Storage flag for non-uniqued, otherwise unowned, metadata.
73 unsigned char Storage : 7;
74
75 unsigned char SubclassData1 : 1;
76 unsigned short SubclassData16 = 0;
77 unsigned SubclassData32 = 0;
78
79public:
80 enum MetadataKind {
81#define HANDLE_METADATA_LEAF(CLASS) CLASS##Kind,
82#include "llvm/IR/Metadata.def"
83 };
84
85protected:
86 Metadata(unsigned ID, StorageType Storage)
87 : SubclassID(ID), Storage(Storage), SubclassData1(false) {
88 static_assert(sizeof(*this) == 8, "Metadata fields poorly packed");
89 }
90
91 ~Metadata() = default;
92
93 /// Default handling of a changed operand, which asserts.
94 ///
95 /// If subclasses pass themselves in as owners to a tracking node reference,
96 /// they must provide an implementation of this method.
97 void handleChangedOperand(void *, Metadata *) {
98 llvm_unreachable("Unimplemented in Metadata subclass");
99 }
100
101public:
102 unsigned getMetadataID() const { return SubclassID; }
103
104 /// User-friendly dump.
105 ///
106 /// If \c M is provided, metadata nodes will be numbered canonically;
107 /// otherwise, pointer addresses are substituted.
108 ///
109 /// Note: this uses an explicit overload instead of default arguments so that
110 /// the nullptr version is easy to call from a debugger.
111 ///
112 /// @{
113 void dump() const;
114 void dump(const Module *M) const;
115 /// @}
116
117 /// Print.
118 ///
119 /// Prints definition of \c this.
120 ///
121 /// If \c M is provided, metadata nodes will be numbered canonically;
122 /// otherwise, pointer addresses are substituted.
123 /// @{
124 void print(raw_ostream &OS, const Module *M = nullptr,
125 bool IsForDebug = false) const;
126 void print(raw_ostream &OS, ModuleSlotTracker &MST, const Module *M = nullptr,
127 bool IsForDebug = false) const;
128 /// @}
129
130 /// Print as operand.
131 ///
132 /// Prints reference of \c this.
133 ///
134 /// If \c M is provided, metadata nodes will be numbered canonically;
135 /// otherwise, pointer addresses are substituted.
136 /// @{
137 void printAsOperand(raw_ostream &OS, const Module *M = nullptr) const;
138 void printAsOperand(raw_ostream &OS, ModuleSlotTracker &MST,
139 const Module *M = nullptr) const;
140 /// @}
141};
142
143// Create wrappers for C Binding types (see CBindingWrapping.h).
144DEFINE_ISA_CONVERSION_FUNCTIONS(Metadata, LLVMMetadataRef)
145
146// Specialized opaque metadata conversions.
147inline Metadata **unwrap(LLVMMetadataRef *MDs) {
148 return reinterpret_cast<Metadata**>(MDs);
149}
150
151#define HANDLE_METADATA(CLASS) class CLASS;
152#include "llvm/IR/Metadata.def"
153
154// Provide specializations of isa so that we don't need definitions of
155// subclasses to see if the metadata is a subclass.
156#define HANDLE_METADATA_LEAF(CLASS) \
157 template <> struct isa_impl<CLASS, Metadata> { \
158 static inline bool doit(const Metadata &MD) { \
159 return MD.getMetadataID() == Metadata::CLASS##Kind; \
160 } \
161 };
162#include "llvm/IR/Metadata.def"
163
164inline raw_ostream &operator<<(raw_ostream &OS, const Metadata &MD) {
165 MD.print(OS);
166 return OS;
167}
168
169/// Metadata wrapper in the Value hierarchy.
170///
171/// A member of the \a Value hierarchy to represent a reference to metadata.
172/// This allows, e.g., intrinsics to have metadata as operands.
173///
174/// Notably, this is the only thing in either hierarchy that is allowed to
175/// reference \a LocalAsMetadata.
176class MetadataAsValue : public Value {
177 friend class ReplaceableMetadataImpl;
178 friend class LLVMContextImpl;
179
180 Metadata *MD;
181
182 MetadataAsValue(Type *Ty, Metadata *MD);
183
184 /// Drop use of metadata (during teardown).
185 void dropUse() { MD = nullptr; }
186
187public:
188 ~MetadataAsValue();
189
190 static MetadataAsValue *get(LLVMContext &Context, Metadata *MD);
191 static MetadataAsValue *getIfExists(LLVMContext &Context, Metadata *MD);
192
193 Metadata *getMetadata() const { return MD; }
194
195 static bool classof(const Value *V) {
196 return V->getValueID() == MetadataAsValueVal;
197 }
198
199private:
200 void handleChangedMetadata(Metadata *MD);
201 void track();
202 void untrack();
203};
204
205/// Base class for tracking ValueAsMetadata/DIArgLists with user lookups and
206/// Owner callbacks outside of ValueAsMetadata.
207///
208/// Currently only inherited by DbgVariableRecord; if other classes need to use
209/// it, then a SubclassID will need to be added (either as a new field or by
210/// making DebugValue into a PointerIntUnion) to discriminate between the
211/// subclasses in lookup and callback handling.
212class DebugValueUser {
213protected:
214 // Capacity to store 3 debug values.
215 // TODO: Not all DebugValueUser instances need all 3 elements, if we
216 // restructure the DbgVariableRecord class then we can template parameterize
217 // this array size.
218 std::array<Metadata *, 3> DebugValues;
219
220 ArrayRef<Metadata *> getDebugValues() const { return DebugValues; }
221
222public:
223 DbgVariableRecord *getUser();
224 const DbgVariableRecord *getUser() const;
225 /// To be called by ReplaceableMetadataImpl::replaceAllUsesWith, where `Old`
226 /// is a pointer to one of the pointers in `DebugValues` (so should be type
227 /// Metadata**), and `NewDebugValue` is the new Metadata* that is replacing
228 /// *Old.
229 /// For manually replacing elements of DebugValues,
230 /// `resetDebugValue(Idx, NewDebugValue)` should be used instead.
231 void handleChangedValue(void *Old, Metadata *NewDebugValue);
232 DebugValueUser() = default;
233 explicit DebugValueUser(std::array<Metadata *, 3> DebugValues)
234 : DebugValues(DebugValues) {
235 trackDebugValues();
236 }
237 DebugValueUser(DebugValueUser &&X) {
238 DebugValues = X.DebugValues;
239 retrackDebugValues(X);
240 }
241 DebugValueUser(const DebugValueUser &X) {
242 DebugValues = X.DebugValues;
243 trackDebugValues();
244 }
245
246 DebugValueUser &operator=(DebugValueUser &&X) {
247 if (&X == this)
248 return *this;
249
250 untrackDebugValues();
251 DebugValues = X.DebugValues;
252 retrackDebugValues(X);
253 return *this;
254 }
255
256 DebugValueUser &operator=(const DebugValueUser &X) {
257 if (&X == this)
258 return *this;
259
260 untrackDebugValues();
261 DebugValues = X.DebugValues;
262 trackDebugValues();
263 return *this;
264 }
265
266 ~DebugValueUser() { untrackDebugValues(); }
267
268 void resetDebugValues() {
269 untrackDebugValues();
270 DebugValues.fill(u: nullptr);
271 }
272
273 void resetDebugValue(size_t Idx, Metadata *DebugValue) {
274 assert(Idx < 3 && "Invalid debug value index.");
275 untrackDebugValue(Idx);
276 DebugValues[Idx] = DebugValue;
277 trackDebugValue(Idx);
278 }
279
280 bool operator==(const DebugValueUser &X) const {
281 return DebugValues == X.DebugValues;
282 }
283 bool operator!=(const DebugValueUser &X) const {
284 return DebugValues != X.DebugValues;
285 }
286
287private:
288 void trackDebugValue(size_t Idx);
289 void trackDebugValues();
290
291 void untrackDebugValue(size_t Idx);
292 void untrackDebugValues();
293
294 void retrackDebugValues(DebugValueUser &X);
295};
296
297/// API for tracking metadata references through RAUW and deletion.
298///
299/// Shared API for updating \a Metadata pointers in subclasses that support
300/// RAUW.
301///
302/// This API is not meant to be used directly. See \a TrackingMDRef for a
303/// user-friendly tracking reference.
304class MetadataTracking {
305public:
306 /// Track the reference to metadata.
307 ///
308 /// Register \c MD with \c *MD, if the subclass supports tracking. If \c *MD
309 /// gets RAUW'ed, \c MD will be updated to the new address. If \c *MD gets
310 /// deleted, \c MD will be set to \c nullptr.
311 ///
312 /// If tracking isn't supported, \c *MD will not change.
313 ///
314 /// \return true iff tracking is supported by \c MD.
315 static bool track(Metadata *&MD) {
316 return track(Ref: &MD, MD&: *MD, Owner: static_cast<Metadata *>(nullptr));
317 }
318
319 /// Track the reference to metadata for \a Metadata.
320 ///
321 /// As \a track(Metadata*&), but with support for calling back to \c Owner to
322 /// tell it that its operand changed. This could trigger \c Owner being
323 /// re-uniqued.
324 static bool track(void *Ref, Metadata &MD, Metadata &Owner) {
325 return track(Ref, MD, Owner: &Owner);
326 }
327
328 /// Track the reference to metadata for \a MetadataAsValue.
329 ///
330 /// As \a track(Metadata*&), but with support for calling back to \c Owner to
331 /// tell it that its operand changed. This could trigger \c Owner being
332 /// re-uniqued.
333 static bool track(void *Ref, Metadata &MD, MetadataAsValue &Owner) {
334 return track(Ref, MD, Owner: &Owner);
335 }
336
337 /// Track the reference to metadata for \a DebugValueUser.
338 ///
339 /// As \a track(Metadata*&), but with support for calling back to \c Owner to
340 /// tell it that its operand changed. This could trigger \c Owner being
341 /// re-uniqued.
342 static bool track(void *Ref, Metadata &MD, DebugValueUser &Owner) {
343 return track(Ref, MD, Owner: &Owner);
344 }
345
346 /// Stop tracking a reference to metadata.
347 ///
348 /// Stops \c *MD from tracking \c MD.
349 static void untrack(Metadata *&MD) { untrack(Ref: &MD, MD&: *MD); }
350 static void untrack(void *Ref, Metadata &MD);
351
352 /// Move tracking from one reference to another.
353 ///
354 /// Semantically equivalent to \c untrack(MD) followed by \c track(New),
355 /// except that ownership callbacks are maintained.
356 ///
357 /// Note: it is an error if \c *MD does not equal \c New.
358 ///
359 /// \return true iff tracking is supported by \c MD.
360 static bool retrack(Metadata *&MD, Metadata *&New) {
361 return retrack(Ref: &MD, MD&: *MD, New: &New);
362 }
363 static bool retrack(void *Ref, Metadata &MD, void *New);
364
365 /// Check whether metadata is replaceable.
366 static bool isReplaceable(const Metadata &MD);
367
368 using OwnerTy = PointerUnion<MetadataAsValue *, Metadata *, DebugValueUser *>;
369
370private:
371 /// Track a reference to metadata for an owner.
372 ///
373 /// Generalized version of tracking.
374 static bool track(void *Ref, Metadata &MD, OwnerTy Owner);
375};
376
377/// Shared implementation of use-lists for replaceable metadata.
378///
379/// Most metadata cannot be RAUW'ed. This is a shared implementation of
380/// use-lists and associated API for the three that support it (
381/// \a ValueAsMetadata, \a TempMDNode, and \a DIArgList).
382class ReplaceableMetadataImpl {
383 friend class MetadataTracking;
384
385public:
386 using OwnerTy = MetadataTracking::OwnerTy;
387
388private:
389 LLVMContext &Context;
390 uint64_t NextIndex = 0;
391 SmallDenseMap<void *, std::pair<OwnerTy, uint64_t>, 4> UseMap;
392
393public:
394 ReplaceableMetadataImpl(LLVMContext &Context) : Context(Context) {}
395
396 ~ReplaceableMetadataImpl() {
397 assert(UseMap.empty() && "Cannot destroy in-use replaceable metadata");
398 }
399
400 LLVMContext &getContext() const { return Context; }
401
402 /// Replace all uses of this with MD.
403 ///
404 /// Replace all uses of this with \c MD, which is allowed to be null.
405 void replaceAllUsesWith(Metadata *MD);
406 /// Replace all uses of the constant with Undef in debug info metadata
407 static void SalvageDebugInfo(const Constant &C);
408 /// Returns the list of all DIArgList users of this.
409 SmallVector<Metadata *> getAllArgListUsers();
410 /// Returns the list of all DbgVariableRecord users of this.
411 SmallVector<DbgVariableRecord *> getAllDbgVariableRecordUsers();
412
413 /// Resolve all uses of this.
414 ///
415 /// Resolve all uses of this, turning off RAUW permanently. If \c
416 /// ResolveUsers, call \a MDNode::resolve() on any users whose last operand
417 /// is resolved.
418 void resolveAllUses(bool ResolveUsers = true);
419
420 unsigned getNumUses() const { return UseMap.size(); }
421
422private:
423 void addRef(void *Ref, OwnerTy Owner);
424 void dropRef(void *Ref);
425 void moveRef(void *Ref, void *New, const Metadata &MD);
426
427 /// Lazily construct RAUW support on MD.
428 ///
429 /// If this is an unresolved MDNode, RAUW support will be created on-demand.
430 /// ValueAsMetadata always has RAUW support.
431 static ReplaceableMetadataImpl *getOrCreate(Metadata &MD);
432
433 /// Get RAUW support on MD, if it exists.
434 static ReplaceableMetadataImpl *getIfExists(Metadata &MD);
435
436 /// Check whether this node will support RAUW.
437 ///
438 /// Returns \c true unless getOrCreate() would return null.
439 static bool isReplaceable(const Metadata &MD);
440};
441
442/// Value wrapper in the Metadata hierarchy.
443///
444/// This is a custom value handle that allows other metadata to refer to
445/// classes in the Value hierarchy.
446///
447/// Because of full uniquing support, each value is only wrapped by a single \a
448/// ValueAsMetadata object, so the lookup maps are far more efficient than
449/// those using ValueHandleBase.
450class ValueAsMetadata : public Metadata, ReplaceableMetadataImpl {
451 friend class ReplaceableMetadataImpl;
452 friend class LLVMContextImpl;
453
454 Value *V;
455
456 /// Drop users without RAUW (during teardown).
457 void dropUsers() {
458 ReplaceableMetadataImpl::resolveAllUses(/* ResolveUsers */ ResolveUsers: false);
459 }
460
461protected:
462 ValueAsMetadata(unsigned ID, Value *V)
463 : Metadata(ID, Uniqued), ReplaceableMetadataImpl(V->getContext()), V(V) {
464 assert(V && "Expected valid value");
465 }
466
467 ~ValueAsMetadata() = default;
468
469public:
470 static ValueAsMetadata *get(Value *V);
471
472 static ConstantAsMetadata *getConstant(Value *C) {
473 return cast<ConstantAsMetadata>(Val: get(V: C));
474 }
475
476 static LocalAsMetadata *getLocal(Value *Local) {
477 return cast<LocalAsMetadata>(Val: get(V: Local));
478 }
479
480 static ValueAsMetadata *getIfExists(Value *V);
481
482 static ConstantAsMetadata *getConstantIfExists(Value *C) {
483 return cast_or_null<ConstantAsMetadata>(Val: getIfExists(V: C));
484 }
485
486 static LocalAsMetadata *getLocalIfExists(Value *Local) {
487 return cast_or_null<LocalAsMetadata>(Val: getIfExists(V: Local));
488 }
489
490 Value *getValue() const { return V; }
491 Type *getType() const { return V->getType(); }
492 LLVMContext &getContext() const { return V->getContext(); }
493
494 SmallVector<Metadata *> getAllArgListUsers() {
495 return ReplaceableMetadataImpl::getAllArgListUsers();
496 }
497 SmallVector<DbgVariableRecord *> getAllDbgVariableRecordUsers() {
498 return ReplaceableMetadataImpl::getAllDbgVariableRecordUsers();
499 }
500
501 static void handleDeletion(Value *V);
502 static void handleRAUW(Value *From, Value *To);
503
504protected:
505 /// Handle collisions after \a Value::replaceAllUsesWith().
506 ///
507 /// RAUW isn't supported directly for \a ValueAsMetadata, but if the wrapped
508 /// \a Value gets RAUW'ed and the target already exists, this is used to
509 /// merge the two metadata nodes.
510 void replaceAllUsesWith(Metadata *MD) {
511 ReplaceableMetadataImpl::replaceAllUsesWith(MD);
512 }
513
514public:
515 static bool classof(const Metadata *MD) {
516 return MD->getMetadataID() == LocalAsMetadataKind ||
517 MD->getMetadataID() == ConstantAsMetadataKind;
518 }
519};
520
521class ConstantAsMetadata : public ValueAsMetadata {
522 friend class ValueAsMetadata;
523
524 ConstantAsMetadata(Constant *C)
525 : ValueAsMetadata(ConstantAsMetadataKind, C) {}
526
527public:
528 static ConstantAsMetadata *get(Constant *C) {
529 return ValueAsMetadata::getConstant(C);
530 }
531
532 static ConstantAsMetadata *getIfExists(Constant *C) {
533 return ValueAsMetadata::getConstantIfExists(C);
534 }
535
536 Constant *getValue() const {
537 return cast<Constant>(Val: ValueAsMetadata::getValue());
538 }
539
540 static bool classof(const Metadata *MD) {
541 return MD->getMetadataID() == ConstantAsMetadataKind;
542 }
543};
544
545class LocalAsMetadata : public ValueAsMetadata {
546 friend class ValueAsMetadata;
547
548 LocalAsMetadata(Value *Local)
549 : ValueAsMetadata(LocalAsMetadataKind, Local) {
550 assert(!isa<Constant>(Local) && "Expected local value");
551 }
552
553public:
554 static LocalAsMetadata *get(Value *Local) {
555 return ValueAsMetadata::getLocal(Local);
556 }
557
558 static LocalAsMetadata *getIfExists(Value *Local) {
559 return ValueAsMetadata::getLocalIfExists(Local);
560 }
561
562 static bool classof(const Metadata *MD) {
563 return MD->getMetadataID() == LocalAsMetadataKind;
564 }
565};
566
567/// Transitional API for extracting constants from Metadata.
568///
569/// This namespace contains transitional functions for metadata that points to
570/// \a Constants.
571///
572/// In prehistory -- when metadata was a subclass of \a Value -- \a MDNode
573/// operands could refer to any \a Value. There's was a lot of code like this:
574///
575/// \code
576/// MDNode *N = ...;
577/// auto *CI = dyn_cast<ConstantInt>(N->getOperand(2));
578/// \endcode
579///
580/// Now that \a Value and \a Metadata are in separate hierarchies, maintaining
581/// the semantics for \a isa(), \a cast(), \a dyn_cast() (etc.) requires three
582/// steps: cast in the \a Metadata hierarchy, extraction of the \a Value, and
583/// cast in the \a Value hierarchy. Besides creating boiler-plate, this
584/// requires subtle control flow changes.
585///
586/// The end-goal is to create a new type of metadata, called (e.g.) \a MDInt,
587/// so that metadata can refer to numbers without traversing a bridge to the \a
588/// Value hierarchy. In this final state, the code above would look like this:
589///
590/// \code
591/// MDNode *N = ...;
592/// auto *MI = dyn_cast<MDInt>(N->getOperand(2));
593/// \endcode
594///
595/// The API in this namespace supports the transition. \a MDInt doesn't exist
596/// yet, and even once it does, changing each metadata schema to use it is its
597/// own mini-project. In the meantime this API prevents us from introducing
598/// complex and bug-prone control flow that will disappear in the end. In
599/// particular, the above code looks like this:
600///
601/// \code
602/// MDNode *N = ...;
603/// auto *CI = mdconst::dyn_extract<ConstantInt>(N->getOperand(2));
604/// \endcode
605///
606/// The full set of provided functions includes:
607///
608/// mdconst::hasa <=> isa
609/// mdconst::extract <=> cast
610/// mdconst::extract_or_null <=> cast_or_null
611/// mdconst::dyn_extract <=> dyn_cast
612/// mdconst::dyn_extract_or_null <=> dyn_cast_or_null
613///
614/// The target of the cast must be a subclass of \a Constant.
615namespace mdconst {
616
617namespace detail {
618
619template <class T> T &make();
620template <class T, class Result> struct HasDereference {
621 using Yes = char[1];
622 using No = char[2];
623 template <size_t N> struct SFINAE {};
624
625 template <class U, class V>
626 static Yes &hasDereference(SFINAE<sizeof(static_cast<V>(*make<U>()))> * = 0);
627 template <class U, class V> static No &hasDereference(...);
628
629 static const bool value =
630 sizeof(hasDereference<T, Result>(nullptr)) == sizeof(Yes);
631};
632template <class V, class M> struct IsValidPointer {
633 static const bool value = std::is_base_of<Constant, V>::value &&
634 HasDereference<M, const Metadata &>::value;
635};
636template <class V, class M> struct IsValidReference {
637 static const bool value = std::is_base_of<Constant, V>::value &&
638 std::is_convertible<M, const Metadata &>::value;
639};
640
641} // end namespace detail
642
643/// Check whether Metadata has a Value.
644///
645/// As an analogue to \a isa(), check whether \c MD has an \a Value inside of
646/// type \c X.
647template <class X, class Y>
648inline std::enable_if_t<detail::IsValidPointer<X, Y>::value, bool>
649hasa(Y &&MD) {
650 assert(MD && "Null pointer sent into hasa");
651 if (auto *V = dyn_cast<ConstantAsMetadata>(MD))
652 return isa<X>(V->getValue());
653 return false;
654}
655template <class X, class Y>
656inline std::enable_if_t<detail::IsValidReference<X, Y &>::value, bool>
657hasa(Y &MD) {
658 return hasa(&MD);
659}
660
661/// Extract a Value from Metadata.
662///
663/// As an analogue to \a cast(), extract the \a Value subclass \c X from \c MD.
664template <class X, class Y>
665inline std::enable_if_t<detail::IsValidPointer<X, Y>::value, X *>
666extract(Y &&MD) {
667 return cast<X>(cast<ConstantAsMetadata>(MD)->getValue());
668}
669template <class X, class Y>
670inline std::enable_if_t<detail::IsValidReference<X, Y &>::value, X *>
671extract(Y &MD) {
672 return extract(&MD);
673}
674
675/// Extract a Value from Metadata, allowing null.
676///
677/// As an analogue to \a cast_or_null(), extract the \a Value subclass \c X
678/// from \c MD, allowing \c MD to be null.
679template <class X, class Y>
680inline std::enable_if_t<detail::IsValidPointer<X, Y>::value, X *>
681extract_or_null(Y &&MD) {
682 if (auto *V = cast_or_null<ConstantAsMetadata>(MD))
683 return cast<X>(V->getValue());
684 return nullptr;
685}
686
687/// Extract a Value from Metadata, if any.
688///
689/// As an analogue to \a dyn_cast_or_null(), extract the \a Value subclass \c X
690/// from \c MD, return null if \c MD doesn't contain a \a Value or if the \a
691/// Value it does contain is of the wrong subclass.
692template <class X, class Y>
693inline std::enable_if_t<detail::IsValidPointer<X, Y>::value, X *>
694dyn_extract(Y &&MD) {
695 if (auto *V = dyn_cast<ConstantAsMetadata>(MD))
696 return dyn_cast<X>(V->getValue());
697 return nullptr;
698}
699
700/// Extract a Value from Metadata, if any, allowing null.
701///
702/// As an analogue to \a dyn_cast_or_null(), extract the \a Value subclass \c X
703/// from \c MD, return null if \c MD doesn't contain a \a Value or if the \a
704/// Value it does contain is of the wrong subclass, allowing \c MD to be null.
705template <class X, class Y>
706inline std::enable_if_t<detail::IsValidPointer<X, Y>::value, X *>
707dyn_extract_or_null(Y &&MD) {
708 if (auto *V = dyn_cast_or_null<ConstantAsMetadata>(MD))
709 return dyn_cast<X>(V->getValue());
710 return nullptr;
711}
712
713} // end namespace mdconst
714
715//===----------------------------------------------------------------------===//
716/// A single uniqued string.
717///
718/// These are used to efficiently contain a byte sequence for metadata.
719/// MDString is always unnamed.
720class MDString : public Metadata {
721 friend class StringMapEntryStorage<MDString>;
722
723 StringMapEntry<MDString> *Entry = nullptr;
724
725 MDString() : Metadata(MDStringKind, Uniqued) {}
726
727public:
728 MDString(const MDString &) = delete;
729 MDString &operator=(MDString &&) = delete;
730 MDString &operator=(const MDString &) = delete;
731
732 static MDString *get(LLVMContext &Context, StringRef Str);
733 static MDString *get(LLVMContext &Context, const char *Str) {
734 return get(Context, Str: Str ? StringRef(Str) : StringRef());
735 }
736
737 StringRef getString() const;
738
739 unsigned getLength() const { return (unsigned)getString().size(); }
740
741 using iterator = StringRef::iterator;
742
743 /// Pointer to the first byte of the string.
744 iterator begin() const { return getString().begin(); }
745
746 /// Pointer to one byte past the end of the string.
747 iterator end() const { return getString().end(); }
748
749 const unsigned char *bytes_begin() const { return getString().bytes_begin(); }
750 const unsigned char *bytes_end() const { return getString().bytes_end(); }
751
752 /// Methods for support type inquiry through isa, cast, and dyn_cast.
753 static bool classof(const Metadata *MD) {
754 return MD->getMetadataID() == MDStringKind;
755 }
756};
757
758/// A collection of metadata nodes that might be associated with a
759/// memory access used by the alias-analysis infrastructure.
760struct AAMDNodes {
761 explicit AAMDNodes() = default;
762 explicit AAMDNodes(MDNode *T, MDNode *TS, MDNode *S, MDNode *N)
763 : TBAA(T), TBAAStruct(TS), Scope(S), NoAlias(N) {}
764
765 bool operator==(const AAMDNodes &A) const {
766 return TBAA == A.TBAA && TBAAStruct == A.TBAAStruct && Scope == A.Scope &&
767 NoAlias == A.NoAlias;
768 }
769
770 bool operator!=(const AAMDNodes &A) const { return !(*this == A); }
771
772 explicit operator bool() const {
773 return TBAA || TBAAStruct || Scope || NoAlias;
774 }
775
776 /// The tag for type-based alias analysis.
777 MDNode *TBAA = nullptr;
778
779 /// The tag for type-based alias analysis (tbaa struct).
780 MDNode *TBAAStruct = nullptr;
781
782 /// The tag for alias scope specification (used with noalias).
783 MDNode *Scope = nullptr;
784
785 /// The tag specifying the noalias scope.
786 MDNode *NoAlias = nullptr;
787
788 // Shift tbaa Metadata node to start off bytes later
789 static MDNode *shiftTBAA(MDNode *M, size_t off);
790
791 // Shift tbaa.struct Metadata node to start off bytes later
792 static MDNode *shiftTBAAStruct(MDNode *M, size_t off);
793
794 // Extend tbaa Metadata node to apply to a series of bytes of length len.
795 // A size of -1 denotes an unknown size.
796 static MDNode *extendToTBAA(MDNode *TBAA, ssize_t len);
797
798 /// Given two sets of AAMDNodes that apply to the same pointer,
799 /// give the best AAMDNodes that are compatible with both (i.e. a set of
800 /// nodes whose allowable aliasing conclusions are a subset of those
801 /// allowable by both of the inputs). However, for efficiency
802 /// reasons, do not create any new MDNodes.
803 AAMDNodes intersect(const AAMDNodes &Other) const {
804 AAMDNodes Result;
805 Result.TBAA = Other.TBAA == TBAA ? TBAA : nullptr;
806 Result.TBAAStruct = Other.TBAAStruct == TBAAStruct ? TBAAStruct : nullptr;
807 Result.Scope = Other.Scope == Scope ? Scope : nullptr;
808 Result.NoAlias = Other.NoAlias == NoAlias ? NoAlias : nullptr;
809 return Result;
810 }
811
812 /// Create a new AAMDNode that describes this AAMDNode after applying a
813 /// constant offset to the start of the pointer.
814 AAMDNodes shift(size_t Offset) const {
815 AAMDNodes Result;
816 Result.TBAA = TBAA ? shiftTBAA(M: TBAA, off: Offset) : nullptr;
817 Result.TBAAStruct =
818 TBAAStruct ? shiftTBAAStruct(M: TBAAStruct, off: Offset) : nullptr;
819 Result.Scope = Scope;
820 Result.NoAlias = NoAlias;
821 return Result;
822 }
823
824 /// Create a new AAMDNode that describes this AAMDNode after extending it to
825 /// apply to a series of bytes of length Len. A size of -1 denotes an unknown
826 /// size.
827 AAMDNodes extendTo(ssize_t Len) const {
828 AAMDNodes Result;
829 Result.TBAA = TBAA ? extendToTBAA(TBAA, len: Len) : nullptr;
830 // tbaa.struct contains (offset, size, type) triples. Extending the length
831 // of the tbaa.struct doesn't require changing this (though more information
832 // could be provided by adding more triples at subsequent lengths).
833 Result.TBAAStruct = TBAAStruct;
834 Result.Scope = Scope;
835 Result.NoAlias = NoAlias;
836 return Result;
837 }
838
839 /// Given two sets of AAMDNodes applying to potentially different locations,
840 /// determine the best AAMDNodes that apply to both.
841 AAMDNodes merge(const AAMDNodes &Other) const;
842
843 /// Determine the best AAMDNodes after concatenating two different locations
844 /// together. Different from `merge`, where different locations should
845 /// overlap each other, `concat` puts non-overlapping locations together.
846 AAMDNodes concat(const AAMDNodes &Other) const;
847
848 /// Create a new AAMDNode for accessing \p AccessSize bytes of this AAMDNode.
849 /// If his AAMDNode has !tbaa.struct and \p AccessSize matches the size of the
850 /// field at offset 0, get the TBAA tag describing the accessed field.
851 AAMDNodes adjustForAccess(unsigned AccessSize);
852 AAMDNodes adjustForAccess(size_t Offset, Type *AccessTy,
853 const DataLayout &DL);
854 AAMDNodes adjustForAccess(size_t Offset, unsigned AccessSize);
855};
856
857// Specialize DenseMapInfo for AAMDNodes.
858template<>
859struct DenseMapInfo<AAMDNodes> {
860 static inline AAMDNodes getEmptyKey() {
861 return AAMDNodes(DenseMapInfo<MDNode *>::getEmptyKey(),
862 nullptr, nullptr, nullptr);
863 }
864
865 static inline AAMDNodes getTombstoneKey() {
866 return AAMDNodes(DenseMapInfo<MDNode *>::getTombstoneKey(),
867 nullptr, nullptr, nullptr);
868 }
869
870 static unsigned getHashValue(const AAMDNodes &Val) {
871 return DenseMapInfo<MDNode *>::getHashValue(PtrVal: Val.TBAA) ^
872 DenseMapInfo<MDNode *>::getHashValue(PtrVal: Val.TBAAStruct) ^
873 DenseMapInfo<MDNode *>::getHashValue(PtrVal: Val.Scope) ^
874 DenseMapInfo<MDNode *>::getHashValue(PtrVal: Val.NoAlias);
875 }
876
877 static bool isEqual(const AAMDNodes &LHS, const AAMDNodes &RHS) {
878 return LHS == RHS;
879 }
880};
881
882/// Tracking metadata reference owned by Metadata.
883///
884/// Similar to \a TrackingMDRef, but it's expected to be owned by an instance
885/// of \a Metadata, which has the option of registering itself for callbacks to
886/// re-unique itself.
887///
888/// In particular, this is used by \a MDNode.
889class MDOperand {
890 Metadata *MD = nullptr;
891
892public:
893 MDOperand() = default;
894 MDOperand(const MDOperand &) = delete;
895 MDOperand(MDOperand &&Op) {
896 MD = Op.MD;
897 if (MD)
898 (void)MetadataTracking::retrack(MD&: Op.MD, New&: MD);
899 Op.MD = nullptr;
900 }
901 MDOperand &operator=(const MDOperand &) = delete;
902 MDOperand &operator=(MDOperand &&Op) {
903 MD = Op.MD;
904 if (MD)
905 (void)MetadataTracking::retrack(MD&: Op.MD, New&: MD);
906 Op.MD = nullptr;
907 return *this;
908 }
909
910 // Check if MDOperand is of type MDString and equals `Str`.
911 bool equalsStr(StringRef Str) const {
912 return isa<MDString>(Val: this->get()) &&
913 cast<MDString>(Val: this->get())->getString() == Str;
914 }
915
916 ~MDOperand() { untrack(); }
917
918 Metadata *get() const { return MD; }
919 operator Metadata *() const { return get(); }
920 Metadata *operator->() const { return get(); }
921 Metadata &operator*() const { return *get(); }
922
923 void reset() {
924 untrack();
925 MD = nullptr;
926 }
927 void reset(Metadata *MD, Metadata *Owner) {
928 untrack();
929 this->MD = MD;
930 track(Owner);
931 }
932
933private:
934 void track(Metadata *Owner) {
935 if (MD) {
936 if (Owner)
937 MetadataTracking::track(Ref: this, MD&: *MD, Owner&: *Owner);
938 else
939 MetadataTracking::track(MD);
940 }
941 }
942
943 void untrack() {
944 assert(static_cast<void *>(this) == &MD && "Expected same address");
945 if (MD)
946 MetadataTracking::untrack(MD);
947 }
948};
949
950template <> struct simplify_type<MDOperand> {
951 using SimpleType = Metadata *;
952
953 static SimpleType getSimplifiedValue(MDOperand &MD) { return MD.get(); }
954};
955
956template <> struct simplify_type<const MDOperand> {
957 using SimpleType = Metadata *;
958
959 static SimpleType getSimplifiedValue(const MDOperand &MD) { return MD.get(); }
960};
961
962/// Pointer to the context, with optional RAUW support.
963///
964/// Either a raw (non-null) pointer to the \a LLVMContext, or an owned pointer
965/// to \a ReplaceableMetadataImpl (which has a reference to \a LLVMContext).
966class ContextAndReplaceableUses {
967 PointerUnion<LLVMContext *, ReplaceableMetadataImpl *> Ptr;
968
969public:
970 ContextAndReplaceableUses(LLVMContext &Context) : Ptr(&Context) {}
971 ContextAndReplaceableUses(
972 std::unique_ptr<ReplaceableMetadataImpl> ReplaceableUses)
973 : Ptr(ReplaceableUses.release()) {
974 assert(getReplaceableUses() && "Expected non-null replaceable uses");
975 }
976 ContextAndReplaceableUses() = delete;
977 ContextAndReplaceableUses(ContextAndReplaceableUses &&) = delete;
978 ContextAndReplaceableUses(const ContextAndReplaceableUses &) = delete;
979 ContextAndReplaceableUses &operator=(ContextAndReplaceableUses &&) = delete;
980 ContextAndReplaceableUses &
981 operator=(const ContextAndReplaceableUses &) = delete;
982 ~ContextAndReplaceableUses() { delete getReplaceableUses(); }
983
984 operator LLVMContext &() { return getContext(); }
985
986 /// Whether this contains RAUW support.
987 bool hasReplaceableUses() const {
988 return isa<ReplaceableMetadataImpl *>(Val: Ptr);
989 }
990
991 LLVMContext &getContext() const {
992 if (hasReplaceableUses())
993 return getReplaceableUses()->getContext();
994 return *cast<LLVMContext *>(Val: Ptr);
995 }
996
997 ReplaceableMetadataImpl *getReplaceableUses() const {
998 if (hasReplaceableUses())
999 return cast<ReplaceableMetadataImpl *>(Val: Ptr);
1000 return nullptr;
1001 }
1002
1003 /// Ensure that this has RAUW support, and then return it.
1004 ReplaceableMetadataImpl *getOrCreateReplaceableUses() {
1005 if (!hasReplaceableUses())
1006 makeReplaceable(ReplaceableUses: std::make_unique<ReplaceableMetadataImpl>(args&: getContext()));
1007 return getReplaceableUses();
1008 }
1009
1010 /// Assign RAUW support to this.
1011 ///
1012 /// Make this replaceable, taking ownership of \c ReplaceableUses (which must
1013 /// not be null).
1014 void
1015 makeReplaceable(std::unique_ptr<ReplaceableMetadataImpl> ReplaceableUses) {
1016 assert(ReplaceableUses && "Expected non-null replaceable uses");
1017 assert(&ReplaceableUses->getContext() == &getContext() &&
1018 "Expected same context");
1019 delete getReplaceableUses();
1020 Ptr = ReplaceableUses.release();
1021 }
1022
1023 /// Drop RAUW support.
1024 ///
1025 /// Cede ownership of RAUW support, returning it.
1026 std::unique_ptr<ReplaceableMetadataImpl> takeReplaceableUses() {
1027 assert(hasReplaceableUses() && "Expected to own replaceable uses");
1028 std::unique_ptr<ReplaceableMetadataImpl> ReplaceableUses(
1029 getReplaceableUses());
1030 Ptr = &ReplaceableUses->getContext();
1031 return ReplaceableUses;
1032 }
1033};
1034
1035struct TempMDNodeDeleter {
1036 inline void operator()(MDNode *Node) const;
1037};
1038
1039#define HANDLE_MDNODE_LEAF(CLASS) \
1040 using Temp##CLASS = std::unique_ptr<CLASS, TempMDNodeDeleter>;
1041#define HANDLE_MDNODE_BRANCH(CLASS) HANDLE_MDNODE_LEAF(CLASS)
1042#include "llvm/IR/Metadata.def"
1043
1044/// Metadata node.
1045///
1046/// Metadata nodes can be uniqued, like constants, or distinct. Temporary
1047/// metadata nodes (with full support for RAUW) can be used to delay uniquing
1048/// until forward references are known. The basic metadata node is an \a
1049/// MDTuple.
1050///
1051/// There is limited support for RAUW at construction time. At construction
1052/// time, if any operand is a temporary node (or an unresolved uniqued node,
1053/// which indicates a transitive temporary operand), the node itself will be
1054/// unresolved. As soon as all operands become resolved, it will drop RAUW
1055/// support permanently.
1056///
1057/// If an unresolved node is part of a cycle, \a resolveCycles() needs
1058/// to be called on some member of the cycle once all temporary nodes have been
1059/// replaced.
1060///
1061/// MDNodes can be large or small, as well as resizable or non-resizable.
1062/// Large MDNodes' operands are allocated in a separate storage vector,
1063/// whereas small MDNodes' operands are co-allocated. Distinct and temporary
1064/// MDnodes are resizable, but only MDTuples support this capability.
1065///
1066/// Clients can add operands to resizable MDNodes using push_back().
1067class MDNode : public Metadata {
1068 friend class ReplaceableMetadataImpl;
1069 friend class LLVMContextImpl;
1070 friend class DIAssignID;
1071
1072 /// The header that is coallocated with an MDNode along with its "small"
1073 /// operands. It is located immediately before the main body of the node.
1074 /// The operands are in turn located immediately before the header.
1075 /// For resizable MDNodes, the space for the storage vector is also allocated
1076 /// immediately before the header, overlapping with the operands.
1077 /// Explicity set alignment because bitfields by default have an
1078 /// alignment of 1 on z/OS.
1079 struct alignas(alignof(size_t)) Header {
1080 bool IsResizable : 1;
1081 bool IsLarge : 1;
1082 size_t SmallSize : 4;
1083 size_t SmallNumOps : 4;
1084 size_t : sizeof(size_t) * CHAR_BIT - 10;
1085
1086 unsigned NumUnresolved = 0;
1087 using LargeStorageVector = SmallVector<MDOperand, 0>;
1088
1089 static constexpr size_t NumOpsFitInVector =
1090 sizeof(LargeStorageVector) / sizeof(MDOperand);
1091 static_assert(
1092 NumOpsFitInVector * sizeof(MDOperand) == sizeof(LargeStorageVector),
1093 "sizeof(LargeStorageVector) must be a multiple of sizeof(MDOperand)");
1094
1095 static constexpr size_t MaxSmallSize = 15;
1096
1097 static constexpr size_t getOpSize(unsigned NumOps) {
1098 return sizeof(MDOperand) * NumOps;
1099 }
1100 /// Returns the number of operands the node has space for based on its
1101 /// allocation characteristics.
1102 static size_t getSmallSize(size_t NumOps, bool IsResizable, bool IsLarge) {
1103 return IsLarge ? NumOpsFitInVector
1104 : std::max(a: NumOps, b: NumOpsFitInVector * IsResizable);
1105 }
1106 /// Returns the number of bytes allocated for operands and header.
1107 static size_t getAllocSize(StorageType Storage, size_t NumOps) {
1108 return getOpSize(
1109 NumOps: getSmallSize(NumOps, IsResizable: isResizable(Storage), IsLarge: isLarge(NumOps))) +
1110 sizeof(Header);
1111 }
1112
1113 /// Only temporary and distinct nodes are resizable.
1114 static bool isResizable(StorageType Storage) { return Storage != Uniqued; }
1115 static bool isLarge(size_t NumOps) { return NumOps > MaxSmallSize; }
1116
1117 size_t getAllocSize() const {
1118 return getOpSize(NumOps: SmallSize) + sizeof(Header);
1119 }
1120 void *getAllocation() {
1121 return reinterpret_cast<char *>(this + 1) -
1122 alignTo(Value: getAllocSize(), Align: alignof(uint64_t));
1123 }
1124
1125 void *getLargePtr() const {
1126 static_assert(alignof(LargeStorageVector) <= alignof(Header),
1127 "LargeStorageVector too strongly aligned");
1128 return reinterpret_cast<char *>(const_cast<Header *>(this)) -
1129 sizeof(LargeStorageVector);
1130 }
1131
1132 void *getSmallPtr();
1133
1134 LargeStorageVector &getLarge() {
1135 assert(IsLarge);
1136 return *reinterpret_cast<LargeStorageVector *>(getLargePtr());
1137 }
1138
1139 const LargeStorageVector &getLarge() const {
1140 assert(IsLarge);
1141 return *reinterpret_cast<const LargeStorageVector *>(getLargePtr());
1142 }
1143
1144 void resizeSmall(size_t NumOps);
1145 void resizeSmallToLarge(size_t NumOps);
1146 void resize(size_t NumOps);
1147
1148 explicit Header(size_t NumOps, StorageType Storage);
1149 ~Header();
1150
1151 MutableArrayRef<MDOperand> operands() {
1152 if (IsLarge)
1153 return getLarge();
1154 return MutableArrayRef(
1155 reinterpret_cast<MDOperand *>(this) - SmallSize, SmallNumOps);
1156 }
1157
1158 ArrayRef<MDOperand> operands() const {
1159 if (IsLarge)
1160 return getLarge();
1161 return ArrayRef(reinterpret_cast<const MDOperand *>(this) - SmallSize,
1162 SmallNumOps);
1163 }
1164
1165 unsigned getNumOperands() const {
1166 if (!IsLarge)
1167 return SmallNumOps;
1168 return getLarge().size();
1169 }
1170 };
1171
1172 Header &getHeader() { return *(reinterpret_cast<Header *>(this) - 1); }
1173
1174 const Header &getHeader() const {
1175 return *(reinterpret_cast<const Header *>(this) - 1);
1176 }
1177
1178 ContextAndReplaceableUses Context;
1179
1180protected:
1181 MDNode(LLVMContext &Context, unsigned ID, StorageType Storage,
1182 ArrayRef<Metadata *> Ops1, ArrayRef<Metadata *> Ops2 = std::nullopt);
1183 ~MDNode() = default;
1184
1185 void *operator new(size_t Size, size_t NumOps, StorageType Storage);
1186 void operator delete(void *Mem);
1187
1188 /// Required by std, but never called.
1189 void operator delete(void *, unsigned) {
1190 llvm_unreachable("Constructor throws?");
1191 }
1192
1193 /// Required by std, but never called.
1194 void operator delete(void *, unsigned, bool) {
1195 llvm_unreachable("Constructor throws?");
1196 }
1197
1198 void dropAllReferences();
1199
1200 MDOperand *mutable_begin() { return getHeader().operands().begin(); }
1201 MDOperand *mutable_end() { return getHeader().operands().end(); }
1202
1203 using mutable_op_range = iterator_range<MDOperand *>;
1204
1205 mutable_op_range mutable_operands() {
1206 return mutable_op_range(mutable_begin(), mutable_end());
1207 }
1208
1209public:
1210 MDNode(const MDNode &) = delete;
1211 void operator=(const MDNode &) = delete;
1212 void *operator new(size_t) = delete;
1213
1214 static inline MDTuple *get(LLVMContext &Context, ArrayRef<Metadata *> MDs);
1215 static inline MDTuple *getIfExists(LLVMContext &Context,
1216 ArrayRef<Metadata *> MDs);
1217 static inline MDTuple *getDistinct(LLVMContext &Context,
1218 ArrayRef<Metadata *> MDs);
1219 static inline TempMDTuple getTemporary(LLVMContext &Context,
1220 ArrayRef<Metadata *> MDs);
1221
1222 /// Create a (temporary) clone of this.
1223 TempMDNode clone() const;
1224
1225 /// Deallocate a node created by getTemporary.
1226 ///
1227 /// Calls \c replaceAllUsesWith(nullptr) before deleting, so any remaining
1228 /// references will be reset.
1229 static void deleteTemporary(MDNode *N);
1230
1231 LLVMContext &getContext() const { return Context.getContext(); }
1232
1233 /// Replace a specific operand.
1234 void replaceOperandWith(unsigned I, Metadata *New);
1235
1236 /// Check if node is fully resolved.
1237 ///
1238 /// If \a isTemporary(), this always returns \c false; if \a isDistinct(),
1239 /// this always returns \c true.
1240 ///
1241 /// If \a isUniqued(), returns \c true if this has already dropped RAUW
1242 /// support (because all operands are resolved).
1243 ///
1244 /// As forward declarations are resolved, their containers should get
1245 /// resolved automatically. However, if this (or one of its operands) is
1246 /// involved in a cycle, \a resolveCycles() needs to be called explicitly.
1247 bool isResolved() const { return !isTemporary() && !getNumUnresolved(); }
1248
1249 bool isUniqued() const { return Storage == Uniqued; }
1250 bool isDistinct() const { return Storage == Distinct; }
1251 bool isTemporary() const { return Storage == Temporary; }
1252
1253 bool isReplaceable() const { return isTemporary() || isAlwaysReplaceable(); }
1254 bool isAlwaysReplaceable() const { return getMetadataID() == DIAssignIDKind; }
1255
1256 unsigned getNumTemporaryUses() const {
1257 assert(isTemporary() && "Only for temporaries");
1258 return Context.getReplaceableUses()->getNumUses();
1259 }
1260
1261 /// RAUW a temporary.
1262 ///
1263 /// \pre \a isTemporary() must be \c true.
1264 void replaceAllUsesWith(Metadata *MD) {
1265 assert(isReplaceable() && "Expected temporary/replaceable node");
1266 if (Context.hasReplaceableUses())
1267 Context.getReplaceableUses()->replaceAllUsesWith(MD);
1268 }
1269
1270 /// Resolve cycles.
1271 ///
1272 /// Once all forward declarations have been resolved, force cycles to be
1273 /// resolved.
1274 ///
1275 /// \pre No operands (or operands' operands, etc.) have \a isTemporary().
1276 void resolveCycles();
1277
1278 /// Resolve a unique, unresolved node.
1279 void resolve();
1280
1281 /// Replace a temporary node with a permanent one.
1282 ///
1283 /// Try to create a uniqued version of \c N -- in place, if possible -- and
1284 /// return it. If \c N cannot be uniqued, return a distinct node instead.
1285 template <class T>
1286 static std::enable_if_t<std::is_base_of<MDNode, T>::value, T *>
1287 replaceWithPermanent(std::unique_ptr<T, TempMDNodeDeleter> N) {
1288 return cast<T>(N.release()->replaceWithPermanentImpl());
1289 }
1290
1291 /// Replace a temporary node with a uniqued one.
1292 ///
1293 /// Create a uniqued version of \c N -- in place, if possible -- and return
1294 /// it. Takes ownership of the temporary node.
1295 ///
1296 /// \pre N does not self-reference.
1297 template <class T>
1298 static std::enable_if_t<std::is_base_of<MDNode, T>::value, T *>
1299 replaceWithUniqued(std::unique_ptr<T, TempMDNodeDeleter> N) {
1300 return cast<T>(N.release()->replaceWithUniquedImpl());
1301 }
1302
1303 /// Replace a temporary node with a distinct one.
1304 ///
1305 /// Create a distinct version of \c N -- in place, if possible -- and return
1306 /// it. Takes ownership of the temporary node.
1307 template <class T>
1308 static std::enable_if_t<std::is_base_of<MDNode, T>::value, T *>
1309 replaceWithDistinct(std::unique_ptr<T, TempMDNodeDeleter> N) {
1310 return cast<T>(N.release()->replaceWithDistinctImpl());
1311 }
1312
1313 /// Print in tree shape.
1314 ///
1315 /// Prints definition of \c this in tree shape.
1316 ///
1317 /// If \c M is provided, metadata nodes will be numbered canonically;
1318 /// otherwise, pointer addresses are substituted.
1319 /// @{
1320 void printTree(raw_ostream &OS, const Module *M = nullptr) const;
1321 void printTree(raw_ostream &OS, ModuleSlotTracker &MST,
1322 const Module *M = nullptr) const;
1323 /// @}
1324
1325 /// User-friendly dump in tree shape.
1326 ///
1327 /// If \c M is provided, metadata nodes will be numbered canonically;
1328 /// otherwise, pointer addresses are substituted.
1329 ///
1330 /// Note: this uses an explicit overload instead of default arguments so that
1331 /// the nullptr version is easy to call from a debugger.
1332 ///
1333 /// @{
1334 void dumpTree() const;
1335 void dumpTree(const Module *M) const;
1336 /// @}
1337
1338private:
1339 MDNode *replaceWithPermanentImpl();
1340 MDNode *replaceWithUniquedImpl();
1341 MDNode *replaceWithDistinctImpl();
1342
1343protected:
1344 /// Set an operand.
1345 ///
1346 /// Sets the operand directly, without worrying about uniquing.
1347 void setOperand(unsigned I, Metadata *New);
1348
1349 unsigned getNumUnresolved() const { return getHeader().NumUnresolved; }
1350
1351 void setNumUnresolved(unsigned N) { getHeader().NumUnresolved = N; }
1352 void storeDistinctInContext();
1353 template <class T, class StoreT>
1354 static T *storeImpl(T *N, StorageType Storage, StoreT &Store);
1355 template <class T> static T *storeImpl(T *N, StorageType Storage);
1356
1357 /// Resize the node to hold \a NumOps operands.
1358 ///
1359 /// \pre \a isTemporary() or \a isDistinct()
1360 /// \pre MetadataID == MDTupleKind
1361 void resize(size_t NumOps) {
1362 assert(!isUniqued() && "Resizing is not supported for uniqued nodes");
1363 assert(getMetadataID() == MDTupleKind &&
1364 "Resizing is not supported for this node kind");
1365 getHeader().resize(NumOps);
1366 }
1367
1368private:
1369 void handleChangedOperand(void *Ref, Metadata *New);
1370
1371 /// Drop RAUW support, if any.
1372 void dropReplaceableUses();
1373
1374 void resolveAfterOperandChange(Metadata *Old, Metadata *New);
1375 void decrementUnresolvedOperandCount();
1376 void countUnresolvedOperands();
1377
1378 /// Mutate this to be "uniqued".
1379 ///
1380 /// Mutate this so that \a isUniqued().
1381 /// \pre \a isTemporary().
1382 /// \pre already added to uniquing set.
1383 void makeUniqued();
1384
1385 /// Mutate this to be "distinct".
1386 ///
1387 /// Mutate this so that \a isDistinct().
1388 /// \pre \a isTemporary().
1389 void makeDistinct();
1390
1391 void deleteAsSubclass();
1392 MDNode *uniquify();
1393 void eraseFromStore();
1394
1395 template <class NodeTy> struct HasCachedHash;
1396 template <class NodeTy>
1397 static void dispatchRecalculateHash(NodeTy *N, std::true_type) {
1398 N->recalculateHash();
1399 }
1400 template <class NodeTy>
1401 static void dispatchRecalculateHash(NodeTy *, std::false_type) {}
1402 template <class NodeTy>
1403 static void dispatchResetHash(NodeTy *N, std::true_type) {
1404 N->setHash(0);
1405 }
1406 template <class NodeTy>
1407 static void dispatchResetHash(NodeTy *, std::false_type) {}
1408
1409 /// Merge branch weights from two direct callsites.
1410 static MDNode *mergeDirectCallProfMetadata(MDNode *A, MDNode *B,
1411 const Instruction *AInstr,
1412 const Instruction *BInstr);
1413
1414public:
1415 using op_iterator = const MDOperand *;
1416 using op_range = iterator_range<op_iterator>;
1417
1418 op_iterator op_begin() const {
1419 return const_cast<MDNode *>(this)->mutable_begin();
1420 }
1421
1422 op_iterator op_end() const {
1423 return const_cast<MDNode *>(this)->mutable_end();
1424 }
1425
1426 ArrayRef<MDOperand> operands() const { return getHeader().operands(); }
1427
1428 const MDOperand &getOperand(unsigned I) const {
1429 assert(I < getNumOperands() && "Out of range");
1430 return getHeader().operands()[I];
1431 }
1432
1433 /// Return number of MDNode operands.
1434 unsigned getNumOperands() const { return getHeader().getNumOperands(); }
1435
1436 /// Methods for support type inquiry through isa, cast, and dyn_cast:
1437 static bool classof(const Metadata *MD) {
1438 switch (MD->getMetadataID()) {
1439 default:
1440 return false;
1441#define HANDLE_MDNODE_LEAF(CLASS) \
1442 case CLASS##Kind: \
1443 return true;
1444#include "llvm/IR/Metadata.def"
1445 }
1446 }
1447
1448 /// Check whether MDNode is a vtable access.
1449 bool isTBAAVtableAccess() const;
1450
1451 /// Methods for metadata merging.
1452 static MDNode *concatenate(MDNode *A, MDNode *B);
1453 static MDNode *intersect(MDNode *A, MDNode *B);
1454 static MDNode *getMostGenericTBAA(MDNode *A, MDNode *B);
1455 static MDNode *getMostGenericFPMath(MDNode *A, MDNode *B);
1456 static MDNode *getMostGenericRange(MDNode *A, MDNode *B);
1457 static MDNode *getMostGenericAliasScope(MDNode *A, MDNode *B);
1458 static MDNode *getMostGenericAlignmentOrDereferenceable(MDNode *A, MDNode *B);
1459 /// Merge !prof metadata from two instructions.
1460 /// Currently only implemented with direct callsites with branch weights.
1461 static MDNode *getMergedProfMetadata(MDNode *A, MDNode *B,
1462 const Instruction *AInstr,
1463 const Instruction *BInstr);
1464};
1465
1466/// Tuple of metadata.
1467///
1468/// This is the simple \a MDNode arbitrary tuple. Nodes are uniqued by
1469/// default based on their operands.
1470class MDTuple : public MDNode {
1471 friend class LLVMContextImpl;
1472 friend class MDNode;
1473
1474 MDTuple(LLVMContext &C, StorageType Storage, unsigned Hash,
1475 ArrayRef<Metadata *> Vals)
1476 : MDNode(C, MDTupleKind, Storage, Vals) {
1477 setHash(Hash);
1478 }
1479
1480 ~MDTuple() { dropAllReferences(); }
1481
1482 void setHash(unsigned Hash) { SubclassData32 = Hash; }
1483 void recalculateHash();
1484
1485 static MDTuple *getImpl(LLVMContext &Context, ArrayRef<Metadata *> MDs,
1486 StorageType Storage, bool ShouldCreate = true);
1487
1488 TempMDTuple cloneImpl() const {
1489 ArrayRef<MDOperand> Operands = operands();
1490 return getTemporary(Context&: getContext(), MDs: SmallVector<Metadata *, 4>(
1491 Operands.begin(), Operands.end()));
1492 }
1493
1494public:
1495 /// Get the hash, if any.
1496 unsigned getHash() const { return SubclassData32; }
1497
1498 static MDTuple *get(LLVMContext &Context, ArrayRef<Metadata *> MDs) {
1499 return getImpl(Context, MDs, Storage: Uniqued);
1500 }
1501
1502 static MDTuple *getIfExists(LLVMContext &Context, ArrayRef<Metadata *> MDs) {
1503 return getImpl(Context, MDs, Storage: Uniqued, /* ShouldCreate */ ShouldCreate: false);
1504 }
1505
1506 /// Return a distinct node.
1507 ///
1508 /// Return a distinct node -- i.e., a node that is not uniqued.
1509 static MDTuple *getDistinct(LLVMContext &Context, ArrayRef<Metadata *> MDs) {
1510 return getImpl(Context, MDs, Storage: Distinct);
1511 }
1512
1513 /// Return a temporary node.
1514 ///
1515 /// For use in constructing cyclic MDNode structures. A temporary MDNode is
1516 /// not uniqued, may be RAUW'd, and must be manually deleted with
1517 /// deleteTemporary.
1518 static TempMDTuple getTemporary(LLVMContext &Context,
1519 ArrayRef<Metadata *> MDs) {
1520 return TempMDTuple(getImpl(Context, MDs, Storage: Temporary));
1521 }
1522
1523 /// Return a (temporary) clone of this.
1524 TempMDTuple clone() const { return cloneImpl(); }
1525
1526 /// Append an element to the tuple. This will resize the node.
1527 void push_back(Metadata *MD) {
1528 size_t NumOps = getNumOperands();
1529 resize(NumOps: NumOps + 1);
1530 setOperand(I: NumOps, New: MD);
1531 }
1532
1533 /// Shrink the operands by 1.
1534 void pop_back() { resize(NumOps: getNumOperands() - 1); }
1535
1536 static bool classof(const Metadata *MD) {
1537 return MD->getMetadataID() == MDTupleKind;
1538 }
1539};
1540
1541MDTuple *MDNode::get(LLVMContext &Context, ArrayRef<Metadata *> MDs) {
1542 return MDTuple::get(Context, MDs);
1543}
1544
1545MDTuple *MDNode::getIfExists(LLVMContext &Context, ArrayRef<Metadata *> MDs) {
1546 return MDTuple::getIfExists(Context, MDs);
1547}
1548
1549MDTuple *MDNode::getDistinct(LLVMContext &Context, ArrayRef<Metadata *> MDs) {
1550 return MDTuple::getDistinct(Context, MDs);
1551}
1552
1553TempMDTuple MDNode::getTemporary(LLVMContext &Context,
1554 ArrayRef<Metadata *> MDs) {
1555 return MDTuple::getTemporary(Context, MDs);
1556}
1557
1558void TempMDNodeDeleter::operator()(MDNode *Node) const {
1559 MDNode::deleteTemporary(N: Node);
1560}
1561
1562/// This is a simple wrapper around an MDNode which provides a higher-level
1563/// interface by hiding the details of how alias analysis information is encoded
1564/// in its operands.
1565class AliasScopeNode {
1566 const MDNode *Node = nullptr;
1567
1568public:
1569 AliasScopeNode() = default;
1570 explicit AliasScopeNode(const MDNode *N) : Node(N) {}
1571
1572 /// Get the MDNode for this AliasScopeNode.
1573 const MDNode *getNode() const { return Node; }
1574
1575 /// Get the MDNode for this AliasScopeNode's domain.
1576 const MDNode *getDomain() const {
1577 if (Node->getNumOperands() < 2)
1578 return nullptr;
1579 return dyn_cast_or_null<MDNode>(Val: Node->getOperand(I: 1));
1580 }
1581 StringRef getName() const {
1582 if (Node->getNumOperands() > 2)
1583 if (MDString *N = dyn_cast_or_null<MDString>(Val: Node->getOperand(I: 2)))
1584 return N->getString();
1585 return StringRef();
1586 }
1587};
1588
1589/// Typed iterator through MDNode operands.
1590///
1591/// An iterator that transforms an \a MDNode::iterator into an iterator over a
1592/// particular Metadata subclass.
1593template <class T> class TypedMDOperandIterator {
1594 MDNode::op_iterator I = nullptr;
1595
1596public:
1597 using iterator_category = std::input_iterator_tag;
1598 using value_type = T *;
1599 using difference_type = std::ptrdiff_t;
1600 using pointer = void;
1601 using reference = T *;
1602
1603 TypedMDOperandIterator() = default;
1604 explicit TypedMDOperandIterator(MDNode::op_iterator I) : I(I) {}
1605
1606 T *operator*() const { return cast_or_null<T>(*I); }
1607
1608 TypedMDOperandIterator &operator++() {
1609 ++I;
1610 return *this;
1611 }
1612
1613 TypedMDOperandIterator operator++(int) {
1614 TypedMDOperandIterator Temp(*this);
1615 ++I;
1616 return Temp;
1617 }
1618
1619 bool operator==(const TypedMDOperandIterator &X) const { return I == X.I; }
1620 bool operator!=(const TypedMDOperandIterator &X) const { return I != X.I; }
1621};
1622
1623/// Typed, array-like tuple of metadata.
1624///
1625/// This is a wrapper for \a MDTuple that makes it act like an array holding a
1626/// particular type of metadata.
1627template <class T> class MDTupleTypedArrayWrapper {
1628 const MDTuple *N = nullptr;
1629
1630public:
1631 MDTupleTypedArrayWrapper() = default;
1632 MDTupleTypedArrayWrapper(const MDTuple *N) : N(N) {}
1633
1634 template <class U>
1635 MDTupleTypedArrayWrapper(
1636 const MDTupleTypedArrayWrapper<U> &Other,
1637 std::enable_if_t<std::is_convertible<U *, T *>::value> * = nullptr)
1638 : N(Other.get()) {}
1639
1640 template <class U>
1641 explicit MDTupleTypedArrayWrapper(
1642 const MDTupleTypedArrayWrapper<U> &Other,
1643 std::enable_if_t<!std::is_convertible<U *, T *>::value> * = nullptr)
1644 : N(Other.get()) {}
1645
1646 explicit operator bool() const { return get(); }
1647 explicit operator MDTuple *() const { return get(); }
1648
1649 MDTuple *get() const { return const_cast<MDTuple *>(N); }
1650 MDTuple *operator->() const { return get(); }
1651 MDTuple &operator*() const { return *get(); }
1652
1653 // FIXME: Fix callers and remove condition on N.
1654 unsigned size() const { return N ? N->getNumOperands() : 0u; }
1655 bool empty() const { return N ? N->getNumOperands() == 0 : true; }
1656 T *operator[](unsigned I) const { return cast_or_null<T>(N->getOperand(I)); }
1657
1658 // FIXME: Fix callers and remove condition on N.
1659 using iterator = TypedMDOperandIterator<T>;
1660
1661 iterator begin() const { return N ? iterator(N->op_begin()) : iterator(); }
1662 iterator end() const { return N ? iterator(N->op_end()) : iterator(); }
1663};
1664
1665#define HANDLE_METADATA(CLASS) \
1666 using CLASS##Array = MDTupleTypedArrayWrapper<CLASS>;
1667#include "llvm/IR/Metadata.def"
1668
1669/// Placeholder metadata for operands of distinct MDNodes.
1670///
1671/// This is a lightweight placeholder for an operand of a distinct node. It's
1672/// purpose is to help track forward references when creating a distinct node.
1673/// This allows distinct nodes involved in a cycle to be constructed before
1674/// their operands without requiring a heavyweight temporary node with
1675/// full-blown RAUW support.
1676///
1677/// Each placeholder supports only a single MDNode user. Clients should pass
1678/// an ID, retrieved via \a getID(), to indicate the "real" operand that this
1679/// should be replaced with.
1680///
1681/// While it would be possible to implement move operators, they would be
1682/// fairly expensive. Leave them unimplemented to discourage their use
1683/// (clients can use std::deque, std::list, BumpPtrAllocator, etc.).
1684class DistinctMDOperandPlaceholder : public Metadata {
1685 friend class MetadataTracking;
1686
1687 Metadata **Use = nullptr;
1688
1689public:
1690 explicit DistinctMDOperandPlaceholder(unsigned ID)
1691 : Metadata(DistinctMDOperandPlaceholderKind, Distinct) {
1692 SubclassData32 = ID;
1693 }
1694
1695 DistinctMDOperandPlaceholder() = delete;
1696 DistinctMDOperandPlaceholder(DistinctMDOperandPlaceholder &&) = delete;
1697 DistinctMDOperandPlaceholder(const DistinctMDOperandPlaceholder &) = delete;
1698
1699 ~DistinctMDOperandPlaceholder() {
1700 if (Use)
1701 *Use = nullptr;
1702 }
1703
1704 unsigned getID() const { return SubclassData32; }
1705
1706 /// Replace the use of this with MD.
1707 void replaceUseWith(Metadata *MD) {
1708 if (!Use)
1709 return;
1710 *Use = MD;
1711
1712 if (*Use)
1713 MetadataTracking::track(MD&: *Use);
1714
1715 Metadata *T = cast<Metadata>(Val: this);
1716 MetadataTracking::untrack(MD&: T);
1717 assert(!Use && "Use is still being tracked despite being untracked!");
1718 }
1719};
1720
1721//===----------------------------------------------------------------------===//
1722/// A tuple of MDNodes.
1723///
1724/// Despite its name, a NamedMDNode isn't itself an MDNode.
1725///
1726/// NamedMDNodes are named module-level entities that contain lists of MDNodes.
1727///
1728/// It is illegal for a NamedMDNode to appear as an operand of an MDNode.
1729class NamedMDNode : public ilist_node<NamedMDNode> {
1730 friend class LLVMContextImpl;
1731 friend class Module;
1732
1733 std::string Name;
1734 Module *Parent = nullptr;
1735 void *Operands; // SmallVector<TrackingMDRef, 4>
1736
1737 void setParent(Module *M) { Parent = M; }
1738
1739 explicit NamedMDNode(const Twine &N);
1740
1741 template <class T1> class op_iterator_impl {
1742 friend class NamedMDNode;
1743
1744 const NamedMDNode *Node = nullptr;
1745 unsigned Idx = 0;
1746
1747 op_iterator_impl(const NamedMDNode *N, unsigned i) : Node(N), Idx(i) {}
1748
1749 public:
1750 using iterator_category = std::bidirectional_iterator_tag;
1751 using value_type = T1;
1752 using difference_type = std::ptrdiff_t;
1753 using pointer = value_type *;
1754 using reference = value_type;
1755
1756 op_iterator_impl() = default;
1757
1758 bool operator==(const op_iterator_impl &o) const { return Idx == o.Idx; }
1759 bool operator!=(const op_iterator_impl &o) const { return Idx != o.Idx; }
1760
1761 op_iterator_impl &operator++() {
1762 ++Idx;
1763 return *this;
1764 }
1765
1766 op_iterator_impl operator++(int) {
1767 op_iterator_impl tmp(*this);
1768 operator++();
1769 return tmp;
1770 }
1771
1772 op_iterator_impl &operator--() {
1773 --Idx;
1774 return *this;
1775 }
1776
1777 op_iterator_impl operator--(int) {
1778 op_iterator_impl tmp(*this);
1779 operator--();
1780 return tmp;
1781 }
1782
1783 T1 operator*() const { return Node->getOperand(i: Idx); }
1784 };
1785
1786public:
1787 NamedMDNode(const NamedMDNode &) = delete;
1788 ~NamedMDNode();
1789
1790 /// Drop all references and remove the node from parent module.
1791 void eraseFromParent();
1792
1793 /// Remove all uses and clear node vector.
1794 void dropAllReferences() { clearOperands(); }
1795 /// Drop all references to this node's operands.
1796 void clearOperands();
1797
1798 /// Get the module that holds this named metadata collection.
1799 inline Module *getParent() { return Parent; }
1800 inline const Module *getParent() const { return Parent; }
1801
1802 MDNode *getOperand(unsigned i) const;
1803 unsigned getNumOperands() const;
1804 void addOperand(MDNode *M);
1805 void setOperand(unsigned I, MDNode *New);
1806 StringRef getName() const;
1807 void print(raw_ostream &ROS, bool IsForDebug = false) const;
1808 void print(raw_ostream &ROS, ModuleSlotTracker &MST,
1809 bool IsForDebug = false) const;
1810 void dump() const;
1811
1812 // ---------------------------------------------------------------------------
1813 // Operand Iterator interface...
1814 //
1815 using op_iterator = op_iterator_impl<MDNode *>;
1816
1817 op_iterator op_begin() { return op_iterator(this, 0); }
1818 op_iterator op_end() { return op_iterator(this, getNumOperands()); }
1819
1820 using const_op_iterator = op_iterator_impl<const MDNode *>;
1821
1822 const_op_iterator op_begin() const { return const_op_iterator(this, 0); }
1823 const_op_iterator op_end() const { return const_op_iterator(this, getNumOperands()); }
1824
1825 inline iterator_range<op_iterator> operands() {
1826 return make_range(x: op_begin(), y: op_end());
1827 }
1828 inline iterator_range<const_op_iterator> operands() const {
1829 return make_range(x: op_begin(), y: op_end());
1830 }
1831};
1832
1833// Create wrappers for C Binding types (see CBindingWrapping.h).
1834DEFINE_ISA_CONVERSION_FUNCTIONS(NamedMDNode, LLVMNamedMDNodeRef)
1835
1836} // end namespace llvm
1837
1838#endif // LLVM_IR_METADATA_H
1839

source code of llvm/include/llvm/IR/Metadata.h