1/*-------------------------------------------------------------------------
4 * Generate code for deforming a heap tuple.
6 * This gains performance benefits over unJITed deforming from compile-time
7 * knowledge of the tuple descriptor. Fixed column widths, NOT NULLness, etc
8 * can be taken advantage of.
10 * Portions Copyright (c) 1996-2025, PostgreSQL Global Development Group
11 * Portions Copyright (c) 1994, Regents of the University of California
14 * src/backend/jit/llvm/llvmjit_deform.c
16 *-------------------------------------------------------------------------
21#include <llvm-c/Core.h>
31 * Create a function that deforms a tuple of type desc up to natts columns.
43 LLVMTypeRef deform_sig;
44 LLVMValueRef v_deform_fn;
46 LLVMBasicBlockRef b_entry;
47 LLVMBasicBlockRef b_adjust_unavail_cols;
48 LLVMBasicBlockRef b_find_start;
50 LLVMBasicBlockRef b_out;
51 LLVMBasicBlockRef b_dead;
52 LLVMBasicBlockRef *attcheckattnoblocks;
53 LLVMBasicBlockRef *attstartblocks;
54 LLVMBasicBlockRef *attisnullblocks;
55 LLVMBasicBlockRef *attcheckalignblocks;
56 LLVMBasicBlockRef *attalignblocks;
57 LLVMBasicBlockRef *attstoreblocks;
61 LLVMValueRef v_tupdata_base;
62 LLVMValueRef v_tts_values;
63 LLVMValueRef v_tts_nulls;
64 LLVMValueRef v_slotoffp;
65 LLVMValueRef v_flagsp;
66 LLVMValueRef v_nvalidp;
67 LLVMValueRef v_nvalid;
68 LLVMValueRef v_maxatt;
72 LLVMValueRef v_tupleheaderp;
73 LLVMValueRef v_tuplep;
74 LLVMValueRef v_infomask1;
75 LLVMValueRef v_infomask2;
80 LLVMValueRef v_hasnulls;
82 /* last column (0 indexed) guaranteed to exist */
83 int guaranteed_column_number = -1;
85 /* current known alignment */
86 int known_alignment = 0;
88 /* if true, known_alignment describes definite offset of column */
89 bool attguaranteedalign =
true;
93 /* virtual tuples never need deforming, so don't generate code */
97 /* decline to JIT for slot types we don't know to handle */
103 lc = LLVMGetModuleContext(mod);
108 * Check which columns have to exist, so we don't have to check the row's
109 * natts unnecessarily.
116 * If the column is declared NOT NULL then it must be present in every
117 * tuple, unless there's a "missing" entry that could provide a
118 * non-NULL value for it. That in turn guarantees that the NULL bitmap
119 * - if there are any NULLable columns - is at least long enough to
120 * cover columns up to attnum.
122 * Be paranoid and also check !attisdropped, even though the
123 * combination of attisdropped && attnotnull combination shouldn't
129 guaranteed_column_number =
attnum;
132 /* Create the signature and function */
134 LLVMTypeRef param_types[1];
138 deform_sig = LLVMFunctionType(LLVMVoidTypeInContext(lc),
139 param_types,
lengthof(param_types), 0);
141 v_deform_fn = LLVMAddFunction(mod,
funcname, deform_sig);
142 LLVMSetLinkage(v_deform_fn, LLVMInternalLinkage);
143 LLVMSetParamAlignment(LLVMGetParam(v_deform_fn, 0), MAXIMUM_ALIGNOF);
147 LLVMAppendBasicBlockInContext(lc, v_deform_fn,
"entry");
148 b_adjust_unavail_cols =
149 LLVMAppendBasicBlockInContext(lc, v_deform_fn,
"adjust_unavail_cols");
151 LLVMAppendBasicBlockInContext(lc, v_deform_fn,
"find_startblock");
153 LLVMAppendBasicBlockInContext(lc, v_deform_fn,
"outblock");
155 LLVMAppendBasicBlockInContext(lc, v_deform_fn,
"deadblock");
157 b = LLVMCreateBuilderInContext(lc);
159 attcheckattnoblocks =
palloc(
sizeof(LLVMBasicBlockRef) * natts);
160 attstartblocks =
palloc(
sizeof(LLVMBasicBlockRef) * natts);
161 attisnullblocks =
palloc(
sizeof(LLVMBasicBlockRef) * natts);
162 attcheckalignblocks =
palloc(
sizeof(LLVMBasicBlockRef) * natts);
163 attalignblocks =
palloc(
sizeof(LLVMBasicBlockRef) * natts);
164 attstoreblocks =
palloc(
sizeof(LLVMBasicBlockRef) * natts);
168 LLVMPositionBuilderAtEnd(
b, b_entry);
170 /* perform allocas first, llvm only converts those to registers */
171 v_offp = LLVMBuildAlloca(
b,
TypeSizeT,
"v_offp");
173 v_slot = LLVMGetParam(v_deform_fn, 0);
186 LLVMValueRef v_heapslot;
200 LLVMValueRef v_minimalslot;
207 v_slotoffp = l_struct_gep(
b,
220 /* should've returned at the start of the function */
237 l_ptr(LLVMInt8TypeInContext(lc)),
251 /* t_infomask & HEAP_HASNULL */
253 LLVMBuildICmp(
b, LLVMIntNE,
257 l_int16_const(lc, 0),
260 /* t_infomask2 & HEAP_NATTS_MASK */
261 v_maxatt = LLVMBuildAnd(
b,
267 * Need to zext, as getelementptr otherwise treats hoff as a signed 8bit
268 * integer, which'd yield a negative offset for t_hoff > 127.
277 LLVMInt32TypeInContext(lc),
"t_hoff");
279 v_tupdata_base = l_gep(
b,
280 LLVMInt8TypeInContext(lc),
283 l_ptr(LLVMInt8TypeInContext(lc)),
289 * Load tuple start offset from slot. Will be reset below in case there's
290 * no existing deformed columns in slot.
293 LLVMValueRef v_off_start;
295 v_off_start = l_load(
b, LLVMInt32TypeInContext(lc), v_slotoffp,
"v_slot_off");
296 v_off_start = LLVMBuildZExt(
b, v_off_start,
TypeSizeT,
"");
297 LLVMBuildStore(
b, v_off_start, v_offp);
300 /* build the basic block for each attribute, need them as jump target */
303 attcheckattnoblocks[
attnum] =
304 l_bb_append_v(v_deform_fn,
"block.attr.%d.attcheckattno",
attnum);
306 l_bb_append_v(v_deform_fn,
"block.attr.%d.start",
attnum);
308 l_bb_append_v(v_deform_fn,
"block.attr.%d.attisnull",
attnum);
309 attcheckalignblocks[
attnum] =
310 l_bb_append_v(v_deform_fn,
"block.attr.%d.attcheckalign",
attnum);
312 l_bb_append_v(v_deform_fn,
"block.attr.%d.align",
attnum);
314 l_bb_append_v(v_deform_fn,
"block.attr.%d.store",
attnum);
318 * Check if it is guaranteed that all the desired attributes are available
319 * in the tuple (but still possibly NULL), by dint of either the last
320 * to-be-deformed column being NOT NULL, or subsequent ones not accessed
321 * here being NOT NULL. If that's not guaranteed the tuple headers natt's
322 * has to be checked, and missing attributes potentially have to be
323 * fetched (using slot_getmissingattrs().
325 if ((natts - 1) <= guaranteed_column_number)
327 /* just skip through unnecessary blocks */
328 LLVMBuildBr(
b, b_adjust_unavail_cols);
329 LLVMPositionBuilderAtEnd(
b, b_adjust_unavail_cols);
330 LLVMBuildBr(
b, b_find_start);
334 LLVMValueRef v_params[3];
337 /* branch if not all columns available */
339 LLVMBuildICmp(
b, LLVMIntULT,
341 l_int16_const(lc, natts),
343 b_adjust_unavail_cols,
346 /* if not, memset tts_isnull of relevant cols to true */
347 LLVMPositionBuilderAtEnd(
b, b_adjust_unavail_cols);
349 v_params[0] = v_slot;
350 v_params[1] = LLVMBuildZExt(
b, v_maxatt, LLVMInt32TypeInContext(lc),
"");
351 v_params[2] = l_int32_const(lc, natts);
356 LLVMBuildBr(
b, b_find_start);
359 LLVMPositionBuilderAtEnd(
b, b_find_start);
361 v_nvalid = l_load(
b, LLVMInt16TypeInContext(lc), v_nvalidp,
"");
364 * Build switch to go from nvalid to the right startblock. Callers
365 * currently don't have the knowledge, but it'd be good for performance to
366 * avoid this check when it's known that the slot is empty (e.g. in scan
371 LLVMValueRef v_switch = LLVMBuildSwitch(
b, v_nvalid,
376 LLVMValueRef v_attno = l_int16_const(lc,
attnum);
378 LLVMAddCase(v_switch, v_attno, attcheckattnoblocks[
attnum]);
383 /* jump from entry block to first block */
384 LLVMBuildBr(
b, attcheckattnoblocks[0]);
387 LLVMPositionBuilderAtEnd(
b, b_dead);
388 LLVMBuildUnreachable(
b);
391 * Iterate over each attribute that needs to be deformed, build code to
397 LLVMValueRef v_incby;
399 LLVMValueRef l_attno = l_int16_const(lc,
attnum);
400 LLVMValueRef v_attdatap;
401 LLVMValueRef v_resultp;
403 /* build block checking whether we did all the necessary attributes */
404 LLVMPositionBuilderAtEnd(
b, attcheckattnoblocks[
attnum]);
407 * If this is the first attribute, slot->tts_nvalid was 0. Therefore
408 * also reset offset to 0, it may be from a previous execution.
412 LLVMBuildStore(
b, l_sizet_const(0), v_offp);
416 * Build check whether column is available (i.e. whether the tuple has
417 * that many columns stored). We can avoid the branch if we know
418 * there's a subsequent NOT NULL column.
420 if (
attnum <= guaranteed_column_number)
422 LLVMBuildBr(
b, attstartblocks[
attnum]);
426 LLVMValueRef v_islast;
428 v_islast = LLVMBuildICmp(
b, LLVMIntUGE,
432 LLVMBuildCondBr(
b, v_islast, b_out, attstartblocks[
attnum]);
434 LLVMPositionBuilderAtEnd(
b, attstartblocks[
attnum]);
437 * Check for nulls if necessary. No need to take missing attributes
438 * into account, because if they're present the heaptuple's natts
439 * would have indicated that a slot_getmissingattrs() is needed.
443 LLVMBasicBlockRef b_ifnotnull;
444 LLVMBasicBlockRef b_ifnull;
445 LLVMBasicBlockRef b_next;
446 LLVMValueRef v_attisnull;
447 LLVMValueRef v_nullbyteno;
448 LLVMValueRef v_nullbytemask;
449 LLVMValueRef v_nullbyte;
450 LLVMValueRef v_nullbit;
452 b_ifnotnull = attcheckalignblocks[
attnum];
453 b_ifnull = attisnullblocks[
attnum];
458 b_next = attcheckattnoblocks[
attnum + 1];
460 v_nullbyteno = l_int32_const(lc,
attnum >> 3);
461 v_nullbytemask = l_int8_const(lc, 1 << ((
attnum) & 0x07));
462 v_nullbyte = l_load_gep1(
b, LLVMInt8TypeInContext(lc), v_bits, v_nullbyteno,
"attnullbyte");
464 v_nullbit = LLVMBuildICmp(
b,
466 LLVMBuildAnd(
b, v_nullbyte, v_nullbytemask,
""),
470 v_attisnull = LLVMBuildAnd(
b, v_hasnulls, v_nullbit,
"");
472 LLVMBuildCondBr(
b, v_attisnull, b_ifnull, b_ifnotnull);
474 LLVMPositionBuilderAtEnd(
b, b_ifnull);
476 /* store null-byte */
479 l_gep(
b, LLVMInt8TypeInContext(lc), v_tts_nulls, &l_attno, 1,
""));
480 /* store zero datum */
483 l_gep(
b,
TypeDatum, v_tts_values, &l_attno, 1,
""));
485 LLVMBuildBr(
b, b_next);
486 attguaranteedalign =
false;
491 LLVMBuildBr(
b, attcheckalignblocks[
attnum]);
492 LLVMPositionBuilderAtEnd(
b, attisnullblocks[
attnum]);
493 LLVMBuildBr(
b, attcheckalignblocks[
attnum]);
495 LLVMPositionBuilderAtEnd(
b, attcheckalignblocks[
attnum]);
498 * Even if alignment is required, we can skip doing it if provably
500 * - first column is guaranteed to be aligned
501 * - columns following a NOT NULL fixed width datum have known
502 * alignment, can skip alignment computation if that known alignment
503 * is compatible with current column.
507 (known_alignment < 0 || known_alignment !=
TYPEALIGN(alignto, known_alignment)))
510 * When accessing a varlena field, we have to "peek" to see if we
511 * are looking at a pad byte or the first byte of a 1-byte-header
512 * datum. A zero byte must be either a pad byte, or the first
513 * byte of a correctly aligned 4-byte length word; in either case,
514 * we can align safely. A non-zero byte must be either a 1-byte
515 * length word, or the first byte of a correctly aligned 4-byte
516 * length word; in either case, we need not align.
520 LLVMValueRef v_possible_padbyte;
521 LLVMValueRef v_ispad;
524 /* don't know if short varlena or not */
525 attguaranteedalign =
false;
530 l_load_gep1(
b, LLVMInt8TypeInContext(lc), v_tupdata_base, v_off,
"padbyte");
532 LLVMBuildICmp(
b, LLVMIntEQ,
533 v_possible_padbyte, l_int8_const(lc, 0),
535 LLVMBuildCondBr(
b, v_ispad,
541 LLVMBuildBr(
b, attalignblocks[
attnum]);
544 LLVMPositionBuilderAtEnd(
b, attalignblocks[
attnum]);
546 /* translation of alignment code (cf TYPEALIGN()) */
548 LLVMValueRef v_off_aligned;
549 LLVMValueRef v_off = l_load(
b,
TypeSizeT, v_offp,
"");
551 /* ((ALIGNVAL) - 1) */
552 LLVMValueRef v_alignval = l_sizet_const(alignto - 1);
554 /* ((uintptr_t) (LEN) + ((ALIGNVAL) - 1)) */
555 LLVMValueRef v_lh = LLVMBuildAdd(
b, v_off, v_alignval,
"");
557 /* ~((uintptr_t) ((ALIGNVAL) - 1)) */
558 LLVMValueRef v_rh = l_sizet_const(~(alignto - 1));
560 v_off_aligned = LLVMBuildAnd(
b, v_lh, v_rh,
"aligned_offset");
562 LLVMBuildStore(
b, v_off_aligned, v_offp);
566 * As alignment either was unnecessary or has been performed, we
567 * now know the current alignment. This is only safe because this
568 * value isn't used for varlena and nullable columns.
570 if (known_alignment >= 0)
572 Assert(known_alignment != 0);
573 known_alignment =
TYPEALIGN(alignto, known_alignment);
576 LLVMBuildBr(
b, attstoreblocks[
attnum]);
577 LLVMPositionBuilderAtEnd(
b, attstoreblocks[
attnum]);
581 LLVMPositionBuilderAtEnd(
b, attcheckalignblocks[
attnum]);
582 LLVMBuildBr(
b, attalignblocks[
attnum]);
583 LLVMPositionBuilderAtEnd(
b, attalignblocks[
attnum]);
584 LLVMBuildBr(
b, attstoreblocks[
attnum]);
586 LLVMPositionBuilderAtEnd(
b, attstoreblocks[
attnum]);
589 * Store the current offset if known to be constant. That allows LLVM
590 * to generate better code. Without that LLVM can't figure out that
591 * the offset might be constant due to the jumps for previously
594 if (attguaranteedalign)
596 Assert(known_alignment >= 0);
597 LLVMBuildStore(
b, l_sizet_const(known_alignment), v_offp);
600 /* compute what following columns are aligned to */
603 /* can't guarantee any alignment after variable length field */
604 known_alignment = -1;
605 attguaranteedalign =
false;
608 attguaranteedalign && known_alignment >= 0)
611 * If the offset to the column was previously known, a NOT NULL &
612 * fixed-width column guarantees that alignment is just the
613 * previous alignment plus column width.
616 known_alignment += att->
attlen;
619 (att->
attlen % alignto) == 0)
622 * After a NOT NULL fixed-width column with a length that is a
623 * multiple of its alignment requirement, we know the following
624 * column is aligned to at least the current column's alignment.
627 known_alignment = alignto;
628 Assert(known_alignment > 0);
629 attguaranteedalign =
false;
633 known_alignment = -1;
634 attguaranteedalign =
false;
638 /* compute address to load data from */
640 LLVMValueRef v_off = l_load(
b,
TypeSizeT, v_offp,
"");
643 l_gep(
b, LLVMInt8TypeInContext(lc), v_tupdata_base, &v_off, 1,
"");
646 /* compute address to store value at */
647 v_resultp = l_gep(
b,
TypeDatum, v_tts_values, &l_attno, 1,
"");
649 /* store null-byte (false) */
650 LLVMBuildStore(
b, l_int8_const(lc, 0),
654 * Store datum. For byval: datums copy the value, extend to Datum's
655 * width, and store. For byref types: store pointer to data.
659 LLVMValueRef v_tmp_loaddata;
660 LLVMTypeRef vartype = LLVMIntTypeInContext(lc, att->
attlen * 8);
661 LLVMTypeRef vartypep = LLVMPointerType(vartype, 0);
664 LLVMBuildPointerCast(
b, v_attdatap, vartypep,
"");
665 v_tmp_loaddata = l_load(
b, vartype, v_tmp_loaddata,
"attr_byval");
666 v_tmp_loaddata = LLVMBuildZExt(
b, v_tmp_loaddata,
TypeDatum,
"");
668 LLVMBuildStore(
b, v_tmp_loaddata, v_resultp);
672 LLVMValueRef v_tmp_loaddata;
680 LLVMBuildStore(
b, v_tmp_loaddata, v_resultp);
683 /* increment data pointer */
686 v_incby = l_sizet_const(att->
attlen);
688 else if (att->
attlen == -1)
695 l_callsite_ro(v_incby);
696 l_callsite_alwaysinline(v_incby);
698 else if (att->
attlen == -2)
703 &v_attdatap, 1,
"strlen");
705 l_callsite_ro(v_incby);
707 /* add 1 for NUL byte */
708 v_incby = LLVMBuildAdd(
b, v_incby, l_sizet_const(1),
"");
713 v_incby = NULL;
/* silence compiler */
716 if (attguaranteedalign)
718 Assert(known_alignment >= 0);
719 LLVMBuildStore(
b, l_sizet_const(known_alignment), v_offp);
723 LLVMValueRef v_off = l_load(
b,
TypeSizeT, v_offp,
"");
725 v_off = LLVMBuildAdd(
b, v_off, v_incby,
"increment_offset");
726 LLVMBuildStore(
b, v_off, v_offp);
730 * jump to next block, unless last possible column, or all desired
731 * (available) attributes have been fetched.
736 LLVMBuildBr(
b, b_out);
740 LLVMBuildBr(
b, attcheckattnoblocks[
attnum + 1]);
745 /* build block that returns */
746 LLVMPositionBuilderAtEnd(
b, b_out);
749 LLVMValueRef v_off = l_load(
b,
TypeSizeT, v_offp,
"");
750 LLVMValueRef v_flags;
752 LLVMBuildStore(
b, l_int16_const(lc, natts), v_nvalidp);
753 v_off = LLVMBuildTrunc(
b, v_off, LLVMInt32TypeInContext(lc),
"");
754 LLVMBuildStore(
b, v_off, v_slotoffp);
755 v_flags = l_load(
b, LLVMInt16TypeInContext(lc), v_flagsp,
"tts_flags");
756 v_flags = LLVMBuildOr(
b, v_flags, l_int16_const(lc,
TTS_FLAG_SLOW),
"");
757 LLVMBuildStore(
b, v_flags, v_flagsp);
761 LLVMDisposeBuilder(
b);
#define TYPEALIGN(ALIGNVAL, LEN)
const TupleTableSlotOps TTSOpsVirtual
const TupleTableSlotOps TTSOpsBufferHeapTuple
const TupleTableSlotOps TTSOpsHeapTuple
const TupleTableSlotOps TTSOpsMinimalTuple
Assert(PointerIsAligned(start, uint64))
#define FIELDNO_HEAPTUPLEDATA_DATA
#define FIELDNO_HEAPTUPLEHEADERDATA_INFOMASK
#define FIELDNO_HEAPTUPLEHEADERDATA_HOFF
#define FIELDNO_HEAPTUPLEHEADERDATA_BITS
#define FIELDNO_HEAPTUPLEHEADERDATA_INFOMASK2
LLVMTypeRef StructMinimalTupleTableSlot
LLVMValueRef llvm_pg_func(LLVMModuleRef mod, const char *funcname)
char * llvm_expand_funcname(struct LLVMJitContext *context, const char *basename)
LLVMTypeRef llvm_pg_var_func_type(const char *varname)
LLVMTypeRef StructTupleTableSlot
LLVMTypeRef TypeStorageBool
LLVMTypeRef StructHeapTupleTableSlot
LLVMModuleRef llvm_mutable_module(LLVMJitContext *context)
LLVMValueRef AttributeTemplate
LLVMTypeRef StructHeapTupleHeaderData
LLVMTypeRef StructHeapTupleData
void llvm_copy_attributes(LLVMValueRef v_from, LLVMValueRef v_to)
LLVMTypeRef LLVMGetFunctionType(LLVMValueRef r)
#define ATTNULLABLE_VALID
static CompactAttribute * TupleDescCompactAttr(TupleDesc tupdesc, int i)
#define FIELDNO_HEAPTUPLETABLESLOT_OFF
#define FIELDNO_HEAPTUPLETABLESLOT_TUPLE
#define FIELDNO_TUPLETABLESLOT_ISNULL
#define FIELDNO_MINIMALTUPLETABLESLOT_TUPLE
#define FIELDNO_MINIMALTUPLETABLESLOT_OFF
#define FIELDNO_TUPLETABLESLOT_VALUES
#define FIELDNO_TUPLETABLESLOT_FLAGS
#define FIELDNO_TUPLETABLESLOT_NVALID