Skip to content

Commit 4240004

Browse files
committed
Implement asm_const_ptr for global_asm and naked_asm
1 parent 825f97e commit 4240004

File tree

14 files changed

+344
-95
lines changed

14 files changed

+344
-95
lines changed

compiler/rustc_codegen_cranelift/src/global_asm.rs

+8-1
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,10 @@ use std::sync::Arc;
88

99
use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece};
1010
use rustc_codegen_ssa::traits::{AsmCodegenMethods, GlobalAsmOperandRef};
11-
use rustc_middle::ty::TyCtxt;
1211
use rustc_middle::ty::layout::{
1312
FnAbiError, FnAbiOfHelpers, FnAbiRequest, HasTyCtxt, HasTypingEnv, LayoutError, LayoutOfHelpers,
1413
};
14+
use rustc_middle::ty::{Instance, TyCtxt};
1515
use rustc_session::config::{OutputFilenames, OutputType};
1616
use rustc_target::asm::InlineAsmArch;
1717

@@ -29,6 +29,7 @@ impl<'tcx> AsmCodegenMethods<'tcx> for GlobalAsmContext<'_, 'tcx> {
2929
operands: &[GlobalAsmOperandRef<'tcx>],
3030
options: InlineAsmOptions,
3131
_line_spans: &[Span],
32+
_instance: Instance<'_>,
3233
) {
3334
codegen_global_asm_inner(self.tcx, self.global_asm, template, operands, options);
3435
}
@@ -132,6 +133,12 @@ fn codegen_global_asm_inner<'tcx>(
132133
let symbol = tcx.symbol_name(instance);
133134
global_asm.push_str(symbol.name);
134135
}
136+
GlobalAsmOperandRef::ConstPointer { value: _ } => {
137+
tcx.dcx().span_err(
138+
span,
139+
"asm! and global_asm! const pointer operands are not yet supported",
140+
);
141+
}
135142
}
136143
}
137144
}

compiler/rustc_codegen_gcc/src/asm.rs

+96-29
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,15 @@
11
use std::borrow::Cow;
22

3-
use gccjit::{LValue, RValue, ToRValue, Type};
3+
use gccjit::{GlobalKind, LValue, RValue, ToRValue, Type};
44
use rustc_ast::ast::{InlineAsmOptions, InlineAsmTemplatePiece};
55
use rustc_codegen_ssa::mir::operand::OperandValue;
66
use rustc_codegen_ssa::mir::place::PlaceRef;
77
use rustc_codegen_ssa::traits::{
88
AsmBuilderMethods, AsmCodegenMethods, BaseTypeCodegenMethods, BuilderMethods,
99
GlobalAsmOperandRef, InlineAsmOperandRef,
1010
};
11-
use rustc_middle::bug;
1211
use rustc_middle::ty::Instance;
12+
use rustc_middle::{bug, mir};
1313
use rustc_span::Span;
1414
use rustc_target::asm::*;
1515

@@ -855,13 +855,106 @@ impl<'gcc, 'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
855855
operands: &[GlobalAsmOperandRef<'tcx>],
856856
options: InlineAsmOptions,
857857
_line_spans: &[Span],
858+
instance: Instance<'tcx>,
858859
) {
859860
let asm_arch = self.tcx.sess.asm_arch.unwrap();
860861

861862
// Default to Intel syntax on x86
862863
let att_dialect = matches!(asm_arch, InlineAsmArch::X86 | InlineAsmArch::X86_64)
863864
&& options.contains(InlineAsmOptions::ATT_SYNTAX);
864865

866+
// Convert all operands to string interpolations
867+
let converted_operands = operands
868+
.iter()
869+
.enumerate()
870+
.map(|(operand_idx, operand)| {
871+
match *operand {
872+
GlobalAsmOperandRef::Interpolate { ref string } => {
873+
// Const operands get injected directly into the
874+
// template. Note that we don't need to escape $
875+
// here unlike normal inline assembly.
876+
string.to_owned()
877+
}
878+
GlobalAsmOperandRef::ConstPointer { value } => {
879+
let (prov, offset) = value.into_parts();
880+
let global_alloc = self.tcx.global_alloc(prov.alloc_id());
881+
let symbol = 'sym: {
882+
let alloc = match global_alloc {
883+
mir::interpret::GlobalAlloc::Function { instance } => {
884+
let function = get_fn(self, instance);
885+
self.add_used_function(function);
886+
// TODO(@Amanieu): Additional mangling is needed on
887+
// some targets to add a leading underscore (Mach-O)
888+
// or byte count suffixes (x86 Windows).
889+
break 'sym self.tcx.symbol_name(instance).name.to_owned();
890+
}
891+
mir::interpret::GlobalAlloc::VTable(ty, dyn_ty) => self
892+
.tcx
893+
.global_alloc(self.tcx.vtable_allocation((
894+
ty,
895+
dyn_ty.principal().map(|principal| {
896+
self.tcx
897+
.instantiate_bound_regions_with_erased(principal)
898+
}),
899+
)))
900+
.unwrap_memory(),
901+
mir::interpret::GlobalAlloc::Static(def_id) => {
902+
// TODO(antoyo): set the global variable as used.
903+
// TODO(@Amanieu): Additional mangling is needed on
904+
// some targets to add a leading underscore (Mach-O).
905+
let instance = Instance::mono(self.tcx, def_id);
906+
break 'sym self.tcx.symbol_name(instance).name.to_owned();
907+
}
908+
mir::interpret::GlobalAlloc::Memory(alloc) => alloc,
909+
};
910+
911+
// For ZSTs directly codegen an aligned pointer.
912+
if alloc.inner().len() == 0 {
913+
assert_eq!(offset.bytes(), 0);
914+
return format!("{}", alloc.inner().align.bytes());
915+
}
916+
917+
let asm_name = self.tcx.symbol_name(instance);
918+
let sym_name = format!("{asm_name}.{operand_idx}");
919+
920+
let init = crate::consts::const_alloc_to_gcc(self, alloc);
921+
let alloc = alloc.inner();
922+
let typ = self.val_ty(init).get_aligned(alloc.align.bytes());
923+
924+
let global = self.declare_global_with_linkage(
925+
&sym_name,
926+
typ,
927+
GlobalKind::Exported,
928+
);
929+
global.global_set_initializer_rvalue(init);
930+
// TODO(nbdd0121): set unnamed address.
931+
// TODO(nbdd0121): set the global variable as used.
932+
933+
sym_name
934+
};
935+
936+
let offset = offset.bytes();
937+
if offset != 0 { format!("{symbol}+{offset}") } else { symbol }
938+
}
939+
GlobalAsmOperandRef::SymFn { instance } => {
940+
let function = get_fn(self, instance);
941+
self.add_used_function(function);
942+
// TODO(@Amanieu): Additional mangling is needed on
943+
// some targets to add a leading underscore (Mach-O)
944+
// or byte count suffixes (x86 Windows).
945+
self.tcx.symbol_name(instance).name.to_owned()
946+
}
947+
GlobalAsmOperandRef::SymStatic { def_id } => {
948+
// TODO(antoyo): set the global variable as used.
949+
// TODO(@Amanieu): Additional mangling is needed on
950+
// some targets to add a leading underscore (Mach-O).
951+
let instance = Instance::mono(self.tcx, def_id);
952+
self.tcx.symbol_name(instance).name.to_owned()
953+
}
954+
}
955+
})
956+
.collect::<Vec<_>>();
957+
865958
// Build the template string
866959
let mut template_str = ".pushsection .text\n".to_owned();
867960
if att_dialect {
@@ -885,33 +978,7 @@ impl<'gcc, 'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
885978
}
886979
}
887980
InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span: _ } => {
888-
match operands[operand_idx] {
889-
GlobalAsmOperandRef::Interpolate { ref string } => {
890-
// Const operands get injected directly into the
891-
// template. Note that we don't need to escape %
892-
// here unlike normal inline assembly.
893-
template_str.push_str(string);
894-
}
895-
896-
GlobalAsmOperandRef::SymFn { instance } => {
897-
let function = get_fn(self, instance);
898-
self.add_used_function(function);
899-
// TODO(@Amanieu): Additional mangling is needed on
900-
// some targets to add a leading underscore (Mach-O)
901-
// or byte count suffixes (x86 Windows).
902-
let name = self.tcx.symbol_name(instance).name;
903-
template_str.push_str(name);
904-
}
905-
906-
GlobalAsmOperandRef::SymStatic { def_id } => {
907-
// TODO(antoyo): set the global variable as used.
908-
// TODO(@Amanieu): Additional mangling is needed on
909-
// some targets to add a leading underscore (Mach-O).
910-
let instance = Instance::mono(self.tcx, def_id);
911-
let name = self.tcx.symbol_name(instance).name;
912-
template_str.push_str(name);
913-
}
914-
}
981+
template_str.push_str(&converted_operands[operand_idx]);
915982
}
916983
}
917984
}

compiler/rustc_codegen_llvm/src/asm.rs

+108-32
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ use rustc_codegen_ssa::traits::*;
77
use rustc_data_structures::fx::FxHashMap;
88
use rustc_middle::ty::Instance;
99
use rustc_middle::ty::layout::TyAndLayout;
10-
use rustc_middle::{bug, span_bug};
10+
use rustc_middle::{bug, mir, span_bug};
1111
use rustc_span::{Pos, Span, Symbol, sym};
1212
use rustc_target::asm::*;
1313
use smallvec::SmallVec;
@@ -389,13 +389,119 @@ impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> {
389389
operands: &[GlobalAsmOperandRef<'tcx>],
390390
options: InlineAsmOptions,
391391
_line_spans: &[Span],
392+
instance: Instance<'tcx>,
392393
) {
393394
let asm_arch = self.tcx.sess.asm_arch.unwrap();
394395

395396
// Default to Intel syntax on x86
396397
let intel_syntax = matches!(asm_arch, InlineAsmArch::X86 | InlineAsmArch::X86_64)
397398
&& !options.contains(InlineAsmOptions::ATT_SYNTAX);
398399

400+
// Convert all operands to string interpolations
401+
let converted_operands = operands
402+
.iter()
403+
.enumerate()
404+
.map(|(operand_idx, operand)| {
405+
match *operand {
406+
GlobalAsmOperandRef::Interpolate { ref string } => {
407+
// Const operands get injected directly into the
408+
// template. Note that we don't need to escape $
409+
// here unlike normal inline assembly.
410+
string.to_owned()
411+
}
412+
GlobalAsmOperandRef::ConstPointer { value } => {
413+
let (prov, offset) = value.into_parts();
414+
let global_alloc = self.tcx.global_alloc(prov.alloc_id());
415+
let llval = 'llval: {
416+
let alloc = match global_alloc {
417+
mir::interpret::GlobalAlloc::Function { instance } => {
418+
break 'llval self.get_fn(instance);
419+
}
420+
mir::interpret::GlobalAlloc::VTable(ty, dyn_ty) => self
421+
.tcx
422+
.global_alloc(self.tcx.vtable_allocation((
423+
ty,
424+
dyn_ty.principal().map(|principal| {
425+
self.tcx
426+
.instantiate_bound_regions_with_erased(principal)
427+
}),
428+
)))
429+
.unwrap_memory(),
430+
mir::interpret::GlobalAlloc::Static(def_id) => {
431+
break 'llval self
432+
.renamed_statics
433+
.borrow()
434+
.get(&def_id)
435+
.copied()
436+
.unwrap_or_else(|| self.get_static(def_id));
437+
}
438+
mir::interpret::GlobalAlloc::Memory(alloc) => alloc,
439+
};
440+
441+
// For ZSTs directly codegen an aligned pointer.
442+
if alloc.inner().len() == 0 {
443+
assert_eq!(offset.bytes(), 0);
444+
return format!("{}", alloc.inner().align.bytes());
445+
}
446+
447+
let asm_name = self.tcx.symbol_name(instance);
448+
let sym_name = format!("{asm_name}.{operand_idx}");
449+
450+
let init = crate::consts::const_alloc_to_llvm(
451+
self, alloc, /*static*/ false,
452+
);
453+
let alloc = alloc.inner();
454+
let g = self.static_addr_of_mut(init, alloc.align, None);
455+
if alloc.mutability.is_not() {
456+
// NB: we can't use `static_addr_of_impl` here to avoid sharing
457+
// the global, as we need to set name and linkage.
458+
unsafe { llvm::LLVMSetGlobalConstant(g, llvm::True) };
459+
}
460+
461+
llvm::set_value_name(g, sym_name.as_bytes());
462+
463+
// `static_addr_of_mut` gives us a private global which can't be
464+
// used by global asm. Update it to a hidden internal global instead.
465+
llvm::set_linkage(g, llvm::Linkage::InternalLinkage);
466+
llvm::set_visibility(g, llvm::Visibility::Hidden);
467+
g
468+
};
469+
self.add_compiler_used_global(llval);
470+
let symbol = llvm::build_string(|s| unsafe {
471+
llvm::LLVMRustGetMangledName(llval, s);
472+
})
473+
.expect("symbol is not valid UTF-8");
474+
475+
let offset = offset.bytes();
476+
if offset != 0 { format!("{symbol}+{offset}") } else { symbol }
477+
}
478+
GlobalAsmOperandRef::SymFn { instance } => {
479+
let llval = self.get_fn(instance);
480+
self.add_compiler_used_global(llval);
481+
let symbol = llvm::build_string(|s| unsafe {
482+
llvm::LLVMRustGetMangledName(llval, s);
483+
})
484+
.expect("symbol is not valid UTF-8");
485+
symbol
486+
}
487+
GlobalAsmOperandRef::SymStatic { def_id } => {
488+
let llval = self
489+
.renamed_statics
490+
.borrow()
491+
.get(&def_id)
492+
.copied()
493+
.unwrap_or_else(|| self.get_static(def_id));
494+
self.add_compiler_used_global(llval);
495+
let symbol = llvm::build_string(|s| unsafe {
496+
llvm::LLVMRustGetMangledName(llval, s);
497+
})
498+
.expect("symbol is not valid UTF-8");
499+
symbol
500+
}
501+
}
502+
})
503+
.collect::<Vec<_>>();
504+
399505
// Build the template string
400506
let mut template_str = String::new();
401507
if intel_syntax {
@@ -405,37 +511,7 @@ impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> {
405511
match *piece {
406512
InlineAsmTemplatePiece::String(ref s) => template_str.push_str(s),
407513
InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span: _ } => {
408-
match operands[operand_idx] {
409-
GlobalAsmOperandRef::Interpolate { ref string } => {
410-
// Const operands get injected directly into the
411-
// template. Note that we don't need to escape $
412-
// here unlike normal inline assembly.
413-
template_str.push_str(string);
414-
}
415-
GlobalAsmOperandRef::SymFn { instance } => {
416-
let llval = self.get_fn(instance);
417-
self.add_compiler_used_global(llval);
418-
let symbol = llvm::build_string(|s| unsafe {
419-
llvm::LLVMRustGetMangledName(llval, s);
420-
})
421-
.expect("symbol is not valid UTF-8");
422-
template_str.push_str(&symbol);
423-
}
424-
GlobalAsmOperandRef::SymStatic { def_id } => {
425-
let llval = self
426-
.renamed_statics
427-
.borrow()
428-
.get(&def_id)
429-
.copied()
430-
.unwrap_or_else(|| self.get_static(def_id));
431-
self.add_compiler_used_global(llval);
432-
let symbol = llvm::build_string(|s| unsafe {
433-
llvm::LLVMRustGetMangledName(llval, s);
434-
})
435-
.expect("symbol is not valid UTF-8");
436-
template_str.push_str(&symbol);
437-
}
438-
}
514+
template_str.push_str(&converted_operands[operand_idx])
439515
}
440516
}
441517
}

0 commit comments

Comments
 (0)