@@ -124,9 +124,85 @@ impl Evaluator<'_> {
124124 destination.write_from_bytes(self, &result)?;
125125 return Ok(true);
126126 }
127+ if let ItemContainerId::TraitId(t) = def.lookup(self.db.upcast()).container {
128+ if self.db.lang_attr(t.into()) == Some(LangItem::Clone) {
129+ let [self_ty] = generic_args.as_slice(Interner) else {
130+ not_supported!("wrong generic arg count for clone");
131+ };
132+ let Some(self_ty) = self_ty.ty(Interner) else {
133+ not_supported!("wrong generic arg kind for clone");
134+ };
135+ // Clone has special impls for tuples and function pointers
136+ if matches!(self_ty.kind(Interner), TyKind::Function(_) | TyKind::Tuple(..)) {
137+ self.exec_clone(def, args, self_ty.clone(), locals, destination, span)?;
138+ return Ok(true);
139+ }
140+ }
141+ }
127142 Ok(false)
128143 }
129144
145+ /// Clone has special impls for tuples and function pointers
146+ fn exec_clone(
147+ &mut self,
148+ def: FunctionId,
149+ args: &[IntervalAndTy],
150+ self_ty: Ty,
151+ locals: &Locals,
152+ destination: Interval,
153+ span: MirSpan,
154+ ) -> Result<()> {
155+ match self_ty.kind(Interner) {
156+ TyKind::Function(_) => {
157+ let [arg] = args else {
158+ not_supported!("wrong arg count for clone");
159+ };
160+ let addr = Address::from_bytes(arg.get(self)?)?;
161+ return destination
162+ .write_from_interval(self, Interval { addr, size: destination.size });
163+ }
164+ TyKind::Tuple(_, subst) => {
165+ let [arg] = args else {
166+ not_supported!("wrong arg count for clone");
167+ };
168+ let addr = Address::from_bytes(arg.get(self)?)?;
169+ let layout = self.layout(&self_ty)?;
170+ for (i, ty) in subst.iter(Interner).enumerate() {
171+ let ty = ty.assert_ty_ref(Interner);
172+ let size = self.layout(ty)?.size.bytes_usize();
173+ let tmp = self.heap_allocate(self.ptr_size(), self.ptr_size())?;
174+ let arg = IntervalAndTy {
175+ interval: Interval { addr: tmp, size: self.ptr_size() },
176+ ty: TyKind::Ref(Mutability::Not, static_lifetime(), ty.clone())
177+ .intern(Interner),
178+ };
179+ let offset = layout.fields.offset(i).bytes_usize();
180+ self.write_memory(tmp, &addr.offset(offset).to_bytes())?;
181+ self.exec_clone(
182+ def,
183+ &[arg],
184+ ty.clone(),
185+ locals,
186+ destination.slice(offset..offset + size),
187+ span,
188+ )?;
189+ }
190+ }
191+ _ => {
192+ self.exec_fn_with_args(
193+ def,
194+ args,
195+ Substitution::from1(Interner, self_ty),
196+ locals,
197+ destination,
198+ None,
199+ span,
200+ )?;
201+ }
202+ }
203+ Ok(())
204+ }
205+
130206 fn exec_alloc_fn(
131207 &mut self,
132208 alloc_fn: &str,
@@ -1057,7 +1133,14 @@ impl Evaluator<'_> {
10571133 _span: MirSpan,
10581134 ) -> Result<()> {
10591135 // We are a single threaded runtime with no UB checking and no optimization, so
1060- // we can implement these as normal functions.
1136+ // we can implement atomic intrinsics as normal functions.
1137+
1138+ if name.starts_with("singlethreadfence_") || name.starts_with("fence_") {
1139+ return Ok(());
1140+ }
1141+
1142+ // The rest of atomic intrinsics have exactly one generic arg
1143+
10611144 let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else {
10621145 return Err(MirEvalError::TypeError("atomic intrinsic generic arg is not provided"));
10631146 };
0 commit comments