From a3be9655a6155972d1b6673dfc80aeea5c0177c7 Mon Sep 17 00:00:00 2001 From: Connor Horman Date: Sat, 27 Jan 2024 02:51:11 -0500 Subject: [PATCH 01/74] feat(xlang-ir)!: Overhaul IR to a structured basic block form --- Cargo.lock | 2 +- c/src/lib.rs | 1 + codegen-clever/src/lib.rs | 1164 +---------------- codegen-x86/src/lib.rs | 1 + lccc/src/main.rs | 29 +- rust/src/irgen/xir_visitor.rs | 2141 ++++++++------------------------ rust/src/lib.rs | 1 + rust/src/sema/mir.rs | 1 + xir/src/lib.rs | 87 +- xlang/src/plugin.rs | 2 + xlang/xlang_abi/src/alloc.rs | 4 +- xlang/xlang_abi/src/vec.rs | 48 + xlang/xlang_backend/src/lib.rs | 1108 +---------------- xlang/xlang_backend/src/mc.rs | 108 +- xlang/xlang_backend/src/ssa.rs | 30 + xlang/xlang_struct/Cargo.toml | 2 +- xlang/xlang_struct/src/fmt.rs | 69 +- xlang/xlang_struct/src/lib.rs | 174 ++- 18 files changed, 828 insertions(+), 4144 deletions(-) create mode 100644 xlang/xlang_backend/src/ssa.rs diff --git a/Cargo.lock b/Cargo.lock index 557b6564..1879da9e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -713,7 +713,7 @@ dependencies = [ [[package]] name = "xlang_struct" -version = "0.1.0" +version = "0.2.0" dependencies = [ "bitflags", "fake-enum", diff --git a/c/src/lib.rs b/c/src/lib.rs index 2832dce2..2a9f2fef 100644 --- a/c/src/lib.rs +++ b/c/src/lib.rs @@ -57,6 +57,7 @@ impl XLangPlugin for CFrontend { xlang::host::rustcall! { #[allow(clippy::missing_const_for_fn)] #[no_mangle] + #[allow(improper_ctypes_definitions)] pub extern "rustcall" fn xlang_frontend_main() -> DynBox { DynBox::unsize_box(Box::new(CFrontend::new())) } diff --git a/codegen-clever/src/lib.rs b/codegen-clever/src/lib.rs index 24318068..153443b4 100644 --- a/codegen-clever/src/lib.rs +++ b/codegen-clever/src/lib.rs @@ -1,1167 +1,11 @@ -use std::{ - cell::RefCell, collections::HashSet, convert::TryFrom, ops::Deref, rc::Rc, str::FromStr, -}; - -use arch_ops::{ - clever::{ - CleverEncoder, CleverExtension, CleverImmediate, CleverIndex, CleverInstruction, - CleverOpcode, CleverOperand, CleverRegister, - }, - traits::{Address, InsnWrite}, -}; -use binfmt::{ - fmt::{FileType, Section, SectionType}, - sym::{SymbolKind, SymbolType}, -}; -use target_tuples::{Architecture, Target}; -use xlang::{ - abi::{ - option::Option::{None as XLangNone, Some as XLangSome}, - span::Span, - string::StringView, - }, - plugin::{OutputMode, XLangCodegen, XLangPlugin}, - prelude::v1::{DynBox, HashMap, Pair}, - targets::properties::{MachineProperties, TargetProperties}, -}; -use xlang_backend::{ - callconv::CallingConvention, expr::ValLocation, mangle::mangle_itanium, str::StringMap, - ty::TypeInformation, FunctionCodegen, FunctionRawCodegen, -}; -use xlang_struct::{ - AccessClass, AggregateDefinition, BinaryOp, FloatFormat, FnType, FunctionDeclaration, - PathComponent, ScalarType, ScalarTypeHeader, ScalarTypeKind, Type, -}; - -#[derive(Clone, Debug, Hash, PartialEq, Eq)] -pub enum CleverValLocation { - Register { - size: u16, - reg: CleverRegister, - }, - Indirect { - size: u16, - reg: CleverRegister, - offset: i64, - }, - Regs { - size: u64, - regs: Vec, - }, - Null, - Unassigned(usize), -} - -fn gcd(a: i64, b: i64) -> i64 { - if a == 0 { - b - } else if b == 0 { - a - } else { - gcd(b, a % b) - } -} - -impl ValLocation for CleverValLocation { - fn addressible(&self) -> bool { - matches!(self, Self::Indirect { .. }) - } - - fn unassigned(n: usize) -> Self { - Self::Unassigned(n) - } -} - -#[derive(Clone, Debug, Hash, PartialEq, Eq)] -pub enum CleverInstructionOrLabel { - Insn(CleverInstruction), - Label(String), - FunctionEpilogue, -} - -impl From for CleverInstructionOrLabel { - fn from(insn: CleverInstruction) -> Self { - Self::Insn(insn) - } -} - -#[derive(Debug, Clone)] -pub struct TempSymbol( - String, - Option<&'static str>, - Option, - SymbolType, - SymbolKind, -); - -#[allow(dead_code)] -#[derive(Clone, Debug, Hash, PartialEq, Eq)] -enum RegisterStatus { - Free, - ToClobber, - MustSave, - InUse, - Saved { - loc: CleverValLocation, - next: Box, - }, -} - -pub struct CleverCallConv { - features: HashSet, - tys: Rc, -} - -#[allow(dead_code)] -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] -pub enum TypeClass { - Float, - Vec, - Integer, - Memory, - Zero, -} - -#[allow(clippy::missing_panics_doc)] // TODO: remove todo!() -#[must_use] -pub fn classify_type(ty: &Type) -> Option { - match ty { - Type::Scalar(ScalarType { - header: - ScalarTypeHeader { - vectorsize: XLangSome(1..=65535), - .. - }, - .. - }) => Some(TypeClass::Vec), - Type::Scalar(ScalarType { - kind: ScalarTypeKind::Float { .. } | ScalarTypeKind::Posit, - .. - }) => Some(TypeClass::Float), - Type::Scalar(_) | Type::Pointer(_) => Some(TypeClass::Integer), - Type::Void | Type::FnType(_) | Type::Null => None, - Type::Array(ty) => classify_type(&ty.ty), - Type::TaggedType(_, ty) => classify_type(ty), - Type::Product(tys) => { - let mut infected = TypeClass::Zero; - for ty in tys { - infected = match (classify_type(ty)?, infected) { - (a, TypeClass::Zero) => a, - (_, TypeClass::Memory) => TypeClass::Memory, - (a, b) if a == b => a, - _ => TypeClass::Memory, - }; - } - Some(infected) - } - Type::Aligned(_, _) => todo!(), - Type::Aggregate(AggregateDefinition { fields, .. }) => { - let mut infected = TypeClass::Zero; - for ty in fields.iter().map(|field| &field.ty) { - infected = match (classify_type(ty)?, infected) { - (a, TypeClass::Zero) => a, - (_, TypeClass::Memory) => TypeClass::Memory, - (a, b) if a == b => a, - _ => TypeClass::Memory, - }; - } - Some(infected) - } - Type::Named(path) => todo!("named type {:?}", path), - } -} - -impl CallingConvention for CleverCallConv { - type Loc = CleverValLocation; - - fn pass_return_place(&self, ty: &xlang_struct::Type) -> Option { - match (classify_type(ty)?, self.tys.type_size(ty)?) { - (TypeClass::Memory, _) => todo!("memory"), - (_, 0..=8) => None, - _ => todo!("Oversized values"), - } - } - - fn find_param( - &self, - fnty: &xlang_struct::FnType, - real: &xlang_struct::FnType, - param: u32, - infn: bool, - ) -> Self::Loc { - let mut int_regs = &[ - CleverRegister::r2, - CleverRegister::r1, - CleverRegister::r3, - CleverRegister::r4, - CleverRegister::r5, - CleverRegister::r9, - CleverRegister::r10, - CleverRegister::r11, - ][..]; - let mut float_regs = &[ - CleverRegister::f0, - CleverRegister::f1, - CleverRegister::f2, - CleverRegister::f3, - ][..]; - - let mut last_loc = CleverValLocation::Unassigned(0); - if self.pass_return_place(&fnty.ret).is_some() { - int_regs = &int_regs[1..]; - } - eprintln!("Finding param {} of {}", param, fnty); - for (i, ty) in fnty - .params - .iter() - .chain(core::iter::repeat(&Type::Void)) - .take((param as usize) + 1) - .enumerate() - { - eprintln!("Classifying type {}", ty); - match (classify_type(ty).unwrap(), self.tys.type_size(ty).unwrap()) { - (TypeClass::Zero, _) => last_loc = CleverValLocation::Null, - (TypeClass::Memory, _) => panic!("Memory value"), - (TypeClass::Float, size @ (2 | 4 | 8)) => { - let reg = float_regs[0]; - float_regs = &float_regs[1..]; - last_loc = CleverValLocation::Register { - size: (size * 8) as u16, - reg, - }; - } - (TypeClass::Integer, size @ 1..=8) | (TypeClass::Float, size @ 1) => { - let reg = int_regs[0]; - int_regs = &int_regs[1..]; - let size = size.next_power_of_two() * 8; - last_loc = CleverValLocation::Register { - size: size as u16, - reg, - }; - } - (TypeClass::Integer | TypeClass::Float, size @ 9..=16) => { - let regs = int_regs[1..2].to_vec(); - int_regs = &int_regs[2..]; - last_loc = CleverValLocation::Regs { size, regs }; - } - - (_, _) => todo!("Memory value"), - } - - if i == (param as usize) { - break; - } - } - - last_loc - } - - fn find_return_val(&self, fnty: &xlang_struct::FnType) -> Self::Loc { - match ( - classify_type(&fnty.ret).unwrap(), - self.tys.type_size(&fnty.ret).unwrap(), - ) { - (TypeClass::Memory, _) => todo!("memory value"), - (_, 0) => CleverValLocation::Null, - (TypeClass::Integer, size @ 1..=8) => { - let size = size.next_power_of_two() * 8; - CleverValLocation::Register { - size: size as u16, - reg: CleverRegister::r0, - } - } - (TypeClass::Float, size @ (2 | 4 | 8)) => { - let size = size * 8; - CleverValLocation::Register { - size: size as u16, - reg: CleverRegister::f0, - } - } - _ => todo!("memory value"), - } - } -} - -pub struct CleverFunctionCodegen { - insns: Vec, - symbols: Vec, - name: String, - strings: Rc>, - frame_size: i32, - properties: &'static TargetProperties<'static>, - scratch_reg: Option, - callconv: CleverCallConv, - ptrreg: Option, - gpr_status: HashMap, - trap_unreachable: bool, - features: HashSet, - tys: Rc, -} - -impl FunctionRawCodegen for CleverFunctionCodegen { - type Loc = CleverValLocation; - - type CallConv = CleverCallConv; - - fn write_trap(&mut self, trap: xlang_backend::expr::Trap) { - match trap { - xlang_backend::expr::Trap::Unreachable if !self.trap_unreachable => {} - xlang_backend::expr::Trap::Breakpoint => { - self.insns - .push(CleverInstructionOrLabel::Insn(CleverInstruction::new( - CleverOpcode::Int { i: 0 }, - vec![], - ))) - } - _ => self - .insns - .push(CleverInstructionOrLabel::Insn(CleverInstruction::new( - CleverOpcode::Und0, - vec![], - ))), - } - } - - fn write_barrier(&mut self, acc: xlang_struct::AccessClass) { - match acc & AccessClass::ATOMIC_MASK { - AccessClass::AtomicRelaxed => {} - AccessClass::AtomicAcquire => { - let scratch = self.get_or_allocate_scratch_reg(); - self.insns - .push(CleverInstructionOrLabel::Insn(CleverInstruction::new( - CleverOpcode::MovRS { r: scratch }, - vec![CleverOperand::Indirect { - size: 8, - base: CleverRegister::r7, - scale: 1, - index: CleverIndex::Abs(0), - }], - ))); - } - AccessClass::AtomicRelease | AccessClass::AtomicAcqRel => { - let scratch = self.get_or_allocate_scratch_reg(); - self.insns - .push(CleverInstructionOrLabel::Insn(CleverInstruction::new( - CleverOpcode::Or { - lock: true, - flags: false, - }, - vec![ - CleverOperand::Indirect { - size: 8, - base: CleverRegister::r7, - scale: 1, - index: CleverIndex::Abs(0), - }, - CleverOperand::Register { - size: 8, - reg: scratch, - }, - ], - ))); - } - _ => self - .insns - .push(CleverInstruction::new(CleverOpcode::Fence, vec![]).into()), - } - } - - fn write_int_binary_imm( - &mut self, - dest: Self::Loc, - a: Self::Loc, - b: u128, - ty: &xlang_struct::Type, - op: xlang_struct::BinaryOp, - ) { - todo!() - } - - fn move_val(&mut self, src: Self::Loc, dest: Self::Loc) { - match (src, dest) { - (CleverValLocation::Unassigned(_), _) | (_, CleverValLocation::Unassigned(_)) => { - panic!("unassigned memory location") - } - (CleverValLocation::Regs { .. }, _) | (_, CleverValLocation::Regs { .. }) => { - todo!("regs") - } - (CleverValLocation::Null, _) | (_, CleverValLocation::Null) => {} - ( - a, - CleverValLocation::Register { - size: _, - reg: r @ CleverRegister(0..=15), - }, - ) => { - let ptrreg = self.get_or_allocate_pointer_reg(); - let op1 = self.loc_to_operand(a, ptrreg).unwrap(); - - self.insns - .push(CleverInstruction::new(CleverOpcode::MovRD { r }, vec![op1]).into()); - } - ( - CleverValLocation::Register { - size: _, - reg: r @ CleverRegister(0..=15), - }, - a, - ) => { - let ptrreg = self.get_or_allocate_pointer_reg(); - let op1 = self.loc_to_operand(a, ptrreg).unwrap(); - - self.insns - .push(CleverInstruction::new(CleverOpcode::MovRS { r }, vec![op1]).into()); - } - (a, b) => { - let ptrreg = self.get_or_allocate_pointer_reg(); - let scratch = self.get_or_allocate_scratch_reg(); - - let op1 = self.loc_to_operand(a, ptrreg).unwrap(); - - self.insns.push( - CleverInstruction::new(CleverOpcode::MovRD { r: scratch }, vec![op1]).into(), - ); - - let op2 = self.loc_to_operand(b, ptrreg).unwrap(); - - self.insns.push( - CleverInstruction::new(CleverOpcode::MovRS { r: scratch }, vec![op2]).into(), - ); - } - } - } - - fn move_imm(&mut self, src: u128, dest: Self::Loc, ty: &xlang_struct::Type) { - match (dest, self.tys.type_size(ty).unwrap()) { - (CleverValLocation::Register { size, reg }, tysize @ 1..=8) => { - let imm_size = (128 - (src.leading_zeros())).min((tysize * 8) as u32); - let imm_val = if imm_size <= 12 { - CleverImmediate::Short(src as u16) - } else { - CleverImmediate::Long((((imm_size + 15) / 16) * 16) as u16, src as u64) - }; - - if reg.0 < 16 { - self.insns.push( - CleverInstruction::new( - CleverOpcode::MovRD { r: reg }, - vec![CleverOperand::Immediate(imm_val)], - ) - .into(), - ); - } else { - self.insns.push( - CleverInstruction::new( - CleverOpcode::Mov, - vec![ - CleverOperand::Register { size, reg }, - CleverOperand::Immediate(imm_val), - ], - ) - .into(), - ); - } - } - (loc, size) => todo!("move of size {} into {:?}", size, loc), - } - } - - fn store_indirect_imm(&mut self, src: xlang_struct::Value, ptr: Self::Loc) { - todo!() - } - - fn load_val(&mut self, lvalue: Self::Loc, loc: Self::Loc) { - todo!() - } - - fn store_indirect(&mut self, lvalue: Self::Loc, loc: Self::Loc, ty: &xlang_struct::Type) { - todo!() - } - - fn get_callconv(&self) -> &Self::CallConv { - &self.callconv - } - - fn native_int_size(&self) -> u16 { - 64 - } - - fn native_float_size(&self) -> xlang::prelude::v1::Option { - if self.features.contains(&CleverExtension::Float) { - XLangSome(64) - } else { - XLangNone - } - } - - fn native_vec_size(&self) -> xlang::prelude::v1::Option { - if self.features.contains(&CleverExtension::Vec) { - XLangSome(16) - } else { - XLangNone - } - } - - fn preferred_vec_size(&self) -> xlang::abi::option::Option { - self.native_vec_size() - } - - fn write_intrinsic( - &mut self, - name: xlang::abi::string::StringView, - params: xlang::vec::Vec>, - ) -> xlang_backend::expr::VStackValue { - todo!() - } - - fn write_target(&mut self, target: u32) { - self.insns.push(CleverInstructionOrLabel::Label(format!( - "{}._T{}", - self.name, target - ))) - } - - fn call_direct(&mut self, path: &xlang_struct::Path, _realty: &FnType) { - let sym = match &*path.components { - [PathComponent::Text(n)] | [PathComponent::Root, PathComponent::Text(n)] => { - n.to_string() - } - [PathComponent::Root, rest @ ..] | [rest @ ..] => mangle_itanium(rest), - }; - - let addr = Address::PltSym { name: sym }; - - self.insns.push( - CleverInstruction::new( - CleverOpcode::CallR { ss: 2 }, - vec![CleverOperand::Immediate(CleverImmediate::LongAddrRel( - 64, addr, - ))], - ) - .into(), - ); - } - - fn call_indirect(&mut self, value: Self::Loc) { - todo!() - } - - fn tailcall_direct(&mut self, path: &xlang_struct::Path, realty: &xlang_struct::FnType) { - todo!() - } - - fn tailcall_indirect(&mut self, value: Self::Loc, realty: &xlang_struct::FnType) { - todo!() - } - - fn leave_function(&mut self) { - self.insns.push(CleverInstructionOrLabel::FunctionEpilogue); - } - - fn branch(&mut self, target: u32, condition: xlang_struct::BranchCondition, val: Self::Loc) { - todo!() - } - - fn branch_compare( - &mut self, - target: u32, - condition: xlang_struct::BranchCondition, - v1: xlang_backend::expr::VStackValue, - v2: xlang_backend::expr::VStackValue, - ) { - todo!() - } - - fn branch_unconditional(&mut self, target: u32) { - todo!() - } - - fn branch_indirect(&mut self, target: Self::Loc) { - todo!() - } - - fn compute_global_address(&mut self, path: &xlang_struct::Path, loc: Self::Loc) { - todo!() - } - - fn compute_label_address(&mut self, target: u32, loc: Self::Loc) { - todo!() - } - - fn compute_parameter_address(&mut self, param: u32, loc: Self::Loc) { - todo!() - } - - fn compute_local_address(&mut self, inloc: Self::Loc, loc: Self::Loc) { - todo!() - } - - fn compute_string_address( - &mut self, - enc: xlang_backend::str::Encoding, - bytes: xlang::vec::Vec, - loc: Self::Loc, - ) { - let addr = self.strings.borrow_mut().get_string_symbol(bytes, enc); - let addr = Address::Symbol { - name: addr.to_string(), - disp: 0, - }; - match loc { - CleverValLocation::Register { size, reg } => match (size, reg) { - (64, CleverRegister(0..=15)) => self.insns.push( - CleverInstruction::new( - CleverOpcode::LeaRD { r: reg }, - vec![CleverOperand::Immediate(CleverImmediate::LongMemRel( - 64, addr, 64, - ))], - ) - .into(), - ), - (size, reg) => self.insns.push( - CleverInstruction::new( - CleverOpcode::Lea, - vec![ - CleverOperand::Register { size, reg }, - CleverOperand::Immediate(CleverImmediate::LongMemRel(64, addr, 64)), - ], - ) - .into(), - ), - }, - CleverValLocation::Indirect { size, reg, offset } => todo!("indirect"), - CleverValLocation::Regs { size, regs } => todo!("registers"), - CleverValLocation::Null => {} - CleverValLocation::Unassigned(_) => panic!("Unassigned"), - } - } - - fn free(&mut self, loc: Self::Loc) { - todo!() - } - - fn clobber(&mut self, loc: Self::Loc) { - todo!() - } - - fn allocate(&mut self, ty: &xlang_struct::Type, needs_addr: bool) -> Self::Loc { - todo!() - } - - fn allocate_lvalue(&mut self, needs_addr: bool) -> Self::Loc { - todo!() - } - - fn prepare_call_frame(&mut self, _: &xlang_struct::FnType, _: &xlang_struct::FnType) { - if self.frame_size & 15 != 0 { - self.frame_size = ((self.frame_size + 15) / 16) * 16; - } - /* TODO */ - } - - fn lockfree_use_libatomic(&mut self, _: u64) -> bool { - false - } - - fn lockfree_cmpxchg_use_libatomic(&mut self, _: u64) -> bool { - false - } - - fn has_wait_free_compound(&mut self, op: BinaryOp, size: u64) -> bool { - match op { - BinaryOp::Add - | BinaryOp::Sub - | BinaryOp::BitAnd - | BinaryOp::BitOr - | BinaryOp::BitXor - | BinaryOp::Lsh - | BinaryOp::Rsh => size < 128, - _ => false, - } - } - - fn has_wait_free_compound_fetch(&mut self, op: BinaryOp, size: u64) -> bool { - false - } - - fn compare_exchange( - &mut self, - dest: Self::Loc, - ctrl: Self::Loc, - val: Self::Loc, - ty: &Type, - ord: AccessClass, - ) { - todo!() - } - - fn weak_compare_exchange( - &mut self, - dest: Self::Loc, - ctrl: Self::Loc, - val: Self::Loc, - ty: &Type, - ord: AccessClass, - ) { - todo!() - } - - fn call_absolute(&mut self, addr: u128, _: &FnType) { - let addr = Address::Abs(addr); - - self.insns.push( - CleverInstruction::new( - CleverOpcode::CallR { ss: 2 }, - vec![CleverOperand::Immediate(CleverImmediate::LongAddrRel( - 64, addr, - ))], - ) - .into(), - ); - } - - fn write_int_binary( - &mut self, - _dest: Self::Loc, - _a: Self::Loc, - _b: Self::Loc, - _ty: &Type, - _op: BinaryOp, - ) { - todo!() - } - - fn write_unary(&mut self, _val: Self::Loc, _ty: &Type, _op: xlang_struct::UnaryOp) { - todo!() - } - - fn write_asm( - &mut self, - asm: &xlang_struct::AsmExpr, - inputs: xlang::vec::Vec>, - ) -> xlang::vec::Vec { - todo!() - } - - fn write_scalar_convert( - &mut self, - target_ty: ScalarType, - incoming_ty: ScalarType, - new_loc: Self::Loc, - old_loc: Self::Loc, - ) { - todo!() - } -} - -impl CleverFunctionCodegen { - fn get_or_allocate_scratch_reg(&mut self) -> CleverRegister { - let Self { - scratch_reg, - gpr_status, - .. - } = self; - *scratch_reg.get_or_insert_with(|| { - for i in 0..16 { - match gpr_status.get_or_insert_mut(CleverRegister(i), RegisterStatus::Free) { - RegisterStatus::Free | RegisterStatus::ToClobber => { - gpr_status.insert(CleverRegister(i), RegisterStatus::InUse); - return CleverRegister(i); - } - RegisterStatus::MustSave => {} - RegisterStatus::InUse => {} - RegisterStatus::Saved { .. } => {} - } - } - - todo!("Save registers") - }) - } - - fn get_or_allocate_pointer_reg(&mut self) -> CleverRegister { - let Self { - ptrreg, gpr_status, .. - } = self; - *ptrreg.get_or_insert_with(|| { - for i in 0..16 { - match gpr_status.get_or_insert_mut(CleverRegister(i), RegisterStatus::Free) { - RegisterStatus::Free | RegisterStatus::ToClobber => { - gpr_status.insert(CleverRegister(i), RegisterStatus::InUse); - return CleverRegister(i); - } - RegisterStatus::MustSave => {} - RegisterStatus::InUse => {} - RegisterStatus::Saved { .. } => {} - } - } - - todo!("Save registers") - }) - } - - fn loc_to_operand( - &mut self, - loc: CleverValLocation, - sreg: CleverRegister, - ) -> Option { - match loc { - CleverValLocation::Register { size, reg } => { - Some(CleverOperand::Register { size, reg }) - } - CleverValLocation::Indirect { size, reg, offset } => { - let scale = gcd(offset.abs(), 128); - let offset = offset / scale; - let idx = if (-8..8).contains(&offset) { - CleverIndex::Abs(offset as i16) - } else { - let size = ((((64 - (offset.leading_ones() + offset.leading_zeros())) + 15) - / 16) - * 16) as u16; - - self.insns.push( - CleverInstruction::new( - CleverOpcode::Movsx { flags: true }, - vec![ - CleverOperand::Register { - size: 64, - reg: sreg, - }, - CleverOperand::Immediate(CleverImmediate::Long( - size, - offset as u64, - )), - ], - ) - .into(), - ); - CleverIndex::Register(sreg) - }; - Some(CleverOperand::Indirect { - size, - base: reg, - scale: scale as u8, - index: idx, - }) - } - CleverValLocation::Regs { .. } => None, - CleverValLocation::Null => None, - CleverValLocation::Unassigned(_) => panic!("unassigned location"), - } - } - - #[allow(clippy::missing_errors_doc)] - #[allow(clippy::missing_panics_doc)] - pub fn write_output( - self, - text: &mut Section, - symbols: &mut Vec, - ) -> std::io::Result<()> { - let mut encoder = CleverEncoder::new(text); - if self.frame_size > 0 { - let size = 64 - self.frame_size.leading_zeros(); - encoder.write_instruction(CleverInstruction::new( - CleverOpcode::PushR { - r: CleverRegister::fbase, - }, - vec![], - ))?; - encoder.write_instruction(CleverInstruction::new( - CleverOpcode::MovRD { - r: CleverRegister::fbase, - }, - vec![CleverOperand::Register { - size: 64, - reg: CleverRegister::sptr, - }], - ))?; - let val = if size <= 12 { - CleverImmediate::Short(self.frame_size as u16) - } else { - CleverImmediate::Long((((size + 15) / 16) * 16) as u16, self.frame_size as u64) - }; - encoder.write_instruction(CleverInstruction::new( - CleverOpcode::SubRD { - r: CleverRegister::sptr, - }, - vec![CleverOperand::Immediate(val)], - ))?; - } - for item in self.insns { - match item { - CleverInstructionOrLabel::Label(num) => { - symbols.push(TempSymbol( - num.to_string(), - Some(".text"), - Some(encoder.offset()), - SymbolType::Function, - SymbolKind::Local, - )); - } - CleverInstructionOrLabel::Insn(insn) => encoder.write_instruction(insn)?, - CleverInstructionOrLabel::FunctionEpilogue => { - if self.frame_size > 0 { - encoder.write_instruction(CleverInstruction::new( - CleverOpcode::MovRD { - r: CleverRegister::sptr, - }, - vec![CleverOperand::Register { - size: 64, - reg: CleverRegister::fbase, - }], - ))?; - encoder.write_instruction(CleverInstruction::new( - CleverOpcode::PopR { - r: CleverRegister::sptr, - }, - Vec::new(), - ))?; - } - encoder.write_instruction(CleverInstruction::new(CleverOpcode::Ret, vec![]))?; - } - } - } - - Ok(()) - } -} - -pub struct CleverCodegenPlugin { - fns: Option>>, - strings: Rc>, - properties: Option<&'static TargetProperties<'static>>, - features: HashSet, -} - -impl CleverCodegenPlugin { - fn write_output_impl(&mut self, mut x: W) -> std::io::Result<()> { - let fmt = binfmt::format_by_name(&self.properties.unwrap().link.obj_binfmt).unwrap(); - let mut file = fmt.create_file(FileType::Relocatable); - let mut text = Section { - name: String::from(".text"), - align: 1024, - ty: SectionType::ProgBits, - content: Vec::new(), - relocs: Vec::new(), - ..Section::default() - }; - - let mut rodata = Section { - name: String::from(".rodata"), - align: 1024, - ty: SectionType::ProgBits, - content: Vec::new(), - relocs: Vec::new(), - ..Section::default() - }; - - let mut syms = Vec::with_capacity(16); - - syms.push(TempSymbol( - "_GLOBAL_OFFSET_TABLE_".into(), - None, - None, - SymbolType::Null, - SymbolKind::Global, - )); - - for (enc, sym, str) in self.strings.borrow().symbols() { - let sym = TempSymbol( - sym.to_string(), - Some(".rodata"), - Some(rodata.content.len()), - SymbolType::Object, - SymbolKind::Local, - ); - rodata.content.extend_from_slice(&enc.encode_utf8(str)); - syms.push(sym); - } - - for (name, mut output) in self.fns.take().unwrap() { - let sym = TempSymbol( - name.clone(), - Some(".text"), - Some(text.content.len()), - SymbolType::Function, - SymbolKind::Global, - ); // TODO: internal linkage is a thing - syms.push(sym); - - syms.extend_from_slice(&output.raw_inner().symbols); - output.into_inner().write_output(&mut text, &mut syms)?; - } - file.add_section(text).unwrap(); - file.add_section(rodata).unwrap(); - for sym in syms { - let secno = sym - .1 - .and_then(|v| file.sections().enumerate().find(|(_, s)| &*s.name == v)) - .map(|(s, _)| u32::try_from(s).unwrap()); - let fsym = file.get_or_create_symbol(&sym.0).unwrap(); - *fsym.kind_mut() = sym.4; - if secno.is_some() { - *fsym.section_mut() = secno; - *fsym.value_mut() = sym.2.map(|v| v as u128); - *fsym.symbol_type_mut() = sym.3; - } - } - - fmt.write_file(&mut x, &file)?; - Ok(()) - } -} - -impl XLangPlugin for CleverCodegenPlugin { - fn accept_ir( - &mut self, - ir: &mut xlang_struct::File, - ) -> xlang::abi::result::Result<(), xlang::plugin::Error> { - self.fns = Some(std::collections::HashMap::new()); - let properties = self.properties.unwrap(); - - let mut tys = TypeInformation::from_properties(properties); - - for Pair(path, member) in &ir.root.members { - match &member.member_decl { - xlang_struct::MemberDeclaration::AggregateDefinition(defn) => { - tys.add_aggregate(path.clone(), defn.clone()); - } - xlang_struct::MemberDeclaration::OpaqueAggregate(_) => { - tys.add_opaque_aggregate(path.clone()); - } - _ => {} - } - } - - let tys = Rc::new(tys); - - for Pair(path, member) in &ir.root.members { - let name = &*path.components; - let name = match name { - [xlang_struct::PathComponent::Root, xlang_struct::PathComponent::Text(t)] - | [xlang_struct::PathComponent::Text(t)] => t.to_string(), - [xlang_struct::PathComponent::Root, v @ ..] | [v @ ..] => { - xlang_backend::mangle::mangle_itanium(v) - } - }; - - match &member.member_decl { - xlang_struct::MemberDeclaration::Function(FunctionDeclaration { - ty, - body: xlang::abi::option::Some(body), - .. - }) => { - let features = self.features.clone(); - let mut state = FunctionCodegen::new( - CleverFunctionCodegen { - insns: Vec::new(), - symbols: Vec::new(), - name: name.clone(), - strings: self.strings.clone(), - properties, - gpr_status: HashMap::new(), - frame_size: 0, - scratch_reg: None, - ptrreg: None, - trap_unreachable: true, - features: features.clone(), - tys: tys.clone(), - callconv: CleverCallConv { - features, - tys: tys.clone(), - }, - }, - path.clone(), - ty.clone(), - properties, - tys.clone(), - ); - state.write_function_body(body); - self.fns.as_mut().unwrap().insert(name.clone(), state); - } - xlang_struct::MemberDeclaration::Function(FunctionDeclaration { - body: xlang::abi::option::None, - .. - }) - | xlang_struct::MemberDeclaration::Scope(_) - | xlang_struct::MemberDeclaration::Empty - | xlang_struct::MemberDeclaration::OpaqueAggregate(_) - | xlang_struct::MemberDeclaration::AggregateDefinition(_) => {} - xlang_struct::MemberDeclaration::Static(_) => todo!(), - } - } - - xlang::abi::result::Ok(()) - } - - #[allow(clippy::needless_borrow)] // Incorrect lint - fn set_target(&mut self, targ: &'static TargetProperties<'static>) { - self.properties = Some(targ); - self.features = get_features_from_properties(targ, targ.arch.default_machine); - } -} - -fn get_features_from_properties( - properties: &'static TargetProperties, - machine: &'static MachineProperties, -) -> HashSet { - let mut names = HashSet::new(); - for &f in machine.default_features { - names.insert(f); - } - for &Pair(name, val) in properties.enabled_features { - if val { - names.insert(name); - } else { - names.remove(&name); - } - } - - names - .into_iter() - .map(xlang::abi::string::StringView::into_str) - .map(CleverExtension::from_str) - .collect::>() - .unwrap() -} - -impl XLangCodegen for CleverCodegenPlugin { - fn target_matches(&self, x: StringView) -> bool { - let target: target_tuples::Target = x.parse().unwrap(); - - matches!(target.arch(), Architecture::Clever) - } - - fn write_output( - &mut self, - x: xlang::prelude::v1::DynMut, - mode: OutputMode, - ) -> xlang::abi::io::Result<()> { - let wrapper = xlang::abi::io::WriteAdapter::new(x); - if mode != OutputMode::Obj { - todo!("asm output") - } - self.write_output_impl(wrapper).map_err(Into::into).into() - } - - fn set_features(&mut self, features: Span) { - self.features = features - .iter() - .map(Deref::deref) - .map(CleverExtension::from_str) - .collect::>() - .unwrap(); - } -} +use xlang::plugin::XLangCodegen; +use xlang::prelude::v1::*; xlang::host::rustcall! { #[no_mangle] +#[allow(improper_ctypes_definitions)] pub extern "rustcall" fn xlang_backend_main() -> DynBox { - DynBox::unsize_box(xlang::prelude::v1::Box::new(CleverCodegenPlugin { - fns: Some(std::collections::HashMap::new()), - strings: Rc::new(RefCell::new(StringMap::new())), - properties: None, - features: HashSet::new() - })) + todo!() }} xlang::plugin_abi_version!("0.1"); diff --git a/codegen-x86/src/lib.rs b/codegen-x86/src/lib.rs index 24df1ad3..cc216189 100644 --- a/codegen-x86/src/lib.rs +++ b/codegen-x86/src/lib.rs @@ -9,6 +9,7 @@ use xlang::{ xlang::host::rustcall! { #[no_mangle] +#[allow(improper_ctypes_definitions)] pub extern "rustcall" fn xlang_backend_main() -> DynBox { DynBox::unsize_box(Box::new(xlang_backend::mc::MCBackend::new(mc::new_writer()))) }} diff --git a/lccc/src/main.rs b/lccc/src/main.rs index 82aa3621..d6eb0e74 100644 --- a/lccc/src/main.rs +++ b/lccc/src/main.rs @@ -141,6 +141,13 @@ fn main() { TakesArg::Never, true ), + ArgSpec::new( + "printxir", + xlang::vec!["print-xir"], + Vec::new(), + TakesArg::Never, + true + ), ArgSpec::new( "shared", xlang::vec!["shared"], @@ -216,6 +223,7 @@ fn main() { "output" => output = arg.value.clone(), "compile" => mode = Mode::CompileOnly, "typeck" => mode = Mode::TypeCheck, + "printxir" => mode = Mode::Xir, "pluginpath" => { let arg = arg.value.as_ref().unwrap(); let (name, path) = arg.split_once('=').unwrap(); @@ -456,9 +464,14 @@ fn main() { ); let mut codegen_handles = Vec::new(); - for codegen_path in &codegen_paths { - codegen_handles.push(Handle::open(codegen_path).expect("couldn't load frontend library")); + + if mode > Mode::Xir { + for codegen_path in &codegen_paths { + codegen_handles + .push(Handle::open(codegen_path).expect("couldn't load frontend library")); + } } + let mut codegens = Vec::new(); for codegen_handle in &codegen_handles { let initializer: lccc::CodegenInit = @@ -478,12 +491,6 @@ fn main() { } } - let codegen = if let Some(cg) = codegen { - cg - } else { - panic!("couldn't find a backend for target {}", xtarget) - }; - for file in &files { let mut file_path = Path::new(file).canonicalize().unwrap(); file_path.pop(); @@ -585,6 +592,11 @@ fn main() { } if mode >= Mode::Asm { + let codegen = if let Some(cg) = &mut codegen { + cg + } else { + panic!("couldn't find a backend for target {}", xtarget) + }; codegen.set_target(properties); codegen.set_features(Span::new(&features)); codegen.accept_ir(&mut file).unwrap(); @@ -600,7 +612,6 @@ fn main() { .unwrap(); // TODO: Handle `-S` and write assembly instead of an object } else if mode == Mode::Xir { - todo!() } } else { file_pairs.push((file.clone(), file.clone())); diff --git a/rust/src/irgen/xir_visitor.rs b/rust/src/irgen/xir_visitor.rs index 7936b45c..3d5e3d28 100644 --- a/rust/src/irgen/xir_visitor.rs +++ b/rust/src/irgen/xir_visitor.rs @@ -7,7 +7,7 @@ use xlang::ir::{ }; use xlang::prelude::v1::{HashMap, Pair}; use xlang::targets::properties::TargetProperties; -use xlang::{abi::string::String as XLangString, abi::vec::Vec, vec}; +use xlang::{abi::boxed::Box as XLangBox, abi::string::String as XLangString, abi::vec::Vec, vec}; use xlang::{ abi::{ self, @@ -16,6 +16,7 @@ use xlang::{ ir::PathComponent, }; +use crate::sema::mir; use crate::sema::{cx, hir::BinaryOp, mir::SsaVarId}; use crate::sema::{generics, ty, UserTypeKind}; use crate::sema::{mir::UnaryOp, ty::Mutability}; @@ -568,7 +569,6 @@ impl<'a> FunctionDefVisitor for XirFunctionDefVisitor<'a> { self.names, self.properties, self.deftys, - self.file, &mut self.fndef.ty, self.fndef.body.insert(ir::FunctionBody::default()), ))) @@ -951,64 +951,37 @@ pub struct XirFunctionBodyVisitor<'a> { names: &'a NameMap, properties: &'a TargetProperties<'a>, deftys: &'a HashMap, - file: &'a mut ir::File, cur_fnty: &'a mut ir::FnType, fndecl: &'a mut ir::FunctionBody, - targs: HashMap>, - var_heights: HashMap, ssa_tys: HashMap, - stack_height: u32, } impl<'a> XirFunctionBodyVisitor<'a> { - fn new( + pub fn new( defs: &'a Definitions, names: &'a NameMap, properties: &'a TargetProperties<'a>, deftys: &'a HashMap, - file: &'a mut ir::File, cur_fnty: &'a mut ir::FnType, fndecl: &'a mut ir::FunctionBody, ) -> Self { - let mut result = Self { - stack_height: cur_fnty.params.len() as u32, + Self { defs, names, properties, deftys, - file, - fndecl, cur_fnty, - targs: HashMap::new(), - var_heights: HashMap::new(), + fndecl, ssa_tys: HashMap::new(), - }; - - for (idx, ty) in result.cur_fnty.params.iter().enumerate() { - let id = SsaVarId(idx as u32); - result.var_heights.insert(id, idx as u32); - result.ssa_tys.insert(id, ty.clone()); } - - result } } -impl<'a> Drop for XirFunctionBodyVisitor<'a> { - fn drop(&mut self) { - for stat in self.fndecl.block.items.iter_mut() { - if let ir::BlockItem::Target { num, stack } = stat { - *stack = core::mem::take( - self.targs - .get_mut(num) - .expect("must have visited the basic block first"), - ); - } - } +impl<'a> FunctionBodyVisitor for XirFunctionBodyVisitor<'a> { + fn visit_inner_value(&mut self) -> Option> { + todo!() } -} -impl<'a> FunctionBodyVisitor for XirFunctionBodyVisitor<'a> { fn visit_basic_block(&mut self) -> Option> { Some(Box::new(XirBasicBlockVisitor::new( self.defs, @@ -1016,21 +989,9 @@ impl<'a> FunctionBodyVisitor for XirFunctionBodyVisitor<'a> { self.properties, self.deftys, self.cur_fnty, - self.fndecl, - &mut self.targs, - &mut self.var_heights, + self.fndecl.blocks.push_mut(ir::Block::default()), + &mut self.fndecl.locals, &mut self.ssa_tys, - self.stack_height, - ))) - } - - fn visit_inner_value(&mut self) -> Option> { - Some(Box::new(XirValueDefVisitor::new( - self.defs, - self.names, - self.deftys, - self.file, - self.properties, ))) } } @@ -1041,27 +1002,24 @@ pub struct XirBasicBlockVisitor<'a> { properties: &'a TargetProperties<'a>, deftys: &'a HashMap, cur_fnty: &'a mut ir::FnType, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, + block: &'a mut ir::Block, + locals: &'a mut Vec, ssa_tys: &'a mut HashMap, stack_height: u32, - bb_id: BasicBlockId, - incoming_vars: Vec, + var_heights: HashMap, + var_stack: Vec, } impl<'a> XirBasicBlockVisitor<'a> { - fn new( + pub fn new( defs: &'a Definitions, names: &'a NameMap, properties: &'a TargetProperties<'a>, deftys: &'a HashMap, cur_fnty: &'a mut ir::FnType, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, + block: &'a mut ir::Block, + locals: &'a mut Vec, ssa_tys: &'a mut HashMap, - stack_height: u32, ) -> Self { Self { defs, @@ -1069,39 +1027,39 @@ impl<'a> XirBasicBlockVisitor<'a> { properties, deftys, cur_fnty, - body, - targs, - var_heights, + block, + locals, ssa_tys, - stack_height, - bb_id: BasicBlockId(u32::MAX), - incoming_vars: vec![], + stack_height: 0, + var_heights: HashMap::new(), + var_stack: Vec::new(), } } } impl<'a> BasicBlockVisitor for XirBasicBlockVisitor<'a> { - fn visit_id(&mut self, bb_id: BasicBlockId) { - self.targs.get_or_insert_mut(bb_id, Vec::new()); - self.body.block.items.push(ir::BlockItem::Target { - num: bb_id.id(), - stack: vec![], - }); - self.bb_id = bb_id; + fn visit_id(&mut self, id: mir::BasicBlockId) { + self.block.target = id.id(); } fn visit_incoming_var(&mut self, incoming: SsaVarId) -> Option> { - self.stack_height += 1; let height = self.stack_height; + self.stack_height += 1; self.var_heights.insert(incoming, height); - let ty = self.ssa_tys.get_or_insert_mut(incoming, ir::Type::Null); - self.incoming_vars.push(incoming); + self.var_stack.push(incoming); Some(Box::new(XirTypeVisitor::new( self.defs, self.names, - ty, + &mut self + .block + .incoming_stack + .push_mut(ir::StackItem { + ty: ir::Type::Null, + kind: ir::StackValueKind::RValue, + }) + .ty, self.properties, ))) } @@ -1113,11 +1071,12 @@ impl<'a> BasicBlockVisitor for XirBasicBlockVisitor<'a> { self.properties, self.deftys, self.cur_fnty, - self.body, - self.targs, - &mut self.var_heights, - &mut self.ssa_tys, + &mut self.block.expr, + self.locals, + self.ssa_tys, &mut self.stack_height, + &mut self.var_heights, + &mut self.var_stack, ))) } @@ -1128,39 +1087,28 @@ impl<'a> BasicBlockVisitor for XirBasicBlockVisitor<'a> { self.properties, self.deftys, self.cur_fnty, - self.body, - self.targs, - &mut self.var_heights, - &mut self.ssa_tys, + self.block, + self.locals, + self.ssa_tys, &mut self.stack_height, + &mut self.var_heights, + &mut self.var_stack, ))) } } -impl<'a> Drop for XirBasicBlockVisitor<'a> { - fn drop(&mut self) { - let targs = self.targs.get_mut(&self.bb_id).unwrap(); - for incoming in &self.incoming_vars { - let ty = self.ssa_tys[incoming].clone(); - targs.push(ir::StackItem { - ty, - kind: ir::StackValueKind::RValue, - }); - } - } -} - pub struct XirTerminatorVisitor<'a> { defs: &'a Definitions, names: &'a NameMap, properties: &'a TargetProperties<'a>, deftys: &'a HashMap, cur_fnty: &'a mut ir::FnType, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, + block: &'a mut ir::Block, + locals: &'a mut Vec, ssa_tys: &'a mut HashMap, - var_heights: &'a mut HashMap, stack_height: &'a mut u32, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, } impl<'a> XirTerminatorVisitor<'a> { @@ -1170,11 +1118,12 @@ impl<'a> XirTerminatorVisitor<'a> { properties: &'a TargetProperties<'a>, deftys: &'a HashMap, cur_fnty: &'a mut ir::FnType, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, + block: &'a mut ir::Block, + locals: &'a mut Vec, ssa_tys: &'a mut HashMap, stack_height: &'a mut u32, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, ) -> Self { Self { defs, @@ -1182,130 +1131,158 @@ impl<'a> XirTerminatorVisitor<'a> { properties, deftys, cur_fnty, - body, - targs, + block, + locals, ssa_tys, - var_heights, stack_height, + var_heights, + var_stack, } } } impl<'a> TerminatorVisitor for XirTerminatorVisitor<'a> { + fn visit_branch(&mut self) -> Option> { + todo!() + } + fn visit_call(&mut self) -> Option> { Some(Box::new(XirCallVisitor::new( self.defs, self.names, self.properties, self.deftys, - self.body, - self.targs, - self.var_heights, + self.cur_fnty, + self.block, + self.locals, self.ssa_tys, self.stack_height, + self.var_heights, + self.var_stack, ))) } fn visit_jump(&mut self) -> Option> { + let targ = { + self.block.term = ir::Terminator::Jump(ir::JumpTarget { + target: !0, + flags: ir::JumpTargetFlags::empty(), + }); + + match &mut self.block.term { + ir::Terminator::Jump(targ) => targ, + _ => unsafe { core::hint::unreachable_unchecked() }, + } + }; + Some(Box::new(XirJumpVisitor::new( + self.defs, self.names, self.properties, - self.body, - self.targs, - self.var_heights, + self.deftys, + self.cur_fnty, + &mut self.block.expr, + targ, + self.locals, self.ssa_tys, self.stack_height, - ir::BranchCondition::Always, + self.var_heights, + self.var_stack, ))) } fn visit_return(&mut self) -> Option> { - Some(Box::new(XirReturnVisitor(XirExprVisitor::new( - self.defs, - self.names, - self.properties, - self.deftys, - self.body, - self.targs, - self.var_heights, - self.stack_height, - )))) - } - - fn visit_branch(&mut self) -> Option> { - Some(Box::new(XirBranchVisitor::new( + self.block.term = ir::Terminator::Exit(1); + Some(Box::new(XirExprVisitor::new( self.defs, self.names, self.properties, self.deftys, - self.body, - self.targs, - self.var_heights, + self.cur_fnty, + &mut self.block.expr, + self.locals, self.ssa_tys, self.stack_height, + self.var_heights, + self.var_stack, ))) } } -pub struct XirReturnVisitor<'a>(XirExprVisitor<'a>); - -impl<'a> ExprVisitor for XirReturnVisitor<'a> { - fn visit_unreachable(&mut self) { - self.0.visit_unreachable() - } - - fn visit_const_int(&mut self) -> Option> { - self.0.visit_const_int() - } - - fn visit_const(&mut self, defid: DefId) { - self.0.visit_const(defid) - } - - fn visit_cast(&mut self) -> Option> { - self.0.visit_cast() - } - - fn visit_const_string(&mut self) -> Option> { - self.0.visit_const_string() - } - - fn visit_var(&mut self, var: SsaVarId) { - self.0.visit_var(var) - } +pub struct XirJumpVisitor<'a> { + defs: &'a Definitions, + names: &'a NameMap, + properties: &'a TargetProperties<'a>, + deftys: &'a HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + targ: &'a mut ir::JumpTarget, + locals: &'a mut Vec, + ssa_tys: &'a mut HashMap, + stack_height: &'a mut u32, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, +} - fn visit_tuple(&mut self) -> Option> { - self.0.visit_tuple() +impl<'a> XirJumpVisitor<'a> { + pub fn new( + defs: &'a Definitions, + names: &'a NameMap, + properties: &'a TargetProperties<'a>, + deftys: &'a HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + targ: &'a mut ir::JumpTarget, + locals: &'a mut Vec, + ssa_tys: &'a mut HashMap, + stack_height: &'a mut u32, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, + ) -> Self { + Self { + defs, + names, + properties, + deftys, + cur_fnty, + exprs, + targ, + locals, + ssa_tys, + stack_height, + var_heights, + var_stack, + } } +} - fn visit_ctor(&mut self) -> Option> { - self.0.visit_ctor() +impl<'a> JumpVisitor for XirJumpVisitor<'a> { + fn visit_target_bb(&mut self, targbb: mir::BasicBlockId) { + self.targ.target = targbb.id(); } - fn visit_field_subobject(&mut self) -> Option> { - self.0.visit_field_subobject() - } + fn visit_remap(&mut self, src: mir::SsaVarId, _: mir::SsaVarId) { + let height = self.var_heights.remove(&src).unwrap().1; + let i = self + .var_stack + .iter() + .enumerate() + .skip_while(|(_, v)| *v != &src) + .map(|(off, _)| off) + .next() + .unwrap(); - fn visit_field_project(&mut self) -> Option> { - self.0.visit_field_project() - } + self.var_stack.remove(i); - fn visit_binary_expr(&mut self) -> Option> { - self.0.visit_binary_expr() - } + for var in &self.var_stack[i..] { + self.var_heights[var] -= 1; + } - fn visit_unary_expr(&mut self) -> Option> { - self.0.visit_unary_expr() - } -} + let depth = (*self.stack_height) - (height + 1); -impl<'a> Drop for XirReturnVisitor<'a> { - fn drop(&mut self) { - self.0 - .body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Exit { values: 1 })) + if depth != 0 { + self.exprs.push(ir::Expr::Pivot(depth, 1)); + } } } @@ -1314,14 +1291,16 @@ pub struct XirCallVisitor<'a> { names: &'a NameMap, properties: &'a TargetProperties<'a>, deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, + cur_fnty: &'a mut ir::FnType, + block: &'a mut ir::Block, + locals: &'a mut Vec, ssa_tys: &'a mut HashMap, stack_height: &'a mut u32, - retplace: Option, - fnty: Option, - late_invoke_intrin: Option<(IntrinsicDef, generics::GenericArgs)>, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, + late_bound_intrinsic: Option, + fnty: ir::FnType, + targ: Option, } impl<'a> XirCallVisitor<'a> { @@ -1330,33 +1309,35 @@ impl<'a> XirCallVisitor<'a> { names: &'a NameMap, properties: &'a TargetProperties<'a>, deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, + cur_fnty: &'a mut ir::FnType, + block: &'a mut ir::Block, + locals: &'a mut Vec, ssa_tys: &'a mut HashMap, stack_height: &'a mut u32, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, ) -> Self { Self { defs, names, - body, + properties, deftys, - targs, - var_heights, + cur_fnty, + block, + locals, ssa_tys, stack_height, - retplace: None, - fnty: None, - properties, - late_invoke_intrin: None, + var_heights, + var_stack, + late_bound_intrinsic: None, + fnty: ir::FnType::default(), + targ: None, } } } impl<'a> CallVisitor for XirCallVisitor<'a> { - fn visit_retplace(&mut self, retplace: SsaVarId) { - self.retplace = Some(retplace); - } + fn visit_retplace(&mut self, _: mir::SsaVarId) {} fn visit_target(&mut self) -> Option> { Some(Box::new(XirExprVisitor::new( @@ -1364,10 +1345,13 @@ impl<'a> CallVisitor for XirCallVisitor<'a> { self.names, self.properties, self.deftys, - self.body, - self.targs, - self.var_heights, + self.cur_fnty, + &mut self.block.expr, + self.locals, + self.ssa_tys, self.stack_height, + self.var_heights, + self.var_stack, ))) } @@ -1375,7 +1359,7 @@ impl<'a> CallVisitor for XirCallVisitor<'a> { Some(Box::new(XirFunctionTyVisitor::new( self.defs, self.names, - self.fnty.insert(ir::FnType::default()), + &mut self.fnty, self.properties, ))) } @@ -1386,603 +1370,203 @@ impl<'a> CallVisitor for XirCallVisitor<'a> { self.names, self.properties, self.deftys, - self.body, - self.targs, - self.var_heights, + self.cur_fnty, + &mut self.block.expr, + self.locals, + self.ssa_tys, self.stack_height, + self.var_heights, + self.var_stack, ))) } - fn visit_tailcall(&mut self) { - let fnty = self.fnty.take().expect("visit_fnty must have been called"); - - let is_never = fnty.ret == NEVER; - - if let Some((intrin, generics)) = self.late_invoke_intrin.take() { - // Handle late bound intrinsics here - match intrin { - IntrinsicDef::__builtin_unreachable => { - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Const(ir::Value::Invalid( - fnty.ret, - )))); - } - IntrinsicDef::impl_id => todo!("impl_id"), - IntrinsicDef::type_id => todo!("type_id"), - IntrinsicDef::type_name => todo!("type_name"), - IntrinsicDef::destroy_at => todo!("destroy_at"), - IntrinsicDef::discriminant => todo!("discriminant"), - - IntrinsicDef::construct_in_place => todo!("construct_in_place"), - IntrinsicDef::__builtin_read => { - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Indirect)); - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::AsRValue( - ir::AccessClass::Normal, - ))); - } - IntrinsicDef::__builtin_read_freeze => { - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Indirect)); - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::AsRValue( - ir::AccessClass::Freeze, - ))); - } - IntrinsicDef::__builtin_read_volatile => { - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Indirect)); - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::AsRValue( - ir::AccessClass::Volatile, - ))); - } - IntrinsicDef::__builtin_write => { - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Pivot(1, 1))); - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Indirect)); - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Assign( - ir::AccessClass::Normal, - ))); - } - IntrinsicDef::__builtin_write_volatile => { - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Pivot(1, 1))); - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Indirect)); - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Assign( - ir::AccessClass::Normal, - ))); - } - IntrinsicDef::__builtin_size_of => todo!(), - IntrinsicDef::__builtin_align_of => todo!(), - IntrinsicDef::__builtin_size_of_val => todo!(), - IntrinsicDef::__builtin_align_of_val => todo!(), - - IntrinsicDef::__builtin_abort - | IntrinsicDef::__builtin_allocate - | IntrinsicDef::__builtin_deallocate - | IntrinsicDef::transmute - | IntrinsicDef::black_box - | IntrinsicDef::__builtin_likely - | IntrinsicDef::__builtin_unlikely => { - unreachable!("These are handled like regular functions") - } - } - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Exit { values: 1 })); - } else { - // Either we're compatible or we're `!` and don't return - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Tailcall(fnty))); - } - } - fn visit_next(&mut self) -> Option> { - let fnty = self.fnty.take().expect("visit_fnty must be called first"); - let retty = fnty.ret.clone(); - *self.stack_height -= fnty.params.len() as u32; - - if let Some((intrin, generics)) = self.late_invoke_intrin.take() { - // Handle late bound intrinsics here - // Assume that params are correct - yes, this will bork irgen if the params are wrong - match intrin { - IntrinsicDef::__builtin_unreachable => { - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Const(ir::Value::Invalid( - retty.clone(), - )))); - } - IntrinsicDef::impl_id => todo!("impl_id"), - IntrinsicDef::type_id => todo!("type_id"), - IntrinsicDef::type_name => todo!("type_name"), - IntrinsicDef::destroy_at => todo!("destroy_at"), - IntrinsicDef::discriminant => todo!("discriminant"), - - IntrinsicDef::construct_in_place => todo!("construct_in_place"), - IntrinsicDef::__builtin_read => { - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Indirect)); - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::AsRValue( - ir::AccessClass::Normal, - ))); - } - IntrinsicDef::__builtin_read_freeze => { - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Indirect)); - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::AsRValue( - ir::AccessClass::Freeze, - ))); - } - IntrinsicDef::__builtin_read_volatile => { - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Indirect)); - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::AsRValue( - ir::AccessClass::Volatile, - ))); - } - IntrinsicDef::__builtin_write => { - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Pivot(1, 1))); - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Indirect)); - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Assign( - ir::AccessClass::Normal, - ))); - } - IntrinsicDef::__builtin_write_volatile => { - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Pivot(1, 1))); - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Indirect)); - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Assign( - ir::AccessClass::Normal, - ))); - } - IntrinsicDef::__builtin_size_of => { - let ty = match generics.params.into_iter().next().unwrap() { - generics::GenericArg::Type(ty) => ty, - _ => unreachable!(), - }; - - let layout = self.defs.layout_of(&ty, DefId::ROOT, DefId::ROOT); - - let size = layout - .size - .expect("__builtin_size_of requires a Sized type"); - - let intty = match &retty { - ir::Type::Scalar(sty) => *sty, - _ => unreachable!("__builtin_size_of returns `usize`"), - }; - - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Const(ir::Value::Integer { - ty: intty, - val: size as u128, - }))); - } - IntrinsicDef::__builtin_align_of => todo!(), - IntrinsicDef::__builtin_size_of_val => todo!(), - IntrinsicDef::__builtin_align_of_val => todo!(), - - IntrinsicDef::__builtin_abort - | IntrinsicDef::__builtin_allocate - | IntrinsicDef::__builtin_deallocate - | IntrinsicDef::transmute - | IntrinsicDef::black_box - | IntrinsicDef::__builtin_likely - | IntrinsicDef::__builtin_unlikely => unreachable!(), - } - } else { - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::CallFunction(fnty))); - } - - Some(Box::new(XirNextBlockVisitor { - inner: XirJumpVisitor::new( - self.names, - self.properties, - self.body, - self.targs, - self.var_heights, - self.ssa_tys, - self.stack_height, - ir::BranchCondition::Always, - ), - retty, - targ: None, - })) + Some(Box::new(XirJumpVisitor::new( + self.defs, + self.names, + self.properties, + self.deftys, + self.cur_fnty, + &mut self.block.expr, + self.targ.insert(ir::JumpTarget { + flags: ir::JumpTargetFlags::empty(), + target: !0, + }), + self.locals, + self.ssa_tys, + self.stack_height, + self.var_heights, + self.var_stack, + ))) } fn visit_intrinsic(&mut self, intrin: IntrinsicDef, generics: &generics::GenericArgs) { - let (item, fnty) = match intrin { - // Rust sym calls - func @ (IntrinsicDef::__builtin_allocate | IntrinsicDef::__builtin_deallocate) => { - let layout_ty = self - .defs - .get_lang_item(LangItem::LayoutTy) - .expect("lang item `layout` is required to use `__builtin_alloc`"); - let u8_ptr = ty::Type::Pointer( - span::synthetic(Mutability::Mut), - Box::new(span::synthetic(ty::Type::Int(ty::IntType::u8))), - ); - let (sym, sig) = match func { - IntrinsicDef::__builtin_allocate => { - let alloc_sym = self.defs.get_lang_item(LangItem::AllocSym).expect( - "lang item `alloc_symbol` is required to use `__builtin_alloc`", - ); - - let fnty = ty::FnType { - safety: span::synthetic(ty::Safety::Unsafe), - constness: span::synthetic(ty::Mutability::Mut), - asyncness: span::synthetic(ty::AsyncType::Normal), - tag: span::synthetic(ty::AbiTag::LCRust(None)), - retty: Box::new(span::synthetic(u8_ptr)), - paramtys: std::vec![span::synthetic(ty::Type::UserType( - layout_ty, - Default::default() - ))], - iscvarargs: span::synthetic(false), - }; - - (alloc_sym, fnty) - } - IntrinsicDef::__builtin_deallocate => { - let dealloc_sym = self.defs.get_lang_item(LangItem::DeallocSym).expect( - "lang item `deaalloc_symbol` is required to use `__builtin_alloc`", - ); - - let fnty = ty::FnType { - safety: span::synthetic(ty::Safety::Unsafe), - constness: span::synthetic(ty::Mutability::Mut), - asyncness: span::synthetic(ty::AsyncType::Normal), - tag: span::synthetic(ty::AbiTag::LCRust(None)), - retty: Box::new(span::synthetic(ty::Type::UNIT)), - paramtys: std::vec![ - span::synthetic(ty::Type::UserType(layout_ty, Default::default())), - span::synthetic(u8_ptr), - ], - iscvarargs: span::synthetic(false), - }; - - (dealloc_sym, fnty) - } - _ => unreachable!(), - }; - - let name = self.names[&sym]; - let mut ir_fnty = ir::FnType::default(); - let fnty_vis = - XirFunctionTyVisitor::new(self.defs, self.names, &mut ir_fnty, self.properties); - - super::visitor::visit_fnty(fnty_vis, &sig, &self.defs); - - let path = ir::Path { - components: vec![ir::PathComponent::Text((&name).into())], - }; - - (path, ir_fnty) - } - - // xlang intrinsics - xlang_intrin @ (IntrinsicDef::__builtin_abort - | IntrinsicDef::transmute - | IntrinsicDef::black_box - | IntrinsicDef::__builtin_likely - | IntrinsicDef::__builtin_unlikely) => { - let path = match xlang_intrin { - IntrinsicDef::__builtin_abort => { - ir::simple_path!(__lccc::intrinsics::C::__builtin_trap) - } - IntrinsicDef::transmute => { - ir::simple_path!(__lccc::intrinsics::Rust::__builtin_transmute) - } - IntrinsicDef::black_box => ir::simple_path!(__lccc::xlang::deoptimize), - IntrinsicDef::__builtin_likely => ir::simple_path!(__lccc::xlang::likely), - IntrinsicDef::__builtin_unlikely => ir::simple_path!(__lccc::xlang::unlikely), - _ => unreachable!(), - }; - - ( - path, - self.fnty - .as_ref() - .cloned() - .expect("visit_fnty must have been called first"), - ) - } - - // late bound intrinsics (Few of these should survive to irgen?) - intrin => return self.late_invoke_intrin = Some((intrin, generics.clone())), - }; - - let ty = ir::Type::FnType((xlang::abi::boxed::Box::new(fnty))); - - let value = ir::Value::GlobalAddress { ty, item }; - - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Const(value))); + todo!("{}{}", intrin.name(), generics) } -} - -pub struct XirNextBlockVisitor<'a> { - inner: XirJumpVisitor<'a>, - retty: ir::Type, - targ: Option, -} -impl<'a> JumpVisitor for XirNextBlockVisitor<'a> { - fn visit_target_bb(&mut self, targbb: BasicBlockId) { - self.targ = Some(targbb); - self.inner.visit_target_bb(targbb) + fn visit_tailcall(&mut self) { + self.block.term = ir::Terminator::Tailcall( + ir::CallFlags::empty(), + XLangBox::new(core::mem::take(&mut self.fnty)), + ); } +} - fn visit_remap(&mut self, src: SsaVarId, targ: SsaVarId) { - self.inner.visit_remap(src, targ) +impl<'a> Drop for XirCallVisitor<'a> { + fn drop(&mut self) { + if let Some(targ) = self.targ.take() { + self.block.term = ir::Terminator::Call( + ir::CallFlags::empty(), + XLangBox::new(core::mem::take(&mut self.fnty)), + targ, + ); + } } } -pub struct XirJumpVisitor<'a> { +pub struct XirStatementVisitor<'a> { + defs: &'a Definitions, names: &'a NameMap, properties: &'a TargetProperties<'a>, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, + deftys: &'a HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + locals: &'a mut Vec, ssa_tys: &'a mut HashMap, stack_height: &'a mut u32, - targ: Option, - cond: ir::BranchCondition, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, } -impl<'a> XirJumpVisitor<'a> { +impl<'a> XirStatementVisitor<'a> { pub fn new( + defs: &'a Definitions, names: &'a NameMap, properties: &'a TargetProperties<'a>, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, + deftys: &'a HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + locals: &'a mut Vec, ssa_tys: &'a mut HashMap, stack_height: &'a mut u32, - cond: ir::BranchCondition, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, ) -> Self { Self { + defs, names, properties, - body, - targs, - var_heights, - stack_height, + deftys, + cur_fnty, + exprs, + locals, ssa_tys, - targ: None, - cond, + stack_height, + var_heights, + var_stack, } } } -impl<'a> JumpVisitor for XirJumpVisitor<'a> { - fn visit_target_bb(&mut self, targbb: BasicBlockId) { - self.targ = Some(targbb); - } - - fn visit_remap(&mut self, src: SsaVarId, dest: SsaVarId) {} -} - -impl<'a> Drop for XirJumpVisitor<'a> { - fn drop(&mut self) { - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Branch { - cond: self.cond, - target: self.targ.unwrap().id(), - })); - if self.cond != ir::BranchCondition::Always && self.cond != ir::BranchCondition::Never { - *self.stack_height -= 1; - } - } -} - -pub struct XirBranchVisitor<'a> { - defs: &'a Definitions, - names: &'a NameMap, - properties: &'a TargetProperties<'a>, - deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, - ssa_tys: &'a mut HashMap, - stack_height: &'a mut u32, -} - -impl<'a> XirBranchVisitor<'a> { - fn new( - defs: &'a Definitions, - names: &'a NameMap, - properties: &'a TargetProperties<'a>, - deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, - ssa_tys: &'a mut HashMap, - stack_height: &'a mut u32, - ) -> Self { - Self { - defs, - names, - properties, - deftys, - body, - targs, - var_heights, - ssa_tys, - stack_height, - } - } -} - -impl<'a> BranchVisitor for XirBranchVisitor<'a> { - fn visit_branch_arm(&mut self) -> Option> { - Some(Box::new(XirBranchArmVisitor::new( +impl<'a> StatementVisitor for XirStatementVisitor<'a> { + fn visit_let(&mut self) -> Option> { + Some(Box::new(XirLetStatementVisitor::new( self.defs, self.names, self.properties, self.deftys, - self.body, - self.targs, - self.var_heights, + self.cur_fnty, + self.exprs, + self.locals, self.ssa_tys, self.stack_height, + self.var_heights, + self.var_stack, ))) } - fn visit_else(&mut self) -> Option> { - Some(Box::new(XirJumpVisitor::new( - self.names, - self.properties, - self.body, - self.targs, - self.var_heights, - self.ssa_tys, - self.stack_height, - ir::BranchCondition::Always, - ))) + fn visit_store_dead(&mut self, _: mir::SsaVarId) {} + + fn visit_discard(&mut self) -> Option> { + None } } -pub struct XirBranchArmVisitor<'a> { +pub struct XirLetStatementVisitor<'a> { defs: &'a Definitions, names: &'a NameMap, properties: &'a TargetProperties<'a>, deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + locals: &'a mut Vec, ssa_tys: &'a mut HashMap, stack_height: &'a mut u32, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, + var: SsaVarId, } -impl<'a> XirBranchArmVisitor<'a> { - fn new( +impl<'a> XirLetStatementVisitor<'a> { + pub fn new( defs: &'a Definitions, names: &'a NameMap, properties: &'a TargetProperties<'a>, deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + locals: &'a mut Vec, ssa_tys: &'a mut HashMap, stack_height: &'a mut u32, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, ) -> Self { Self { defs, names, properties, deftys, - body, - targs, - var_heights, + cur_fnty, + exprs, + locals, ssa_tys, stack_height, + var_heights, + var_stack, + var: SsaVarId::INVALID, } } } -impl<'a> BranchArmVisitor for XirBranchArmVisitor<'a> { - fn visit_cond(&mut self) -> Option> { - Some(Box::new(XirExprVisitor::new( +impl<'a> LetStatementVisitor for XirLetStatementVisitor<'a> { + fn visit_var(&mut self, var: mir::SsaVarId) { + self.var = var; + self.var_heights.insert(var, *self.stack_height); + self.var_stack.push(var); + } + + fn visit_var_ty(&mut self) -> Option> { + Some(Box::new(XirTypeVisitor::new( self.defs, self.names, + self.ssa_tys.get_or_insert_mut(self.var, ir::Type::Null), self.properties, - self.deftys, - self.body, - self.targs, - self.var_heights, - self.stack_height, ))) } - fn visit_jump(&mut self) -> Option> { - Some(Box::new(XirJumpVisitor::new( + fn visit_init(&mut self) -> Option> { + Some(Box::new(XirExprVisitor::new( + self.defs, self.names, self.properties, - self.body, - self.targs, - self.var_heights, + self.deftys, + self.cur_fnty, + self.exprs, + self.locals, self.ssa_tys, self.stack_height, - ir::BranchCondition::NotEqual, + self.var_heights, + self.var_stack, ))) } } @@ -1992,16 +1576,13 @@ pub struct XirExprVisitor<'a> { names: &'a NameMap, properties: &'a TargetProperties<'a>, deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + locals: &'a mut Vec, + ssa_tys: &'a mut HashMap, stack_height: &'a mut u32, -} - -impl<'a> Drop for XirExprVisitor<'a> { - fn drop(&mut self) { - *self.stack_height += 1; - } + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, } impl<'a> XirExprVisitor<'a> { @@ -2010,75 +1591,61 @@ impl<'a> XirExprVisitor<'a> { names: &'a NameMap, properties: &'a TargetProperties<'a>, deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + locals: &'a mut Vec, + ssa_tys: &'a mut HashMap, stack_height: &'a mut u32, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, ) -> Self { Self { defs, names, properties, deftys, - body, - targs, - var_heights, + cur_fnty, + exprs, + locals, + ssa_tys, stack_height, + var_heights, + var_stack, } } } impl<'a> ExprVisitor for XirExprVisitor<'a> { fn visit_unreachable(&mut self) { - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Const(ir::Value::Invalid( - NEVER, - )))); + todo!() } fn visit_const_int(&mut self) -> Option> { - match self - .body - .block - .items - .push_mut(ir::BlockItem::Expr(ir::Expr::Const(ir::Value::Integer { - ty: ir::ScalarType::default(), - val: 0, - }))) { - ir::BlockItem::Expr(ir::Expr::Const(ir::Value::Integer { ty, val })) => { - Some(Box::new(XirConstIntVisitor::new( - self.names, - self.properties, - self.deftys, - self.targs, - self.var_heights, - self.stack_height, - ty, - val, - ))) - } - _ => unreachable!(), - } + let (intty, val) = match self.exprs.push_mut(ir::Expr::Const(ir::Value::Integer { + ty: ir::ScalarType::default(), + val: 0, + })) { + ir::Expr::Const(ir::Value::Integer { ty, val }) => (ty, val), + _ => unsafe { core::hint::unreachable_unchecked() }, + }; + Some(Box::new(XirConstIntVisitor::new( + self.properties, + val, + intty, + ))) } fn visit_const(&mut self, defid: DefId) { - let sym = self.names[&defid]; + let name = self.names[&defid]; let path = ir::Path { - components: vec![ir::PathComponent::Text((&sym).into())], + components: vec![ir::PathComponent::Text((&name).into())], }; - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Const( - ir::Value::GlobalAddress { - ty: self.deftys[&defid].clone(), - item: path, - }, - ))) + let ty = self.deftys[&defid].clone(); + + self.exprs + .push(ir::Expr::Const(ir::Value::GlobalAddress { ty, item: path })); } fn visit_cast(&mut self) -> Option> { @@ -2087,1012 +1654,328 @@ impl<'a> ExprVisitor for XirExprVisitor<'a> { self.names, self.properties, self.deftys, - self.body, - self.targs, - self.var_heights, + self.cur_fnty, + self.exprs, + self.locals, + self.ssa_tys, self.stack_height, + self.var_heights, + self.var_stack, ))) } fn visit_const_string(&mut self) -> Option> { - match self - .body - .block - .items - .push_mut(ir::BlockItem::Expr(ir::Expr::Const(ir::Value::String { - encoding: ir::StringEncoding::Utf8, - utf8: XLangString::new(), - ty: ir::Type::Pointer(ir::PointerType { - inner: xlang::abi::boxed::Box::new(ir::Type::Scalar(ir::ScalarType { - header: ir::ScalarTypeHeader { - bitsize: 8, - vectorsize: XLangNone, - validity: ScalarValidity::empty(), - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: XLangNone, - max: XLangNone, - }, - })), - ..Default::default() - }), - }))) { - ir::BlockItem::Expr(ir::Expr::Const(ir::Value::String { utf8, .. })) => { - Some(Box::new(XirConstStringVisitor::new( - self.names, - self.properties, - self.deftys, - self.targs, - self.var_heights, - self.stack_height, - utf8, - ))) - } - _ => unreachable!(), - } + let (ty, val) = match self.exprs.push_mut(ir::Expr::Const(ir::Value::String { + encoding: ir::StringEncoding::Utf8, + ty: ir::Type::default(), + utf8: XLangString::new(), + })) { + ir::Expr::Const(ir::Value::String { ty, utf8, .. }) => (ty, utf8), + _ => unsafe { core::hint::unreachable_unchecked() }, + }; + Some(Box::new(XirConstStringVisitor::new( + self.properties, + val, + ty, + ))) } - fn visit_var(&mut self, var: SsaVarId) { - let depth = *self.stack_height - self.var_heights[&var]; + fn visit_var(&mut self, var: mir::SsaVarId) { + let height = self.var_heights[&var]; + + let depth = (*self.stack_height) - (height + 1); if depth != 0 { - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Pivot(1, depth))); + self.exprs.push(ir::Expr::Pivot(depth, 1)); } - - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Dup(1))); + self.exprs.push(ir::Expr::Dup(1)); if depth != 0 { - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Pivot(depth + 1, 1))); + self.exprs.push(ir::Expr::Pivot(1, depth + 1)); } } fn visit_tuple(&mut self) -> Option> { - Some(Box::new(XirTupleExprVisitor::new( + Some(Box::new(XirTupleVisitor::new( + self.defs, self.names, self.properties, self.deftys, - self.body, - self.targs, - self.var_heights, + self.cur_fnty, + self.exprs, + self.locals, + self.ssa_tys, self.stack_height, + self.var_heights, + self.var_stack, ))) } fn visit_ctor(&mut self) -> Option> { - Some(Box::new(XirConstructorExprVisitor::new( - self.defs, - self.names, - self.properties, - self.deftys, - self.body, - self.targs, - self.var_heights, - self.stack_height, - ))) + todo!() } fn visit_field_subobject(&mut self) -> Option> { - Some(Box::new(XirFieldAccessVisitor::new( - self.defs, - self.names, - self.properties, - self.deftys, - self.body, - self.targs, - self.var_heights, - self.stack_height, - ir::Expr::Member, - ))) + todo!() } fn visit_field_project(&mut self) -> Option> { - Some(Box::new(XirFieldAccessVisitor::new( - self.defs, - self.names, - self.properties, - self.deftys, - self.body, - self.targs, - self.var_heights, - self.stack_height, - ir::Expr::MemberIndirect, - ))) + todo!() } fn visit_binary_expr(&mut self) -> Option> { - Some(Box::new(XirBinaryExprVisitor::new( - self.defs, - self.names, - self.properties, - self.deftys, - self.body, - self.targs, - self.var_heights, - self.stack_height, - ))) + todo!() } fn visit_unary_expr(&mut self) -> Option> { - Some(Box::new(XirUnaryExprVisitor::new( - self.defs, - self.names, - self.properties, - self.deftys, - self.body, - self.targs, - self.var_heights, - self.stack_height, - ))) + todo!() + } +} + +impl<'a> Drop for XirExprVisitor<'a> { + fn drop(&mut self) { + *self.stack_height += 1; } } -pub struct XirConstructorExprVisitor<'a> { +pub struct XirTupleVisitor<'a> { defs: &'a Definitions, names: &'a NameMap, properties: &'a TargetProperties<'a>, deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + locals: &'a mut Vec, + ssa_tys: &'a mut HashMap, stack_height: &'a mut u32, - fields: Vec, - ctor_ty: DefId, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, + tys: Vec, } -impl<'a> XirConstructorExprVisitor<'a> { +impl<'a> XirTupleVisitor<'a> { pub fn new( defs: &'a Definitions, names: &'a NameMap, properties: &'a TargetProperties<'a>, deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + locals: &'a mut Vec, + ssa_tys: &'a mut HashMap, stack_height: &'a mut u32, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, ) -> Self { Self { defs, names, properties, deftys, - body, - targs, - var_heights, + cur_fnty, + exprs, + locals, + ssa_tys, stack_height, - fields: Vec::new(), - ctor_ty: DefId::ROOT, + var_heights, + var_stack, + tys: vec![], } } } -impl<'a> Drop for XirConstructorExprVisitor<'a> { - fn drop(&mut self) { - *self.stack_height -= self.fields.len() as u32; - let ty = ir::Type::Named(into_path(self.names[&self.ctor_ty])); - - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Aggregate( - ir::AggregateCtor { - ty, - fields: core::mem::take(&mut self.fields), - }, - ))); - } -} - -impl<'a> ConstructorVisitor for XirConstructorExprVisitor<'a> { - fn visit_ctor_def(&mut self, defid: DefId) { - self.ctor_ty = defid; - // TODO: Handle Enum constructors, which need to be nested. - } - - fn visit_field(&mut self) -> Option> { - let field = self.fields.push_mut(XLangString::new()); - Some(Box::new(XirConstructorFieldVisitor::new( +impl<'a> TupleExprVisitor for XirTupleVisitor<'a> { + fn visit_elem(&mut self) -> Option> { + Some(Box::new(XirExprVisitor::new( self.defs, self.names, self.properties, self.deftys, - self.body, - self.targs, - self.var_heights, + self.cur_fnty, + self.exprs, + self.locals, + self.ssa_tys, self.stack_height, - field, + self.var_heights, + self.var_stack, ))) } +} - fn visit_init(&mut self) -> Option> { - todo!() +impl<'a> Drop for XirTupleVisitor<'a> { + fn drop(&mut self) { + let fields = self + .tys + .iter() + .zip(0..) + .map(|(_, x)| xlang::abi::format!("{}", x)) + .collect::>(); + + *self.stack_height -= fields.len() as u32; + + self.exprs.push(ir::Expr::Aggregate(ir::AggregateCtor { + ty: ir::Type::Product(core::mem::take(&mut self.tys)), + fields, + })); } } -pub struct XirConstructorFieldVisitor<'a> { +pub struct XirCastVisitor<'a> { defs: &'a Definitions, names: &'a NameMap, properties: &'a TargetProperties<'a>, deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + locals: &'a mut Vec, + ssa_tys: &'a mut HashMap, stack_height: &'a mut u32, - field: &'a mut XLangString, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, + ty: ir::Type, } -impl<'a> XirConstructorFieldVisitor<'a> { +impl<'a> XirCastVisitor<'a> { pub fn new( defs: &'a Definitions, names: &'a NameMap, properties: &'a TargetProperties<'a>, deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + locals: &'a mut Vec, + ssa_tys: &'a mut HashMap, stack_height: &'a mut u32, - field: &'a mut XLangString, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, ) -> Self { Self { defs, names, properties, deftys, - body, - targs, - var_heights, + cur_fnty, + exprs, + locals, + ssa_tys, stack_height, - field, + var_heights, + var_stack, + ty: ir::Type::Null, } } } -impl<'a> FieldInitVisitor for XirConstructorFieldVisitor<'a> { - fn visit_field(&mut self, field_name: &ty::FieldName) { - let _ = write!(self.field, "{}", field_name); - } - - fn visit_value(&mut self) -> Option> { +impl<'a> CastVisitor for XirCastVisitor<'a> { + fn visit_inner(&mut self) -> Option> { Some(Box::new(XirExprVisitor::new( self.defs, self.names, self.properties, self.deftys, - self.body, - self.targs, - self.var_heights, + self.cur_fnty, + self.exprs, + self.locals, + self.ssa_tys, self.stack_height, + self.var_heights, + self.var_stack, + ))) + } + + fn visit_cast_type(&mut self) -> Option> { + Some(Box::new(XirTypeVisitor::new( + self.defs, + self.names, + &mut self.ty, + self.properties, ))) } } -pub struct XirFieldAccessVisitor<'a> { - defs: &'a Definitions, - names: &'a NameMap, - properties: &'a TargetProperties<'a>, - deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, - stack_height: &'a mut u32, - ctor: fn(XLangString) -> ir::Expr, - field: XLangString, +impl<'a> Drop for XirCastVisitor<'a> { + fn drop(&mut self) { + self.exprs.push(ir::Expr::Convert( + ir::ConversionStrength::Reinterpret, + core::mem::take(&mut self.ty), + )); + *self.stack_height -= 1; + } } -impl<'a> XirFieldAccessVisitor<'a> { - pub fn new( - defs: &'a Definitions, - names: &'a NameMap, - properties: &'a TargetProperties<'a>, - deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, - stack_height: &'a mut u32, - ctor: fn(XLangString) -> ir::Expr, - ) -> Self { - Self { - defs, - names, - properties, - deftys, - body, - targs, - var_heights, - stack_height, - ctor, - field: XLangString::new(), - } - } -} - -impl<'a> Drop for XirFieldAccessVisitor<'a> { - fn drop(&mut self) { - *self.stack_height -= 1; - self.body - .block - .items - .push(ir::BlockItem::Expr((self.ctor)(core::mem::take( - &mut self.field, - )))) - } -} - -impl<'a> FieldAccessVisitor for XirFieldAccessVisitor<'a> { - fn visit_base(&mut self) -> Option> { - Some(Box::new(XirExprVisitor::new( - self.defs, - self.names, - self.properties, - self.deftys, - self.body, - self.targs, - self.var_heights, - self.stack_height, - ))) - } - fn visit_field(&mut self, field_name: &ty::FieldName) { - // TODO: variant fields - let _ = write!(self.field, "{}", field_name); - } -} - -pub struct XirBinaryExprVisitor<'a> { - defs: &'a Definitions, - names: &'a NameMap, - properties: &'a TargetProperties<'a>, - deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, - stack_height: &'a mut u32, - op: Option, -} - -impl<'a> XirBinaryExprVisitor<'a> { - fn new( - defs: &'a Definitions, - names: &'a NameMap, - properties: &'a TargetProperties<'a>, - deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, - stack_height: &'a mut u32, - ) -> Self { - Self { - defs, - names, - properties, - deftys, - body, - targs, - var_heights, - stack_height, - op: None, - } - } -} - -impl<'a> BinaryExprVisitor for XirBinaryExprVisitor<'a> { - fn visit_op(&mut self, op: BinaryOp) { - self.op = Some(op); - } - - fn visit_lhs(&mut self) -> Option> { - Some(Box::new(XirExprVisitor::new( - self.defs, - self.names, - self.properties, - self.deftys, - self.body, - self.targs, - self.var_heights, - self.stack_height, - ))) - } - - fn visit_rhs(&mut self) -> Option> { - Some(Box::new(XirExprVisitor::new( - self.defs, - self.names, - self.properties, - self.deftys, - self.body, - self.targs, - self.var_heights, - self.stack_height, - ))) - } -} - -impl<'a> Drop for XirBinaryExprVisitor<'a> { - fn drop(&mut self) { - *self.stack_height -= 2; // `XirExprVisitor`s drop will increase stack height by 1 - let op = match self.op.expect("visit_op must be called first") { - BinaryOp::Add => ir::BinaryOp::Add, - BinaryOp::Sub => ir::BinaryOp::Sub, - BinaryOp::Mul => ir::BinaryOp::Mul, - BinaryOp::Div => ir::BinaryOp::Div, - BinaryOp::Rem => ir::BinaryOp::Mod, - BinaryOp::BitAnd => ir::BinaryOp::BitAnd, - BinaryOp::BitOr => ir::BinaryOp::BitOr, - BinaryOp::BitXor => ir::BinaryOp::BitXor, - - BinaryOp::Less => ir::BinaryOp::CmpLt, - BinaryOp::Greater => ir::BinaryOp::CmpGt, - BinaryOp::Equal => ir::BinaryOp::CmpEq, - BinaryOp::NotEqual => ir::BinaryOp::CmpNe, - BinaryOp::LessEqual => ir::BinaryOp::CmpLe, - BinaryOp::GreaterEqual => ir::BinaryOp::CmpGe, - - BinaryOp::LeftShift => ir::BinaryOp::Lsh, - BinaryOp::RightShift => ir::BinaryOp::Rsh, - - BinaryOp::BoolAnd - | BinaryOp::BoolOr - | BinaryOp::Range - | BinaryOp::RangeInclusive - | BinaryOp::Assign - | BinaryOp::AddAssign - | BinaryOp::SubAssign - | BinaryOp::MulAssign - | BinaryOp::DivAssign - | BinaryOp::RemAssign - | BinaryOp::BitAndAssign - | BinaryOp::BitOrAssign - | BinaryOp::BitXorAssign - | BinaryOp::BoolAndAssign - | BinaryOp::BoolOrAssign - | BinaryOp::LeftShiftAssign - | BinaryOp::RightShiftAssign => unreachable!("handled before MIR"), - }; - - let overflow_behaviour = ir::OverflowBehaviour::Wrap; - - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::BinaryOp( - op, - overflow_behaviour, - ))); - } -} - -pub struct XirUnaryExprVisitor<'a> { - defs: &'a Definitions, - names: &'a NameMap, - properties: &'a TargetProperties<'a>, - deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, - stack_height: &'a mut u32, - op: Option, -} - -impl<'a> XirUnaryExprVisitor<'a> { - fn new( - defs: &'a Definitions, - names: &'a NameMap, - properties: &'a TargetProperties<'a>, - deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, - stack_height: &'a mut u32, - ) -> Self { - Self { - defs, - names, - properties, - deftys, - body, - targs, - var_heights, - stack_height, - op: None, - } - } -} - -impl<'a> UnaryExprVisitor for XirUnaryExprVisitor<'a> { - fn visit_op(&mut self, op: UnaryOp) { - self.op = Some(op); - } - - fn visit_lhs(&mut self) -> Option> { - Some(Box::new(XirExprVisitor::new( - self.defs, - self.names, - self.properties, - self.deftys, - self.body, - self.targs, - self.var_heights, - self.stack_height, - ))) - } -} - -impl<'a> Drop for XirUnaryExprVisitor<'a> { - fn drop(&mut self) { - *self.stack_height -= 1; // drop for XirExprVisitor will increase stack_height - - let op = match self.op.expect("visit_op must be called first") { - UnaryOp::Neg => ir::UnaryOp::Minus, - UnaryOp::Not => todo!(), - UnaryOp::RangeFrom - | UnaryOp::RangeTo - | UnaryOp::RangeToInclusive - | UnaryOp::RawAddrOf(_) - | UnaryOp::AddrOf(_) - | UnaryOp::Deref => unreachable!("handled before of MIR"), - }; - - let overflow_behaviour = ir::OverflowBehaviour::Wrap; - - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::UnaryOp( - op, - overflow_behaviour, - ))); - } -} - -pub struct XirTupleExprVisitor<'a> { - names: &'a NameMap, +pub struct XirConstStringVisitor<'a> { properties: &'a TargetProperties<'a>, - deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, - stack_height: &'a mut u32, - next_field: u32, - fields: Vec, - elem_tys: Vec, + val: &'a mut XLangString, + ty: &'a mut ir::Type, } -impl<'a> XirTupleExprVisitor<'a> { +impl<'a> XirConstStringVisitor<'a> { pub fn new( - names: &'a NameMap, properties: &'a TargetProperties<'a>, - deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, - stack_height: &'a mut u32, + val: &'a mut XLangString, + ty: &'a mut ir::Type, ) -> Self { Self { - names, properties, - deftys, - body, - targs, - var_heights, - stack_height, - next_field: 0, - fields: Vec::new(), - elem_tys: Vec::new(), + val, + ty, } } } -impl<'a> TupleExprVisitor for XirTupleExprVisitor<'a> { - fn visit_elem(&mut self) -> Option> { - todo!() +impl<'a> ConstStringVisitor for XirConstStringVisitor<'a> { + fn visit_string_type(&mut self, st: StringType) { + match st { + StringType::Default | StringType::Raw(_) => todo!(), + StringType::Byte | StringType::RawByte(_) => { + let mut ptr = ir::PointerType::default(); + *ptr.inner = ir::Type::Scalar(ir::ScalarType { + header: ir::ScalarTypeHeader { + bitsize: 0, + vectorsize: XLangNone, + validity: ir::ScalarValidity::empty(), + }, + kind: ir::ScalarTypeKind::Integer { + signed: false, + min: XLangNone, + max: XLangNone, + }, + }); + *self.ty = ir::Type::Pointer(ptr); + } + } } -} -impl<'a> Drop for XirTupleExprVisitor<'a> { - fn drop(&mut self) { - *self.stack_height -= self.next_field; - let ty = ir::Type::Product(core::mem::take(&mut self.elem_tys)); - - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Aggregate( - ir::AggregateCtor { - ty, - fields: core::mem::take(&mut self.fields), - }, - ))) + fn visit_value(&mut self, val: Symbol) { + *self.val = (&val).into(); } } pub struct XirConstIntVisitor<'a> { - names: &'a NameMap, properties: &'a TargetProperties<'a>, - deftys: &'a HashMap, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, - stack_height: &'a mut u32, - ty: &'a mut ir::ScalarType, val: &'a mut u128, + intty: &'a mut ir::ScalarType, } impl<'a> XirConstIntVisitor<'a> { pub fn new( - names: &'a NameMap, properties: &'a TargetProperties<'a>, - deftys: &'a HashMap, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, - stack_height: &'a mut u32, - ty: &'a mut ir::ScalarType, val: &'a mut u128, + intty: &'a mut ir::ScalarType, ) -> Self { Self { - names, properties, - deftys, - targs, - var_heights, - stack_height, - ty, val, + intty, } } } impl<'a> ConstIntVisitor for XirConstIntVisitor<'a> { fn visit_intty(&mut self) -> Option> { - Some(Box::new(XirIntTyVisitor::new(self.ty, self.properties))) + Some(Box::new(XirIntTyVisitor::new(self.intty, self.properties))) } fn visit_value(&mut self, val: u128) { *self.val = val; } } - -pub struct XirConstStringVisitor<'a> { - names: &'a NameMap, - properties: &'a TargetProperties<'a>, - deftys: &'a HashMap, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, - stack_height: &'a mut u32, - string: &'a mut XLangString, -} - -impl<'a> XirConstStringVisitor<'a> { - pub fn new( - names: &'a NameMap, - properties: &'a TargetProperties<'a>, - deftys: &'a HashMap, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, - stack_height: &'a mut u32, - string: &'a mut XLangString, - ) -> Self { - Self { - names, - properties, - deftys, - targs, - var_heights, - stack_height, - string, - } - } -} - -impl<'a> ConstStringVisitor for XirConstStringVisitor<'a> { - fn visit_string_type(&mut self, st: StringType) {} - - fn visit_value(&mut self, val: Symbol) { - *self.string = (&*val).into(); - } -} - -pub struct XirCastVisitor<'a> { - defs: &'a Definitions, - names: &'a NameMap, - properties: &'a TargetProperties<'a>, - deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, - stack_height: &'a mut u32, - as_ty: Option, -} - -impl<'a> XirCastVisitor<'a> { - pub fn new( - defs: &'a Definitions, - names: &'a NameMap, - properties: &'a TargetProperties<'a>, - deftys: &'a HashMap, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, - stack_height: &'a mut u32, - ) -> Self { - Self { - defs, - names, - properties, - deftys, - body, - targs, - var_heights, - stack_height, - as_ty: None, - } - } -} - -impl<'a> CastVisitor for XirCastVisitor<'a> { - fn visit_inner(&mut self) -> Option> { - Some(Box::new(XirExprVisitor::new( - self.defs, - self.names, - self.properties, - self.deftys, - self.body, - self.targs, - self.var_heights, - self.stack_height, - ))) - } - - fn visit_cast_type(&mut self) -> Option> { - Some(Box::new(XirTypeVisitor::new( - self.defs, - self.names, - self.as_ty.insert(ir::Type::Null), - self.properties, - ))) - } -} - -impl<'a> Drop for XirCastVisitor<'a> { - fn drop(&mut self) { - *self.stack_height -= 1; - - self.body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Convert( - ir::ConversionStrength::Reinterpret, - self.as_ty - .take() - .expect("visit_cast_type must have been called"), - ))); - } -} - -pub struct XirStatementVisitor<'a> { - defs: &'a Definitions, - names: &'a NameMap, - properties: &'a TargetProperties<'a>, - deftys: &'a HashMap, - cur_fnty: &'a mut ir::FnType, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, - ssa_tys: &'a mut HashMap, - stack_height: &'a mut u32, -} - -impl<'a> XirStatementVisitor<'a> { - pub fn new( - defs: &'a Definitions, - names: &'a NameMap, - properties: &'a TargetProperties<'a>, - deftys: &'a HashMap, - cur_fnty: &'a mut ir::FnType, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, - ssa_tys: &'a mut HashMap, - stack_height: &'a mut u32, - ) -> Self { - Self { - defs, - names, - properties, - deftys, - cur_fnty, - body, - targs, - var_heights, - ssa_tys, - stack_height, - } - } -} - -impl<'a> StatementVisitor for XirStatementVisitor<'a> { - fn visit_let(&mut self) -> Option> { - Some(Box::new(XirLetVisitor::new( - self.defs, - self.names, - self.properties, - self.deftys, - self.cur_fnty, - self.body, - self.targs, - self.var_heights, - self.ssa_tys, - self.stack_height, - ))) - } - - fn visit_store_dead(&mut self, _: SsaVarId) { - // Be more intelligent in the future - } - - fn visit_discard(&mut self) -> Option> { - Some(Box::new(XirDiscardVisitor( - XirExprVisitor::new( - self.defs, - self.names, - self.properties, - self.deftys, - self.body, - self.targs, - self.var_heights, - self.stack_height, - ), - true, - ))) - } -} - -pub struct XirDiscardVisitor<'a>(XirExprVisitor<'a>, bool); - -impl<'a> ExprVisitor for XirDiscardVisitor<'a> { - fn visit_unreachable(&mut self) { - self.0.visit_unreachable() - } - - fn visit_const_int(&mut self) -> Option> { - self.0.visit_const_int() - } - - fn visit_const(&mut self, defid: DefId) { - self.0.visit_const(defid) - } - - fn visit_cast(&mut self) -> Option> { - self.0.visit_cast() - } - - fn visit_const_string(&mut self) -> Option> { - self.0.visit_const_string() - } - - fn visit_var(&mut self, var: crate::sema::mir::SsaVarId) { - // We can honestly no-op, but destructor currently pops *something* - self.1 = false; - } - - fn visit_tuple(&mut self) -> Option> { - self.0.visit_tuple() - } - - fn visit_ctor(&mut self) -> Option> { - self.0.visit_ctor() - } - - fn visit_field_subobject(&mut self) -> Option> { - self.0.visit_field_subobject() - } - - fn visit_field_project(&mut self) -> Option> { - self.0.visit_field_project() - } - - fn visit_binary_expr(&mut self) -> Option> { - self.0.visit_binary_expr() - } - - fn visit_unary_expr(&mut self) -> Option> { - self.0.visit_unary_expr() - } -} - -impl<'a> Drop for XirDiscardVisitor<'a> { - fn drop(&mut self) { - if self.1 { - *self.0.stack_height -= 1; - self.0 - .body - .block - .items - .push(ir::BlockItem::Expr(ir::Expr::Pop(1))); - } - } -} - -pub struct XirLetVisitor<'a> { - defs: &'a Definitions, - names: &'a NameMap, - properties: &'a TargetProperties<'a>, - deftys: &'a HashMap, - cur_fnty: &'a mut ir::FnType, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, - ssa_tys: &'a mut HashMap, - stack_height: &'a mut u32, - varid: Option, -} - -impl<'a> XirLetVisitor<'a> { - pub fn new( - defs: &'a Definitions, - names: &'a NameMap, - properties: &'a TargetProperties<'a>, - deftys: &'a HashMap, - cur_fnty: &'a mut ir::FnType, - body: &'a mut ir::FunctionBody, - targs: &'a mut HashMap>, - var_heights: &'a mut HashMap, - ssa_tys: &'a mut HashMap, - stack_height: &'a mut u32, - ) -> Self { - Self { - defs, - names, - properties, - deftys, - cur_fnty, - body, - targs, - var_heights, - ssa_tys, - stack_height, - varid: None, - } - } -} - -impl<'a> LetStatementVisitor for XirLetVisitor<'a> { - fn visit_var(&mut self, var: SsaVarId) { - self.varid = Some(var); - } - - fn visit_var_ty(&mut self) -> Option> { - Some(Box::new(XirTypeVisitor::new( - self.defs, - self.names, - self.ssa_tys - .get_or_insert_mut(self.varid.unwrap(), ir::Type::default()), - self.properties, - ))) - } - - fn visit_init(&mut self) -> Option> { - Some(Box::new(XirExprVisitor::new( - self.defs, - self.names, - self.properties, - self.deftys, - self.body, - self.targs, - self.var_heights, - self.stack_height, - ))) - } -} - -impl<'a> Drop for XirLetVisitor<'a> { - fn drop(&mut self) { - let varid = self.varid.take().expect("visit_var must have been called"); - self.var_heights.insert(varid, *self.stack_height); - } -} diff --git a/rust/src/lib.rs b/rust/src/lib.rs index 62567964..e3c84bd0 100644 --- a/rust/src/lib.rs +++ b/rust/src/lib.rs @@ -116,6 +116,7 @@ impl XLangPlugin for RustFrontend { xlang::host::rustcall! { #[allow(clippy::missing_const_for_fn)] #[no_mangle] + #[allow(improper_ctypes_definitions)] pub extern "rustcall" fn xlang_frontend_main() -> DynBox { DynBox::unsize_box(Box::new(RustFrontend::new())) } diff --git a/rust/src/sema/mir.rs b/rust/src/sema/mir.rs index 19b053e5..7741c539 100644 --- a/rust/src/sema/mir.rs +++ b/rust/src/sema/mir.rs @@ -60,6 +60,7 @@ impl core::borrow::Borrow for BasicBlockId { } impl SsaVarId { + pub const INVALID: SsaVarId = Self(!0); pub const fn id(self) -> u32 { self.0 } diff --git a/xir/src/lib.rs b/xir/src/lib.rs index 8c2b9a12..251cb662 100644 --- a/xir/src/lib.rs +++ b/xir/src/lib.rs @@ -1,27 +1,16 @@ #![deny(warnings, clippy::all, clippy::pedantic, clippy::nursery)] -use ir::File; -use xlang::abi::io::{self, IntoChars, Read}; + +use xlang::abi::io::{self, Read}; use xlang::abi::prelude::v1::*; use xlang::abi::result::Result; -use xlang::abi::span::SpanMut; use xlang::abi::string::StringView; use xlang::ir; use xlang::plugin::{Error, XLangFrontend, XLangPlugin}; - -mod binary_reader; -mod lexer; -mod parser; -mod validate; - -use lexer::lex; use xlang::targets::properties::TargetProperties; -use crate::parser::parse_file; - struct XirFrontend { filename: Option, target: Option<&'static TargetProperties<'static>>, - file: Option, } impl XirFrontend { @@ -30,7 +19,6 @@ impl XirFrontend { Self { filename: None, target: None, - file: None, } } } @@ -45,77 +33,15 @@ impl XLangFrontend for XirFrontend { } #[allow(clippy::cast_lossless)] - fn read_source(&mut self, mut file: DynMut) -> io::Result<()> { - let mut b = 0u8; - - xlang::abi::try_!(file.read_exact(SpanMut::from_mut(&mut b))); - - if b == 0xFF { - let mut mag = [0u8; 3]; - xlang::abi::try_!(file.read_exact(SpanMut::new(&mut mag))); - - if mag != *b"XIR" { - return xlang::abi::result::Err(io::Error::Message( - "Invalid Binary XIR file".into(), - )); - } - } else { - let mut stream = file.into_chars(); - let c = match b { - 0x00..=0x7f => b as char, - 0xC0..=0xDF => { - let c2 = stream.next_byte().expect("Invalid UTF-8"); - if !(0x80..=0xBF).contains(&c2) { - panic!("Invalid UTF-8"); - } - char::from_u32(((b as u32) << 5) | (c2 as u32)).expect("Invalid UTF-8") - } - 0xE0..=0xEF => { - let c2 = stream.next_byte().expect("Invalid UTF-8"); - if !(0x80..=0xBF).contains(&c2) { - panic!("Invalid UTF-8"); - } - let c3 = stream.next_byte().expect("Invalid UTF-8"); - if !(0x80..=0xBF).contains(&c3) { - panic!("Invalid UTF-8"); - } - char::from_u32(((b as u32) << 10) | ((c2 as u32) << 5) | (c3 as u32)) - .expect("Invalid UTF-8") - } - 0xF0..=0xF7 => { - let c2 = stream.next_byte().expect("Invalid UTF-8"); - if !(0x80..=0xBF).contains(&c2) { - panic!("Invalid UTF-8"); - } - let c3 = stream.next_byte().expect("Invalid UTF-8"); - if !(0x80..=0xBF).contains(&c3) { - panic!("Invalid UTF-8"); - } - let c4 = stream.next_byte().expect("Invalid UTF-8"); - if !(0x80..=0xBF).contains(&c4) { - panic!("Invalid UTF-8"); - } - char::from_u32( - ((b as u32) << 15) | ((c2 as u32) << 10) | ((c3 as u32) << 5) | (c4 as u32), - ) - .expect("Invalid UTF-8") - } - _ => panic!("Invalid UTF-8"), - }; - let lexed = lex(core::iter::once(c).chain(stream)).collect::>(); - self.file = Some(parse_file(lexed.into_iter())); - } - - Result::Ok(()) + fn read_source(&mut self, _: DynMut) -> io::Result<()> { + todo!() } } impl XLangPlugin for XirFrontend { #[allow(clippy::too_many_lines)] - fn accept_ir(&mut self, file: &mut ir::File) -> Result<(), Error> { - *file = self.file.take().unwrap(); - validate::tycheck(file, self.target.unwrap()); - Result::Ok(()) + fn accept_ir(&mut self, _: &mut ir::File) -> Result<(), Error> { + todo!() } fn set_target(&mut self, targ: &'static TargetProperties<'static>) { @@ -125,6 +51,7 @@ impl XLangPlugin for XirFrontend { xlang::host::rustcall! { #[allow(clippy::missing_const_for_fn)] + #[allow(improper_ctypes_definitions)] #[no_mangle] pub extern "rustcall" fn xlang_frontend_main() -> DynBox { DynBox::unsize_box(Box::new(XirFrontend::new())) diff --git a/xlang/src/plugin.rs b/xlang/src/plugin.rs index 622c1c76..a71fca29 100644 --- a/xlang/src/plugin.rs +++ b/xlang/src/plugin.rs @@ -5,6 +5,8 @@ //! xlang plugins should be installed to the xlang plugin directory //! +#![allow(improper_ctypes_definitions)] + pub mod v1 { use xlang_abi::{ io::{self, Read, ReadSeek, Write}, diff --git a/xlang/xlang_abi/src/alloc.rs b/xlang/xlang_abi/src/alloc.rs index 1cc4ac7a..b40546db 100644 --- a/xlang/xlang_abi/src/alloc.rs +++ b/xlang/xlang_abi/src/alloc.rs @@ -593,7 +593,7 @@ unsafe impl Allocator for &mut A { // TODO: Should this be changed? No. XLangAlloc is the correct name. This isn't the `Global` or `System` allocator, it's the xlang allocator #[derive(Copy, Clone)] #[repr(transparent)] -pub struct XLangAlloc(core::mem::MaybeUninit); +pub struct XLangAlloc(()); impl XLangAlloc { /// @@ -602,7 +602,7 @@ impl XLangAlloc { /// All values of type [`XLangAlloc`] are identical, and may be used interchangeably #[must_use] pub const fn new() -> Self { - Self(core::mem::MaybeUninit::uninit()) + Self(()) } } diff --git a/xlang/xlang_abi/src/vec.rs b/xlang/xlang_abi/src/vec.rs index b5e06b80..f2075c10 100644 --- a/xlang/xlang_abi/src/vec.rs +++ b/xlang/xlang_abi/src/vec.rs @@ -451,6 +451,54 @@ impl Vec { self.len += 1; } + /// Removes and returns the `i`th element of the vector, shifting all subsequent elements down. + /// + /// The relative order of remaining elements is preserved by this function + /// + /// ## Panics + /// + /// Panics if `i` is greater than or equal to `self.len()` + pub fn remove(&mut self, i: usize) -> T { + if i >= self.len { + panic!( + "Index {} is out of range for a Vec of length {}", + i, self.len + ); + } + let rest = self.len - i; + let loc = unsafe { self.ptr.as_ptr().add(i) }; + let val = unsafe { core::ptr::read(loc) }; + unsafe { + core::ptr::copy(loc.offset(1), loc, rest); + } + self.len -= 1; + val + } + + /// Removes and returns the `i`th element of the vector, replacing it with the last element of the vector. + /// + /// This operation does not preserve the relative order of the remaining elements + /// + /// ## Panics + /// + /// Panics if `i` is greater than or equal to `self.len()` + pub fn swap_remove(&mut self, i: usize) -> T { + if i >= self.len { + panic!( + "Index {} is out of range for a Vec of length {}", + i, self.len + ); + } + self.len -= 1; + let loc = unsafe { self.ptr.as_ptr().add(i) }; + let end_loc = unsafe { self.ptr.as_ptr().add(self.len) }; + if loc != end_loc { + unsafe { core::ptr::swap_nonoverlapping(loc, end_loc, 1) } + } + + unsafe { core::ptr::read(end_loc) } + } + /// Inserts `val` into the `Vec` at a position such that the list is sorted in ascending order and returns that position. /// /// If an element that compares equal to `val` is already present in the list, the position relative to any such element is unspecified diff --git a/xlang/xlang_backend/src/lib.rs b/xlang/xlang_backend/src/lib.rs index 8014199b..f39a0888 100644 --- a/xlang/xlang_backend/src/lib.rs +++ b/xlang/xlang_backend/src/lib.rs @@ -1,9 +1,10 @@ #![deny(missing_docs, warnings)] // No clippy::nursery +#![allow(dead_code)] // I'm not deleting a bunch of randomly placed shit //! A helper crate for implementing [`xlang::plugin::XLangCodegen`]s without duplicating code (also can be used to evaluate constant expressions) //! the `xlang_backend` crate provides a general interface for writing expressions to an output. use std::{ collections::{HashSet, VecDeque}, - convert::{TryFrom, TryInto}, + convert::TryInto, fmt::Debug, io::Write, mem::MaybeUninit, @@ -19,9 +20,8 @@ use xlang::{ abi::string::StringView, ir::{ AccessClass, AsmExpr, BinaryOp, Block, BranchCondition, CharFlags, Expr, FnType, - FunctionBody, HashSwitch, LinearSwitch, OverflowBehaviour, Path, PointerType, ScalarType, - ScalarTypeHeader, ScalarTypeKind, ScalarValidity, StackItem, StackValueKind, Switch, Type, - UnaryOp, Value, + OverflowBehaviour, Path, PointerType, ScalarType, ScalarTypeHeader, ScalarTypeKind, + ScalarValidity, Type, UnaryOp, Value, }, prelude::v1::*, targets::properties::TargetProperties, @@ -48,6 +48,10 @@ pub mod mangle; /// Module for generic Machine Code pub mod mc; +/// Module for building SSA from XIR that can be readily lowered to machine code +/// Does not use FunctionCodegen +pub mod ssa; + /// /// Basic Trait for creating the code generator pub trait FunctionRawCodegen { @@ -1781,1101 +1785,17 @@ impl FunctionCodegen { } /// Writes an expression in linear order into the codegen - pub fn write_expr(&mut self, expr: &Expr) { - if self.diverged { - return; - } - eprintln!("{:?}", expr); - self.print_vstack(); - match expr { - Expr::Const(v) => self.push_value(VStackValue::Constant(v.clone())), - Expr::Exit { values } => self.write_exit(*values), - Expr::BinaryOp(op, v) => self.write_binary_op(*op, *v), - Expr::UnaryOp(op, v) => self.write_unary_op(*op, *v), - Expr::CallFunction(fnty) => { - let vals = self.pop_values(fnty.params.len()).unwrap(); - let target = self.pop_value().unwrap(); - match target { - VStackValue::Constant(Value::GlobalAddress { ty, item }) => { - let realty = match &ty { - Type::FnType(ty) => &**ty, - _ => fnty, - }; - self.call_fn(fnty, realty, &item, vals, false); - } - VStackValue::Constant(Value::Invalid(_)) - | VStackValue::Constant(Value::Uninitialized(_)) - | VStackValue::Constant(Value::LabelAddress(_)) => { - self.inner.write_trap(Trap::Unreachable); - self.push_value(VStackValue::Trapped); - } - VStackValue::Pointer(pty, lvalue) => { - let ty = &*pty.inner; - let realty = match &ty { - Type::FnType(ty) => &**ty, - _ => fnty, - }; - match lvalue { - LValue::OpaquePointer(loc) => { - self.call_indirect(fnty, realty, loc, vals, false) - } - LValue::GlobalAddress(path) => { - self.call_fn(fnty, realty, &path, vals, false) - } - LValue::TransparentAddr(_) => todo!("call abs"), - _ => { - self.inner.write_trap(Trap::Unreachable); - self.push_value(VStackValue::Trapped); - } - } - } - val => panic!("Invalid value {}", val), - } - } - Expr::Branch { cond, target } => self.write_branch(*cond, *target), - Expr::BranchIndirect => { - let val = self.pop_value().unwrap(); - - match val { - VStackValue::Constant(Value::LabelAddress(n)) => { - self.branch_to(n); - } - VStackValue::Pointer(_, LValue::Label(n)) => self.branch_to(n), - VStackValue::Pointer(_, LValue::OpaquePointer(loc)) => { - self.inner.branch_indirect(loc); - } - VStackValue::Pointer(_, _) => { - self.inner.write_trap(Trap::Unreachable); - self.diverged = true; - } - VStackValue::Trapped => self.diverged = true, - val => panic!("Invalid value {}", val), - } - } - - Expr::Convert(_, Type::Pointer(pty)) => match self.pop_value().unwrap() { - VStackValue::Constant(Value::LabelAddress(n)) => { - self.push_value(VStackValue::Pointer(pty.clone(), LValue::Label(n))) - } - VStackValue::Constant(Value::String { encoding, utf8, .. }) => { - self.push_value(VStackValue::Pointer( - pty.clone(), - LValue::StringLiteral(Encoding::XLang(encoding), utf8.into_bytes()), - )) - } - VStackValue::Constant(Value::ByteString { content }) => { - self.push_value(VStackValue::Pointer( - pty.clone(), - LValue::StringLiteral(Encoding::Byte, content), - )) - } - VStackValue::Constant(Value::GlobalAddress { item, .. }) => self.push_value( - VStackValue::Pointer(pty.clone(), LValue::GlobalAddress(item)), - ), - VStackValue::Constant(Value::Uninitialized(Type::Pointer(_))) => self.push_value( - VStackValue::Constant(Value::Uninitialized(Type::Pointer(pty.clone()))), - ), - VStackValue::Constant(Value::Invalid(Type::Pointer(_))) => { - self.push_value(VStackValue::Trapped); - self.inner.write_trap(Trap::Unreachable) - } - VStackValue::Pointer(_, lval) => { - self.push_value(VStackValue::Pointer(pty.clone(), lval)) - } - VStackValue::Trapped => { - self.push_value(VStackValue::Trapped); - } - VStackValue::OpaqueScalar(sty, _) => todo!("{:?} as {:?}", sty, pty), - val => panic!("Invalid value for convert _ {:?}: {:?}", pty, val), - }, - Expr::Convert(_, Type::Scalar(sty)) => match self.pop_value().unwrap() { - VStackValue::Constant(Value::Uninitialized(Type::Scalar(_))) => self.push_value( - VStackValue::Constant(Value::Uninitialized(Type::Scalar(*sty))), - ), - VStackValue::Constant(Value::Invalid(Type::Scalar(_))) => { - self.push_value(VStackValue::Trapped); - self.inner.write_trap(Trap::Unreachable) - } - VStackValue::Constant(Value::Integer { ty, val }) => { - let mut newval = val & (!0 >> (128 - sty.header.bitsize.min(128))); - - if matches!( - sty, - ScalarType { - kind: ScalarTypeKind::Integer { signed: true, .. }, - .. - } - ) { - let bits = match ty { - ScalarType { - kind: ScalarTypeKind::Integer { signed: true, .. }, - header: ScalarTypeHeader { bitsize, .. }, - } => bitsize, - _ => sty.header.bitsize.min(128), - }; - - let bit = newval >> bits.saturating_sub(1); - - newval |= !(bit.wrapping_sub(1)) << bits.saturating_sub(1); - } - - self.push_value(VStackValue::Constant(Value::Integer { - ty: *sty, - val: newval, - })); - } - VStackValue::OpaqueScalar(psty, loc) => { - let ty = Type::Scalar(*sty); - - let newloc = self.inner.allocate(&ty, false); - - self.inner - .write_scalar_convert(*sty, psty, newloc.clone(), loc); - - self.push_value(VStackValue::OpaqueScalar(*sty, newloc)); - } - val => panic!("Invalind input for convert {}: {:?}", sty, val), - }, - Expr::Convert(str, ty) => todo!("convert {:?} {:?}", str, ty), - Expr::Derive(_, expr) => { - self.write_expr(expr); - } - Expr::Local(n) => { - let param_cnt = u32::try_from(self.fnty.params.len()).unwrap(); - let ty = if *n < param_cnt { - self.fnty.params[*n as usize].clone() - } else { - self.locals[((*n) - param_cnt) as usize].1.clone() - }; - self.push_value(VStackValue::LValue(ty, LValue::Local(*n))) - } - Expr::Pop(n) => { - for val in self.pop_values((*n).try_into().unwrap()).unwrap() { - match val { - VStackValue::Constant(Value::Invalid(_)) => { - self.inner.write_trap(Trap::Unreachable); - self.diverged = true; - break; - } - VStackValue::Trapped => self.diverged = true, - _ => {} - } - } - } - Expr::Dup(n) => { - let values = self.pop_values((*n).try_into().unwrap()).unwrap(); - self.push_values(values.clone()); - self.push_values(values); - } - Expr::Pivot(n, m) => { - eprintln!("pivot {} {}", n, m); - let vals1 = self.pop_values((*m).try_into().unwrap()).unwrap(); - let vals2 = self.pop_values((*n).try_into().unwrap()).unwrap(); - self.push_values(vals1); - self.push_values(vals2); - } - Expr::Aggregate(ctor) => { - let vals = self.pop_values(ctor.fields.len()).unwrap(); - self.push_value(VStackValue::AggregatePieced( - ctor.ty.clone(), - ctor.fields.iter().cloned().zip(vals).collect(), - )); - } - Expr::Member(m) => { - let val = self.pop_value().unwrap(); - - match val { - VStackValue::LValue(ty, lval) => { - let layout = self.tys.aggregate_layout(&ty).unwrap(); - let inner_ty = layout.fields.get(&m.to_string()); - - match inner_ty { - StdSome((_, inner_ty)) => self.push_value(VStackValue::LValue( - inner_ty.clone(), - LValue::Field(ty, Box::new(lval), m.clone()), - )), - _ => panic!("Cannot get member {} of {}", m, ty), - } - } - VStackValue::OpaqueAggregate(ty, loc) => { - let layout = self.tys.aggregate_layout(&ty).unwrap(); - let inner_ty = layout.fields.get(&m.to_string()).unwrap(); - - if (inner_ty.0 == 0) && layout.fields.iter().count() == 1 { - let val = self.opaque_value(&inner_ty.1, loc); - self.push_value(val); - } else { - todo!("opaque aggregate") - } - } - val => panic!("cannot get member {} of {}", m, val), - } - } - Expr::MemberIndirect(_) => todo!(), - Expr::Assign(cl) => { - let lval = self.vstack.pop_back().unwrap(); - let value = self.vstack.pop_back().unwrap(); - - let (_, lval) = match lval { - VStackValue::LValue(ty, lval) => (ty, lval), - val => panic!("Cannot assign to rvalue stack value {:?}", val), - }; - - match (lval, value) { - (LValue::Null, _) - | (LValue::Label(_), _) - | (LValue::StringLiteral(_, _), _) - | (_, VStackValue::Constant(Value::Invalid(_))) => { - self.inner.write_trap(Trap::Unreachable); - self.push_value(VStackValue::Trapped); - } - (_, VStackValue::Trapped) => { - self.push_value(VStackValue::Trapped); - } - (LValue::Local(n), val) => { - let loc = self.locals[n as usize].0.clone(); - - match loc { - VStackValue::Trapped => { - self.push_value(VStackValue::Trapped); - } - VStackValue::OpaqueScalar(_, loc) - | VStackValue::OpaqueAggregate(_, loc) - | VStackValue::Pointer(_, LValue::OpaquePointer(loc)) => { - self.move_val(val, loc); - } - _ => { - self.locals[n as usize].0 = val; - } - } - } - (LValue::OpaquePointer(loc), val) => self.store_val(loc, val, *cl), - (a, b) => todo!("store {:?} <- {:?}", a, b), - } - } - Expr::AsRValue(_) => { - let lvalue = self.pop_value().unwrap(); - let (ty, lvalue) = match lvalue { - VStackValue::LValue(ty, lvalue) => (ty, lvalue), - val => panic!("Invalid value for as_rvalue {:?}", val), - }; - - let (_, base, path) = self.get_field_paths(lvalue, &ty); - - let mut val = match base { - LValue::Null => { - self.inner.write_trap(Trap::Unreachable); - VStackValue::Trapped - } - LValue::Temporary(val) => Box::into_inner(val), - LValue::OpaquePointer(loc) => { - let dst = self.inner.allocate(&ty, false); - self.inner.load_val(loc, dst.clone()); - - self.opaque_value(&ty, dst) - } - - LValue::Local(n) => { - let local = self.locals[n as usize].0.clone(); - if let Some(loc) = local.opaque_location() { - let dst = self.inner.allocate(&ty, false); - self.inner.move_val(loc.clone(), dst.clone()); - self.opaque_value(&ty, dst) - } else { - local - } - } - LValue::GlobalAddress(_) => todo!("as_rvalue global_address"), - LValue::Label(_) => { - self.inner.write_trap(Trap::Unreachable); - VStackValue::Trapped - } - LValue::Field(_, _, _) => todo!("as_rvalue field"), - LValue::StringLiteral(_, _) => todo!("as_rvalue string_literal"), - LValue::Offset(_, _) => todo!("as_rvalue offset"), - LValue::TransparentAddr(_) => todo!("absolute addr"), - }; - - for f in &path { - val = match val { - VStackValue::Constant(_) => todo!(), - VStackValue::LValue(_, _) => todo!(), - VStackValue::Pointer(_, _) => todo!(), - VStackValue::OpaqueScalar(_, _) => todo!(), - VStackValue::AggregatePieced(base, mut fields) => { - let inner = fields.remove(f); - - if let StdSome(Pair(_, inner)) = inner { - inner - } else { - let fty = self.tys.get_field_type(&base, f).unwrap(); - self.tys.zero_init(&fty).unwrap().into_transparent_for() - } - } - VStackValue::OpaqueAggregate(_, _) => todo!(), - VStackValue::CompareResult(_, _) => todo!(), - VStackValue::Trapped => VStackValue::Trapped, - VStackValue::ArrayRepeat(_, _) => todo!(), - } - } - self.push_value(val); - } - Expr::CompoundAssign(op, v, acc) => { - let [rhs, lval] = self.pop_values_static().unwrap(); - - let (ty, lval) = match lval { - VStackValue::Trapped => { - self.push_value(VStackValue::Trapped); - return; - } - VStackValue::LValue(ty, lval) => (ty, lval), - val => panic!("Invalid value {}", val), - }; - - let (_, base, fields) = self.get_field_paths(lval, &ty); - - match base { - LValue::OpaquePointer(_) => todo!(), - LValue::Local(n) => { - let mut val = &mut self.locals[n as usize].0; - - let tys = &self.tys; - - let mut it = fields.into_iter(); - - for field in &mut it { - match val { - VStackValue::Constant(Value::Invalid(_)) => { - self.inner.write_trap(Trap::Unreachable); - self.push_value(VStackValue::Trapped); - return; - } - VStackValue::Constant(Value::Uninitialized(ty)) => { - let mut aggregate = HashMap::::new(); - - for Pair(name, (_, ty)) in - tys.aggregate_layout(ty).unwrap().fields - { - let _ = aggregate.insert( - name.into(), - VStackValue::Constant(Value::Uninitialized(ty)), - ); - } - - *val = VStackValue::AggregatePieced( - core::mem::replace(ty, Type::Null), - aggregate, - ); - - match val { - VStackValue::AggregatePieced(_, vals) => { - val = vals.get_mut(&field).unwrap() - } - _ => unsafe { core::hint::unreachable_unchecked() }, - } - } - VStackValue::AggregatePieced(ty, vals) => { - val = vals.get_or_insert_with_mut(field, |field| { - let field_ty = tys.get_field_type(ty, field).unwrap(); - tys.zero_init(&field_ty).unwrap().into_transparent_for() - }); - } - VStackValue::OpaqueAggregate(_, _) => todo!("opaque aggregate"), - VStackValue::Trapped => break, - val => panic!("Invalid value {}", val), - } - } - - if *acc == AccessClass::Normal { - let underlying = core::mem::replace(val, VStackValue::Trapped); - - match (underlying, rhs) { - (VStackValue::Constant(Value::Invalid(_)), _) - | (_, VStackValue::Constant(Value::Invalid(_))) => { - self.inner.write_trap(Trap::Unreachable); - } - (VStackValue::Trapped, _) | (_, VStackValue::Trapped) => {} - ( - underlying @ VStackValue::Constant(Value::Uninitialized(_)), - _, - ) - | ( - underlying @ VStackValue::OpaqueScalar(_, _), - VStackValue::Constant(Value::Uninitialized(_)), - ) => { - *val = underlying; - match *v { - OverflowBehaviour::Trap => { - self.inner.write_trap(Trap::Unreachable); - self.diverged = true; - } - OverflowBehaviour::Checked => { - let bool_sty = ScalarType { - header: ScalarTypeHeader { - bitsize: 1, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }; - self.push_value(VStackValue::Constant( - Value::Uninitialized(Type::Scalar(bool_sty)), - )) - } - _ => {} - } - } - (_, res @ VStackValue::Constant(Value::Uninitialized(_))) => { - *val = res; - match *v { - OverflowBehaviour::Trap => { - self.inner.write_trap(Trap::Unreachable); - self.diverged = true; - } - OverflowBehaviour::Checked => { - let bool_sty = ScalarType { - header: ScalarTypeHeader { - bitsize: 1, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }; - self.push_value(VStackValue::Constant( - Value::Uninitialized(Type::Scalar(bool_sty)), - )) - } - _ => {} - } - } - ( - VStackValue::Constant(Value::Integer { - ty: sty1, - val: val1, - }), - VStackValue::Constant(Value::Integer { - ty: sty2, - val: val2, - }), - ) => match (sty1, sty2) { - ( - ScalarType { - header: - ScalarTypeHeader { - bitsize: bit1, - vectorsize: None, - .. - }, - kind: ScalarTypeKind::Integer { signed: false, .. }, - }, - ScalarType { - header: - ScalarTypeHeader { - vectorsize: None, .. - }, - kind: ScalarTypeKind::Integer { signed: false, .. }, - }, - ) => { - let (res, overflow) = match *op{ - BinaryOp::Add => val1.overflowing_add(val2), - BinaryOp::Sub => val1.overflowing_sub(val2), - BinaryOp::Mul => val1.overflowing_mul(val2), - BinaryOp::Div => { - if val2==0{ - (0,true) - }else{ - val1.overflowing_div(val2) - } - } - BinaryOp::Mod => { - if val2==0{ - (0,true) - }else{ - val1.overflowing_rem(val2) - } - } - BinaryOp::BitAnd => (val1&val2,false), - BinaryOp::BitOr => (val1|val2,false), - BinaryOp::BitXor => (val1|val2,false), - BinaryOp::Lsh => (val1.wrapping_shl(val2 as u32),u16::try_from(val2).map(|val|val>bit1).unwrap_or(true)), - BinaryOp::Rsh => (val1.wrapping_shr(val2 as u32),u16::try_from(val2).map(|val|val>bit1).unwrap_or(true)), - op => panic!("Cannot perform binary operation {} as compound_assign",op) - }; - - let mask = (!0u128).wrapping_shr(128 - (bit1 as u32)); - - let mres = res & mask; - - let overflow = overflow || (mres != res); - - let res = match *v { - OverflowBehaviour::Saturate => { - let mres = if overflow { - if *op == BinaryOp::Rsh || *op == BinaryOp::Sub - { - 0 - } else { - mask - } - } else { - res - }; - VStackValue::Constant(Value::Integer { - ty: sty1, - val: mres, - }) - } - OverflowBehaviour::Wrap => { - VStackValue::Constant(Value::Integer { - ty: sty1, - val: mres, - }) - } - OverflowBehaviour::Checked => { - let bool_sty = ScalarType { - header: ScalarTypeHeader { - bitsize: 1, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }; - - self.vstack.push_back(VStackValue::Constant( - Value::Integer { - ty: bool_sty, - val: overflow as u128, - }, - )); - VStackValue::Constant(Value::Integer { - ty: sty1, - val: mres, - }) - } - OverflowBehaviour::Trap => { - if overflow { - self.inner.write_trap(Trap::Abort); - self.diverged = true; - return; - } else { - VStackValue::Constant(Value::Integer { - ty: sty1, - val: mres, - }) - } - } - OverflowBehaviour::Unchecked => { - if overflow { - VStackValue::Constant(Value::Uninitialized( - Type::Scalar(sty1), - )) - } else { - VStackValue::Constant(Value::Integer { - ty: sty1, - val: mres, - }) - } - } - _ => panic!("Invalid overflow behaviour {}", v), - }; - - *val = res; - } - ( - ScalarType { - header: - ScalarTypeHeader { - bitsize: bit1, - vectorsize: None, - .. - }, - kind: ScalarTypeKind::Integer { signed: true, .. }, - }, - ScalarType { - header: - ScalarTypeHeader { - vectorsize: None, .. - }, - kind: ScalarTypeKind::Integer { signed: true, .. }, - }, - ) => { - let val1 = val1 as i128; - let val2 = val2 as i128; - let (res, overflow) = match *op{ - BinaryOp::Add => val1.overflowing_add(val2), - BinaryOp::Sub => val1.overflowing_sub(val2), - BinaryOp::Mul => val1.overflowing_mul(val2), - BinaryOp::Div => { - if val2==0{ - (0,true) - }else{ - val1.overflowing_div(val2) - } - } - BinaryOp::Mod => { - if val2==0{ - (0,true) - }else{ - val1.overflowing_rem(val2) - } - } - BinaryOp::BitAnd => (val1&val2,false), - BinaryOp::BitOr => (val1|val2,false), - BinaryOp::BitXor => (val1|val2,false), - BinaryOp::Lsh => (val1.wrapping_shl(val2 as u32),u16::try_from(val2).map(|val|val>bit1).unwrap_or(true)), - BinaryOp::Rsh => (val1.wrapping_shr(val2 as u32),u16::try_from(val2).map(|val|val>bit1).unwrap_or(true)), - op => panic!("Cannot perform binary operation {} as compound_assign",op) - }; - - let mask = - (!0u128).wrapping_shr(128 - (bit1 as u32)) as i128; - - let mres = res & mask; - - let overflow = overflow - || ((128 - res.leading_ones() - res.leading_zeros()) - < 128 - (bit1 as u32)); - - let mres = ((0i128 - .wrapping_sub(mres.wrapping_shr(bit1.into()) & 1)) - .wrapping_shl(bit1.into())) - as u128; - - let res = match *v { - OverflowBehaviour::Saturate => { - let mres = if overflow { - if *op == BinaryOp::Rsh || *op == BinaryOp::Sub - { - 0 - } else { - mask as u128 - } - } else { - mres - }; - VStackValue::Constant(Value::Integer { - ty: sty1, - val: mres, - }) - } - OverflowBehaviour::Wrap => { - VStackValue::Constant(Value::Integer { - ty: sty1, - val: mres, - }) - } - OverflowBehaviour::Checked => { - let bool_sty = ScalarType { - header: ScalarTypeHeader { - bitsize: 1, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }; - - self.vstack.push_back(VStackValue::Constant( - Value::Integer { - ty: bool_sty, - val: overflow as u128, - }, - )); - VStackValue::Constant(Value::Integer { - ty: sty1, - val: mres, - }) - } - OverflowBehaviour::Trap => { - if overflow { - self.inner.write_trap(Trap::Abort); - self.diverged = true; - return; - } else { - VStackValue::Constant(Value::Integer { - ty: sty1, - val: mres, - }) - } - } - OverflowBehaviour::Unchecked => { - if overflow { - VStackValue::Constant(Value::Uninitialized( - Type::Scalar(sty1), - )) - } else { - VStackValue::Constant(Value::Integer { - ty: sty1, - val: mres, - }) - } - } - _ => panic!("Invalid overflow behaviour {}", v), - }; - - *val = res; - } - (sty1, sty2) => todo!("({},{})", sty1, sty2), - }, - (val1, val2) => panic!("Invalid values {}, {}", val1, val2), - } - } else { - todo!("compound_assign {}", acc) - } - } - LValue::Temporary(_) => todo!(), - LValue::GlobalAddress(_) => todo!(), - LValue::TransparentAddr(_) => todo!(), - _ => { - self.inner.write_trap(Trap::Unreachable); - self.push_value(VStackValue::Trapped); - } - } - } - Expr::FetchAssign(_, _, _) => todo!(), - Expr::LValueOp(_, _) => todo!(), - Expr::UnaryLValue(_, _, _) => todo!(), - Expr::Indirect => { - let val = self.pop_value().unwrap(); - match val { - VStackValue::Pointer(pty, lval) => { - self.push_value(VStackValue::LValue(Box::into_inner(pty.inner), lval)) - } - val => panic!("Invalid value for instruction {:?}", val), - } - } - Expr::AddrOf => { - let val = self.pop_value().unwrap(); - match val { - VStackValue::LValue(ty, lval) => self.push_value(VStackValue::Pointer( - PointerType { - inner: Box::new(ty), - ..Default::default() - }, - lval, - )), - val => panic!("Invalid value for instruction {:?}", val), - } - } - Expr::Sequence(_) => {} - Expr::Fence(barrier) => self.inner.write_barrier(*barrier), - Expr::Switch(Switch::Linear(s)) => { - let ctrl = self.pop_value().unwrap(); - match ctrl { - VStackValue::Constant(Value::Uninitialized(_) | Value::Invalid(_)) => { - self.inner.write_trap(Trap::Unreachable); - } - VStackValue::Constant(Value::Integer { val, .. }) => { - let val = val.wrapping_sub(s.min); - let (idx, rem) = ((val / (s.scale as u128)), val % (s.scale as u128)); - let target = if rem != 0 || idx >= (s.cases.len() as u128) { - s.default - } else { - s.cases[usize::try_from(idx).unwrap()] - }; - - self.branch_to(target); - } - VStackValue::OpaqueScalar(ty, loc) => { - todo!("switch OpaqueScalar({:?},{:?})", ty, loc) - } - VStackValue::CompareResult(a, b) => { - todo!("switch CompareResult({:?},{:?})", a, b) - } - VStackValue::Trapped => {} - v => panic!("Invalid value for switch {:?}", v), - } - } - Expr::Switch(Switch::Hash(s)) => { - let ctrl = self.pop_value().unwrap(); - match ctrl { - VStackValue::Constant(Value::Uninitialized(_) | Value::Invalid(_)) => { - self.inner.write_trap(Trap::Unreachable); - } - VStackValue::Constant(v @ Value::Integer { .. }) => { - let mut found = None; - for Pair(case, target) in &s.cases { - if &v == case { - found = Some(*target); - break; - } - } - let target = found.unwrap_or(s.default); - - self.branch_to(target); - } - VStackValue::OpaqueScalar(ty, loc) => { - todo!("switch OpaqueScalar({:?},{:?})", ty, loc) - } - VStackValue::CompareResult(a, b) => { - todo!("switch CompareResult({:?},{:?})", a, b) - } - VStackValue::Trapped => {} - v => panic!("Invalid value for switch {:?}", v), - } - } - Expr::Tailcall(fnty) => { - let vals = self.pop_values(fnty.params.len()).unwrap(); - let target = self.pop_value().unwrap(); - match target { - VStackValue::Constant(Value::GlobalAddress { ty, item }) => { - let realty = match &ty { - Type::FnType(ty) => &**ty, - _ => fnty, - }; - self.call_fn(fnty, realty, &item, vals, true); - } - VStackValue::Constant(Value::Invalid(_)) - | VStackValue::Constant(Value::Uninitialized(_)) - | VStackValue::Constant(Value::LabelAddress(_)) => { - self.inner.write_trap(Trap::Unreachable); - } - VStackValue::Pointer(pty, lvalue) => { - let ty = &*pty.inner; - let realty = match &ty { - Type::FnType(ty) => &**ty, - _ => fnty, - }; - match lvalue { - LValue::OpaquePointer(loc) => { - self.call_indirect(fnty, realty, loc, vals, true) - } - LValue::GlobalAddress(path) => { - self.call_fn(fnty, realty, &path, vals, true) - } - LValue::TransparentAddr(_) => todo!("call abs"), - _ => { - self.inner.write_trap(Trap::Unreachable); - } - } - } - val => panic!("Invalid value {}", val), - } - } - Expr::Asm(asm) => self.write_asm(asm), - Expr::BeginStorage(n) => { - let local = &mut self.locals[(*n) as usize]; - local.0 = VStackValue::Constant(Value::Uninitialized(local.1.clone())); - } - Expr::EndStorage(n) => { - let local = &mut self.locals[(*n) as usize]; - local.0 = VStackValue::Constant(Value::Invalid(local.1.clone())); - } - Expr::Select(n) => { - let sel = self.pop_value().unwrap(); - let possible_vals = self.pop_values(*n as usize).unwrap(); - let tyof = possible_vals[0].value_type(); - match sel { - VStackValue::Trapped => { - self.push_value(VStackValue::Trapped); - } - VStackValue::Constant(Value::Invalid(_)) => { - self.inner.write_trap(Trap::Unreachable); - self.push_value(VStackValue::Trapped); - } - VStackValue::Constant(Value::Uninitialized(_)) => { - self.inner.write_trap(Trap::Unreachable); - self.push_value(VStackValue::Trapped); - } - VStackValue::Constant(Value::Integer { - ty: - ScalarType { - kind: ScalarTypeKind::Integer { .. }, - .. - }, - val, - }) => { - if val > (*n as u128) { - self.push_value(VStackValue::Constant(Value::Invalid(tyof))) - } else { - self.push_value(possible_vals.into_iter().nth(val as usize).unwrap()) - } - } - val => todo!("{:?}", val), - } - } - } + pub fn write_expr(&mut self, _: &Expr) { + unimplemented!() } /// Writes an asm-expr - pub fn write_asm(&mut self, asm: &AsmExpr) { - let nexprs = asm.inputs.len(); - - let inputs = self.pop_values(nexprs).unwrap(); - - let vals = self.inner.write_asm(asm, inputs); - - let vals = vals - .into_iter() - .zip(&asm.outputs) - .map(|(loc, ty)| self.opaque_value(&ty.ty, loc)) - .collect::>(); - - self.push_values(vals); - } - - /// Writes the body of a function to the codegen - pub fn write_function_body(&mut self, body: &FunctionBody) { - let fnty: FnType = self.fnty.clone(); - self.vstack.reserve(fnty.params.len()); - - for (i, ty) in fnty.params.iter().enumerate() { - let loc = - self.inner - .get_callconv() - .find_param(&fnty, &fnty, u32::try_from(i).unwrap(), true); - let val = self.opaque_value(ty, loc); - self.push_value(val); - } - - self.locals.reserve(body.locals.len()); - for ty in &body.locals { - self.locals.push(( - VStackValue::Constant(Value::Uninitialized(ty.clone())), - ty.clone(), - )) - } - self.write_block(&body.block, 0); - if !self.diverged { - self.inner.leave_function(); - } + pub fn write_asm(&mut self, _: &AsmExpr) { + unimplemented!() } /// Writes the elements of a block to the codegen, usually the top level block of a function - pub fn write_block(&mut self, block: &Block, _: u32) { - for item in &block.items { - match item { - xlang::ir::BlockItem::Target { num, stack } => { - self.cfg - .get_or_insert_with_mut(*num, |_| BranchToInfo::default()) - .fallthrough_from = self.ctarg; - - self.ctarg = *num; - - let values = stack - .iter() - .map(|item| match item { - StackItem { - kind: StackValueKind::LValue, - ty, - } => VStackValue::LValue( - ty.clone(), - LValue::OpaquePointer(self.inner.allocate_lvalue(false)), - ), - StackItem { - kind: StackValueKind::RValue, - ty, - } => { - let loc = self.inner.allocate(ty, false); - self.opaque_value(ty, loc) - } - }) - .collect(); - self.targets.insert(*num, values); - } - xlang::ir::BlockItem::Expr(expr) => match expr { - Expr::Branch { - cond: BranchCondition::Never, - .. - } => {} - Expr::Const(Value::LabelAddress(targ)) => { - self.cfg - .get_or_insert_with_mut(*targ, |_| BranchToInfo::default()) - .branch_from - .insert(self.ctarg); - } - Expr::Branch { target, .. } => { - self.cfg - .get_or_insert_with_mut(*target, |_| BranchToInfo::default()) - .branch_from - .insert(self.ctarg); - } - Expr::Switch(s) => match s { - Switch::Linear(LinearSwitch { default, cases, .. }) => { - self.cfg - .get_or_insert_with_mut(*default, |_| BranchToInfo::default()) - .branch_from - .insert(self.ctarg); - - for targ in cases { - self.cfg - .get_or_insert_with_mut(*targ, |_| BranchToInfo::default()) - .branch_from - .insert(self.ctarg); - } - } - Switch::Hash(HashSwitch { default, cases, .. }) => { - self.cfg - .get_or_insert_with_mut(*default, |_| BranchToInfo::default()) - .branch_from - .insert(self.ctarg); - - for Pair(_, targ) in cases { - self.cfg - .get_or_insert_with_mut(*targ, |_| BranchToInfo::default()) - .branch_from - .insert(self.ctarg); - } - } - }, - _ => {} - }, - } - } - self.ctarg = !0; // reset block position - for item in &block.items { - match item { - xlang::ir::BlockItem::Expr(expr) => self.write_expr(expr), - xlang::ir::BlockItem::Target { num, .. } => { - self.ctarg = *num; - if !self.locals_opaque { - let mut locals = std::mem::take(&mut self.locals); - - for (local, _) in &mut locals { - let val = core::mem::replace(local, VStackValue::Trapped); - - *local = self.make_opaque(val); - } - - self.locals = locals; - self.locals_opaque = true; - } - if !self.diverged { - let locs = self.targets[num].clone(); - let vals = self.pop_values(locs.len()).unwrap(); - for (val, stack_val) in vals.into_iter().zip(locs) { - self.move_val(val, stack_val.opaque_location().unwrap().clone()); - } - self.clear_stack(); - for val in self.targets[num].clone() { - self.push_value(val.clone()); - } - } else if let StdSome(br) = self.cfg.get(num) { - if !br.branch_from.is_empty() { - self.clear_stack(); - for val in self.targets[num].clone() { - self.push_value(val.clone()); - } - self.diverged = false; - } else { - self.clear_stack(); - } - } - - self.inner.write_target(*num); - } - } - } + pub fn write_block(&mut self, _: &Block, _: u32) { + unimplemented!() } } diff --git a/xlang/xlang_backend/src/mc.rs b/xlang/xlang_backend/src/mc.rs index d9bc31f0..067e1bda 100644 --- a/xlang/xlang_backend/src/mc.rs +++ b/xlang/xlang_backend/src/mc.rs @@ -8,9 +8,8 @@ use binfmt::{ }; use xlang::{ abi::{ - collection::HashMap, io::WriteAdapter, option::None as XLangNone, - option::Some as XLangSome, pair::Pair, result::Result::Ok as XLangOk, span::Span, - string::StringView, try_, + collection::HashMap, io::WriteAdapter, option::None as XLangNone, pair::Pair, + result::Result::Ok as XLangOk, span::Span, string::StringView, try_, }, ir::{ AccessClass, BinaryOp, FnType, Linkage, PathComponent, PointerKind, ScalarType, @@ -782,108 +781,9 @@ impl MCBackend { impl XLangPlugin for MCBackend { fn accept_ir( &mut self, - ir: &mut xlang::ir::File, + _: &mut xlang::ir::File, ) -> xlang::abi::result::Result<(), xlang::plugin::Error> { - let mut tys = TypeInformation::from_properties(self.properties.unwrap()); - for Pair(path, mem) in &ir.root.members { - match &mem.member_decl { - xlang::ir::MemberDeclaration::OpaqueAggregate(_) => { - tys.add_opaque_aggregate(path.clone()) - } - xlang::ir::MemberDeclaration::AggregateDefinition(defn) => { - tys.add_aggregate(path.clone(), defn.clone()) - } - _ => {} - } - } - let tys = Rc::new(tys); - for Pair(path, mem) in &ir.root.members { - match &mem.member_decl { - xlang::ir::MemberDeclaration::Function(f) => { - let features = self - .writer - .get_features(self.properties.unwrap(), self.feature); - let mangled_name = match &*path.components { - [PathComponent::Root, PathComponent::Text(n)] - | [PathComponent::Text(n)] => n.to_string(), - [PathComponent::Root, rest @ ..] | [rest @ ..] => features.mangle(rest), - }; - - let linkage = f.linkage; - let body = if let XLangSome(body) = &f.body { - let section_spec = SectionSpec::GlobalSection; - - let innercg = MCFunctionCodegen { - fn_name: mangled_name.clone(), - inner: features, - next_loc_id: 0, - tys: tys.clone(), - mc_insns: Vec::new(), - strings: self.strings.clone(), - callconv: CallConvAdaptor(self.writer.get_call_conv( - &f.ty, - self.properties.unwrap(), - self.feature, - Rc::clone(&tys), - )), - fnty: f.ty.clone(), - }; - let mut fncg = FunctionCodegen::new( - innercg, - path.clone(), - f.ty.clone(), - self.properties.unwrap(), - tys.clone(), - ); - - fncg.write_function_body(body); - - Some((section_spec, fncg)) - } else { - None - }; - - self.functions - .insert(mangled_name, MCFunctionDecl { linkage, body }); - } - xlang::ir::MemberDeclaration::Static(st) => { - let features = self - .writer - .get_features(self.properties.unwrap(), self.feature); - let mangled_name = match &*path.components { - [PathComponent::Root, PathComponent::Text(n)] - | [PathComponent::Text(n)] => n.to_string(), - [PathComponent::Root, rest @ ..] | [rest @ ..] => features.mangle(rest), - }; - - let linkage = st.linkage; - - let init = match &st.init { - xlang::ir::Value::Empty => None, - val => { - let section_spec = SectionSpec::GlobalSection; - let space = tys.type_size(&st.ty).unwrap(); - let align = tys.type_align(&st.ty).unwrap(); - let init: xlang::ir::Value = val.clone(); - let specifier = st.specifiers; - Some(MCStaticDef { - section: section_spec, - init, - space, - align, - specifier, - }) - } - }; - - self.statics - .insert(mangled_name, MCStaticDecl { linkage, init }); - } - _ => {} - } - } - self.tys = Some(tys); - XLangOk(()) + unimplemented!() } fn set_target(&mut self, targ: &'static TargetProperties<'static>) { diff --git a/xlang/xlang_backend/src/ssa.rs b/xlang/xlang_backend/src/ssa.rs new file mode 100644 index 00000000..9c8296fa --- /dev/null +++ b/xlang/xlang_backend/src/ssa.rs @@ -0,0 +1,30 @@ +#![allow(missing_docs)] +use std::rc::Rc; + +use xlang::ir::*; + +#[derive(Clone, Debug, Hash, PartialEq, Eq)] +pub enum SsaTerminator { + Fallthrough(u32, Vec), + Jump(u32, Vec), + Exit(u16), + Tailcall(CallTarget, Vec), +} + +#[derive(Clone, Debug, Hash, PartialEq, Eq)] +pub struct CallTarget { + pub ptr: OpaquePtr, + pub real_ty: FnType, +} + +#[derive(Clone, Debug, Hash, PartialEq, Eq)] +pub enum OpaquePtr { + Symbol(String), + Pointer(OpaqueLocation), +} + +#[derive(Clone, Debug, Hash, PartialEq, Eq)] +pub struct OpaqueLocation { + pub ty: Rc, + pub num: u32, +} diff --git a/xlang/xlang_struct/Cargo.toml b/xlang/xlang_struct/Cargo.toml index 9328a522..252a6599 100644 --- a/xlang/xlang_struct/Cargo.toml +++ b/xlang/xlang_struct/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "xlang_struct" -version = "0.1.0" +version = "0.2.0" authors = ["Connor Horman "] edition = "2018" license = "BSD-2-Clause-Patent" diff --git a/xlang/xlang_struct/src/fmt.rs b/xlang/xlang_struct/src/fmt.rs index ec33a782..36420111 100644 --- a/xlang/xlang_struct/src/fmt.rs +++ b/xlang/xlang_struct/src/fmt.rs @@ -52,6 +52,51 @@ impl<'b, 'a> IrFormatter<'b, 'a> { Self(f) } + pub fn fmt_terminator(&mut self, term: &Terminator, tabs: Tabs) -> core::fmt::Result { + tabs.fmt(self)?; + match term { + Terminator::Jump(jump) => self.write_fmt(format_args!("jump {}", jump)), + Terminator::Branch(cond, then_targ, else_targ) => self.write_fmt(format_args!( + "branch {} {} else {}", + cond, then_targ, else_targ + )), + Terminator::BranchIndirect => self.write_str("branch indirect"), + Terminator::Call(flags, fnty, next) => { + self.write_fmt(format_args!("call {}function{} next {}", flags, fnty, next)) + } + Terminator::Tailcall(flags, fnty) => { + self.write_fmt(format_args!("tailcall {}function{}", flags, fnty)) + } + Terminator::Exit(vals) => self.write_fmt(format_args!("exit {}", vals)), + Terminator::Asm(asm) => asm.fmt(self), + Terminator::Switch(switch) => { + self.write_str("switch ")?; + let nested = tabs.nest(); + match switch { + Switch::Hash(switch) => { + self.write_str("hash\n")?; + for case in &switch.cases { + self.write_fmt(format_args!("{}{}: {}\n", nested, case.0, case.1))?; + } + self.write_fmt(format_args!("{}default {}\n", nested, switch.default))?; + } + Switch::Linear(switch) => { + self.write_fmt(format_args!( + "linear {} min {} scale {}\n", + switch.ty, switch.min, switch.scale + ))?; + for case in &switch.cases { + self.write_fmt(format_args!("{}{}", nested, case))?; + } + self.write_fmt(format_args!("{}default {}\n", nested, switch.default))?; + } + } + self.write_fmt(format_args!("{}end switch", tabs)) + } + Terminator::Unreachable => self.write_str("unreachable"), + } + } + /// Formats a scope member `mem` at the given `path` at the current nesting level given by `tabs` pub fn fmt_scope_member( &mut self, @@ -93,10 +138,30 @@ impl<'b, 'a> IrFormatter<'b, 'a> { self.write_fmt(format_args!("declare _{}: {};\n", local, ty))?; } - for item in &body.block.items { + for block in &body.blocks { nested.fmt(self)?; - item.fmt(self)?; + self.write_str("target @")?; + block.target.fmt(self)?; + self.write_str(" [")?; + let mut sep = ""; + for item in &block.incoming_stack { + self.write_str(sep)?; + sep = ", "; + item.fmt(self)?; + } + self.write_str("]{\n")?; + let inner_nest = nested.nest(); + + for expr in &block.expr { + inner_nest.fmt(self)?; + expr.fmt(self)?; + self.write_str("\n")?; + } + + self.fmt_terminator(&block.term, inner_nest)?; self.write_str("\n")?; + nested.fmt(self)?; + self.write_str("}\n")?; } tabs.fmt(self)?; diff --git a/xlang/xlang_struct/src/lib.rs b/xlang/xlang_struct/src/lib.rs index 8bd27f4e..c57c127b 100644 --- a/xlang/xlang_struct/src/lib.rs +++ b/xlang/xlang_struct/src/lib.rs @@ -1784,26 +1784,9 @@ pub enum Expr { /// Operands: [..]=>[..,Value] Const(Value), - /// Exits the function - /// - /// # Stack - /// - /// Type Checking: [..,T1,T2,...,Tn]=>diverged - /// - /// Operands: [..,v1,v2,...,vn]=>diverged - Exit { - values: u16, - }, - /// Computes BinaryOp(BinaryOp, OverflowBehaviour), UnaryOp(UnaryOp, OverflowBehaviour), - CallFunction(FnType), - Branch { - cond: BranchCondition, - target: u32, - }, - BranchIndirect, Convert(ConversionStrength, Type), Derive(PointerType, Box), Local(u32), @@ -1823,9 +1806,6 @@ pub enum Expr { AddrOf, Fence(AccessClass), - Switch(Switch), - Tailcall(FnType), - Asm(AsmExpr), BeginStorage(u32), EndStorage(u32), @@ -1851,14 +1831,8 @@ impl core::fmt::Display for Expr { f.write_str("const ")?; val.fmt(f) } - Self::Exit { values } => f.write_fmt(format_args!("exit {}", values)), Self::BinaryOp(op, v) => f.write_fmt(format_args!("{} {}", op, v)), Self::UnaryOp(op, v) => f.write_fmt(format_args!("{} {}", op, v)), - Self::CallFunction(fun) => f.write_fmt(format_args!("call {}", fun)), - Self::Branch { cond, target } => { - f.write_fmt(format_args!("branch {} @{}", cond, target)) - } - Self::BranchIndirect => f.write_str("branch indirect"), Self::Convert(strength, ty) => f.write_fmt(format_args!("convert {} {}", strength, ty)), Self::Derive(pty, inner) => f.write_fmt(format_args!("derive {} {}", pty, inner)), Self::Local(n) => f.write_fmt(format_args!("local _{}", n)), @@ -1895,9 +1869,6 @@ impl core::fmt::Display for Expr { Self::Sequence(AccessClass::Normal) => f.write_str("nop"), Self::Sequence(acc) => f.write_fmt(format_args!("sequence {}", acc)), Self::Fence(acc) => f.write_fmt(format_args!("fence {}", acc)), - Self::Switch(_) => todo!(), - Self::Tailcall(fun) => f.write_fmt(format_args!("tailcall {}", fun)), - Self::Asm(asm) => asm.fmt(f), Self::BeginStorage(n) => f.write_fmt(format_args!("begin storage _{}", n)), Self::EndStorage(n) => f.write_fmt(format_args!("end storage _{}", n)), Self::Select(n) => f.write_fmt(format_args!("select {}", n)), @@ -1905,6 +1876,107 @@ impl core::fmt::Display for Expr { } } +bitflags::bitflags! { + /// The flags for a jump target + /// + /// Matches the syntax: + /// ```abnf + /// jump-target-flag := "fallthrough" / "cold" / "continue" + /// ``` + #[repr(transparent)] + pub struct JumpTargetFlags : u32{ + /// The "fallthrough" flag. + /// Indicates that the jump does not perform a branch but instead continues on to the next basic block + /// + /// The behaviour is undefined if the `target` field of the [`JumpTarget`] is not the immediately adjacent basic block + const FALLTHROUGH = 1; + /// The "cold" flag. + /// Indicates that the branch is unlikely to be taken. + /// + /// The optimizer may pessimize the case where the branch is taken, to optimize the case where a different branch is taken + const COLD = 2; + } +} + +impl core::fmt::Display for JumpTargetFlags { + fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { + if self.contains(Self::FALLTHROUGH) { + f.write_str("fallthrough ")?; + } + if self.contains(Self::COLD) { + f.write_str("cold ")?; + } + Ok(()) + } +} + +/// The target of a jump, such as a branch, +/// Matches the syntax +/// ```abnf +/// jump-target := [*()] @ +/// ``` +#[repr(C)] +#[derive(Clone, Debug, Hash, PartialEq, Eq)] +pub struct JumpTarget { + /// The flags for the jump + pub flags: JumpTargetFlags, + /// The target basic block + pub target: u32, +} + +impl core::fmt::Display for JumpTarget { + fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { + self.flags.fmt(f)?; + f.write_str("@")?; + self.target.fmt(f) + } +} + +bitflags::bitflags! { + #[repr(transparent)] + pub struct CallFlags: u32{ + const WILLRETURN = 1; + } +} + +impl core::fmt::Display for CallFlags { + fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { + if self.contains(Self::WILLRETURN) { + f.write_str("willreturn ")?; + } + Ok(()) + } +} + +/// A terminator of a [`Block`] +/// +/// Matchs the `terminator` production +#[repr(u32)] +#[derive(Clone, Debug, Hash, PartialEq, Eq)] +pub enum Terminator { + /// Jump to another basic block, uncondtionally + /// Matches the syntax + /// ```abnf + /// terminator := "jump" + /// ``` + Jump(JumpTarget), + /// Branch to one of two basic blocks, depending on the evaluation of a condition, + Branch(BranchCondition, JumpTarget, JumpTarget), + BranchIndirect, + Call(CallFlags, Box, JumpTarget), + Tailcall(CallFlags, Box), + Exit(u16), + Asm(AsmExpr), + Switch(Switch), + Unreachable, +} + +impl Default for Terminator { + fn default() -> Self { + Self::Unreachable + } +} + #[repr(u16)] #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub enum AsmConstraint { @@ -1991,9 +2063,10 @@ pub struct AsmExpr { pub access_class: AccessClass, pub string: String, pub clobbers: Vec, - pub targets: Vec, + pub targets: Vec, pub inputs: Vec, pub outputs: Vec, + pub next: Option, } impl core::fmt::Display for AsmExpr { @@ -2143,8 +2216,8 @@ pub enum Switch { #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct HashSwitch { - pub cases: Vec>, - pub default: u32, + pub cases: Vec>, + pub default: JumpTarget, } #[derive(Clone, Debug, Hash, PartialEq, Eq)] @@ -2152,8 +2225,8 @@ pub struct LinearSwitch { pub ty: Type, pub min: u128, pub scale: u32, - pub default: u32, - pub cases: Vec, + pub default: JumpTarget, + pub cases: Vec, } #[repr(u8)] @@ -2180,43 +2253,20 @@ impl core::fmt::Display for StackItem { } } -#[repr(u8)] -#[derive(Clone, Debug, Hash, PartialEq, Eq)] -pub enum BlockItem { - Expr(Expr), - Target { num: u32, stack: Vec }, -} - -impl core::fmt::Display for BlockItem { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::Expr(expr) => expr.fmt(f), - Self::Target { num, stack } => { - f.write_fmt(format_args!("target @{} [", num))?; - let mut sep = ""; - - for item in stack { - f.write_str(sep)?; - sep = ", "; - item.fmt(f)?; - } - f.write_str("]") - } - } - } -} - #[repr(C)] #[derive(Clone, Debug, Hash, PartialEq, Eq, Default)] pub struct Block { - pub items: Vec, + pub target: u32, + pub incoming_stack: Vec, + pub expr: Vec, + pub term: Terminator, } #[repr(C)] #[derive(Clone, Debug, Hash, PartialEq, Eq, Default)] pub struct FunctionBody { pub locals: Vec, - pub block: Block, + pub blocks: Vec, } #[repr(C)] From fb347a142ddb5dce01f0d2b2a9ec3c4721c363a5 Mon Sep 17 00:00:00 2001 From: Connor Horman Date: Sat, 27 Jan 2024 22:11:50 -0500 Subject: [PATCH 02/74] fix(xlang): Reintroduce dependency on xlang-interface cdylib --- Cargo.lock | 20 +++++++++++++++++++- lccc/src/exports.rs | 1 + xlang/Cargo.toml | 1 + 3 files changed, 21 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 1879da9e..9713e6ce 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -19,7 +19,7 @@ version = "0.1.0" dependencies = [ "install-dirs 0.3.2", "itertools", - "lccc-siphash", + "lccc-siphash 0.1.0 (git+https://github.com/lccc-project/lccc-siphash)", "libc", "serde", "serde_derive", @@ -293,6 +293,12 @@ dependencies = [ "xlang_host", ] +[[package]] +name = "lccc-siphash" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15d185e10c787c464fc66c65e04e5ece989e1b654ab177bdad844248e9df71fc" + [[package]] name = "lccc-siphash" version = "0.1.0" @@ -660,6 +666,7 @@ dependencies = [ "target-tuples", "xlang_abi", "xlang_host", + "xlang_interface", "xlang_struct", "xlang_targets", ] @@ -704,6 +711,17 @@ dependencies = [ "cfg-match", ] +[[package]] +name = "xlang_interface" +version = "0.1.0" +dependencies = [ + "lazy_static", + "lccc-siphash 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "xlang_abi", + "xlang_host", + "xlang_targets", +] + [[package]] name = "xlang_opt" version = "0.1.0" diff --git a/lccc/src/exports.rs b/lccc/src/exports.rs index 395577cf..65544dee 100644 --- a/lccc/src/exports.rs +++ b/lccc/src/exports.rs @@ -1,4 +1,5 @@ xlang_host::rustcall! { + #[link(name = "xlang_interface", kind = "dylib")] extern "rustcall"{ pub fn __xlang_driver_init_rng(key: u64); } diff --git a/xlang/Cargo.toml b/xlang/Cargo.toml index efd8d547..e3c05071 100644 --- a/xlang/Cargo.toml +++ b/xlang/Cargo.toml @@ -13,4 +13,5 @@ xlang_abi = {path="xlang_abi"} xlang_struct = {path="xlang_struct"} xlang_targets = {path="xlang_targets"} xlang_host = {path="xlang_host"} +xlang_interface = {path="xlang_interface"} fake-enum ="0.1" From 3144a1b02b46ac9d86fc547f4e461abaa336426e Mon Sep 17 00:00:00 2001 From: Ray Redondo Date: Sat, 27 Jan 2024 21:47:53 -0600 Subject: [PATCH 03/74] feat(rust-irgen): start support for binary expr --- rust/src/irgen/xir_visitor.rs | 112 +++++++++++++++++++++++++++++++++- 1 file changed, 111 insertions(+), 1 deletion(-) diff --git a/rust/src/irgen/xir_visitor.rs b/rust/src/irgen/xir_visitor.rs index 3d5e3d28..77d09e9b 100644 --- a/rust/src/irgen/xir_visitor.rs +++ b/rust/src/irgen/xir_visitor.rs @@ -1723,7 +1723,19 @@ impl<'a> ExprVisitor for XirExprVisitor<'a> { } fn visit_binary_expr(&mut self) -> Option> { - todo!() + Some(Box::new(XirBinaryExprVisitor::new( + self.defs, + self.names, + self.properties, + self.deftys, + self.cur_fnty, + self.exprs, + self.locals, + self.ssa_tys, + self.stack_height, + self.var_heights, + self.var_stack, + ))) } fn visit_unary_expr(&mut self) -> Option> { @@ -1819,6 +1831,104 @@ impl<'a> Drop for XirTupleVisitor<'a> { } } +pub struct XirBinaryExprVisitor<'a> { + defs: &'a Definitions, + names: &'a NameMap, + properties: &'a TargetProperties<'a>, + deftys: &'a HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + locals: &'a mut Vec, + ssa_tys: &'a mut HashMap, + stack_height: &'a mut u32, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, + op: Option, +} + +impl<'a> XirBinaryExprVisitor<'a> { + pub fn new( + defs: &'a Definitions, + names: &'a NameMap, + properties: &'a TargetProperties<'a>, + deftys: &'a HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + locals: &'a mut Vec, + ssa_tys: &'a mut HashMap, + stack_height: &'a mut u32, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, + ) -> Self { + Self { + defs, + names, + properties, + deftys, + cur_fnty, + exprs, + locals, + ssa_tys, + stack_height, + var_heights, + var_stack, + op: None, + } + } +} + +impl<'a> BinaryExprVisitor for XirBinaryExprVisitor<'a> { + fn visit_op(&mut self, op: BinaryOp) { + self.op = Some(match op { + BinaryOp::Sub => ir::BinaryOp::Sub, + x => todo!("{:?}", x), + }); + } + + fn visit_lhs(&mut self) -> Option> { + Some(Box::new(XirExprVisitor::new( + self.defs, + self.names, + self.properties, + self.deftys, + self.cur_fnty, + self.exprs, + self.locals, + self.ssa_tys, + self.stack_height, + self.var_heights, + self.var_stack, + ))) + } + + fn visit_rhs(&mut self) -> Option> { + Some(Box::new(XirExprVisitor::new( + self.defs, + self.names, + self.properties, + self.deftys, + self.cur_fnty, + self.exprs, + self.locals, + self.ssa_tys, + self.stack_height, + self.var_heights, + self.var_stack, + ))) + } +} + +impl<'a> Drop for XirBinaryExprVisitor<'a> { + fn drop(&mut self) { + *self.stack_height -= 1; + self.exprs.push(ir::Expr::BinaryOp( + self.op + .expect("BinaryExprVisitor::visit_op was never called"), + ir::OverflowBehaviour::Wrap, + )); + } +} + pub struct XirCastVisitor<'a> { defs: &'a Definitions, names: &'a NameMap, From 4b9e8aaec27dfb0765498665e7e0048a73f1d574 Mon Sep 17 00:00:00 2001 From: Ray Redondo Date: Sat, 27 Jan 2024 21:52:25 -0600 Subject: [PATCH 04/74] fix(rust-irgen): two problems: off by one errors. --- rust/src/irgen/xir_visitor.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rust/src/irgen/xir_visitor.rs b/rust/src/irgen/xir_visitor.rs index 77d09e9b..2fd66614 100644 --- a/rust/src/irgen/xir_visitor.rs +++ b/rust/src/irgen/xir_visitor.rs @@ -1920,7 +1920,7 @@ impl<'a> BinaryExprVisitor for XirBinaryExprVisitor<'a> { impl<'a> Drop for XirBinaryExprVisitor<'a> { fn drop(&mut self) { - *self.stack_height -= 1; + *self.stack_height -= 2; self.exprs.push(ir::Expr::BinaryOp( self.op .expect("BinaryExprVisitor::visit_op was never called"), From 2c7cf3958e1c59a512fb2d4096b49c857bcb3d74 Mon Sep 17 00:00:00 2001 From: Ray Redondo Date: Sat, 27 Jan 2024 22:52:07 -0600 Subject: [PATCH 05/74] feat(rust-irgen): initial support for unary exprs --- rust/src/irgen/xir_visitor.rs | 96 ++++++++++++++++++++++++++++++++++- 1 file changed, 95 insertions(+), 1 deletion(-) diff --git a/rust/src/irgen/xir_visitor.rs b/rust/src/irgen/xir_visitor.rs index 2fd66614..482d8249 100644 --- a/rust/src/irgen/xir_visitor.rs +++ b/rust/src/irgen/xir_visitor.rs @@ -1739,7 +1739,19 @@ impl<'a> ExprVisitor for XirExprVisitor<'a> { } fn visit_unary_expr(&mut self) -> Option> { - todo!() + Some(Box::new(XirUnaryExprVisitor::new( + self.defs, + self.names, + self.properties, + self.deftys, + self.cur_fnty, + self.exprs, + self.locals, + self.ssa_tys, + self.stack_height, + self.var_heights, + self.var_stack, + ))) } } @@ -1929,6 +1941,88 @@ impl<'a> Drop for XirBinaryExprVisitor<'a> { } } +pub struct XirUnaryExprVisitor<'a> { + defs: &'a Definitions, + names: &'a NameMap, + properties: &'a TargetProperties<'a>, + deftys: &'a HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + locals: &'a mut Vec, + ssa_tys: &'a mut HashMap, + stack_height: &'a mut u32, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, + op: Option, +} + +impl<'a> XirUnaryExprVisitor<'a> { + pub fn new( + defs: &'a Definitions, + names: &'a NameMap, + properties: &'a TargetProperties<'a>, + deftys: &'a HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + locals: &'a mut Vec, + ssa_tys: &'a mut HashMap, + stack_height: &'a mut u32, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, + ) -> Self { + Self { + defs, + names, + properties, + deftys, + cur_fnty, + exprs, + locals, + ssa_tys, + stack_height, + var_heights, + var_stack, + op: None, + } + } +} + +impl<'a> UnaryExprVisitor for XirUnaryExprVisitor<'a> { + fn visit_op(&mut self, op: UnaryOp) { + self.op = Some(match op { + UnaryOp::Neg => ir::UnaryOp::Minus, + x => todo!("{:?}", x), + }); + } + + fn visit_lhs(&mut self) -> Option> { + Some(Box::new(XirExprVisitor::new( + self.defs, + self.names, + self.properties, + self.deftys, + self.cur_fnty, + self.exprs, + self.locals, + self.ssa_tys, + self.stack_height, + self.var_heights, + self.var_stack, + ))) + } +} + +impl<'a> Drop for XirUnaryExprVisitor<'a> { + fn drop(&mut self) { + *self.stack_height -= 1; + self.exprs.push(ir::Expr::UnaryOp( + self.op + .expect("UnaryExprVisitor::visit_op was never called"), + ir::OverflowBehaviour::Wrap, + )); + } +} + pub struct XirCastVisitor<'a> { defs: &'a Definitions, names: &'a NameMap, From 7c3dccdd4b6d72f0cc00a073cec140929f02bc09 Mon Sep 17 00:00:00 2001 From: Ray Redondo Date: Sun, 28 Jan 2024 00:45:28 -0600 Subject: [PATCH 06/74] feat(rust-xir): struct support --- rust/src/irgen/xir_visitor.rs | 264 +++++++++++++++++++++++++++++++++- 1 file changed, 262 insertions(+), 2 deletions(-) diff --git a/rust/src/irgen/xir_visitor.rs b/rust/src/irgen/xir_visitor.rs index 482d8249..2ce664d4 100644 --- a/rust/src/irgen/xir_visitor.rs +++ b/rust/src/irgen/xir_visitor.rs @@ -1711,11 +1711,35 @@ impl<'a> ExprVisitor for XirExprVisitor<'a> { } fn visit_ctor(&mut self) -> Option> { - todo!() + Some(Box::new(XirConstructorVisitor::new( + self.defs, + self.names, + self.properties, + self.deftys, + self.cur_fnty, + self.exprs, + self.locals, + self.ssa_tys, + self.stack_height, + self.var_heights, + self.var_stack, + ))) } fn visit_field_subobject(&mut self) -> Option> { - todo!() + Some(Box::new(XirFieldSubobjectVisitor::new( + self.defs, + self.names, + self.properties, + self.deftys, + self.cur_fnty, + self.exprs, + self.locals, + self.ssa_tys, + self.stack_height, + self.var_heights, + self.var_stack, + ))) } fn visit_field_project(&mut self) -> Option> { @@ -1761,6 +1785,76 @@ impl<'a> Drop for XirExprVisitor<'a> { } } +pub struct XirFieldSubobjectVisitor<'a> { + defs: &'a Definitions, + names: &'a NameMap, + properties: &'a TargetProperties<'a>, + deftys: &'a HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + locals: &'a mut Vec, + ssa_tys: &'a mut HashMap, + stack_height: &'a mut u32, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, +} + +impl<'a> XirFieldSubobjectVisitor<'a> { + pub fn new( + defs: &'a Definitions, + names: &'a NameMap, + properties: &'a TargetProperties<'a>, + deftys: &'a HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + locals: &'a mut Vec, + ssa_tys: &'a mut HashMap, + stack_height: &'a mut u32, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, + ) -> Self { + Self { + defs, + names, + properties, + deftys, + cur_fnty, + exprs, + locals, + ssa_tys, + stack_height, + var_heights, + var_stack, + } + } +} + +impl<'a> FieldAccessVisitor for XirFieldSubobjectVisitor<'a> { + fn visit_base(&mut self) -> Option> { + Some(Box::new(XirExprVisitor::new( + self.defs, + self.names, + self.properties, + self.deftys, + self.cur_fnty, + self.exprs, + self.locals, + self.ssa_tys, + self.stack_height, + self.var_heights, + self.var_stack, + ))) + } + + fn visit_field(&mut self, field_name: &ty::FieldName) { + *self.stack_height -= 1; + self.exprs.push(ir::Expr::Member(match field_name { + ty::FieldName::Field(x) => x.to_string().into(), + x => todo!("{:?}", x), + })) + } +} + pub struct XirTupleVisitor<'a> { defs: &'a Definitions, names: &'a NameMap, @@ -1843,6 +1937,172 @@ impl<'a> Drop for XirTupleVisitor<'a> { } } +pub struct XirConstructorVisitor<'a> { + defs: &'a Definitions, + names: &'a NameMap, + properties: &'a TargetProperties<'a>, + deftys: &'a HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + locals: &'a mut Vec, + ssa_tys: &'a mut HashMap, + stack_height: &'a mut u32, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, + ty: Option, + fields: Vec, +} + +impl<'a> XirConstructorVisitor<'a> { + pub fn new( + defs: &'a Definitions, + names: &'a NameMap, + properties: &'a TargetProperties<'a>, + deftys: &'a HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + locals: &'a mut Vec, + ssa_tys: &'a mut HashMap, + stack_height: &'a mut u32, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, + ) -> Self { + Self { + defs, + names, + properties, + deftys, + cur_fnty, + exprs, + locals, + ssa_tys, + stack_height, + var_heights, + var_stack, + ty: None, + fields: vec![], + } + } +} + +impl<'a> ConstructorVisitor for XirConstructorVisitor<'a> { + fn visit_ctor_def(&mut self, defid: DefId) { + self.ty = Some(ir::Type::Named(ir::Path { + components: vec![ir::PathComponent::Text( + self.names[&defid].to_string().into(), + )], + })); + } + + fn visit_field(&mut self) -> Option> { + Some(Box::new(XirFieldInitVisitor::new( + self.defs, + self.names, + self.properties, + self.deftys, + self.cur_fnty, + self.exprs, + self.locals, + self.ssa_tys, + self.stack_height, + self.var_heights, + self.var_stack, + &mut self.fields, + ))) + } + + fn visit_init(&mut self) -> Option> { + todo!("..default() and similar aren't currently handled by irgen") + } +} + +impl<'a> Drop for XirConstructorVisitor<'a> { + fn drop(&mut self) { + let fields = std::mem::replace(&mut self.fields, vec![]); + *self.stack_height -= self.fields.len() as u32; + + self.exprs.push(ir::Expr::Aggregate(ir::AggregateCtor { + ty: self + .ty + .take() + .expect("ConstructorVisitor::visit_ty was never called"), + fields: fields.into_iter().map(String::into).collect(), + })); + } +} + +pub struct XirFieldInitVisitor<'a> { + defs: &'a Definitions, + names: &'a NameMap, + properties: &'a TargetProperties<'a>, + deftys: &'a HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + locals: &'a mut Vec, + ssa_tys: &'a mut HashMap, + stack_height: &'a mut u32, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, + fields: &'a mut Vec, +} + +impl<'a> XirFieldInitVisitor<'a> { + pub fn new( + defs: &'a Definitions, + names: &'a NameMap, + properties: &'a TargetProperties<'a>, + deftys: &'a HashMap, + cur_fnty: &'a mut ir::FnType, + exprs: &'a mut Vec, + locals: &'a mut Vec, + ssa_tys: &'a mut HashMap, + stack_height: &'a mut u32, + var_heights: &'a mut HashMap, + var_stack: &'a mut Vec, + fields: &'a mut Vec, + ) -> Self { + Self { + defs, + names, + properties, + deftys, + cur_fnty, + exprs, + locals, + ssa_tys, + stack_height, + var_heights, + var_stack, + fields, + } + } +} + +impl<'a> FieldInitVisitor for XirFieldInitVisitor<'a> { + fn visit_field(&mut self, field_name: &ty::FieldName) { + self.fields.push(match field_name { + ty::FieldName::Field(x) => x.to_string(), + x => todo!("{:?}", x), + }); + } + + fn visit_value(&mut self) -> Option> { + Some(Box::new(XirExprVisitor::new( + self.defs, + self.names, + self.properties, + self.deftys, + self.cur_fnty, + self.exprs, + self.locals, + self.ssa_tys, + self.stack_height, + self.var_heights, + self.var_stack, + ))) + } +} + pub struct XirBinaryExprVisitor<'a> { defs: &'a Definitions, names: &'a NameMap, From 49b8fb201992ded1694cbe2c83190a7e7f9e7e6f Mon Sep 17 00:00:00 2001 From: Ray Redondo Date: Mon, 29 Jan 2024 14:29:24 -0600 Subject: [PATCH 07/74] feat(rust-parse): support char literals --- rust/src/lex.rs | 1 - rust/src/parse.rs | 69 +++++++++++++++++++++++++++++++++++++++++++---- 2 files changed, 64 insertions(+), 6 deletions(-) diff --git a/rust/src/lex.rs b/rust/src/lex.rs index b4b0c02c..420eee99 100644 --- a/rust/src/lex.rs +++ b/rust/src/lex.rs @@ -512,7 +512,6 @@ impl LexemeClass { AstFrag::Vis(_) => Self::AstFrag(Some(AstFragClass::Vis)), AstFrag::Meta(_) => Self::AstFrag(Some(AstFragClass::Meta)), }, - _ => todo!(), } } diff --git a/rust/src/parse.rs b/rust/src/parse.rs index 0092fbfe..9dedea18 100644 --- a/rust/src/parse.rs +++ b/rust/src/parse.rs @@ -16,8 +16,7 @@ use crate::{ }, interning::Symbol, lex::{ - AstFrag, AstFragClass, Group, GroupType, IsEof, Keyword, Lexeme, LexemeBody, LexemeClass, - Punctuation, StringType, Token, TokenType, + AstFrag, AstFragClass, CharType, Group, GroupType, IsEof, Keyword, Lexeme, LexemeBody, LexemeClass, Punctuation, StringType, Token, TokenType }, sema::ty::Mutability, span::{Pos, Span}, @@ -800,6 +799,7 @@ pub fn do_user_type_enum( let (startspan, var_id) = do_lexeme_token(&mut inner_tree, LexemeClass::Identifier)?; + // FIXME: What is this used for? let var_name = Spanned { body: var_id.body, span: startspan, @@ -1169,7 +1169,7 @@ pub fn do_statement( pub fn do_literal( tree: &mut PeekMoreIterator>, ) -> Result> { - // Only handling int and string lits for now + // Only handling int, string, and char lits for now match do_lexeme_class(tree, LexemeClass::Number) { Ok(x) => { let span = x.span; @@ -1195,7 +1195,19 @@ pub fn do_literal( span, }) } - Err(b) => Err(a | b)?, // TODO: Literally every other kind of useful literal + Err(b) => match do_char(tree) { + Ok((ch, ty)) => { + let span = ch.span; + Ok(Spanned { + body: Literal { + val: ch, + lit_kind: LiteralKind::Char(ty), + }, + span, + }) + } + Err(c) => Err(a | b | c)?, // TODO: Literally every other kind of useful literal + }, }, } } @@ -2639,7 +2651,7 @@ pub fn do_pattern_binding( }; let binding = match do_lexeme_classes(&mut tree, &[punct!(@), punct!(::)]) { - Ok((lex, punct!(@))) => Some(Box::new(do_pattern_param(&mut tree)?)), + Ok((_, punct!(@))) => Some(Box::new(do_pattern_param(&mut tree)?)), Ok((lex, punct!(::))) => { return Err(Error { expected: vec![punct!(@)], @@ -3217,6 +3229,53 @@ pub fn do_string( )) } +pub fn do_char( + tree: &mut PeekMoreIterator>, +) -> Result<(Spanned, StringType)> { + let full_str = do_lexeme_class(tree, LexemeClass::Character)?; + let chr_ty = *if let Lexeme { + body: + LexemeBody::Token(Token { + ty: TokenType::Character(chr_ty), + .. + }), + .. + } = &full_str + { + chr_ty + } else { + unreachable!() + }; + let str = full_str.text().unwrap(); + let str = match chr_ty { + CharType::Default => &str[1..str.len() - 1], // Skip " and " + CharType::Byte => &str[2..str.len() - 1], // Skip b" and " + }; + let mut parsed = String::new(); + let mut str_iter = str.chars(); + while let Some(c) = str_iter.next() { + match c { + '\\' => match str_iter.next() { + Some('0') => parsed.push('\0'), + Some('n') => parsed.push('\n'), + None => todo!("throw an error"), + Some(x) => todo!("\\{}", x), + }, + x => parsed.push(x), + } + } + Ok(( + Spanned { + body: parsed.into(), + span: full_str.span, + }, + match chr_ty { + CharType::Default => StringType::Default, + CharType::Byte => StringType::Byte, + }, + )) +} + pub fn do_item_extern_block( tree: &mut PeekMoreIterator>, ) -> Result> { From 72a31934e8170874897e2fcab4670019347ff6ba Mon Sep 17 00:00:00 2001 From: Ray Redondo Date: Mon, 29 Jan 2024 14:45:56 -0600 Subject: [PATCH 08/74] feat(rust-hir): support char literals --- rust/src/ast.rs | 4 ++-- rust/src/parse.rs | 7 ++----- rust/src/sema/hir.rs | 16 +++++++++++++--- rust/src/sema/tyck.rs | 3 +++ 4 files changed, 20 insertions(+), 10 deletions(-) diff --git a/rust/src/ast.rs b/rust/src/ast.rs index 2d860c1a..00504dcb 100644 --- a/rust/src/ast.rs +++ b/rust/src/ast.rs @@ -3,7 +3,7 @@ use core::fmt; use crate::lex::Group; use crate::{interning::Symbol, lex::Lexeme}; -pub use crate::lex::StringType; +pub use crate::lex::{CharType, StringType}; pub use crate::span::Spanned; @@ -433,7 +433,7 @@ impl core::fmt::Display for Literal { #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub enum LiteralKind { String(StringType), - Char(StringType), + Char(CharType), Int(Option>), Float(Option>), Bool, diff --git a/rust/src/parse.rs b/rust/src/parse.rs index 9dedea18..7acf0a14 100644 --- a/rust/src/parse.rs +++ b/rust/src/parse.rs @@ -3231,7 +3231,7 @@ pub fn do_string( pub fn do_char( tree: &mut PeekMoreIterator>, -) -> Result<(Spanned, StringType)> { +) -> Result<(Spanned, CharType)> { let full_str = do_lexeme_class(tree, LexemeClass::Character)?; let chr_ty = *if let Lexeme { body: @@ -3269,10 +3269,7 @@ pub fn do_char( body: parsed.into(), span: full_str.span, }, - match chr_ty { - CharType::Default => StringType::Default, - CharType::Byte => StringType::Byte, - }, + chr_ty, )) } diff --git a/rust/src/sema/hir.rs b/rust/src/sema/hir.rs index 9e899172..116460f6 100644 --- a/rust/src/sema/hir.rs +++ b/rust/src/sema/hir.rs @@ -1,6 +1,6 @@ use xlang::abi::collection::HashMap; -use crate::ast::{self, Literal, LiteralKind, Mutability, Safety, StringType}; +use crate::ast::{self, CharType, Literal, LiteralKind, Mutability, Safety, StringType}; use crate::helpers::{FetchIncrement, TabPrinter}; use crate::interning::Symbol; use crate::span::{synthetic, Span}; @@ -85,6 +85,7 @@ pub enum HirExpr { Var(HirVarId), ConstInt(Option>, u128), ConstString(StringType, Spanned), + ConstChar(CharType, Spanned), Const(DefId, GenericArgs), #[allow(dead_code)] Unreachable, @@ -127,6 +128,11 @@ impl core::fmt::Display for HirExpr { s.escape_default().fmt(f)?; f.write_str("\"") } + HirExpr::ConstChar(_, s) => { + f.write_str("'")?; + s.escape_default().fmt(f)?; + f.write_str("'") + } HirExpr::Tuple(v) => { f.write_str("(")?; let mut sep = ""; @@ -710,9 +716,13 @@ impl<'a> HirLowerer<'a> { Ok(expr.copy_span(|_| HirExpr::ConstInt(ty, val))) } Literal { - lit_kind: LiteralKind::String(skind), + lit_kind: LiteralKind::String(sty), + val: sym, + } => Ok(expr.copy_span(|_| HirExpr::ConstString(sty, sym))), + Literal { + lit_kind: LiteralKind::Char(cty), val: sym, - } => Ok(expr.copy_span(|_| HirExpr::ConstString(skind, sym))), + } => Ok(expr.copy_span(|_| HirExpr::ConstChar(cty, sym))), _ => todo!("literal"), }, ast::Expr::Break(_, _) => todo!("break"), diff --git a/rust/src/sema/tyck.rs b/rust/src/sema/tyck.rs index 273863c9..9d71c40a 100644 --- a/rust/src/sema/tyck.rs +++ b/rust/src/sema/tyck.rs @@ -692,6 +692,9 @@ impl<'a> ThirConverter<'a> { cat: ValueCategory::Rvalue, }) } + &hir::HirExpr::ConstChar(cty, val) => { + todo!("const char"); + } hir::HirExpr::Const(defid, generics) => { let def = self.defs.definition(*defid); From 25937b5029bb9a5ca28d461de858e93887daba75 Mon Sep 17 00:00:00 2001 From: Connor Horman Date: Tue, 30 Jan 2024 23:25:08 -0500 Subject: [PATCH 09/74] feat(xlang-backend): Start rewrite for new format --- codegen-clever/src/lib.rs | 62 +- codegen-w65/src/lib.rs | 409 +---- codegen-x86/src/lib.rs | 86 +- lc-binutils | 2 +- lccc/src/main.rs | 1 + lccc/src/session.rs | 9 +- xlang/xlang_abi/src/vec.rs | 77 + xlang/xlang_backend/src/expr.rs | 150 +- xlang/xlang_backend/src/intrinsic.rs | 182 --- xlang/xlang_backend/src/lib.rs | 1932 +++--------------------- xlang/xlang_backend/src/mach.rs | 42 + xlang/xlang_backend/src/mc.rs | 1036 ------------- xlang/xlang_backend/src/mc/callconv.rs | 31 - xlang/xlang_backend/src/mc/regalloc.rs | 22 - xlang/xlang_backend/src/ssa.rs | 493 +++++- xlang/xlang_backend/src/ty.rs | 2 +- xlang/xlang_host/build.rs | 1 + 17 files changed, 1074 insertions(+), 3463 deletions(-) create mode 100644 xlang/xlang_backend/src/mach.rs delete mode 100644 xlang/xlang_backend/src/mc.rs delete mode 100644 xlang/xlang_backend/src/mc/callconv.rs delete mode 100644 xlang/xlang_backend/src/mc/regalloc.rs diff --git a/codegen-clever/src/lib.rs b/codegen-clever/src/lib.rs index 153443b4..9b351b27 100644 --- a/codegen-clever/src/lib.rs +++ b/codegen-clever/src/lib.rs @@ -1,11 +1,67 @@ -use xlang::plugin::XLangCodegen; -use xlang::prelude::v1::*; +use target_tuples::{Architecture, Target}; +use xlang::{ + abi::string::StringView, plugin::XLangCodegen, prelude::v1::*, + targets::properties::TargetProperties, +}; +use xlang_backend::{mach::Machine, SsaCodegenPlugin}; + +pub struct CleverAssignments {} + +pub struct CleverClobbers {} + +pub struct CleverMachine {} + +impl Machine for CleverMachine { + fn matches_target(&self, targ: StringView) -> bool { + Target::parse(&targ).arch() == Architecture::Clever + } + + fn init_from_target(&mut self, _: &TargetProperties) {} + + type Assignments = CleverAssignments; + + type BlockClobbers = CleverClobbers; + + fn new_assignments(&self) -> Self::Assignments { + todo!() + } + + fn assign_locations( + &self, + assignments: &mut Self::Assignments, + insns: &[xlang_backend::ssa::SsaInstruction], + incoming: &[xlang_backend::ssa::OpaqueLocation], + which: u32, + ) -> Self::BlockClobbers { + todo!() + } + + fn codegen_prologue( + &self, + assignments: &Self::Assignments, + out: &mut W, + ) -> std::io::Result<()> { + todo!() + } + + fn codegen_block std::prelude::v1::String>( + &self, + assignments: &Self::Assignments, + insns: &[xlang_backend::ssa::SsaInstruction], + block_clobbers: Self::BlockClobbers, + out: &mut W, + label_sym: F, + which: u32, + ) -> std::io::Result<()> { + todo!() + } +} xlang::host::rustcall! { #[no_mangle] #[allow(improper_ctypes_definitions)] pub extern "rustcall" fn xlang_backend_main() -> DynBox { - todo!() + DynBox::unsize_box(xlang::abi::boxed::Box::new(SsaCodegenPlugin::new(CleverMachine{}))) }} xlang::plugin_abi_version!("0.1"); diff --git a/codegen-w65/src/lib.rs b/codegen-w65/src/lib.rs index e1191477..1883dbd5 100644 --- a/codegen-w65/src/lib.rs +++ b/codegen-w65/src/lib.rs @@ -1,410 +1,67 @@ -use std::{ - cell::RefCell, collections::HashSet, convert::TryFrom, ops::Deref, rc::Rc, str::FromStr, -}; - -use arch_ops::{ - traits::{Address, InsnWrite}, - w65::{W65Address, W65Encoder, W65Instruction, W65Mode, W65Opcode, W65Operand, W65Register}, -}; -use binfmt::{ - fmt::{FileType, Section, SectionType}, - sym::{SymbolKind, SymbolType}, -}; use target_tuples::{Architecture, Target}; use xlang::{ - abi::{ - option::Option::{None as XLangNone, Some as XLangSome}, - span::Span, - string::StringView, - }, - plugin::{OutputMode, XLangCodegen, XLangPlugin}, - prelude::v1::{DynBox, HashMap, Pair}, - targets::properties::{MachineProperties, TargetProperties}, -}; -use xlang_backend::{ - callconv::CallingConvention, - expr::ValLocation, - mangle::mangle_itanium, - mc::{MCBackend, MCWriter, MachineFeatures}, - str::StringMap, - ty::TypeInformation, - FunctionCodegen, FunctionRawCodegen, -}; -use xlang_struct::{ - AccessClass, AggregateDefinition, BinaryOp, FnType, FunctionDeclaration, PathComponent, - ScalarType, ScalarTypeHeader, ScalarTypeKind, Type, + abi::string::StringView, plugin::XLangCodegen, prelude::v1::*, + targets::properties::TargetProperties, }; +use xlang_backend::{mach::Machine, SsaCodegenPlugin}; -#[derive(Clone, Debug, Hash, PartialEq, Eq)] -pub enum W65ValLocation { - Register { - size: u16, - reg: W65Register, - }, - Indirect { - size: u16, - reg: W65Register, - offset: i64, - }, - Regs { - size: u64, - regs: Vec, - }, - SyntheticRegister { - size: u64, - base: u32, - }, - Null, - Unassigned(usize), -} - -fn gcd(a: i64, b: i64) -> i64 { - if a == 0 { - b - } else if b == 0 { - a - } else { - gcd(b, a % b) - } -} - -impl ValLocation for W65ValLocation { - fn addressible(&self) -> bool { - matches!(self, Self::Indirect { .. }) - } - - fn unassigned(n: usize) -> Self { - Self::Unassigned(n) - } -} - -#[derive(Clone, Debug, Hash, PartialEq, Eq)] -pub enum W65InstructionOrLabel { - Insn(W65Instruction), - Label(String), - FunctionEpilogue, - ClobberMode, - ClearModeFlags(W65Mode), - SetModeFlags(W65Mode), - ResetMode(W65Mode), - AssumeMode(W65Mode), -} - -impl From for W65InstructionOrLabel { - fn from(insn: W65Instruction) -> Self { - Self::Insn(insn) - } -} - -#[derive(Debug, Clone)] -pub struct TempSymbol( - String, - Option<&'static str>, - Option, - SymbolType, - SymbolKind, -); - -#[allow(dead_code)] -#[derive(Clone, Debug, Hash, PartialEq, Eq)] -enum RegisterStatus { - Free, - ToClobber, - MustSave, - InUse, - Saved { - loc: W65ValLocation, - next: Box, - }, -} - -pub struct W65CallConv { - tys: Rc, -} - -#[allow(dead_code)] -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] -pub enum TypeClass { - Integer, - Memory, - Zero, -} - -#[allow(clippy::missing_panics_doc)] // TODO: remove todo!() -#[must_use] -pub fn classify_type(ty: &Type) -> Option { - match ty { - Type::Scalar(ScalarType { - header: - ScalarTypeHeader { - vectorsize: XLangSome(1..=65535), - .. - }, - .. - }) => Some(TypeClass::Memory), - Type::Scalar(ScalarType { - kind: ScalarTypeKind::Float { .. }, - .. - }) => Some(TypeClass::Integer), - Type::Scalar(_) | Type::Pointer(_) => Some(TypeClass::Integer), - Type::Void | Type::FnType(_) | Type::Null => None, - Type::Array(ty) => classify_type(&ty.ty), - Type::TaggedType(_, ty) => classify_type(ty), - Type::Product(tys) => { - let mut infected = TypeClass::Zero; - for ty in tys { - infected = match (classify_type(ty)?, infected) { - (a, TypeClass::Zero) => a, - (_, TypeClass::Memory) => TypeClass::Memory, - (a, b) if a == b => a, - _ => TypeClass::Memory, - }; - } - Some(infected) - } - Type::Aligned(_, _) => todo!(), - Type::Aggregate(AggregateDefinition { fields, .. }) => { - let mut infected = TypeClass::Zero; - for ty in fields.iter().map(|field| &field.ty) { - infected = match (classify_type(ty)?, infected) { - (a, TypeClass::Zero) => a, - (_, TypeClass::Memory) => TypeClass::Memory, - (a, b) if a == b => a, - _ => TypeClass::Memory, - }; - } - Some(infected) - } - Type::Named(path) => todo!("named type {:?}", path), - } -} - -impl CallingConvention for W65CallConv { - type Loc = W65ValLocation; - - fn pass_return_place(&self, ty: &xlang_struct::Type) -> Option { - match (classify_type(ty)?, self.tys.type_size(ty)?) { - (TypeClass::Memory, _) => todo!("memory"), - (_, 0..=8) => None, - _ => todo!("Oversized values"), - } - } - - fn find_param( - &self, - fnty: &xlang_struct::FnType, - real: &xlang_struct::FnType, - param: u32, - infn: bool, - ) -> Self::Loc { - let mut ismallregs: &[W65Register] = &[W65Register::X, W65Register::Y]; - let mut iregs: &[u32] = &[1, 2, 3, 4, 5, 6]; - - let mut last_loc = W65ValLocation::Unassigned(0); - - let param = param as usize; - - if param > real.params.len() && real.variadic { - todo!("varargs are passed on the stack") - } - - for (i, ty) in fnty.params.iter().enumerate() { - match (classify_type(ty).unwrap(), self.tys.type_size(ty).unwrap()) { - (TypeClass::Memory, _) => todo!("memory"), - (TypeClass::Zero, _) => last_loc = W65ValLocation::Null, - (TypeClass::Integer, size @ (1 | 2)) => { - if ismallregs.is_empty() { - if let Some((first, rest)) = iregs.split_first() { - iregs = rest; - last_loc = W65ValLocation::SyntheticRegister { size, base: *first } - } else { - todo!("stack") - } - } else { - let reg = ismallregs[0]; - ismallregs = &ismallregs[1..]; - last_loc = W65ValLocation::Register { - size: size as u16, - reg, - } - } - } - (TypeClass::Integer, size @ 3..=4) => { - if let Some((first, rest)) = iregs.split_first() { - iregs = rest; - last_loc = W65ValLocation::SyntheticRegister { size, base: *first } - } else { - todo!("stack") - } - } - (TypeClass::Integer, size @ 5..=16) => { - let nregs = ((size as usize) & !3) >> 2; - - if iregs.len() < nregs { - todo!("stack") - } - - let (l, r) = iregs.split_at(nregs); - - iregs = r; +pub struct W65Assignments {} - last_loc = W65ValLocation::SyntheticRegister { size, base: l[0] } - } - _ => todo!("oversized values"), - } - } +pub struct W65Clobbers {} - last_loc - } - - fn find_return_val(&self, fnty: &xlang_struct::FnType) -> Self::Loc { - match ( - classify_type(&fnty.ret).unwrap(), - self.tys.type_size(&fnty.ret), - ) { - (TypeClass::Memory, _) => todo!("memory"), - (TypeClass::Zero, _) => W65ValLocation::Null, - (TypeClass::Integer, Some(size @ (1 | 2))) => W65ValLocation::Register { - size: size as u16, - reg: W65Register::A, - }, - (TypeClass::Integer, Some(size @ (3..=8))) => { - W65ValLocation::SyntheticRegister { size, base: 0 } - } - _ => todo!("oversized values"), - } - } -} - -impl W65CallConv { - pub fn return_mode(&self, fnty: &xlang_struct::FnType) -> W65Mode { - let mut mode = W65Mode::NONE; - - match (classify_type(&fnty.ret), self.tys.type_size(&fnty.ret)) { - (Some(TypeClass::Integer), Some(1)) => mode |= W65Mode::M, - _ => {} - } - - mode - } - - pub fn call_mode(&self, fnty: &xlang_struct::FnType) -> W65Mode { - let mut mode = W65Mode::M; - let mut hassize2 = false; - let mut nsize12vals = 0; - - for param in &fnty.params { - match (classify_type(param), self.tys.type_size(param)) { - (Some(TypeClass::Integer), Some(1)) if nsize12vals != 2 => { - nsize12vals += 1; - } - (Some(TypeClass::Integer), Some(1)) if nsize12vals != 2 => { - nsize12vals += 1; - } - _ if nsize12vals == 2 => break, - _ => continue, - } - } - - if !hassize2 && nsize12vals != 0 { - mode | W65Mode::X; - } - - mode - } -} - -pub struct W65MachineFeatures { - properties: &'static TargetProperties<'static>, -} - -impl MachineFeatures for W65MachineFeatures { - type Loc = W65ValLocation; - - type CallConv = W65CallConv; - - fn native_int_size(&self) -> u16 { - todo!() - } +pub struct W65Machine {} - fn native_float_size(&self) -> Option { - todo!() +impl Machine for W65Machine { + fn matches_target(&self, targ: StringView) -> bool { + Target::parse(&targ).arch() == Architecture::Wc65c816 } - fn lockfree_use_libatomic(&self, size: u64) -> bool { - todo!() - } + fn init_from_target(&mut self, _: &TargetProperties) {} - fn lockfree_cmpxchg_use_libatomic(&self, size: u64) -> bool { - todo!() - } + type Assignments = W65Assignments; - fn has_wait_free_compound(&self, op: BinaryOp, size: u64) -> bool { - todo!() - } + type BlockClobbers = W65Clobbers; - fn has_wait_free_compound_fetch(&self, op: BinaryOp, size: u64) -> bool { - todo!() + fn new_assignments(&self) -> Self::Assignments { + W65Assignments {} } - fn mangle(&self, path: &[PathComponent]) -> String { - todo!() - } -} - -pub struct W65MCWriter {} - -impl MCWriter for W65MCWriter { - fn get_features( - &self, - properties: &'static TargetProperties<'static>, - features: Span, - ) -> Self::Features { - W65MachineFeatures { properties } - } - - type Features = W65MachineFeatures; - - type Clobbers = (); - - fn resolve_locations( + fn assign_locations( &self, - insns: &mut [xlang_backend::mc::MCInsn<::Loc>], - callconv: &::CallConv, - ) -> Self::Clobbers { + assignments: &mut Self::Assignments, + insns: &[xlang_backend::ssa::SsaInstruction], + incoming: &[xlang_backend::ssa::OpaqueLocation], + which: u32, + ) -> Self::BlockClobbers { todo!() } - fn write_machine_code( + fn codegen_prologue( &self, - insns: &[xlang_backend::mc::MCInsn<::Loc>], - clobbers: Self::Clobbers, - tys: Rc, - out: &mut I, - sym_accepter: F, + assignments: &Self::Assignments, + out: &mut W, ) -> std::io::Result<()> { todo!() } - fn get_call_conv( + fn codegen_block std::prelude::v1::String>( &self, - realty: &FnType, - targ: &'static TargetProperties<'static>, - features: Span, - ty_info: Rc, - ) -> ::CallConv { + assignments: &Self::Assignments, + insns: &[xlang_backend::ssa::SsaInstruction], + block_clobbers: Self::BlockClobbers, + out: &mut W, + label_sym: F, + which: u32, + ) -> std::io::Result<()> { todo!() } - - fn target_matches(&self, name: &str) -> bool { - Target::parse(name).arch() == Architecture::Wc65c816 - } } xlang::host::rustcall! { #[no_mangle] +#[allow(improper_ctypes_definitions)] pub extern "rustcall" fn xlang_backend_main() -> DynBox { - DynBox::unsize_box(xlang::abi::boxed::Box::new(MCBackend::new(W65MCWriter{}))) + DynBox::unsize_box(xlang::abi::boxed::Box::new(SsaCodegenPlugin::new(W65Machine{}))) }} xlang::plugin_abi_version!("0.1"); diff --git a/codegen-x86/src/lib.rs b/codegen-x86/src/lib.rs index cc216189..a9bd4269 100644 --- a/codegen-x86/src/lib.rs +++ b/codegen-x86/src/lib.rs @@ -1,15 +1,87 @@ -#![deny(warnings, clippy::all, clippy::pedantic, clippy::nursery)] - -pub mod mc; - +use arch_ops::x86::{X86Mode, X86Register}; +use target_tuples::{Architecture, Target}; use xlang::{ - plugin::XLangCodegen, - prelude::v1::{Box, DynBox}, + abi::string::StringView, plugin::XLangCodegen, prelude::v1::*, + targets::properties::TargetProperties, }; +use xlang_backend::{mach::Machine, SsaCodegenPlugin}; + +pub struct LocationAssignment {} + +pub struct X86Machine { + mode: Option, +} + +pub struct X86Assignments { + mode: X86Mode, + sp: X86Register, + available_int_registers: Vec, + stack_width: u32, + assigns: HashMap, +} + +pub struct X86Clobbers {} + +impl Machine for X86Machine { + fn matches_target(&self, targ: StringView) -> bool { + let arch = Target::parse(&targ).arch(); + arch.is_x86() || arch == Architecture::X86_64 + } + + fn init_from_target(&mut self, properties: &TargetProperties) { + let mode = match properties.arch.width { + 16 => X86Mode::Real, + 32 => X86Mode::Protected, + 64 => X86Mode::Long, + _ => panic!("Not an x86 target"), + }; + self.mode = Some(mode); + } + + type Assignments = X86Assignments; + + type BlockClobbers = X86Clobbers; + + fn new_assignments(&self) -> Self::Assignments { + todo!() + } + + fn assign_locations( + &self, + assignments: &mut Self::Assignments, + insns: &[xlang_backend::ssa::SsaInstruction], + incoming: &[xlang_backend::ssa::OpaqueLocation], + which: u32, + ) -> Self::BlockClobbers { + todo!() + } + + fn codegen_block std::prelude::v1::String>( + &self, + assignments: &Self::Assignments, + insns: &[xlang_backend::ssa::SsaInstruction], + block_clobbers: Self::BlockClobbers, + out: &mut W, + label_sym: F, + which: u32, + ) -> std::io::Result<()> { + todo!() + } + + fn codegen_prologue( + &self, + assignments: &Self::Assignments, + out: &mut W, + ) -> std::io::Result<()> { + todo!() + } +} xlang::host::rustcall! { #[no_mangle] #[allow(improper_ctypes_definitions)] pub extern "rustcall" fn xlang_backend_main() -> DynBox { - DynBox::unsize_box(Box::new(xlang_backend::mc::MCBackend::new(mc::new_writer()))) + DynBox::unsize_box(xlang::abi::boxed::Box::new(SsaCodegenPlugin::new(X86Machine{mode: None}))) }} + +xlang::plugin_abi_version!("0.1"); diff --git a/lc-binutils b/lc-binutils index 23371857..d97de389 160000 --- a/lc-binutils +++ b/lc-binutils @@ -1 +1 @@ -Subproject commit 23371857b11f909a19af38a1a4873290b546a9d8 +Subproject commit d97de389bd9a941c222320229bbe41f41294230b diff --git a/lccc/src/main.rs b/lccc/src/main.rs index d6eb0e74..1dea9441 100644 --- a/lccc/src/main.rs +++ b/lccc/src/main.rs @@ -467,6 +467,7 @@ fn main() { if mode > Mode::Xir { for codegen_path in &codegen_paths { + println!("Opening codegen plugin: {}", codegen_path.display()); codegen_handles .push(Handle::open(codegen_path).expect("couldn't load frontend library")); } diff --git a/lccc/src/session.rs b/lccc/src/session.rs index 972eeb75..86a647c1 100644 --- a/lccc/src/session.rs +++ b/lccc/src/session.rs @@ -1,13 +1,15 @@ use core::cell::RefCell; use std::path::PathBuf; -use xlang::abi::io::Read; +use xlang::abi::io::{Read, Write}; use xlang::abi::prelude::v1::*; use xlang::abi::string::StringView; use xlang::plugin::{XLangCodegen, XLangFrontend, XLangPlugin}; use xlang::targets::properties::{MachineProperties, TargetProperties}; use xlang_host::dso::Handle; +use crate::Mode; + use super::{DebugLevel, OptimizeLevel}; pub struct MsvcSysroot { @@ -63,6 +65,9 @@ pub struct CompileSession<'a> { driver: &'a DriverSession<'a>, frontend: DynBox, backend: Option>, + middle_plugins: Vec>, file: PathBuf, - output_file: Box, + output_file: Box, } + +impl<'a> CompileSession<'a> {} diff --git a/xlang/xlang_abi/src/vec.rs b/xlang/xlang_abi/src/vec.rs index f2075c10..6ba70f29 100644 --- a/xlang/xlang_abi/src/vec.rs +++ b/xlang/xlang_abi/src/vec.rs @@ -6,6 +6,7 @@ use std::{ marker::PhantomData, mem::{size_of, ManuallyDrop}, ops::{Deref, DerefMut}, + ptr::NonNull, }; use crate::{ @@ -394,6 +395,32 @@ impl Vec { new } + /// Returns an iterator that moves out of the back `n` elements of `self`. + /// + /// The iterator takes ownership of the values immediately - if dropped, the unconsumed values are dropped, and if forgetten the unconsumed values will also be forgotten. + /// `self` remains borrowed for the lifetime of the iterator to avoid an extra allocation. + /// + /// ## Panics + /// + /// Panics if `n` is greater than `len()` + /// + pub fn drain_back(&mut self, n: usize) -> DrainBack { + if n > self.len { + panic!( + "Index {} is out of range for a vector of length {}", + n, self.len + ) + } + self.len -= n; + let base = unsafe { self.ptr.as_ptr().add(self.len) }; + + DrainBack { + ptr: unsafe { NonNull::new_unchecked(base) }, + remaining: n, + _phantom: PhantomData, + } + } + /// /// Resizes the vector, shrinking it to `nlen`, dropping any excees elements /// @@ -744,6 +771,56 @@ impl From> for Vec { } } +/// An iterator that takes ownership of elements at the end of a [`Vec`] +pub struct DrainBack<'a, T> { + ptr: core::ptr::NonNull, + remaining: usize, + _phantom: PhantomData<&'a mut [T]>, +} + +impl<'a, T> Iterator for DrainBack<'a, T> { + type Item = T; + fn next(&mut self) -> Option { + if self.remaining == 0 { + None + } else { + let val = unsafe { core::ptr::read(self.ptr.as_ptr()) }; + self.remaining -= 1; + self.ptr = unsafe { NonNull::new_unchecked(self.ptr.as_ptr().add(1)) }; + + Some(val) + } + } + + fn size_hint(&self) -> (usize, Option) { + (self.remaining, Some(self.remaining)) + } +} + +impl<'a, T> Drop for DrainBack<'a, T> { + fn drop(&mut self) { + if core::mem::needs_drop::() { + for i in 0..self.remaining { + unsafe { core::ptr::drop_in_place(self.ptr.as_ptr().add(i)) } + } + } + } +} + +impl<'a, T> ExactSizeIterator for DrainBack<'a, T> {} +impl<'a, T> FusedIterator for DrainBack<'a, T> {} +impl<'a, T> DoubleEndedIterator for DrainBack<'a, T> { + fn next_back(&mut self) -> Option { + if self.remaining == 0 { + None + } else { + self.remaining -= 1; + let val = unsafe { core::ptr::read(self.ptr.as_ptr().add(self.remaining)) }; + Some(val) + } + } +} + /// An iterator over the values of a [`Vec`] pub struct IntoIter { ptr: Unique, diff --git a/xlang/xlang_backend/src/expr.rs b/xlang/xlang_backend/src/expr.rs index dacf46a0..9a832d8a 100644 --- a/xlang/xlang_backend/src/expr.rs +++ b/xlang/xlang_backend/src/expr.rs @@ -1,6 +1,6 @@ use xlang::{ + abi::{collection::HashMap, option::None as XLangNone, pair::Pair}, ir::{ArrayType, Path, PointerType, ScalarType, ScalarTypeHeader, ScalarTypeKind, Type, Value}, - prelude::v1::*, }; use core::{fmt::Debug, hash::Hash}; @@ -12,9 +12,6 @@ use crate::str::Encoding; pub trait ValLocation: Eq + Debug + Clone { /// Checks if this location is addressable (is not a register) fn addressible(&self) -> bool; - - /// Gets an unassigned location, used by [`super::FunctionCodegen`] to keep track of values before asking the raw codegen to assign locations. - fn unassigned(n: usize) -> Self; } /// The pointee of a pointer @@ -22,8 +19,6 @@ pub trait ValLocation: Eq + Debug + Clone { pub enum LValue { /// An LValue from an Opaque pointer stored in `Loc` OpaquePointer(Loc), - /// A pointer to a temporary value (`as_temporary`) - Temporary(Box>), /// A pointer to a local variable Local(u32), /// A pointer to a global static/function @@ -32,11 +27,9 @@ pub enum LValue { Label(u32), /// Aggregate Element Field Field(Type, Box>, String), - /// A pointer to a string literal - StringLiteral(Encoding, Vec), /// Offset (in bytes) to some other lvalue - Offset(Box>, u64), + Offset(Box>, i64), /// A Null pointer Null, @@ -49,51 +42,12 @@ impl core::fmt::Display for LValue { fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { match self { LValue::OpaquePointer(loc) => f.write_fmt(format_args!("opaque({:?})", loc)), - LValue::Temporary(val) => f.write_fmt(format_args!("temporary({})", val)), LValue::Local(n) => f.write_fmt(format_args!("_{}", n)), LValue::GlobalAddress(path) => f.write_fmt(format_args!("global_addr({})", path)), LValue::Label(n) => f.write_fmt(format_args!("&&@{}", n)), LValue::Field(ty, lval, name) => { f.write_fmt(format_args!("{}.({})::{}", lval, ty, name)) } - LValue::StringLiteral(enc, bytes) => { - core::fmt::Display::fmt(enc, f)?; - f.write_str(" ")?; - match core::str::from_utf8(bytes) { - Ok(s) => f.write_fmt(format_args!(" \"{}\"", s.escape_default())), - Err(mut e) => { - let mut bytes = &bytes[..]; - f.write_str(" \"")?; - while !bytes.is_empty() { - let (l, r) = bytes.split_at(e.valid_up_to()); - core::fmt::Display::fmt( - &core::str::from_utf8(l).unwrap().escape_default(), - f, - )?; - if let core::option::Option::Some(len) = e.error_len() { - let (ebytes, rest) = r.split_at(len); - bytes = rest; - for b in ebytes { - f.write_fmt(format_args!("\\x{:02x}", b))?; - } - } else { - let ebytes = core::mem::take(&mut bytes); - for b in ebytes { - f.write_fmt(format_args!("\\x{:02x}", b))?; - } - } - match core::str::from_utf8(bytes) { - Ok(s) => { - core::fmt::Display::fmt(&s.escape_default(), f)?; - break; - } - Err(next_err) => e = next_err, - } - } - f.write_str("\"") - } - } - } LValue::Offset(loc, off) => f.write_fmt(format_args!("{}+{}", loc, off)), LValue::Null => f.write_str("null"), LValue::TransparentAddr(addr) => f.write_fmt(format_args!("{:#x}", addr)), @@ -180,10 +134,47 @@ impl VStackValue { } } + /// Creates an opaque value corresponding to the specified [`Type`] + pub fn opaque_value(ty: Type, loc: Loc) -> Self { + match ty { + Type::Void | Type::FnType(_) | Type::Null => { + panic!("opaque_value requires a complete value type") + } + Type::Scalar(sty) => Self::OpaqueScalar(sty, loc), + Type::Aligned(_, ty) | Type::TaggedType(_, ty) => { + Self::opaque_value(xlang::abi::boxed::Box::into_inner(ty), loc) + } + Type::Pointer(pty) => Self::Pointer(pty, LValue::OpaquePointer(loc)), + ty => VStackValue::OpaqueAggregate(ty, loc), + } + } + + /// Creates an opaque [`VStackValue::LValue`] of the specified [`Type`] + pub fn opaque_lvalue(ty: Type, loc: Loc) -> Self { + Self::LValue(ty, LValue::OpaquePointer(loc)) + } + /// Obtains the type of the value pub fn value_type(&self) -> Type { match self { - VStackValue::Constant(_) => todo!(), + VStackValue::Constant(val) => match val { + Value::Invalid(ty) | Value::Uninitialized(ty) => ty.clone(), + Value::GenericParameter(_) => panic!("Cannot handle generic params this late"), + Value::Integer { ty, val } => Type::Scalar(*ty), + Value::GlobalAddress { ty, item } => { + let mut pty = PointerType::default(); + *pty.inner = ty.clone(); + Type::Pointer(pty) + } + Value::ByteString { .. } => todo!("what type is byte string constant again?"), + Value::String { ty, .. } => ty.clone(), + Value::LabelAddress(_) => { + let mut pty = PointerType::default(); + *pty.inner = Type::Void; + Type::Pointer(pty) + } + Value::Empty => Type::Null, + }, VStackValue::LValue(ty, _) => ty.clone(), VStackValue::Pointer(ptrty, _) => Type::Pointer(ptrty.clone()), VStackValue::OpaqueScalar(scalar, _) => Type::Scalar(*scalar), @@ -196,15 +187,17 @@ impl VStackValue { }, kind: ScalarTypeKind::Integer { signed: false, - min: None, - max: None, + min: XLangNone, + max: XLangNone, }, }), - VStackValue::Trapped => Type::Void, - VStackValue::ArrayRepeat(val, len) => Type::Array(Box::new(ArrayType { - ty: val.value_type(), - len: len.clone(), - })), + VStackValue::Trapped => Type::Null, + VStackValue::ArrayRepeat(val, len) => { + Type::Array(xlang::abi::boxed::Box::new(ArrayType { + ty: val.value_type(), + len: len.clone(), + })) + } } } } @@ -222,6 +215,17 @@ pub enum Trap { Overflow, } +impl core::fmt::Display for Trap { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Trap::Unreachable => f.write_str("unreachable"), + Trap::Breakpoint => f.write_str("breakpoint"), + Trap::Abort => f.write_str("abort"), + Trap::Overflow => f.write_str("overflow"), + } + } +} + /// A ValLocation that cannot be instantiated - that is, no opaque values can be produced within this location #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] pub enum NoOpaque {} @@ -230,10 +234,6 @@ impl ValLocation for NoOpaque { fn addressible(&self) -> bool { match *self {} } - - fn unassigned(_: usize) -> Self { - panic!("Unassigned location") - } } impl VStackValue { @@ -254,14 +254,13 @@ impl VStackValue { ), VStackValue::OpaqueAggregate(_, loc) => match loc {}, VStackValue::CompareResult(left, right) => VStackValue::CompareResult( - Box::new(Box::into_inner(left).into_transparent_for()), - Box::new(Box::into_inner(right).into_transparent_for()), + Box::new((*left).into_transparent_for()), + Box::new((*right).into_transparent_for()), ), VStackValue::Trapped => VStackValue::Trapped, - VStackValue::ArrayRepeat(val, count) => VStackValue::ArrayRepeat( - Box::new(Box::into_inner(val).into_transparent_for()), - count, - ), + VStackValue::ArrayRepeat(val, count) => { + VStackValue::ArrayRepeat(Box::new((*val).into_transparent_for()), count) + } } } } @@ -271,22 +270,15 @@ impl LValue { pub fn into_transparent_for(self) -> LValue { match self { LValue::OpaquePointer(val) => match val {}, - LValue::Temporary(val) => { - LValue::Temporary(Box::new(Box::into_inner(val).into_transparent_for())) - } LValue::Local(n) => LValue::Local(n), LValue::GlobalAddress(path) => LValue::GlobalAddress(path), LValue::Label(n) => LValue::Label(n), - LValue::Field(ty, base, field) => LValue::Field( - ty, - Box::new(Box::into_inner(base).into_transparent_for()), - field, - ), - LValue::StringLiteral(enc, bytes) => LValue::StringLiteral(enc, bytes), - LValue::Offset(base, bytes) => LValue::Offset( - Box::new(Box::into_inner(base).into_transparent_for()), - bytes, - ), + LValue::Field(ty, base, field) => { + LValue::Field(ty, Box::new((*base).into_transparent_for()), field) + } + LValue::Offset(base, bytes) => { + LValue::Offset(Box::new((*base).into_transparent_for()), bytes) + } LValue::Null => LValue::Null, LValue::TransparentAddr(addr) => LValue::TransparentAddr(addr), } diff --git a/xlang/xlang_backend/src/intrinsic.rs b/xlang/xlang_backend/src/intrinsic.rs index df1c5823..8b137891 100644 --- a/xlang/xlang_backend/src/intrinsic.rs +++ b/xlang/xlang_backend/src/intrinsic.rs @@ -1,183 +1 @@ -use xlang::{ - abi::option::Some as XLangSome, - abi::pair::Pair, - abi::vec::Vec, - ir::{FnType, PathComponent}, - targets::properties::TargetProperties, -}; -use crate::{ - expr::{Trap, VStackValue}, - FunctionCodegen, FunctionRawCodegen, -}; - -/// -/// Calls an intrinsic function defined -/// -pub fn call_intrinsic( - path: &::xlang::ir::Path, - codegen: &mut crate::FunctionCodegen, - fnty: &::xlang::ir::FnType, - properties: &::xlang::targets::properties::TargetProperties, - params: &mut Vec>, -) -> bool { - if call_generic_intrinsic(path, codegen, fnty, properties, params) { - true - } else if call_nongeneric_intrinsic(path, codegen, fnty, properties, params) { - true - } else if call_target_intrinsic(path, codegen, fnty, properties, params) { - true - } else { - false - } -} - -macro_rules! define_generic_xlang_intrinsics{ - {$($($path:ident)::* :: <($($generics:pat),*)> | $codegen:pat, $properties:pat, $ty:pat, $params:pat | => $expr:expr),* $(,)?} => { - fn call_generic_intrinsic(path: &::xlang::ir::Path, codegen: &mut $crate::FunctionCodegen, fnty: &::xlang::ir::FnType,properties: &::xlang::targets::properties::TargetProperties, params: &mut Vec>) -> bool{ - match &*path.components{ - [::xlang::ir::PathComponent::Root,rest @ ..] - | [rest @ ..] => { - match rest{ - [rest @ .., ::xlang::ir::PathComponent::Generics(generics)] => { - $(match &[$(::core::stringify!($path)),*]{ - [idents @ ..] if rest.iter().map(|comp: &::xlang::ir::PathComponent| {match comp{ - ::xlang::ir::PathComponent::Text(__name) => ::std::option::Option::Some(&**__name), - _ => ::std::option::Option::None - }}).eq(idents.iter().map(|n|::std::option::Option::Some(*n)))=> return {(|$codegen: &mut $crate::FunctionCodegen, $properties: &::xlang::targets::properties::TargetProperties,$ty: &::xlang::ir::FnType, generics: &[::xlang::ir::GenericParameter], $params: Vec>| match generics{ - [$($generics),*] => $expr, - _ => panic!("invalid signature") - })(codegen, properties,fnty,generics, core::mem::take(params)); true}, - _ => {} - })* - } - _ => {} - } - } - - } - return false - } - } -} - -macro_rules! define_xlang_intrinsics{ - { - $($($path:ident)::* | $codegen:pat, $properties:pat, $ty:pat, $params:pat | => $expr:expr),* $(,)? - } => { - - #[allow(clippy::redundant_closure_call)] // needed for hygine - fn call_nongeneric_intrinsic(path: &::xlang::ir::Path, codegen: &mut $crate::FunctionCodegen, fnty: &::xlang::ir::FnType,properties: &::xlang::targets::properties::TargetProperties, params: &mut Vec>) -> bool{ - match &*path.components{ - [::xlang::ir::PathComponent::Root,rest @ ..] - | [rest @ ..] => { - $(match &[$(::core::stringify!($path)),*]{ - [idents @ ..] if rest.iter().map(|comp: &::xlang::ir::PathComponent| {match comp{ - ::xlang::ir::PathComponent::Text(__name) => ::std::option::Option::Some(&**__name), - _ => ::std::option::Option::None - }}).eq(idents.iter().map(|n|::std::option::Option::Some(*n)))=> return {(|$codegen: &mut $crate::FunctionCodegen, $properties: &::xlang::targets::properties::TargetProperties,$ty: &::xlang::ir::FnType, $params: Vec> | $expr)(codegen,properties,fnty, core::mem::take(params)); true}, - _ => {} - })* - } - } - return false - } - } -} - -fn call_target_intrinsic( - path: &xlang::ir::Path, - code: &mut FunctionCodegen, - _fnty: &FnType, - properties: &TargetProperties, - params: &mut Vec>, -) -> bool { - match &*path.components { - [PathComponent::Text(lccc), PathComponent::Text(intrinsics), PathComponent::Text(aname), PathComponent::Text(iname)] - if lccc.strip_suffix("__").unwrap_or(lccc) == "lccc" - && intrinsics - .strip_prefix("__") - .map_or(&**intrinsics, |s| s.strip_suffix("__").unwrap_or(s)) - == "intrinsics" => - { - for name in properties.arch.arch_names { - if name == &**aname { - for Pair(name, _) in properties.arch.builtins { - if name == &**iname { - let params = core::mem::take(params); - let val = code.raw_inner().write_intrinsic(*name, params); - code.push_value(val); - return true; - } - } - } - } - - return false; - } - [..] => false, - } -} - -define_xlang_intrinsics! { - __lccc::intrinsics::C::__builtin_trap | codegen, _, _, _ | => { - codegen.raw_inner().write_trap(Trap::Abort); - codegen.push_value(VStackValue::Trapped); - }, - __lccc::xlang::deoptimize |codegen, _, fnty,params| => { - let value = params.into_iter().next().unwrap(); - let ret = &fnty.ret; - - - - let loc = if let XLangSome(loc) = value.opaque_location(){ - loc.clone() - }else{ - let loc = codegen.raw_inner().allocate(ret,false); - codegen.move_val(value,loc.clone()); - loc - }; - - let loc = codegen.raw_inner().write_deoptimize(loc); - - let ret = codegen.opaque_value(ret, loc); - codegen.push_value(ret); - }, - __lccc::xlang::likely |codegen, _, _, params| => { - let value = params.into_iter().next().unwrap(); - codegen.push_value(value); - }, - __lccc::xlang::unlikely |codegen, _, _, params| => { - let value = params.into_iter().next().unwrap(); - codegen.push_value(value); - } -} - -define_generic_xlang_intrinsics! { - __lccc::xlang::__atomic_is_always_lockfree::<(xlang::ir::GenericParameter::Type(ty))> | codegen, _, fnty, _ | => { - if fnty.params.len()!=0{panic!("bad signature for intrinsic __lccc::xlang::__atomic_is_always_lockfree")} - match &fnty.ret{ - xlang::ir::Type::Scalar(retty @ xlang::ir::ScalarType{kind: xlang::ir::ScalarTypeKind::Integer{..},..}) => { - - let val = codegen.get_type_information().atomic_is_lock_free(ty).expect("__lccc::xlang::__atomic_is_always_lockfree requires a complete value type") as u128; - - let val = VStackValue::Constant(xlang::ir::Value::Integer{ty: *retty, val}); - codegen.push_value(val); - } - _ => panic!("bad signature for intrinsic __lccc::xlang::__atomic_is_always_lockfree") - } - }, - __lccc::xlang::__atomic_required_alignment::<(xlang::ir::GenericParameter::Type(ty))> | codegen, _, fnty, _ | => { - if fnty.params.len()!=0{panic!("bad signature for intrinsic __lccc::xlang::__atomic_required_alignment")} - match &fnty.ret{ - xlang::ir::Type::Scalar(retty @ xlang::ir::ScalarType{kind: xlang::ir::ScalarTypeKind::Integer{..},..}) => { - - let val = codegen.get_type_information().atomic_required_alignment(ty).expect("__lccc::xlang::__atomic_required_alignment requires a complete value type") as u128; - - let val = VStackValue::Constant(xlang::ir::Value::Integer{ty: *retty, val}); - codegen.push_value(val); - } - _ => panic!("bad signature for intrinsic __lccc::xlang::__atomic_required_alignment") - } - } -} diff --git a/xlang/xlang_backend/src/lib.rs b/xlang/xlang_backend/src/lib.rs index f39a0888..595ef704 100644 --- a/xlang/xlang_backend/src/lib.rs +++ b/xlang/xlang_backend/src/lib.rs @@ -1,32 +1,26 @@ -#![deny(missing_docs, warnings)] // No clippy::nursery +// #![deny(missing_docs, warnings)] // No clippy::nursery #![allow(dead_code)] // I'm not deleting a bunch of randomly placed shit //! A helper crate for implementing [`xlang::plugin::XLangCodegen`]s without duplicating code (also can be used to evaluate constant expressions) //! the `xlang_backend` crate provides a general interface for writing expressions to an output. -use std::{ - collections::{HashSet, VecDeque}, - convert::TryInto, - fmt::Debug, - io::Write, - mem::MaybeUninit, - option::Option::Some as StdSome, - rc::Rc, -}; -use self::str::Encoding; -use callconv::CallingConvention; -use expr::{LValue, Trap, VStackValue, ValLocation}; +use std::rc::Rc; + +use arch_ops::traits::InsnWrite; +use mach::Machine; +use ssa::FunctionBuilder; use ty::TypeInformation; use xlang::{ - abi::string::StringView, - ir::{ - AccessClass, AsmExpr, BinaryOp, Block, BranchCondition, CharFlags, Expr, FnType, - OverflowBehaviour, Path, PointerType, ScalarType, ScalarTypeHeader, ScalarTypeKind, - ScalarValidity, Type, UnaryOp, Value, - }, - prelude::v1::*, + abi::{io::WriteAdapter, option::Some as XLangSome, pair::Pair, try_}, + ir::{self, Linkage}, + plugin::{XLangCodegen, XLangPlugin}, targets::properties::TargetProperties, }; +use binfmt::{ + fmt::{Section, SectionFlag}, + sym::{Symbol, SymbolKind, SymbolType}, +}; + /// Module for handling and internalizing string literal values pub mod str; @@ -45,1757 +39,261 @@ pub mod callconv; /// Module for name mangling pub mod mangle; -/// Module for generic Machine Code -pub mod mc; +/// Module for Machine Support +pub mod mach; /// Module for building SSA from XIR that can be readily lowered to machine code /// Does not use FunctionCodegen pub mod ssa; -/// -/// Basic Trait for creating the code generator -pub trait FunctionRawCodegen { - /// The type for Locations - type Loc: ValLocation; - - /// The type of calling conventions used by this backend - type CallConv: CallingConvention + ?Sized; - - /// Handles the `__lccc::xlang::deoptimize` intrinsic. Implemented as a no-op by default. - /// Implementations that generate IR that is run through a separate optimizer should override the default impl - fn write_deoptimize(&mut self, val: Self::Loc) -> Self::Loc { - val - } - - /// Writes an instruction corresponding to the given [`Trap`] - fn write_trap(&mut self, trap: Trap); - - /// Writes a full thread fence for the given AccessClass - fn write_barrier(&mut self, acc: AccessClass); - - /// Performs a binary operatation on a val location and a constant - fn write_int_binary_imm( - &mut self, - dest: Self::Loc, - a: Self::Loc, - b: u128, - ty: &Type, - op: BinaryOp, - ); - - /// Performs a binary operatation on two val locations - fn write_int_binary( - &mut self, - dest: Self::Loc, - src1: Self::Loc, - src2: Self::Loc, - ty: &Type, - op: BinaryOp, - ); - - /// Performs a unary operation on a val location - fn write_unary(&mut self, val: Self::Loc, ty: &Type, op: UnaryOp); - - /// Moves a value between two [`ValLocation`]s - fn move_val(&mut self, src: Self::Loc, dest: Self::Loc); - - /// Stores an immediate value into the given location - fn move_imm(&mut self, src: u128, dest: Self::Loc, ty: &Type); - - /// Stores an immediate value into the pointer in `ptr` - fn store_indirect_imm(&mut self, src: Value, ptr: Self::Loc); - - /// Loads a value into the pointer in the given value location - fn load_val(&mut self, lvalue: Self::Loc, loc: Self::Loc); - - /// Stores a value into the given value location - fn store_indirect(&mut self, lvalue: Self::Loc, loc: Self::Loc, ty: &Type); - - /// Obtains the calling convention for the current function - fn get_callconv(&self) -> &Self::CallConv; - - /// The maximum integer size (in bits) supported natively (without emulation) - fn native_int_size(&self) -> u16; - /// The maximum floating point size (in bits) supported natively, or None if no floating-point support exists - fn native_float_size(&self) -> Option; - - /// The maximum Vector size supported natively, in bytes - fn native_vec_size(&self) -> Option { - None - } - - /// Preferred Vector size of the current codegen, in total bytes - /// This need not be the same as the [`FunctionRawCodegen::native_vec_size`], for example, if some vector types incur a significant runtime performance penalty - /// (such as AVX-512) - fn preferred_vec_size(&self) -> Option { - None - } - - /// Writes a call to a target intrinsic (such as `x86::_mm_addp_i8`) - fn write_intrinsic( - &mut self, - name: StringView, - params: Vec>, - ) -> VStackValue; - - /// Writes a new target at the current location - fn write_target(&mut self, target: u32); - /// Performs a direct call to a named function - fn call_direct(&mut self, path: &Path, realty: &FnType); - /// Performs an indirect call to the pointer stored in `value` - fn call_indirect(&mut self, value: Self::Loc); - /// Performs a direct call to the given address - fn call_absolute(&mut self, addr: u128, realty: &FnType); - - /// Performs a guaranteed tail call to the target - /// Note: The signature is assumed to be compatible with the current function - fn tailcall_direct(&mut self, value: &Path, realty: &FnType); - - /// Performs a guaranteed tail call to the target - /// Note: The signature is assumed to be compatible with the current function - fn tailcall_indirect(&mut self, value: Self::Loc, realty: &FnType); - - /// Performs the exit sequence of a function - fn leave_function(&mut self); - - /// Performs a conditional branch to `target` based on `condition` and `val` - fn branch(&mut self, target: u32, condition: BranchCondition, val: Self::Loc); - /// Performs a conditional branch based on comparing `v1` and `v2` according to `condition` - /// This is used for the sequence `cmp; branch @` - fn branch_compare( - &mut self, - target: u32, - condition: BranchCondition, - v1: VStackValue, - v2: VStackValue, - ); - /// Branches to the `target` unconditionally (IE. when the condition is always, or based on constant-folded values) - fn branch_unconditional(&mut self, target: u32); - - /// Branches to the target given in `target` - fn branch_indirect(&mut self, target: Self::Loc); - - /// Computes the address of a global, and moves the pointer into `Self::Loc` - fn compute_global_address(&mut self, path: &Path, loc: Self::Loc); - - /// Computes the address of a label, and moves the pointer into `Self::Loc` - fn compute_label_address(&mut self, target: u32, loc: Self::Loc); - - /// Computes the address of a parameter and moves the pointer into `Self::Loc` - fn compute_parameter_address(&mut self, param: u32, loc: Self::Loc); - - /// Computes the address of a local variable in `inloc` (used only if addressable), and moves the pointer into `Self::Loc` - fn compute_local_address(&mut self, inloc: Self::Loc, loc: Self::Loc); - - /// Computes the address of a string literal - fn compute_string_address(&mut self, enc: Encoding, bytes: Vec, loc: Self::Loc); - - /// Marks the given location as freed and allows other allocations to use the location without clobbering it - fn free(&mut self, loc: Self::Loc); - - /// Clobbers the given location, saving the value and then freeing it. - fn clobber(&mut self, loc: Self::Loc); - - /// Allocates space to store a local variable or stack value of type `Type` - fn allocate(&mut self, ty: &Type, needs_addr: bool) -> Self::Loc; - - /// Allocates space to store an lvalue - fn allocate_lvalue(&mut self, needs_addr: bool) -> Self::Loc; - - /// Prepares the stack frame (as necessary) for a call to a function with the given `callty` and `realty` - fn prepare_call_frame(&mut self, callty: &FnType, realty: &FnType); - - /// Whether or not lock-free atomic ops of some size should issue a call to libatomic for this backend. - fn lockfree_use_libatomic(&mut self, size: u64) -> bool; - - /// Whether or not lock-free atomic rmws use libatomic - fn lockfree_cmpxchg_use_libatomic(&mut self, size: u64) -> bool; - - /// Whether or not BinaryOp can be implemented directly by the CPU - fn has_wait_free_compound(&mut self, op: BinaryOp, size: u64) -> bool; - - /// Whether or not the fecth version of BinaryOp can be implemented directly by the CPU - fn has_wait_free_compound_fetch(&mut self, op: BinaryOp, size: u64) -> bool; - - /// Writes a Compare Exchange Instruction, according to the atomic Access class in `ord` - /// Padding bytes in `ctrl` and `val` are zeroed prior to the call to this funtion - /// dest and ctrl both contain pointers to the destination and the compare - fn compare_exchange( - &mut self, - dest: Self::Loc, - ctrl: Self::Loc, - val: Self::Loc, - ty: &Type, - ord: AccessClass, - ); - - /// Writes a Weak Compare Exchange Instruction, according to the atomic Access class in `ord` - /// Padding bytes in `ctrl` and `val` are zeroed prior to the call to this funtion - /// dest and ctrl both contain pointers to the destination and the compare - fn weak_compare_exchange( - &mut self, - dest: Self::Loc, - ctrl: Self::Loc, - val: Self::Loc, - ty: &Type, - ord: AccessClass, - ); +#[derive(Clone, Debug, Hash, PartialEq, Eq)] +pub enum SectionSpec { + Global, +} - /// Generates a sequence (compiler) fence - synchronizing with signal handlers on the current thread of execution - /// This is generally a runtime no-op - fn sequence(&mut self, _: AccessClass) { - /* by default, do nothing. This is for something like xlangcodegen-llvm that drops down to a lower-level IR */ +impl core::fmt::Display for SectionSpec { + fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { + match self { + Self::Global => Ok(()), + } } - - /// Writes an assembly expression - fn write_asm(&mut self, asm: &AsmExpr, inputs: Vec>) -> Vec; - - /// Converts between two scalar types - fn write_scalar_convert( - &mut self, - target_ty: ScalarType, - incoming_ty: ScalarType, - new_loc: Self::Loc, - old_loc: Self::Loc, - ); } -#[derive(Default, Debug)] -struct BranchToInfo { - fallthrough_from: u32, - branch_from: HashSet, +pub struct FunctionDef { + section: SectionSpec, + linkage: Linkage, + fnty: Rc, + body: Option>, } -/// A type for handling the generation of code for functions. -pub struct FunctionCodegen { - inner: F, - vstack: VecDeque>, - properties: &'static TargetProperties<'static>, - targets: HashMap>>, - diverged: bool, - locals: Vec<(VStackValue, Type)>, - fnty: FnType, - locals_opaque: bool, - tys: Rc, - ctarg: u32, - cfg: HashMap, + +pub struct SsaCodegenPlugin { + mach: Rc, + targ: Option<&'static TargetProperties<'static>>, + functions: Vec<(String, FunctionDef)>, } -impl FunctionCodegen { - /// Constructs a new [`FunctionCodegen`] with a given [`FunctionRawCodegen`], the given function name and signature, and the target properties - pub fn new( - inner: F, - _path: Path, - fnty: FnType, - properties: &'static TargetProperties, - tys: Rc, - ) -> Self { +impl SsaCodegenPlugin { + pub fn new(mach: M) -> Self { Self { - inner, - properties, - vstack: VecDeque::new(), - targets: HashMap::new(), - diverged: false, - locals: Vec::new(), - fnty, - locals_opaque: false, - tys, - ctarg: !0, - cfg: HashMap::new(), - } - } - - fn get_type_information(&self) -> &TypeInformation { - &self.tys - } - - fn print_vstack(&self) { - let mut iter = self.vstack.iter(); - let stdout = std::io::stdout(); - let mut stdout = stdout.lock(); - core::write!(stdout, "[").unwrap(); - if let StdSome(val) = iter.next() { - core::write!(stdout, "{}", val).unwrap(); - } - - for val in iter { - core::write!(stdout, ", {}", val).unwrap(); - } - - core::writeln!(stdout, "]").unwrap(); - } - - /// Obtains the target properties. - /// Convience Method for some code generators - pub fn properties(&self) -> &'static TargetProperties { - self.properties - } - - /// Obtains a mutable reference to the inner `F` - pub fn raw_inner(&mut self) -> &mut F { - &mut self.inner - } - - /// Obtains the inner `F` from self - pub fn into_inner(self) -> F { - self.inner - } - - /// Obtains the inner `F` from self - pub fn is_atomic_lock_free(&self, asize: u64) -> bool { - let lockfree_mask = self.properties.primitives.lock_free_atomic_mask - | self.properties.arch.lock_free_atomic_masks; - - if asize > (1 << 15) { - return false; - } else { - let bits = (asize.next_power_of_two() - 1).count_ones(); - - (lockfree_mask & (1 << bits)) != 0 - } - } - - /// Writes a value according to access class to the pointer in `ptr` - pub fn store_val(&mut self, ptr: F::Loc, val: VStackValue, cl: AccessClass) { - if cl.0 & 0xF != 0 { - let ty = self.type_of(&val); - let size = self.tys.type_size(&ty).unwrap(); - - let align = ty::scalar_align(size, self.properties.primitives.max_atomic_align); - let asize = ty::align_size(size, align); - - if !self.is_atomic_lock_free(asize) || self.inner.lockfree_use_libatomic(asize) { - todo!("libatomic call") - } - } - match val { - VStackValue::Constant(Value::Invalid(_)) => { - self.inner.write_trap(Trap::Unreachable); - self.vstack.push_back(VStackValue::Trapped); - } - VStackValue::Constant(Value::Uninitialized(_)) | VStackValue::Trapped => {} - VStackValue::Constant(val) => { - self.inner.store_indirect_imm(val, ptr); - } - VStackValue::LValue(_, _) => panic!("Cannot store an lvalue"), - VStackValue::Pointer(ty, val) => match val { - LValue::OpaquePointer(loc) => { - self.inner.store_indirect(ptr, loc, &Type::Pointer(ty)) - } - lval => { - let loc = self.inner.allocate_lvalue(false); - self.move_val(VStackValue::Pointer(ty.clone(), lval), loc.clone()); - self.inner.store_indirect(ptr, loc, &Type::Pointer(ty)); - } - }, - VStackValue::OpaqueScalar(ty, loc) => { - self.inner.store_indirect(ptr, loc, &Type::Scalar(ty)) - } - VStackValue::AggregatePieced(_, _) => todo!(), - VStackValue::OpaqueAggregate(ty, loc) => self.inner.store_indirect(ptr, loc, &ty), - VStackValue::CompareResult(_, _) => todo!(), - VStackValue::ArrayRepeat(_, _) => todo!(), - } - } - - /// Moves a given value into the given value location - pub fn move_val(&mut self, val: VStackValue, loc: F::Loc) { - match val { - VStackValue::Constant(Value::Invalid(_)) => { - self.inner.write_trap(Trap::Unreachable); - self.vstack.push_back(VStackValue::Trapped); - } - VStackValue::Constant(Value::Uninitialized(_)) | VStackValue::Trapped => {} - VStackValue::Constant(Value::GlobalAddress { item, .. }) => { - self.inner.compute_global_address(&item, loc) - } - VStackValue::Constant(Value::LabelAddress(n)) => { - self.inner.compute_label_address(n, loc) - } - VStackValue::Constant(Value::String { - encoding, - utf8, - ty: Type::Pointer(_), - }) => { - self.inner - .compute_string_address(Encoding::XLang(encoding), utf8.into_bytes(), loc) - } - VStackValue::Constant(Value::String { ty, .. }) => todo!("string {:?}", ty), - VStackValue::Constant(Value::ByteString { content }) => self - .inner - .compute_string_address(Encoding::Byte, content, loc), - VStackValue::Constant(Value::Integer { val, ty }) => { - self.inner.move_imm(val, loc, &Type::Scalar(ty)) - } - VStackValue::Constant(Value::GenericParameter(n)) => todo!("%{}", n), - VStackValue::Constant(Value::Empty) => panic!("Cannot move an empty value"), - VStackValue::Pointer(pty, lvalue) => match lvalue { - LValue::OpaquePointer(loc2) => self.inner.move_val(loc2, loc), - LValue::Temporary(_) => todo!("temporary address"), - LValue::Local(n) => todo!("local {:?}", n), - LValue::GlobalAddress(item) => self.inner.compute_global_address(&item, loc), - LValue::Label(n) => self.inner.compute_label_address(n, loc), - LValue::Field(_, _, _) => todo!("field"), - LValue::StringLiteral(enc, bytes) => { - self.inner.compute_string_address(enc, bytes, loc) - } - LValue::Offset(_, _) => todo!("offset"), - LValue::Null => self.inner.move_imm(0, loc, &Type::Pointer(pty)), - LValue::TransparentAddr(addr) => { - self.inner.move_imm(addr.get(), loc, &Type::Pointer(pty)) - } - }, - VStackValue::LValue(ty, lvalue) => { - let pty = PointerType { - inner: Box::new(ty), - ..Default::default() - }; - match lvalue { - LValue::OpaquePointer(loc2) => self.inner.move_val(loc2, loc), - LValue::Temporary(_) => todo!("temporary address"), - LValue::Local(n) => todo!("local {:?}", n), - LValue::GlobalAddress(item) => self.inner.compute_global_address(&item, loc), - LValue::Label(n) => self.inner.compute_label_address(n, loc), - LValue::Field(_, _, _) => todo!("field"), - LValue::StringLiteral(enc, bytes) => { - self.inner.compute_string_address(enc, bytes, loc) - } - LValue::Offset(_, _) => todo!("offset"), - LValue::Null => self.inner.move_imm(0, loc, &Type::Pointer(pty)), - LValue::TransparentAddr(addr) => { - self.inner.move_imm(addr.get(), loc, &Type::Pointer(pty)) - } - } - } - VStackValue::OpaqueScalar(_, loc2) => self.inner.move_val(loc2, loc), - VStackValue::AggregatePieced(ty, fields) => { - if self.tys.type_size(&ty) != StdSome(0) { - let fields = fields.iter().collect::>(); - - if fields.len() == 1 { - self.move_val(fields[0].1.clone(), loc); - } else if fields.len() == 0 { - } else { - todo!("pieced aggregate") - } - } - } - VStackValue::OpaqueAggregate(_, loc2) => self.inner.move_val(loc2, loc), - VStackValue::CompareResult(_, _) => todo!("compare result"), - VStackValue::ArrayRepeat(_, _) => todo!("array repeat"), - } - } - - /// - /// Pushes a single value onto the vstack - pub fn push_value(&mut self, val: VStackValue) { - self.vstack.push_back(val) - } - - /// - /// Pops a single value from the vstack - pub fn pop_value(&mut self) -> Option> { - self.vstack.pop_back().into() - } - - /// - /// Pops `n` values from the vstack - pub fn pop_values(&mut self, n: usize) -> Option>> { - let len = self.vstack.len(); - if len < n { - None - } else { - Some(self.vstack.drain((len - n)..).collect()) - } - } - - /// Pops `N` values from the stack, and returns them in a statically-sized array, or otherwise returns `None`. - pub fn pop_values_static(&mut self) -> Option<[VStackValue; N]> { - let len = self.vstack.len(); - if len < N { - None - } else { - let mut array = MaybeUninit::<[VStackValue; N]>::uninit(); - let ptr = array.as_mut_ptr().cast::>(); - - let vals = self.vstack.drain((len - N)..); - - for (i, val) in vals.enumerate() { - // SAFETY: - // i is less than the length of the array - unsafe { - ptr.add(i).write(val); - } - } - - // SAFETY: - // The loop above has initialized array - Some(unsafe { array.assume_init() }) - } - } - - /// - /// Pushes all of the incoming values to the stack, in order - pub fn push_values>>(&mut self, vals: I) { - self.vstack.extend(vals); - } - - /// Obtains an opaque value of the given type that is placed in `loc` - pub fn opaque_value(&mut self, ty: &Type, loc: F::Loc) -> VStackValue { - match ty { - Type::Null | Type::Void | Type::FnType(_) => panic!("Invalid type"), - Type::Scalar(sty) => VStackValue::OpaqueScalar(*sty, loc), - Type::Pointer(pty) => VStackValue::Pointer(pty.clone(), LValue::OpaquePointer(loc)), - Type::Array(_) => todo!("array"), - Type::TaggedType(_, ty) => self.opaque_value(ty, loc), - Type::Product(_) | Type::Aggregate(_) => VStackValue::OpaqueAggregate(ty.clone(), loc), - Type::Aligned(_, ty) => self.opaque_value(ty, loc), - Type::Named(_) => VStackValue::OpaqueAggregate(ty.clone(), loc), - } - } - - /// Determines the type of a vstack value - pub fn type_of(&mut self, val: &VStackValue) -> Type { - match val { - VStackValue::Constant(val) => match val { - Value::Invalid(ty) | Value::Uninitialized(ty) => ty.clone(), - Value::GenericParameter(_) => panic!("Generic Parameter held too late"), - Value::Integer { ty, .. } => Type::Scalar(*ty), - Value::GlobalAddress { ty, .. } => Type::Pointer(PointerType { - inner: Box::new(ty.clone()), - ..Default::default() - }), - Value::ByteString { .. } => todo!("byte string"), - Value::String { ty, .. } => ty.clone(), - Value::LabelAddress(_) => Type::Pointer(PointerType { - inner: Box::new(Type::Void), - ..Default::default() - }), - Value::Empty => panic!("Cannot use an empty value"), - }, - VStackValue::LValue(_, _) => panic!("Cannot typeof an lvalue"), - VStackValue::Pointer(pty, _) => Type::Pointer(pty.clone()), - VStackValue::OpaqueScalar(sty, _) => Type::Scalar(*sty), - VStackValue::AggregatePieced(ty, _) => ty.clone(), - VStackValue::OpaqueAggregate(ty, _) => ty.clone(), - VStackValue::CompareResult(_, _) => todo!("compare result"), - VStackValue::Trapped => Type::Null, - VStackValue::ArrayRepeat(_, _) => todo!("array repeat"), - } - } - - /// Makes the given value opaque, if it is not already. - pub fn make_opaque(&mut self, val: VStackValue) -> VStackValue { - match val { - VStackValue::Constant(v) => match v { - Value::Invalid(ty) => { - self.inner.write_trap(Trap::Unreachable); - let loc = self.inner.allocate(&ty, false); - self.opaque_value(&ty, loc) - } - Value::Uninitialized(ty) => { - let loc = self.inner.allocate(&ty, false); - self.opaque_value(&ty, loc) - } - Value::GenericParameter(_) => todo!("generic parameters"), - Value::Integer { ty, val } => { - let loc = self.inner.allocate(&Type::Scalar(ty), false); - self.move_val( - VStackValue::Constant(Value::Integer { ty, val }), - loc.clone(), - ); - VStackValue::OpaqueScalar(ty, loc) - } - Value::GlobalAddress { ty, item } => { - let pty = PointerType { - inner: Box::new(ty.clone()), - ..Default::default() - }; - let loc = self.inner.allocate(&Type::Pointer(pty.clone()), false); - self.move_val( - VStackValue::Constant(Value::GlobalAddress { ty, item }), - loc.clone(), - ); - VStackValue::Pointer(pty, LValue::OpaquePointer(loc)) - } - Value::ByteString { content } => { - let pty = PointerType { - inner: Box::new(Type::Scalar(ScalarType { - header: ScalarTypeHeader { - bitsize: 8, - ..Default::default() - }, - kind: ScalarTypeKind::Char { - flags: CharFlags::empty(), - }, - })), - ..Default::default() - }; - let loc = self.inner.allocate(&Type::Pointer(pty.clone()), false); - self.move_val( - VStackValue::Constant(Value::ByteString { content }), - loc.clone(), - ); - VStackValue::Pointer(pty, LValue::OpaquePointer(loc)) - } - Value::String { encoding, utf8, ty } => { - let loc = self.inner.allocate(&ty, false); - self.move_val( - VStackValue::Constant(Value::String { - encoding, - utf8, - ty: ty.clone(), - }), - loc.clone(), - ); - self.opaque_value(&ty, loc) - } - Value::LabelAddress(n) => { - let pty = PointerType { - inner: Box::new(Type::Void), - ..Default::default() - }; - let loc = self.inner.allocate(&Type::Pointer(pty.clone()), false); - self.move_val(VStackValue::Constant(Value::LabelAddress(n)), loc.clone()); - VStackValue::Pointer(pty, LValue::OpaquePointer(loc)) - } - Value::Empty => panic!("Cannot use an empty value"), - }, - VStackValue::LValue(ty, LValue::OpaquePointer(loc)) => { - VStackValue::LValue(ty, LValue::OpaquePointer(loc)) - } - VStackValue::Pointer(ty, LValue::OpaquePointer(loc)) => { - VStackValue::Pointer(ty, LValue::OpaquePointer(loc)) - } - VStackValue::LValue(ty, lval) => { - let loc = self.inner.allocate_lvalue(false); - self.move_val(VStackValue::LValue(ty.clone(), lval), loc.clone()); - VStackValue::LValue(ty, LValue::OpaquePointer(loc)) - } - VStackValue::Pointer(ty, lval) => { - let loc = self.inner.allocate_lvalue(false); - self.move_val(VStackValue::Pointer(ty.clone(), lval), loc.clone()); - VStackValue::Pointer(ty, LValue::OpaquePointer(loc)) - } - VStackValue::OpaqueScalar(sty, loc) => VStackValue::OpaqueScalar(sty, loc), - VStackValue::AggregatePieced(ty, pieces) => { - let loc = self.inner.allocate(&ty, false); - self.move_val( - VStackValue::AggregatePieced(ty.clone(), pieces), - loc.clone(), - ); - VStackValue::OpaqueAggregate(ty, loc) - } - VStackValue::OpaqueAggregate(ty, loc) => VStackValue::OpaqueAggregate(ty, loc), - VStackValue::CompareResult(_, _) => todo!("compare results"), - VStackValue::Trapped => VStackValue::Trapped, - VStackValue::ArrayRepeat(_, _) => todo!("array repeat"), - } - } - - /// Pushes an opaque value of the given type - pub fn push_opaque(&mut self, ty: &Type, loc: F::Loc) { - let val = self.opaque_value(ty, loc); - self.push_value(val); - } - - /// Clears the expression stack - pub fn clear_stack(&mut self) { - self.vstack.clear() - } - - /// Calls a function by memory address stored in `loc` - pub fn call_indirect( - &mut self, - callty: &FnType, - realty: &FnType, - loc: F::Loc, - vals: Vec>, - is_tailcall: bool, - ) { - self.inner.prepare_call_frame(callty, realty); - if let std::option::Option::Some(place) = - self.inner.get_callconv().pass_return_place(&callty.ret) - { - todo!("return place {:?}", place); - } - - for (i, val) in vals.into_iter().enumerate() { - let param_loc = - self.inner - .get_callconv() - .find_param(callty, realty, i.try_into().unwrap(), false); - self.move_val(val, param_loc); - } - - if is_tailcall { - self.inner.tailcall_indirect(loc, realty); - self.diverged = true; - } else { - self.inner.call_indirect(loc); - match &callty.ret { - Type::Void => {} - Type::Scalar(ScalarType { - kind: kind @ ScalarTypeKind::Integer { .. }, - header: header @ ScalarTypeHeader { bitsize: 0, .. }, - }) if header.validity.contains(ScalarValidity::NONZERO) => { - // special case uint nonzero(0)/int nonzero(0) - self.push_value(VStackValue::Constant(Value::Uninitialized(Type::Scalar( - ScalarType { - kind: *kind, - header: *header, - }, - )))); - } - ty => { - let retloc = self.inner.get_callconv().find_return_val(callty); - self.push_opaque(ty, retloc); - } - } + mach: Rc::new(mach), + targ: None, + functions: Vec::new(), } } +} - /// Calls a function by name - pub fn call_fn( +impl XLangPlugin for SsaCodegenPlugin { + fn accept_ir( &mut self, - callty: &FnType, - realty: &FnType, - path: &Path, - mut vals: Vec>, - is_tailcall: bool, - ) { - if intrinsic::call_intrinsic(path, self, realty, self.properties, &mut vals) { - if is_tailcall { - self.write_exit(1); - self.diverged = true; - } - return; - } - - self.inner.prepare_call_frame(callty, realty); - if let std::option::Option::Some(place) = - self.inner.get_callconv().pass_return_place(&callty.ret) - { - todo!("return place {:?}", place); - } - - for (i, val) in vals.into_iter().enumerate() { - let param_loc = - self.inner - .get_callconv() - .find_param(callty, realty, i.try_into().unwrap(), false); - self.move_val(val, param_loc); - } - if is_tailcall { - self.inner.tailcall_direct(path, realty); - self.diverged = true; - } else { - self.inner.call_direct(path, realty); - match &callty.ret { - Type::Void => {} - Type::Scalar(ScalarType { - kind: kind @ ScalarTypeKind::Integer { .. }, - header: header @ ScalarTypeHeader { bitsize: 0, .. }, - }) if header.validity.contains(ScalarValidity::NONZERO) => { - // special case uint nonzero(0)/int nonzero(0) - self.push_value(VStackValue::Constant(Value::Invalid(Type::Scalar( - ScalarType { - kind: *kind, - header: *header, - }, - )))); - } - ty => { - let retloc = self.inner.get_callconv().find_return_val(callty); - self.push_opaque(ty, retloc); - } - } - } - } - - /// Writes the exit point of the given block with the given number of values - pub fn write_exit(&mut self, values: u16) { - self.diverged = true; - if values == 1 { - let val = self.pop_value().unwrap(); - match val { - VStackValue::Constant(Value::Invalid(_)) => { - self.inner.write_trap(Trap::Unreachable); - return; - } - VStackValue::Trapped => return, - val => { - let loc = self.inner.get_callconv().find_return_val(&self.fnty); - self.move_val(val, loc); - } + ir: &mut ir::File, + ) -> xlang::abi::result::Result<(), xlang::plugin::Error> { + let targ = self.targ.expect("set_target must be called first"); + let mut tys = TypeInformation::from_properties(targ); + for Pair(path, field) in &ir.root.members { + match &field.member_decl { + ir::MemberDeclaration::Scope(_) => todo!("non-root scope"), + ir::MemberDeclaration::OpaqueAggregate(_) => tys.add_opaque_aggregate(path.clone()), + ir::MemberDeclaration::AggregateDefinition(def) => { + tys.add_aggregate(path.clone(), def.clone()) + } + ir::MemberDeclaration::Function(_) + | ir::MemberDeclaration::Static(_) + | ir::MemberDeclaration::Empty => {} } - self.inner.leave_function(); - } else if values == 0 { - self.inner.leave_function(); - } else { - panic!("Attempt to exit function with more than one value"); } - } - - /// Writes the given binary operator to the stream - pub fn write_binary_op(&mut self, op: BinaryOp, v: OverflowBehaviour) { - let [val1, val2] = self.pop_values_static().unwrap(); - - match (val1, val2) { - (VStackValue::Trapped, _) | (_, VStackValue::Trapped) => { - self.push_value(VStackValue::Trapped) - } - (VStackValue::LValue(_, _), _) | (_, VStackValue::LValue(_, _)) => { - panic!("Cannot apply {:?} to an lvalue", op) - } - (VStackValue::Constant(Value::Invalid(_)), _) - | (_, VStackValue::Constant(Value::Invalid(_))) => { - self.inner.write_trap(Trap::Unreachable); - self.push_value(VStackValue::Trapped); - } - (VStackValue::Constant(Value::Uninitialized(ty)), _) - | (_, VStackValue::Constant(Value::Uninitialized(ty))) => match op { - BinaryOp::Cmp | BinaryOp::CmpInt => self.push_value(VStackValue::Constant( - Value::Uninitialized(Type::Scalar(ScalarType { - header: ScalarTypeHeader { - bitsize: 32, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: true, - min: None, - max: None, - }, - })), - )), - BinaryOp::CmpLt - | BinaryOp::CmpGt - | BinaryOp::CmpLe - | BinaryOp::CmpGe - | BinaryOp::CmpEq - | BinaryOp::CmpNe => self.push_value(VStackValue::Constant(Value::Uninitialized( - Type::Scalar(ScalarType { - header: ScalarTypeHeader { - bitsize: 32, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: true, - min: None, - max: None, - }, - }), - ))), - _ => match v { - OverflowBehaviour::Wrap | OverflowBehaviour::Unchecked => { - self.push_value(VStackValue::Constant(Value::Uninitialized(ty))) - } - OverflowBehaviour::Trap => { - self.inner.write_trap(Trap::Unreachable); - self.push_value(VStackValue::Trapped); - } - OverflowBehaviour::Checked => { - self.push_values([ - VStackValue::Constant(Value::Uninitialized(ty)), - VStackValue::Constant(Value::Uninitialized(Type::Scalar(ScalarType { - header: ScalarTypeHeader { - bitsize: 1, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }))), - ]); - } - v => todo!("Unexpected Overflow behaviour {:?}", v), - }, - }, - ( - VStackValue::Constant(Value::Integer { - ty: - ty1 @ ScalarType { - header: - ScalarTypeHeader { - vectorsize: None, .. - }, - kind: ScalarTypeKind::Integer { signed: false, .. }, - }, - val: val1, - }), - VStackValue::Constant(Value::Integer { - ty: - ty2 @ ScalarType { - header: - ScalarTypeHeader { - vectorsize: None, .. - }, - kind: ScalarTypeKind::Integer { signed: false, .. }, - }, - val: val2, - }), - ) if ty1.header.bitsize == ty2.header.bitsize => match op { - BinaryOp::Cmp | BinaryOp::CmpInt => { - let sty = ScalarType { - header: ScalarTypeHeader { - bitsize: 32, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }; - let val = match val1.cmp(&val2) { - std::cmp::Ordering::Less => !0, - std::cmp::Ordering::Equal => 0, - std::cmp::Ordering::Greater => 1, - }; - - self.push_value(VStackValue::Constant(Value::Integer { ty: sty, val })) - } - BinaryOp::CmpLt => { - let sty = ScalarType { - header: ScalarTypeHeader { - bitsize: 32, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }; - let val = (val1 < val2) as u128; - - self.push_value(VStackValue::Constant(Value::Integer { ty: sty, val })) - } - BinaryOp::CmpGt => { - let sty = ScalarType { - header: ScalarTypeHeader { - bitsize: 32, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }; - let val = (val1 > val2) as u128; - self.push_value(VStackValue::Constant(Value::Integer { ty: sty, val })) - } - BinaryOp::CmpLe => { - let sty = ScalarType { - header: ScalarTypeHeader { - bitsize: 32, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }; - let val = (val1 <= val2) as u128; - - self.push_value(VStackValue::Constant(Value::Integer { ty: sty, val })) - } - BinaryOp::CmpGe => { - let sty = ScalarType { - header: ScalarTypeHeader { - bitsize: 32, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }; - let val = (val1 >= val2) as u128; - - self.push_value(VStackValue::Constant(Value::Integer { ty: sty, val })) - } - BinaryOp::CmpEq => { - let sty = ScalarType { - header: ScalarTypeHeader { - bitsize: 32, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }; - let val = (val1 == val2) as u128; - - self.push_value(VStackValue::Constant(Value::Integer { ty: sty, val })) - } - BinaryOp::CmpNe => { - let sty = ScalarType { - header: ScalarTypeHeader { - bitsize: 32, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }; - let val = (val1 != val2) as u128; - - self.push_value(VStackValue::Constant(Value::Integer { ty: sty, val })) - } - _ => { - let (val, overflow) = match op { - BinaryOp::Add => val1.overflowing_add(val2), - BinaryOp::Sub => val1.overflowing_sub(val2), - BinaryOp::Mul => val1.overflowing_mul(val2), - BinaryOp::Div => { - if val2 == 0 { - (0, true) - } else { - val1.overflowing_div(val2) - } + let tys = Rc::new(tys); + for Pair(path, field) in &ir.root.members { + match &field.member_decl { + ir::MemberDeclaration::Scope(_) => todo!("non-root scope"), + ir::MemberDeclaration::Function(f) => { + let sym = match &*path.components { + [ir::PathComponent::Text(name)] + | [ir::PathComponent::Root, ir::PathComponent::Text(name)] => { + name.to_string() } - BinaryOp::Mod => { - if val2 == 0 { - (0, true) - } else { - val1.overflowing_rem(val2) - } + [ir::PathComponent::Root, rest @ ..] | [rest @ ..] => { + self.mach.mangle(rest) } - BinaryOp::BitOr => (val1 | val2, false), - BinaryOp::BitAnd => (val1 & val2, false), - BinaryOp::BitXor => (val1 ^ val2, false), - BinaryOp::Lsh => ( - val1.wrapping_shl(val2 as u32), - val2 > (ty1.header.bitsize.into()), - ), - BinaryOp::Rsh => ( - val1.wrapping_shr(val2 as u32), - val2 > (ty1.header.bitsize.into()), - ), - BinaryOp::Cmp - | BinaryOp::CmpInt - | BinaryOp::CmpLt - | BinaryOp::CmpLe - | BinaryOp::CmpEq - | BinaryOp::CmpNe - | BinaryOp::CmpGe - | BinaryOp::CmpGt => unreachable!(), - op => todo!("{:?}", op), }; - - let overflow = overflow || (val.leading_zeros() < (ty1.header.bitsize as u32)); - let val = val & (!((!0u128).wrapping_shl(128 - (ty1.header.bitsize as u32)))); - - match v { - OverflowBehaviour::Wrap => self - .vstack - .push_back(VStackValue::Constant(Value::Integer { ty: ty1, val })), - OverflowBehaviour::Unchecked => { - if overflow { - self.vstack - .push_back(VStackValue::Constant(Value::Uninitialized( - Type::Scalar(ty1), - ))) - } else { - self.vstack.push_back(VStackValue::Constant(Value::Integer { - ty: ty1, - val, - })) - } + let ty = Rc::new(f.ty.clone()); + + let section = SectionSpec::Global; + let linkage = f.linkage; + let body = if let XLangSome(body) = &f.body { + let mut builder = ssa::FunctionBuilder::new( + sym.clone(), + self.mach.clone(), + tys.clone(), + targ, + ty.clone(), + ); + for local in &body.locals { + builder.push_local(local.clone()); } - OverflowBehaviour::Checked => { - self.vstack - .push_back(VStackValue::Constant(Value::Integer { ty: ty1, val })); - self.vstack.push_back(VStackValue::Constant(Value::Integer { - ty: ScalarType { - header: ScalarTypeHeader { - bitsize: 1, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }, - val: overflow as u128, - })); + for block in &body.blocks { + builder.push_incoming(block.target, &block.incoming_stack); } - OverflowBehaviour::Trap => { - if overflow { - self.inner.write_trap(Trap::Overflow); - self.vstack.push_back(VStackValue::Trapped); - } else { - self.vstack.push_back(VStackValue::Constant(Value::Integer { - ty: ty1, - val, - })); + for block in &body.blocks { + let block_builder = + builder.new_basic_block(block.target, &block.incoming_stack); + for expr in &block.expr { + block_builder.write_expr(expr); } + block_builder.write_terminator(&block.term); } - OverflowBehaviour::Saturate => { - if (op == BinaryOp::Sub || op == BinaryOp::Rsh) && overflow { - self.vstack.push_back(VStackValue::Constant(Value::Integer { - ty: ty1, - val: 0, - })); - } else if overflow { - self.vstack.push_back(VStackValue::Constant(Value::Integer { - ty: ty1, - val: !((!0u128) - .wrapping_shl(128 - (ty1.header.bitsize as u32))), - })); - } else { - self.vstack.push_back(VStackValue::Constant(Value::Integer { - ty: ty1, - val, - })); - } - } - v => todo!("{:?} {:?}", op, v), - } - } - }, - ( - VStackValue::Constant(Value::Integer { - ty: - ty1 @ ScalarType { - header: - ScalarTypeHeader { - vectorsize: None, .. - }, - kind: ScalarTypeKind::Integer { signed: true, .. }, - }, - val: val1, - }), - VStackValue::Constant(Value::Integer { - ty: - ty2 @ ScalarType { - header: - ScalarTypeHeader { - vectorsize: None, .. - }, - kind: ScalarTypeKind::Integer { signed: true, .. }, - }, - val: val2, - }), - ) if ty1.header.bitsize == ty2.header.bitsize => match op { - BinaryOp::Cmp | BinaryOp::CmpInt => { - let sty = ScalarType { - header: ScalarTypeHeader { - bitsize: 32, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }; - let val = match (val1 as i128).cmp(&(val2 as i128)) { - std::cmp::Ordering::Less => !0, - std::cmp::Ordering::Equal => 0, - std::cmp::Ordering::Greater => 1, - }; - - self.push_value(VStackValue::Constant(Value::Integer { ty: sty, val })) - } - BinaryOp::CmpLt => { - let sty = ScalarType { - header: ScalarTypeHeader { - bitsize: 32, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }; - let val = ((val1 as i128) < (val2 as i128)) as u128; - - self.push_value(VStackValue::Constant(Value::Integer { ty: sty, val })) - } - BinaryOp::CmpGt => { - let sty = ScalarType { - header: ScalarTypeHeader { - bitsize: 32, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }; - let val = ((val1 as i128) > (val2 as i128)) as u128; - self.push_value(VStackValue::Constant(Value::Integer { ty: sty, val })) - } - BinaryOp::CmpLe => { - let sty = ScalarType { - header: ScalarTypeHeader { - bitsize: 32, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }; - let val = ((val1 as i128) <= (val2 as i128)) as u128; - - self.push_value(VStackValue::Constant(Value::Integer { ty: sty, val })) - } - BinaryOp::CmpGe => { - let sty = ScalarType { - header: ScalarTypeHeader { - bitsize: 32, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }; - let val = ((val1 as i128) >= (val2 as i128)) as u128; - - self.push_value(VStackValue::Constant(Value::Integer { ty: sty, val })) - } - BinaryOp::CmpEq => { - let sty = ScalarType { - header: ScalarTypeHeader { - bitsize: 32, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, + Some(builder) + } else { + None }; - let val = (val1 == val2) as u128; - self.push_value(VStackValue::Constant(Value::Integer { ty: sty, val })) - } - BinaryOp::CmpNe => { - let sty = ScalarType { - header: ScalarTypeHeader { - bitsize: 32, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, + self.functions.push(( + sym, + FunctionDef { + section, + linkage, + fnty: ty, + body, }, - }; - let val = (val1 != val2) as u128; - - self.push_value(VStackValue::Constant(Value::Integer { ty: sty, val })) - } - _ => { - let val1 = val1 as i128; - let val2 = val2 as i128; - let (val, overflow) = match op { - BinaryOp::Add => val1.overflowing_add(val2), - BinaryOp::Sub => val1.overflowing_sub(val2), - BinaryOp::Mul => val1.overflowing_mul(val2), - BinaryOp::Div => { - if val2 == 0 { - (0, true) - } else { - val1.overflowing_div(val2) - } - } - BinaryOp::Mod => { - if val2 == 0 { - (0, true) - } else { - val1.overflowing_rem(val2) - } - } - BinaryOp::BitOr => (val1 | val2, false), - BinaryOp::BitAnd => (val1 & val2, false), - BinaryOp::BitXor => (val1 ^ val2, false), - BinaryOp::Lsh => ( - val1.wrapping_shl(val2 as u32), - val2 > (ty1.header.bitsize.into()), - ), - BinaryOp::Rsh => ( - val1.wrapping_shr(val2 as u32), - val2 > (ty1.header.bitsize.into()), - ), - BinaryOp::Cmp - | BinaryOp::CmpInt - | BinaryOp::CmpLt - | BinaryOp::CmpLe - | BinaryOp::CmpEq - | BinaryOp::CmpNe - | BinaryOp::CmpGe - | BinaryOp::CmpGt => unreachable!(), - op => todo!("{:?}", op), - }; - - let overflow = overflow - || ((val.leading_zeros() < (ty1.header.bitsize as u32)) - && (val.leading_ones() < (ty1.header.bitsize as u32))); - let val = val & (!((!0i128).wrapping_shl(128 - (ty1.header.bitsize as u32)))); - - let val = (((val as i128) << ((128 - (ty1.header.bitsize - 1)) as u32)) - >> ((128 - (ty1.header.bitsize - 1)) as u32)) - as u128; // sign extend signed integers. This makes implementing Cmp et. al above easier - - match v { - OverflowBehaviour::Wrap => self - .vstack - .push_back(VStackValue::Constant(Value::Integer { ty: ty1, val })), - OverflowBehaviour::Unchecked => { - if overflow { - self.vstack - .push_back(VStackValue::Constant(Value::Uninitialized( - Type::Scalar(ty1), - ))) - } else { - self.vstack.push_back(VStackValue::Constant(Value::Integer { - ty: ty1, - val, - })) - } - } - OverflowBehaviour::Checked => { - self.vstack - .push_back(VStackValue::Constant(Value::Integer { ty: ty1, val })); - self.vstack.push_back(VStackValue::Constant(Value::Integer { - ty: ScalarType { - header: ScalarTypeHeader { - bitsize: 1, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }, - val: overflow as u128, - })); - } - OverflowBehaviour::Trap => { - if overflow { - self.inner.write_trap(Trap::Abort); - self.vstack.push_back(VStackValue::Trapped); - } else { - self.vstack.push_back(VStackValue::Constant(Value::Integer { - ty: ty1, - val, - })); - } - } - OverflowBehaviour::Saturate => { - if (op == BinaryOp::Sub || op == BinaryOp::Rsh) && overflow { - self.vstack.push_back(VStackValue::Constant(Value::Integer { - ty: ty1, - val: 0, - })); - } else if overflow { - self.vstack.push_back(VStackValue::Constant(Value::Integer { - ty: ty1, - val: !((!0u128) - .wrapping_shl(128 - (ty1.header.bitsize as u32))), - })); - } else { - self.vstack.push_back(VStackValue::Constant(Value::Integer { - ty: ty1, - val, - })); - } - } - v => todo!("{:?} {:?}", op, v), - } + )); } - }, - ( - VStackValue::OpaqueScalar( - st @ ScalarType { - kind: ScalarTypeKind::Integer { .. }, - .. - }, - loc, - ), - VStackValue::Constant(Value::Integer { ty, val }), - ) if st == ty => { - let header = st.header; - match header.vectorsize { - None => { - if header.bitsize.is_power_of_two() - && header.bitsize <= self.inner.native_int_size() - { - match v { - OverflowBehaviour::Wrap | OverflowBehaviour::Unchecked => { - let new_loc = match op { - BinaryOp::CmpEq - | BinaryOp::CmpNe - | BinaryOp::CmpGt - | BinaryOp::CmpLt - | BinaryOp::CmpGe - | BinaryOp::CmpLe => self.inner.allocate( - &Type::Scalar(ScalarType { - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - header: ScalarTypeHeader { - bitsize: 1, - vectorsize: None, - validity: Default::default(), - }, - }), - false, - ), - BinaryOp::CmpInt => self.inner.allocate( - &Type::Scalar(ScalarType { - kind: ScalarTypeKind::Integer { - signed: true, - min: None, - max: None, - }, - header: ScalarTypeHeader { - bitsize: 32, - vectorsize: None, - validity: Default::default(), - }, - }), - false, - ), - _ => self.inner.allocate(&Type::Scalar(st), false), - }; - self.inner.write_int_binary_imm( - new_loc.clone(), - loc, - val, - &Type::Scalar(st), - op, - ); - self.push_value(VStackValue::OpaqueScalar(st, new_loc)); - } - v => todo!("{:?} {:?}", op, v), - } - } else { - todo!("Non-native integer") - } - } - Some(vector) => todo!("vectorsize({:?})", vector), - } - } - (a, b) => todo!("{:?}: {:?}, {:?}", op, a, b), - } - } - - fn branch_to(&mut self, target: u32) { - let values = self.pop_values(self.targets[&target].len()).unwrap(); - for (targ_val, val) in self.targets[&target].clone().into_iter().zip(values) { - let loc = targ_val.opaque_location().unwrap().clone(); - self.move_val(val, loc); - } - - if !self.locals_opaque { - let mut locals = std::mem::take(&mut self.locals); - - for (local, _) in &mut locals { - let val = core::mem::replace(local, VStackValue::Trapped); - - *local = self.make_opaque(val); - } - - self.locals = locals; - self.locals_opaque = true; - } - - self.diverged = true; - if let StdSome(targ) = self.cfg.get(&target) { - if targ.fallthrough_from == self.ctarg { - return; - } - } - - self.inner.branch_unconditional(target); - } - - fn branch_conditional_to(&mut self, target: u32, cond: BranchCondition, loc: F::Loc) { - let values = self.pop_values(self.targets[&target].len()).unwrap(); - for (targ_val, val) in self.targets[&target].clone().into_iter().zip(values) { - let loc = targ_val.opaque_location().unwrap().clone(); - self.move_val(val, loc); - self.push_value(targ_val); - } - - if !self.locals_opaque { - let mut locals = std::mem::take(&mut self.locals); - - for (local, _) in &mut locals { - let val = core::mem::replace(local, VStackValue::Trapped); - - *local = self.make_opaque(val); + ir::MemberDeclaration::Static(_) => todo!("static"), + ir::MemberDeclaration::OpaqueAggregate(_) + | ir::MemberDeclaration::AggregateDefinition(_) + | ir::MemberDeclaration::Empty => {} } - - self.locals = locals; - self.locals_opaque = true; } - if let StdSome(targ) = self.cfg.get(&target) { - if targ.fallthrough_from == self.ctarg { - return; + for (name, def) in &self.functions { + print!("{}{} {}{}", def.section, def.linkage, name, def.fnty); + if let Some(body) = &def.body { + println!("{}", body); + } else { + println!(";"); } } - self.inner.branch(target, cond, loc); + xlang::abi::result::Ok(()) } - /// Writes a (potentially conditional) branch to `target` based on `cond` - pub fn write_branch(&mut self, cond: BranchCondition, target: u32) { - match cond { - BranchCondition::Always => { - self.branch_to(target); - } - BranchCondition::Never => {} - cond => { - let control = self.pop_value().unwrap(); - match control { - VStackValue::Constant(Value::Uninitialized(_)) - | VStackValue::Constant(Value::Invalid(_)) => { - self.inner.write_trap(Trap::Unreachable); - self.vstack.push_back(VStackValue::Trapped); - } - VStackValue::Constant(Value::Integer { - ty: - ScalarType { - kind: ScalarTypeKind::Integer { signed, .. }, - .. - }, - val, - }) => { - let taken = match cond { - BranchCondition::Equal => val == 0, - BranchCondition::NotEqual => val != 0, - BranchCondition::Less => signed && ((val as i128) < 0), - BranchCondition::LessEqual => { - (signed && ((val as i128) <= 0)) || val == 0 - } - BranchCondition::Greater => { - if signed { - (val as i128) > 0 - } else { - val > 0 - } - } - BranchCondition::GreaterEqual => (!signed) || ((val as i128) >= 0), - _ => unreachable!(), - }; - - if taken { - self.branch_to(target); - } - } - VStackValue::OpaqueScalar(_, loc) => { - self.branch_conditional_to(target, cond, loc); - } - VStackValue::CompareResult(_, _) => todo!("compare"), - VStackValue::Trapped => { - self.push_value(VStackValue::Trapped); - } - val => panic!("Invalid Branch Control {:?}", val), - } - } - } + fn set_target(&mut self, targ: &'static xlang::targets::properties::TargetProperties<'static>) { + Rc::get_mut(&mut self.mach).unwrap().init_from_target(targ); + self.targ = Some(targ); } +} - fn get_field_paths( - &self, - lval: LValue, - ty: &Type, - ) -> (Type, LValue, Vec) { - match lval { - LValue::Field(base_ty, base, field) => { - let base_type = self.tys.get_field_type(&base_ty, &field).unwrap(); - if &base_type == ty { - let (inner_ty, base, mut fields) = - self.get_field_paths(Box::into_inner(base), &base_ty); - fields.push(field); - (inner_ty, base, fields) - } else { - ( - ty.clone(), - LValue::Field(base_ty, base, field), - xlang::abi::vec![], - ) - } - } - lval => (ty.clone(), lval, xlang::abi::vec![]), - } +impl XLangCodegen for SsaCodegenPlugin { + fn target_matches(&self, x: xlang::abi::string::StringView) -> bool { + self.mach.matches_target(x) } - /// Writes a unary operator - pub fn write_unary_op(&mut self, op: UnaryOp, v: OverflowBehaviour) { - let val = self.pop_value().unwrap(); - match val { - VStackValue::Constant(Value::Invalid(_)) => { - self.inner.write_trap(Trap::Unreachable); - self.push_value(VStackValue::Trapped); - } - VStackValue::Constant(Value::Uninitialized(ty)) => match v { - OverflowBehaviour::Checked => { - let check_ty = ScalarType { - header: ScalarTypeHeader { - bitsize: 1, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }; - self.push_value(VStackValue::Constant(Value::Uninitialized(ty))); - self.push_value(VStackValue::Constant(Value::Uninitialized(Type::Scalar( - check_ty, - )))); - } - OverflowBehaviour::Trap => { - self.inner.write_trap(Trap::Unreachable); - self.push_value(VStackValue::Trapped); - } - _ => self.push_value(VStackValue::Constant(Value::Uninitialized(ty))), - }, - VStackValue::Constant(Value::Integer { - ty: - sty @ ScalarType { - kind: ScalarTypeKind::Integer { signed: false, .. }, - .. - }, - val, - }) => { - let base_val = match op { - UnaryOp::Minus => (-(val as i128)) as u128, - UnaryOp::BitNot => !val, - UnaryOp::LogicNot => (val == 0) as u128, - op => panic!("Invalid unary op {:?}", op), + fn write_output( + &mut self, + x: xlang::prelude::v1::DynMut, + mode: xlang::plugin::OutputMode, + ) -> xlang::abi::io::Result<()> { + let targ = self.targ.expect("set_target must have been called first"); + if mode == xlang::plugin::OutputMode::Obj { + let mut writer = WriteAdapter::new(x); + let fmt = + binfmt::format_by_name(&targ.link.obj_binfmt).expect("obj_binfmt is not supported"); + + let mut output = fmt.create_file(binfmt::fmt::FileType::Relocatable); + + let mut sections = vec![ + Section { + name: format!(".text"), + align: 1024, + ty: binfmt::fmt::SectionType::ProgBits, + flags: Some(SectionFlag::Alloc | SectionFlag::Executable), + ..Section::default() + }, + Section { + name: format!(".rodata"), + align: 1024, + ty: binfmt::fmt::SectionType::ProgBits, + flags: Some(SectionFlag::Alloc.into()), + ..Section::default() + }, + Section { + name: format!(".data"), + align: 1024, + ty: binfmt::fmt::SectionType::ProgBits, + flags: Some(SectionFlag::Alloc | SectionFlag::Writable), + ..Section::default() + }, + Section { + name: format!(".bss"), + align: 1024, + ty: binfmt::fmt::SectionType::NoBits, + flags: Some(SectionFlag::Alloc | SectionFlag::Writable), + ..Section::default() + }, + ]; + let mut syms = vec![]; + + for (sym_name, def) in core::mem::take(&mut self.functions) { + let sym_kind = match def.linkage { + Linkage::External => SymbolKind::Global, + Linkage::Internal => SymbolKind::Local, + Linkage::Constant => SymbolKind::Local, + Linkage::Weak => SymbolKind::Weak, }; - let mask = (!0u128).wrapping_shr((128 - sty.header.bitsize) as u32); + if let Some(mut body) = def.body { + let section = match def.section { + SectionSpec::Global => 0, + }; + let offset = sections[section as usize].offset(); + let sym_idx = syms.len(); + let sym = Symbol::new( + sym_name, + section, + offset as u128, + SymbolType::Function, + sym_kind, + ); + syms.push(sym); - self.push_value(VStackValue::Constant(Value::Integer { - ty: sty, - val: base_val & mask, - })); + try_!(body + .write(&mut sections[section as usize], |name, offset| { + let inner_sym = Symbol::new( + name, + section, + offset, + SymbolType::Null, + SymbolKind::Local, + ); + syms.push(inner_sym); + }) + .map_err(Into::into)); - if v == OverflowBehaviour::Checked { - let check_ty = ScalarType { - header: ScalarTypeHeader { - bitsize: 1, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }; - self.push_value(VStackValue::Constant(Value::Integer { - ty: check_ty, - val: 1, - })); + *syms[sym_idx].size_mut() = + Some((sections[section as usize].offset() - offset) as u64); + } else if sym_kind == SymbolKind::Weak { + syms.push(Symbol::new_undef(sym_name, SymbolType::Function, sym_kind)); } } - VStackValue::Constant(Value::Integer { - ty: - sty @ ScalarType { - kind: ScalarTypeKind::Integer { signed: true, .. }, - .. - }, - val, - }) => { - let base_val = match op { - UnaryOp::Minus => (-(val as i128)) as u128, - UnaryOp::BitNot => !val, - UnaryOp::LogicNot => (val == 0) as u128, - op => panic!("Invalid unary op {:?}", op), - }; - - let overflow = op == UnaryOp::Minus && (base_val == val); - - let mask = (!0u128).wrapping_shr((128 - sty.header.bitsize) as u32); - - let val = base_val & mask; - - let val = (((val as i128) << ((128 - (sty.header.bitsize - 1)) as u32)) - >> ((128 - (sty.header.bitsize - 1)) as u32)) as u128; + let mut section_map = vec![]; + for section in sections { + let new_off = output + .add_section(section) + .expect("Could not add a section"); + section_map.push(new_off) + } - match (v, overflow) { - (OverflowBehaviour::Wrap | OverflowBehaviour::Saturate, _) - | (OverflowBehaviour::Unchecked | OverflowBehaviour::Trap, false) => { - self.push_value(VStackValue::Constant(Value::Integer { ty: sty, val })) - } - (OverflowBehaviour::Unchecked, true) => self.push_value(VStackValue::Constant( - Value::Uninitialized(Type::Scalar(sty)), - )), - (OverflowBehaviour::Trap, true) => { - self.inner.write_trap(Trap::Abort); - self.push_value(VStackValue::Trapped); - } - (OverflowBehaviour::Checked, overflow) => { - self.push_value(VStackValue::Constant(Value::Integer { ty: sty, val })); - let check_ty = ScalarType { - header: ScalarTypeHeader { - bitsize: 1, - ..Default::default() - }, - kind: ScalarTypeKind::Integer { - signed: false, - min: None, - max: None, - }, - }; - self.push_value(VStackValue::Constant(Value::Integer { - ty: check_ty, - val: overflow as u128, - })); - } - (v, _) => panic!("Invalid overflow behaviour {:?}", v), + for sym in syms.iter_mut() { + if let Some(sect) = sym.section_mut() { + *sect = section_map[(*sect) as usize]; } } - VStackValue::OpaqueScalar(sty, loc) => { - todo!("OpaqueScalar({},{:?})", sty, loc); - } - val => panic!("Invalid value {}", val), + output.add_symbols(syms).expect("Could not add symbols"); + try_!(fmt.write_file(&mut writer, &output).map_err(Into::into)); + xlang::abi::result::Ok(()) + } else { + todo!("asm file") } } - - /// Writes an expression in linear order into the codegen - pub fn write_expr(&mut self, _: &Expr) { - unimplemented!() - } - - /// Writes an asm-expr - pub fn write_asm(&mut self, _: &AsmExpr) { - unimplemented!() - } - - /// Writes the elements of a block to the codegen, usually the top level block of a function - pub fn write_block(&mut self, _: &Block, _: u32) { - unimplemented!() - } } diff --git a/xlang/xlang_backend/src/mach.rs b/xlang/xlang_backend/src/mach.rs new file mode 100644 index 00000000..e5b7f516 --- /dev/null +++ b/xlang/xlang_backend/src/mach.rs @@ -0,0 +1,42 @@ +use xlang::{abi::string::StringView, targets::properties::TargetProperties}; + +use crate::{ + mangle::mangle_itanium, + ssa::{OpaqueLocation, SsaInstruction}, +}; + +use arch_ops::traits::InsnWrite; + +use std::io::Result; + +pub trait Machine { + type Assignments; + type BlockClobbers; + fn matches_target(&self, targ: StringView) -> bool; + fn init_from_target(&mut self, targ: &TargetProperties); + fn new_assignments(&self) -> Self::Assignments; + fn assign_locations( + &self, + assignments: &mut Self::Assignments, + insns: &[SsaInstruction], + incoming: &[OpaqueLocation], + which: u32, + ) -> Self::BlockClobbers; + fn codegen_prologue( + &self, + assignments: &Self::Assignments, + out: &mut W, + ) -> std::io::Result<()>; + fn codegen_block String>( + &self, + assignments: &Self::Assignments, + insns: &[SsaInstruction], + block_clobbers: Self::BlockClobbers, + out: &mut W, + label_sym: F, + which: u32, + ) -> std::io::Result<()>; + fn mangle(&self, path: &[xlang::ir::PathComponent]) -> String { + mangle_itanium(path) + } +} diff --git a/xlang/xlang_backend/src/mc.rs b/xlang/xlang_backend/src/mc.rs deleted file mode 100644 index 067e1bda..00000000 --- a/xlang/xlang_backend/src/mc.rs +++ /dev/null @@ -1,1036 +0,0 @@ -use core::convert::TryFrom; -use std::{cell::RefCell, io::Write, rc::Rc}; - -use arch_ops::traits::InsnWrite; -use binfmt::{ - fmt::{FileType, Section}, - sym::Symbol, -}; -use xlang::{ - abi::{ - collection::HashMap, io::WriteAdapter, option::None as XLangNone, pair::Pair, - result::Result::Ok as XLangOk, span::Span, string::StringView, try_, - }, - ir::{ - AccessClass, BinaryOp, FnType, Linkage, PathComponent, PointerKind, ScalarType, - ScalarTypeHeader, ScalarTypeKind, Type, UnaryOp, - }, - plugin::{OutputMode, XLangCodegen, XLangPlugin}, - targets::properties::{StackAttributeControlStyle, TargetProperties}, -}; - -use crate::{ - callconv::CallingConvention, - expr::{Trap, VStackValue, ValLocation}, - str::StringMap, - ty::TypeInformation, - FunctionCodegen, FunctionRawCodegen, -}; - -/// Register Allocation -pub mod regalloc; - -/// Calling Convention Abstraction -pub mod callconv; - -/// Converts the u128 `val` into bytes according to the platform endianness in `props` -pub fn u128_to_targ_bytes(val: u128, props: &TargetProperties) -> [u8; 16] { - match props.arch.byte_order { - xlang::targets::properties::ByteOrder::LittleEndian => val.to_le_bytes(), - xlang::targets::properties::ByteOrder::BigEndian => val.to_be_bytes(), - xlang::targets::properties::ByteOrder::MiddleEndian => { - let mut bytes = val.to_be_bytes(); - for i in 0..8 { - bytes.swap(2 * i, 2 * i + 1); - } - bytes - } - } -} - -/// Basic Queries about Machine Features -pub trait MachineFeatures { - /// The type of Value Locations - type Loc: ValLocation; - - /// The type of Value Locations - type CallConv: CallingConvention; - - /// The maximum integer size (in bits) supported natively (without emulation) - fn native_int_size(&self) -> u16; - /// The maximum floating point size (in bits) supported natively, or None if no floating-point support exists - fn native_float_size(&self) -> Option; - - /// The maximum Vector size supported natively, in bytes - fn native_vec_size(&self) -> Option { - None - } - - /// Preferred Vector size of the current codegen, in total bytes - /// This need not be the same as the [`FunctionRawCodegen::native_vec_size`], for example, if some vector types incur a significant runtime performance penalty - /// (such as AVX-512) - fn preferred_vec_size(&self) -> Option { - None - } - - /// Whether or not lock-free atomic ops of some size should issue a call to libatomic for this backend. - fn lockfree_use_libatomic(&self, size: u64) -> bool; - - /// Whether or not lock-free atomic rmws use libatomic - fn lockfree_cmpxchg_use_libatomic(&self, size: u64) -> bool; - - /// Whether or not BinaryOp can be implemented directly by the CPU - fn has_wait_free_compound(&self, op: BinaryOp, size: u64) -> bool; - - /// Whether or not the fecth version of BinaryOp can be implemented directly by the CPU - fn has_wait_free_compound_fetch(&self, op: BinaryOp, size: u64) -> bool; - - /// Whether or not volatile has any effect on codegen - fn propagate_side_effects(&self) -> bool { - false - } - - /// Whether or not the `sequence` XIR instruction has any effect on codegen - fn propagate_sequence(&self) -> bool { - false - } - - /// Mangle the given path. Note: This has already stripped the `Root` and won't be non-mangled paths - fn mangle(&self, path: &[PathComponent]) -> String; -} - -/// An abstract machine instruction, converted by `xlang_backend` from XIR. -#[derive(Clone, Debug, Hash, PartialEq, Eq)] -#[non_exhaustive] -pub enum MCInsn { - /// Performs no-operation - Null, - /// Move a value from one location to another - Mov { - /// The destination location - dest: MaybeResolved, - /// The source location - src: MaybeResolved, - }, - /// Moves an immediate value into a location - MovImm { - /// The destination location - dest: MaybeResolved, - /// The Source Value - src: u128, - }, - /// Stores from src into the pointer in `dest_ptr` - StoreIndirect { - /// The destination pointer - dest_ptr: MaybeResolved, - /// The Source value - src: MaybeResolved, - /// Class - cl: AccessClass, - }, - /// Stores of an immediate integer into the pointer in `dest_ptr` - StoreIndirectImm { - /// The destination pointer - dest_ptr: MaybeResolved, - /// The source value - src: u128, - }, - /// Emits a trap - Trap { - /// The kind of trap - kind: Trap, - }, - /// Emits a Barrier (Runtime Fence) - Barrier(AccessClass), - /// Performs a binary operation, with an immediate as the rhs - BinaryOpImm { - /// The destination - dest: MaybeResolved, - /// The first operand - src: MaybeResolved, - /// The second operand (an immediate) - val: u128, - /// The Operation performed - op: BinaryOp, - }, - /// Performs a binary operation with unknown values on both sides - BinaryOp { - /// The destination - dest: MaybeResolved, - /// The first operand - src1: MaybeResolved, - /// The second operand - src2: MaybeResolved, - /// The Operation performed - op: BinaryOp, - }, - /// Performs a unary operation - UnaryOp { - /// The operand - dest: MaybeResolved, - /// The operation performed - op: UnaryOp, - }, - /// Calls the given String by address - CallSym(String, String), - /// Tailcalls the given String by address - TailcallSym(String), - /// Cleans up the frame and does a return - Return, - /// Loads the address of a symbol into the given location - LoadSym { - /// The location in which to place the symbol - loc: MaybeResolved, - /// The symbol to resolve - sym: String, - }, - - /// A Label - Label(String), - /// Unconditional Branch - UnconditionalBranch(String), - /// A Load operation that reads from a pointer - LoadIndirect { - /// The (immediate) destination location - dest: MaybeResolved, - /// The location with the pointer to load from - src_ptr: MaybeResolved, - /// The Access Class - cl: AccessClass, - }, - /// Zero-extends (or truncates) from src to dest - ZeroExtend { - /// Destination register - dest: MaybeResolved, - /// Source register - src: MaybeResolved, - /// Width of the destination value - new_width: u16, - /// Wdith of the source value - old_width: u16, - }, - /// Conditional Branch - Branch { - /// Target upon successful condition check - targ: String, - /// Condition for branching - cond: xlang::ir::BranchCondition, - /// Value to test - val: MaybeResolved, - }, -} - -impl Default for MCInsn { - fn default() -> Self { - Self::Null - } -} - -/// A location (register) allocated by the backend -#[derive(Clone, Debug, Hash, PartialEq, Eq)] -pub struct Location { - id: u32, - has_addr: bool, - ty: Type, - size: u64, - align: u64, -} - -impl Location { - /// The id of the `Location` - pub const fn id(&self) -> u32 { - self.id - } - - /// Whether or not the location needs/has an address - pub const fn has_addr(&self) -> bool { - self.has_addr - } - - /// Yields the type of the `Location` - pub const fn type_of(&self) -> &Type { - &self.ty - } - - /// Yields the size of the `Location` - pub const fn size_of(&self) -> u64 { - self.size - } - - /// Yields the alignment requirement of the `Location` - pub const fn align_of(&self) -> u64 { - self.align - } -} - -impl ValLocation for Location { - fn addressible(&self) -> bool { - self.has_addr - } - - fn unassigned(n: usize) -> Self { - Self { - id: u32::try_from(n).unwrap(), - has_addr: false, - ty: Type::Null, - size: 0, - align: 1, - } - } -} - -/// A value location that is either unresolved or a resolved machine-specific location (such as a register or stack memory) -#[derive(Clone, Debug, Hash, PartialEq, Eq)] -pub enum MaybeResolved { - /// An unresolved location that is assigned during output - Unresolved(Location), - /// A known, resolved, location - Resolved(Type, Loc), -} - -impl ValLocation for MaybeResolved { - fn addressible(&self) -> bool { - match self { - Self::Unresolved(loc) => loc.has_addr, - Self::Resolved(_, loc) => loc.addressible(), - } - } - - fn unassigned(n: usize) -> Self { - Self::Unresolved(Location::unassigned(n)) - } -} - -/// An Adaptor to a calling convention that yields [`MaybeResolved`] as the location type -pub struct CallConvAdaptor(C); - -impl CallingConvention for CallConvAdaptor { - type Loc = MaybeResolved; - - fn pass_return_place(&self, ty: &Type) -> Option { - self.0 - .pass_return_place(ty) - .map(|loc| MaybeResolved::Resolved(ty.clone(), loc)) - } - - fn find_param( - &self, - fnty: &xlang::ir::FnType, - real: &xlang::ir::FnType, - param: u32, - infn: bool, - ) -> Self::Loc { - MaybeResolved::Resolved( - real.params[usize::try_from(param).unwrap()].clone(), - self.0.find_param(fnty, real, param, infn), - ) - } - - fn find_return_val(&self, fnty: &xlang::ir::FnType) -> Self::Loc { - MaybeResolved::Resolved(fnty.ret.clone(), self.0.find_return_val(fnty)) - } -} - -/// Raw codegen for generating MCIR from XIR -pub struct MCFunctionCodegen { - inner: F, - next_loc_id: u32, - tys: Rc, - mc_insns: Vec>, - callconv: CallConvAdaptor, - strings: Rc>, - fn_name: String, - fnty: FnType, -} - -#[allow(unused_variables)] -impl FunctionRawCodegen for MCFunctionCodegen { - type Loc = MaybeResolved; - - type CallConv = CallConvAdaptor; - - fn write_trap(&mut self, trap: crate::expr::Trap) { - self.mc_insns.push(MCInsn::Trap { kind: trap }); - } - - fn write_barrier(&mut self, acc: xlang::ir::AccessClass) { - self.mc_insns.push(MCInsn::Barrier(acc)); - } - - fn write_int_binary_imm( - &mut self, - dest: Self::Loc, - a: Self::Loc, - b: u128, - _: &Type, - op: xlang::ir::BinaryOp, - ) { - self.mc_insns.push(MCInsn::BinaryOpImm { - dest, - src: a, - val: b, - op, - }); - } - - fn write_int_binary( - &mut self, - dest: Self::Loc, - src1: Self::Loc, - src2: Self::Loc, - _: &Type, - op: xlang::ir::BinaryOp, - ) { - self.mc_insns.push(MCInsn::BinaryOp { - dest, - src1, - src2, - op, - }); - } - - fn write_unary(&mut self, val: Self::Loc, _: &Type, op: xlang::ir::UnaryOp) { - self.mc_insns.push(MCInsn::UnaryOp { dest: val, op }); - } - - fn move_val(&mut self, src: Self::Loc, dest: Self::Loc) { - self.mc_insns.push(MCInsn::Mov { dest, src }); - } - - fn move_imm(&mut self, src: u128, dest: Self::Loc, ty: &Type) { - self.mc_insns.push(MCInsn::MovImm { dest, src }); - } - - fn store_indirect_imm(&mut self, src: xlang::ir::Value, ptr: Self::Loc) { - match src { - xlang::ir::Value::Integer { ty, val } => self.mc_insns.push(MCInsn::StoreIndirectImm { - dest_ptr: ptr, - src: val, - }), - _ => todo!(), - } - } - - fn load_val(&mut self, lvalue: Self::Loc, loc: Self::Loc) { - self.mc_insns.push(MCInsn::LoadIndirect { - dest: loc, - src_ptr: lvalue, - cl: AccessClass::Normal, - }) - } - - fn store_indirect(&mut self, lvalue: Self::Loc, loc: Self::Loc, _: &Type) { - self.mc_insns.push(MCInsn::StoreIndirect { - dest_ptr: lvalue, - src: loc, - cl: AccessClass::Normal, - }) - } - - fn get_callconv(&self) -> &Self::CallConv { - &self.callconv - } - - fn native_int_size(&self) -> u16 { - self.inner.native_int_size() - } - - fn native_float_size(&self) -> xlang::abi::option::Option { - self.inner.native_float_size().into() - } - - fn write_intrinsic( - &mut self, - name: xlang::abi::string::StringView, - params: xlang::vec::Vec>, - ) -> crate::expr::VStackValue { - todo!() - } - - fn write_target(&mut self, target: u32) { - let targ = format!("{}._T{}", self.fn_name, target); - self.mc_insns.push(MCInsn::Label(targ)); - } - - fn call_direct(&mut self, path: &xlang::ir::Path, realty: &xlang::ir::FnType) { - let addr = match &*path.components { - [PathComponent::Text(a)] | [PathComponent::Root, PathComponent::Text(a)] => { - a.to_string() - } - [PathComponent::Root, rest @ ..] | [rest @ ..] => self.inner.mangle(rest), - }; - self.mc_insns - .push(MCInsn::CallSym(realty.tag.to_string(), addr)) - } - - fn call_indirect(&mut self, value: Self::Loc) { - todo!() - } - - fn call_absolute(&mut self, addr: u128, realty: &xlang::ir::FnType) { - todo!() - } - - fn tailcall_direct(&mut self, path: &xlang::ir::Path, realty: &xlang::ir::FnType) { - let addr = match &*path.components { - [PathComponent::Text(a)] | [PathComponent::Root, PathComponent::Text(a)] => { - a.clone().to_string() - } - [PathComponent::Root, rest @ ..] | [rest @ ..] => self.inner.mangle(rest), - }; - if self.callconv.can_tail(realty, &self.fnty) { - self.mc_insns.push(MCInsn::TailcallSym(addr)) - } else { - self.mc_insns - .push(MCInsn::CallSym(realty.tag.to_string(), addr)); - self.mc_insns.push(MCInsn::Return); - } - } - - fn tailcall_indirect(&mut self, value: Self::Loc, realty: &xlang::ir::FnType) { - todo!("tailcall_indirect") - } - - fn leave_function(&mut self) { - self.mc_insns.push(MCInsn::Return) - } - - fn branch(&mut self, target: u32, cond: xlang::ir::BranchCondition, val: Self::Loc) { - let targ = format!("{}._T{}", self.fn_name, target); - self.mc_insns.push(MCInsn::Branch { targ, cond, val }); - } - - fn branch_compare( - &mut self, - target: u32, - condition: xlang::ir::BranchCondition, - v1: crate::expr::VStackValue, - v2: crate::expr::VStackValue, - ) { - todo!() - } - - fn branch_unconditional(&mut self, target: u32) { - let targ = format!("{}._T{}", self.fn_name, target); - self.mc_insns.push(MCInsn::UnconditionalBranch(targ)); - } - - fn branch_indirect(&mut self, target: Self::Loc) { - todo!() - } - - fn compute_global_address(&mut self, path: &xlang::ir::Path, loc: Self::Loc) { - let sym = match &*path.components { - [xlang::ir::PathComponent::Root, xlang::ir::PathComponent::Text(text)] - | [xlang::ir::PathComponent::Text(text)] => text.to_string(), - [xlang::ir::PathComponent::Root, rest @ ..] | [rest @ ..] => self.inner.mangle(rest), - }; - - self.mc_insns.push(MCInsn::LoadSym { loc, sym }); - } - - fn compute_label_address(&mut self, target: u32, loc: Self::Loc) { - todo!() - } - - fn compute_parameter_address(&mut self, param: u32, loc: Self::Loc) { - todo!() - } - - fn compute_local_address(&mut self, inloc: Self::Loc, loc: Self::Loc) { - todo!() - } - - fn compute_string_address( - &mut self, - enc: crate::str::Encoding, - bytes: xlang::vec::Vec, - loc: Self::Loc, - ) { - let sym = self - .strings - .borrow_mut() - .get_string_symbol(bytes, enc) - .to_string(); - - self.mc_insns.push(MCInsn::LoadSym { loc, sym }); - } - - fn free(&mut self, loc: Self::Loc) { - todo!() - } - - fn clobber(&mut self, loc: Self::Loc) { - todo!() - } - - fn allocate(&mut self, ty: &Type, needs_addr: bool) -> Self::Loc { - let id = self.next_loc_id; - self.next_loc_id += 1; - let size = self.tys.type_size(ty).unwrap(); - let align = self.tys.type_align(ty).unwrap(); - MaybeResolved::Unresolved(Location { - id, - size, - align, - has_addr: needs_addr, - ty: ty.clone(), - }) - } - - fn allocate_lvalue(&mut self, needs_addr: bool) -> Self::Loc { - let size = self.tys.pointer_size(&Type::Void, PointerKind::Default); - let align = self.tys.pointer_align(&Type::Void, PointerKind::Default); - - let ty = Type::Pointer(xlang::ir::PointerType { - inner: xlang::abi::boxed::Box::new(Type::Void), - ..Default::default() - }); - - let id = self.next_loc_id; - self.next_loc_id += 1; - MaybeResolved::Unresolved(Location { - id, - size, - align, - has_addr: needs_addr, - ty, - }) - } - - fn prepare_call_frame(&mut self, callty: &xlang::ir::FnType, realty: &xlang::ir::FnType) {} - - fn lockfree_use_libatomic(&mut self, size: u64) -> bool { - todo!() - } - - fn lockfree_cmpxchg_use_libatomic(&mut self, size: u64) -> bool { - todo!() - } - - fn has_wait_free_compound(&mut self, op: xlang::ir::BinaryOp, size: u64) -> bool { - todo!() - } - - fn has_wait_free_compound_fetch(&mut self, op: xlang::ir::BinaryOp, size: u64) -> bool { - todo!() - } - - fn compare_exchange( - &mut self, - dest: Self::Loc, - ctrl: Self::Loc, - val: Self::Loc, - ty: &Type, - ord: xlang::ir::AccessClass, - ) { - todo!() - } - - fn weak_compare_exchange( - &mut self, - dest: Self::Loc, - ctrl: Self::Loc, - val: Self::Loc, - ty: &Type, - ord: xlang::ir::AccessClass, - ) { - todo!() - } - - fn write_asm( - &mut self, - _: &xlang::ir::AsmExpr, - _: xlang::vec::Vec::Loc>>>, - ) -> xlang::vec::Vec { - todo!() - } - - fn write_scalar_convert( - &mut self, - target_ty: xlang::ir::ScalarType, - incoming_ty: xlang::ir::ScalarType, - new_loc: Self::Loc, - old_loc: Self::Loc, - ) { - match (target_ty, incoming_ty) { - ( - ScalarType { - header: - ScalarTypeHeader { - bitsize: new_width, - vectorsize: XLangNone, - .. - }, - kind: ScalarTypeKind::Integer { signed, .. }, - }, - ScalarType { - header: - ScalarTypeHeader { - bitsize: old_width, - vectorsize: XLangNone, - .. - }, - kind: ScalarTypeKind::Integer { signed: false, .. }, - }, - ) => { - self.mc_insns.push(MCInsn::ZeroExtend { - dest: new_loc, - src: old_loc, - new_width, - old_width, - }); - } - _ => todo!(), - } - } -} - -/// Trait for types that can write [`MCInsn`]s to an [`InsnWrite`] -pub trait MCWriter { - /// The [`MachineFeatures`] type - type Features: MachineFeatures; - /// The type used to indicate what locations were clobbered at what instruction. - type Clobbers; - /// Resolves locations in-place for the given machine code - fn resolve_locations( - &self, - insns: &mut [MCInsn<::Loc>], - callconv: &::CallConv, - ) -> Self::Clobbers; - /// Writes the machine code to the given stream - fn write_machine_code( - &self, - insns: &[MCInsn<::Loc>], - clobbers: Self::Clobbers, - tys: Rc, - out: &mut I, - sym_accepter: F, - ) -> std::io::Result<()>; - /// Obtains the calling convention for the given `realty` - fn get_call_conv( - &self, - realty: &FnType, - targ: &'static TargetProperties<'static>, - features: Span, - ty_info: Rc, - ) -> ::CallConv; - /// Obtains the features from the target - fn get_features( - &self, - properties: &'static TargetProperties<'static>, - features: Span, - ) -> Self::Features; - - /// Checks if the given target name matches - fn target_matches(&self, name: &str) -> bool; -} - -#[allow(dead_code)] // These will be used more properly later -enum SectionSpec { - GlobalSection, - UniqueSection, - Comdat(String), - Section(String), -} - -struct MCFunctionDecl { - linkage: Linkage, - body: Option<(SectionSpec, FunctionCodegen>)>, -} - -struct MCStaticDef { - section: SectionSpec, - init: xlang::ir::Value, - space: u64, - align: u64, - specifier: xlang::ir::StaticSpecifier, -} - -struct MCStaticDecl { - linkage: Linkage, - init: Option, -} - -/// Backend that generates MCIR from XIR then writes to a binary file -pub struct MCBackend { - properties: Option<&'static TargetProperties<'static>>, - feature: Span<'static, StringView<'static>>, - strings: Rc>, - writer: W, - functions: HashMap>, - statics: HashMap, - tys: Option>, -} - -impl MCBackend { - /// Creates a new backend writer - pub fn new(x: W) -> Self { - Self { - properties: None, - feature: Span::empty(), - strings: Rc::new(RefCell::new(StringMap::new())), - writer: x, - functions: HashMap::new(), - statics: HashMap::new(), - tys: None, - } - } -} - -impl XLangPlugin for MCBackend { - fn accept_ir( - &mut self, - _: &mut xlang::ir::File, - ) -> xlang::abi::result::Result<(), xlang::plugin::Error> { - unimplemented!() - } - - fn set_target(&mut self, targ: &'static TargetProperties<'static>) { - self.properties = Some(targ); - } -} - -impl XLangCodegen for MCBackend { - fn target_matches(&self, x: StringView) -> bool { - self.writer.target_matches(&x) - } - - fn write_output( - &mut self, - x: xlang::abi::traits::DynMut, - mode: xlang::plugin::OutputMode, - ) -> xlang::abi::io::Result<()> { - assert!(matches!(mode, OutputMode::Obj)); - let mut syms = Vec::new(); - let props = self.properties.unwrap(); - let binfmt = binfmt::format_by_name(&props.link.obj_binfmt).unwrap(); - - let features = self.writer.get_features(props, self.feature); - - let mut binfile = binfmt.create_file(FileType::Relocatable); - - let mut rodata = Section { - name: String::from(".rodata"), - align: 1024, - ty: binfmt::fmt::SectionType::ProgBits, - flags: Some(binfmt::fmt::SectionFlag::Alloc.into()), - ..Default::default() - }; - - let mut text = Section { - name: String::from(".text"), - align: 1024, - ty: binfmt::fmt::SectionType::ProgBits, - flags: Some(binfmt::fmt::SectionFlag::Alloc | binfmt::fmt::SectionFlag::Executable), - ..Default::default() - }; - let mut data = Section { - name: String::from(".data"), - align: 1024, - ty: binfmt::fmt::SectionType::NoBits, - flags: Some(binfmt::fmt::SectionFlag::Alloc | binfmt::fmt::SectionFlag::Writable), - ..Default::default() - }; - let mut bss = Section { - name: String::from(".bss"), - align: 1024, - ty: binfmt::fmt::SectionType::NoBits, - flags: Some(binfmt::fmt::SectionFlag::Alloc | binfmt::fmt::SectionFlag::Writable), - ..Default::default() - }; - - for (enc, name, content) in self.strings.borrow().symbols() { - let st = Symbol::new( - name.to_string(), - Some(0), - Some(rodata.offset() as u128), - binfmt::sym::SymbolType::Object, - binfmt::sym::SymbolKind::Local, - ); - let bytes = enc.encode_utf8(content); - try_!(rodata.write_all(&bytes).map_err(Into::into)); - syms.push(st); - } - - for Pair(name, func) in core::mem::take(&mut self.functions) { - let symty = match func.linkage { - Linkage::External => binfmt::sym::SymbolKind::Global, - Linkage::Internal | Linkage::Constant => binfmt::sym::SymbolKind::Local, - Linkage::Weak => binfmt::sym::SymbolKind::Weak, - }; - - if let Some((_, func)) = func.body { - let mut inner = func.into_inner(); - let cc = &inner.callconv; - let clobbers = self.writer.resolve_locations(&mut inner.mc_insns, &cc.0); - - let sym = Symbol::new( - name, - Some(1), - Some(text.offset() as u128), - binfmt::sym::SymbolType::Function, - symty, - ); - - try_!(self - .writer - .write_machine_code( - &inner.mc_insns, - clobbers, - self.tys.clone().unwrap(), - &mut text, - |label, offset| { - syms.push(Symbol::new( - label, - Some(1), - Some(offset as u128), - binfmt::sym::SymbolType::Function, - binfmt::sym::SymbolKind::Local, - )) - } - ) - .map_err(Into::into)); - - syms.push(sym); - } else if func.linkage == Linkage::Weak { - let sym = Symbol::new(name, None, None, binfmt::sym::SymbolType::Function, symty); - - syms.push(sym); - } - } - - for Pair(name, decl) in core::mem::take(&mut self.statics) { - let symty = match decl.linkage { - Linkage::External => binfmt::sym::SymbolKind::Global, - Linkage::Internal | Linkage::Constant => binfmt::sym::SymbolKind::Local, - Linkage::Weak => binfmt::sym::SymbolKind::Weak, - }; - - if let Some(init) = decl.init { - let (secno, section) = match init.section { - SectionSpec::GlobalSection => { - if let xlang::ir::Value::Uninitialized(_) = init.init { - (3, &mut bss) - } else if init - .specifier - .contains(xlang::ir::StaticSpecifier::IMMUTABLE) - { - (0, &mut rodata) - } else { - (2, &mut data) - } - } - _ => todo!(), - }; - - section.align = section.align.max(init.align as usize); - let total_len = section.content.len() + section.tail_size; - let size = init.space as usize; - if (total_len as u64 & (init.align - 1)) != 0 { - section.tail_size += - (init.align - (total_len as u64 & (init.align - 1))) as usize; - } - let sym = Symbol::new( - name, - Some(secno), - Some(section.offset() as u128), - binfmt::sym::SymbolType::Object, - symty, - ); - - match init.init { - xlang::ir::Value::Invalid(_) | xlang::ir::Value::Uninitialized(_) => { - section.tail_size += size; - } - xlang::ir::Value::GenericParameter(_) => panic!("late generic"), - xlang::ir::Value::Integer { val, .. } => { - let val = u128_to_targ_bytes(val, props); - - let leading = val.len().min(size); - - try_!(section.write_all(&val[..leading]).map_err(Into::into)); - if leading < size { - try_!(section.write_zeroes(size - leading).map_err(Into::into)); - } - } - xlang::ir::Value::GlobalAddress { item, .. } => { - let mangled_name = match &*item.components { - [xlang::ir::PathComponent::Text(name)] - | [xlang::ir::PathComponent::Root, xlang::ir::PathComponent::Text(name)] => { - name.to_string() - } - [xlang::ir::PathComponent::Root, rest @ ..] | rest => { - features.mangle(rest) - } - }; - - try_!(section - .write_addr( - size * 8, - arch_ops::traits::Address::Symbol { - name: mangled_name, - disp: 0 - }, - false - ) - .map_err(Into::into)); - } - xlang::ir::Value::ByteString { .. } => todo!("byte string"), - xlang::ir::Value::String { .. } => todo!("string"), - xlang::ir::Value::LabelAddress(_) => { - panic!("Cannot use label_address in a global static") - } - xlang::ir::Value::Empty => unreachable!(), - } - syms.push(sym); - } else if decl.linkage == Linkage::Weak { - let sym = Symbol::new(name, None, None, binfmt::sym::SymbolType::Object, symty); - - syms.push(sym); - } - } - - let rodatano = binfile.add_section(rodata).unwrap(); - let textno = binfile.add_section(text).unwrap(); - let datano = binfile.add_section(data).unwrap(); - let bssno = binfile.add_section(bss).unwrap(); - - if let StackAttributeControlStyle::GnuStack = props.link.stack_attribute_control { - let note_gnustack = Section { - name: String::from(".note.GNU-stack"), - align: 1024, - ty: binfmt::fmt::SectionType::NoBits, - flags: Some(binfmt::fmt::SectionFlags::default()), - ..Default::default() - }; - - binfile.add_section(note_gnustack).unwrap(); - } - - for mut sym in syms { - match sym.section_mut() { - Some(x @ 0) => *x = rodatano, - Some(x @ 1) => *x = textno, - Some(x @ 2) => *x = datano, - Some(x @ 3) => *x = bssno, - None => {} - Some(x) => panic!("Unexpected section number for symbol {}", x), - } - - let insym = binfile.get_or_create_symbol(sym.name()).unwrap(); - - if sym.value().is_some() || insym.value().is_none() { - *insym = sym; - } - } - - let mut adaptor = WriteAdapter::new(x); - - try_!(binfmt - .write_file(&mut adaptor, &binfile) - .map_err(Into::into)); - - XLangOk(()) - } -} diff --git a/xlang/xlang_backend/src/mc/callconv.rs b/xlang/xlang_backend/src/mc/callconv.rs deleted file mode 100644 index 96fb2e31..00000000 --- a/xlang/xlang_backend/src/mc/callconv.rs +++ /dev/null @@ -1,31 +0,0 @@ -// use xlang::ir::FloatFormat; - -// pub enum TypeMode { -// Null, -// Memory, -// Integer, -// Float(FloatFormat), -// IntVector, -// FloatVector, -// Struct(Vec<(u64, TypeMode)>), -// Union(Vec), -// ComplexFloat(FloatFormat), -// } - -// pub trait CallConvTag { -// type Register: Clone + PartialEq + Hash; - -// type CallMode: Clone + PartialEq + Hash; - -// /// All modes available -// fn all_modes(&self) -> &[Self::CallMode]; - -// /// All registers available for a particular call mode -// fn param_registers_for_mode(&self, mode: Self::CallMode) -> &[Self::Register]; - -// /// Converts a [`TypeMode`] into the appropriate [`CallConvTag::CallMode`] -// fn call_mode(&self, ty_mode: &TypeMode) -> Self::CallMode; - -// /// Maximum Size of values in the registers for the [`CallConvTag::CallMode`] -// fn register_width(&self, mode: Self::CallMode) -> u64; -// } diff --git a/xlang/xlang_backend/src/mc/regalloc.rs b/xlang/xlang_backend/src/mc/regalloc.rs deleted file mode 100644 index 276af408..00000000 --- a/xlang/xlang_backend/src/mc/regalloc.rs +++ /dev/null @@ -1,22 +0,0 @@ -use core::hash::Hash; -use std::num::NonZeroU64; - -/// A set of registers available for use by regalloc -pub trait RegSet { - /// The register type to use - type Register: Clone + PartialEq + Hash; - - /// All of the registers in the set - fn all_registers(&self) -> &[Self::Register]; - - /// The maximum size of values that can be stored in the register - fn max_size(&self) -> u64; - - /// The maximum alignment of values that can be stored in the register. - /// - /// Typically, this is the same as [`RegSet::max_size`], and [`None`] means this value. - /// This may be smaller if the register is a synthetic one placed in memory and does not have the required alignment. - /// - /// Returns a power of 2 less than [`RegSet::max_size`] - fn max_align(&self) -> Option; -} diff --git a/xlang/xlang_backend/src/ssa.rs b/xlang/xlang_backend/src/ssa.rs index 9c8296fa..820b1b75 100644 --- a/xlang/xlang_backend/src/ssa.rs +++ b/xlang/xlang_backend/src/ssa.rs @@ -1,20 +1,105 @@ #![allow(missing_docs)] +use core::cell::{Cell, RefCell}; +use std::panic::Location; use std::rc::Rc; -use xlang::ir::*; +use crate::expr::ValLocation; +use crate::expr::*; +use crate::mach::Machine; +use crate::ty::TypeInformation; + +use arch_ops::traits::InsnWrite; +use xlang::targets::properties::TargetProperties; + +use xlang::ir; + +use xlang::abi::{boxed::Box as XLangBox, collection::HashMap, vec::Vec}; + +struct SharedCounter(Cell); + +impl SharedCounter { + const fn new() -> Self { + Self(Cell::new(0)) + } + fn next(&self) -> u32 { + let val = self.0.get(); + self.0.set(val + 1); + + val + } +} #[derive(Clone, Debug, Hash, PartialEq, Eq)] -pub enum SsaTerminator { - Fallthrough(u32, Vec), +pub enum SsaInstruction { Jump(u32, Vec), - Exit(u16), + Fallthrough(u32, Vec), + Exit(Vec), Tailcall(CallTarget, Vec), + Trap(Trap), + LoadImmediate(OpaqueLocation, u128), +} + +impl core::fmt::Display for SsaInstruction { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + SsaInstruction::Jump(val, stack) => { + f.write_fmt(format_args!("jump @{} [", val))?; + let mut sep = ""; + for item in stack { + f.write_str(sep)?; + sep = ", "; + item.fmt(f)?; + } + f.write_str("]") + } + SsaInstruction::Fallthrough(val, stack) => { + f.write_fmt(format_args!("fallthrough @{} [", val))?; + let mut sep = ""; + for item in stack { + f.write_str(sep)?; + sep = ", "; + item.fmt(f)?; + } + f.write_str("]") + } + SsaInstruction::Exit(estack) => { + f.write_fmt(format_args!("exit ["))?; + let mut sep = ""; + for item in estack { + f.write_str(sep)?; + sep = ", "; + item.fmt(f)?; + } + f.write_str("]") + } + SsaInstruction::Tailcall(targ, params) => { + f.write_fmt(format_args!("tailcall {}(", targ))?; + let mut sep = ""; + for item in params { + f.write_str(sep)?; + sep = ", "; + item.fmt(f)?; + } + f.write_str(")") + } + SsaInstruction::Trap(trap) => f.write_fmt(format_args!("trap {}", trap)), + SsaInstruction::LoadImmediate(dest, val) => { + f.write_fmt(format_args!("loadimm {}, {}", dest, val)) + } + } + } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct CallTarget { pub ptr: OpaquePtr, - pub real_ty: FnType, + pub real_ty: ir::FnType, +} + +impl core::fmt::Display for CallTarget { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_fmt(format_args!("{}: {}", self.ptr, self.real_ty)) + } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] @@ -23,8 +108,404 @@ pub enum OpaquePtr { Pointer(OpaqueLocation), } +impl core::fmt::Display for OpaquePtr { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + OpaquePtr::Symbol(s) => f.write_str(s), + OpaquePtr::Pointer(ptr) => f.write_fmt(format_args!("[{}]", ptr)), + } + } +} + #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct OpaqueLocation { - pub ty: Rc, + pub ty: Rc, + pub kind: ir::StackValueKind, pub num: u32, + has_addr: bool, +} + +impl core::fmt::Display for OpaqueLocation { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_fmt(format_args!("%{}: ", self.num))?; + if self.kind == ir::StackValueKind::LValue { + f.write_str("lvalue ")?; + } + self.ty.fmt(f) + } +} + +impl ValLocation for OpaqueLocation { + fn addressible(&self) -> bool { + self.has_addr + } +} + +pub struct FunctionBuilder { + tys: Rc, + sym_name: String, + mach: Rc, + basic_blocks: Vec>, + target: &'static TargetProperties<'static>, + locals: Rc>, + loc_id_counter: Rc, + incoming_locations: HashMap>, + incoming_count: Rc>, + fnty: Rc, +} + +impl FunctionBuilder { + pub fn new( + sym_name: String, + mach: Rc, + tys: Rc, + target: &'static TargetProperties<'static>, + fnty: Rc, + ) -> Self { + Self { + sym_name, + mach, + tys, + target, + basic_blocks: Vec::new(), + locals: Rc::new(Vec::new()), + loc_id_counter: Rc::new(SharedCounter::new()), + incoming_locations: HashMap::new(), + fnty, + incoming_count: Rc::new(HashMap::new()), + } + } +} + +impl FunctionBuilder { + pub fn push_local(&mut self, ty: ir::Type) { + Rc::get_mut(&mut self.locals) + .expect("No basic blocks may have been pushed yet") + .push(OpaqueLocation { + ty: Rc::new(ty), + kind: ir::StackValueKind::RValue, + num: self.loc_id_counter.next(), + has_addr: true, + }) + } + + pub fn push_incoming(&mut self, id: u32, incoming: &Vec) { + let incoming_count = Rc::get_mut(&mut self.incoming_count) + .expect("new_basic_block may not have been called yet"); + incoming_count.insert(id, incoming.len()); + } + + pub fn new_basic_block( + &mut self, + id: u32, + incoming: &Vec, + ) -> &mut BasicBlockBuilder { + let mut vstack = Vec::new(); + let mut incoming_locs = Vec::new(); + + for nloc in incoming { + let ty = nloc.ty.clone(); + let loc = OpaqueLocation { + ty: Rc::new(ty.clone()), + kind: nloc.kind, + num: self.loc_id_counter.next(), + has_addr: false, + }; + + match nloc.kind { + ir::StackValueKind::LValue => { + vstack.push(VStackValue::opaque_lvalue(ty, loc.clone())) + } + ir::StackValueKind::RValue => { + vstack.push(VStackValue::opaque_value(ty, loc.clone())) + } + } + incoming_locs.push(loc); + } + + self.incoming_locations.insert(id, incoming_locs); + + let builder = BasicBlockBuilder { + id, + tys: self.tys.clone(), + mach: self.mach.clone(), + insns: Vec::new(), + target: self.target, + locals: self.locals.clone(), + vstack, + incoming_count: self.incoming_count.clone(), + loc_id_counter: self.loc_id_counter.clone(), + }; + + self.basic_blocks.push_mut(builder) + } + + pub fn write( + &mut self, + out: &mut W, + mut sym_accepter: F, + ) -> std::io::Result<()> { + let mut assigns = self.mach.new_assignments(); + let mut block_clobbers = vec![]; + for bb in &self.basic_blocks { + block_clobbers.push(self.mach.assign_locations( + &mut assigns, + &bb.insns, + self.incoming_locations.get(&bb.id).unwrap(), + bb.id, + )); + } + + self.mach.codegen_prologue(&assigns, out)?; + for (bb, block_clobbers) in self.basic_blocks.iter().zip(block_clobbers) { + let symbol = format!("{}._B{}", self.sym_name, bb.id); + self.mach.codegen_block( + &assigns, + &bb.insns, + block_clobbers, + out, + |id| format!("{}._B{}", self.sym_name, id), + bb.id, + )?; + } + Ok(()) + } +} + +impl core::fmt::Display for FunctionBuilder { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str("{\n")?; + for (i, loc) in self.locals.iter().enumerate() { + f.write_fmt(format_args!(" local _{}: {};\n", i, loc.ty))?; + } + for bb in &self.basic_blocks { + bb.fmt(f)?; + } + f.write_str("}\n")?; + Ok(()) + } +} + +pub struct BasicBlockBuilder { + id: u32, + tys: Rc, + mach: Rc, + insns: Vec, + target: &'static TargetProperties<'static>, + locals: Rc>, + vstack: Vec>, + incoming_count: Rc>, + loc_id_counter: Rc, +} + +impl BasicBlockBuilder { + pub fn move_into(&mut self, val: VStackValue, loc: OpaqueLocation) { + match val { + VStackValue::Constant(val) => match val { + ir::Value::Invalid(_) => self.insns.push(SsaInstruction::Trap(Trap::Unreachable)), + ir::Value::Uninitialized(_) => {} + ir::Value::GenericParameter(_) => panic!("Cannot handle generics this late"), + ir::Value::Integer { val, .. } => { + self.insns.push(SsaInstruction::LoadImmediate(loc, val)) + } + ir::Value::GlobalAddress { ty, item } => todo!(), + ir::Value::ByteString { content } => todo!(), + ir::Value::String { encoding, utf8, ty } => todo!(), + ir::Value::LabelAddress(_) => todo!(), + ir::Value::Empty => panic!("Empty IR value"), + }, + VStackValue::LValue(_, _) => todo!(), + VStackValue::Pointer(_, _) => todo!(), + VStackValue::OpaqueScalar(_, _) => todo!(), + VStackValue::AggregatePieced(_, _) => todo!(), + VStackValue::OpaqueAggregate(_, _) => todo!(), + VStackValue::CompareResult(_, _) => todo!(), + VStackValue::Trapped => todo!(), + VStackValue::ArrayRepeat(_, _) => todo!(), + } + } + + pub fn push(&mut self, val: VStackValue) { + self.vstack.push(val); + } + + pub fn pop(&mut self) -> VStackValue { + self.vstack + .pop() + .expect("BasicBlockBuilder::pop called with an empty stack") + } + + pub fn pop_values_static(&mut self) -> [VStackValue; N] { + use core::mem::MaybeUninit; + let mut val = MaybeUninit::<[_; N]>::uninit(); + let mut ptr = val.as_mut_ptr().cast::>(); + + for val in self.vstack.drain_back(N) { + unsafe { ptr.write(val) }; + ptr = unsafe { ptr.add(1) }; + } + // SAFETY: We just initialized all `N` elements + unsafe { val.assume_init() } + } + + pub fn pop_values(&mut self, n: usize) -> Vec> { + self.vstack.split_off_back(n) + } + pub fn pop_opaque(&mut self, n: usize) -> Vec { + let mut vstack = core::mem::take(&mut self.vstack); + + let ret = vstack + .drain_back(n) + .map(|val| self.make_opaque(val)) + .collect(); + self.vstack = vstack; + ret + } + + pub fn make_opaque(&mut self, val: VStackValue) -> OpaqueLocation { + if let Some(loc) = val.opaque_location() { + loc.clone() + } else { + let loc = match &val { + VStackValue::LValue(ty, _) => OpaqueLocation { + ty: Rc::new(ty.clone()), + kind: ir::StackValueKind::LValue, + num: self.loc_id_counter.next(), + has_addr: false, + }, + val => OpaqueLocation { + ty: Rc::new(val.value_type()), + kind: ir::StackValueKind::RValue, + num: self.loc_id_counter.next(), + has_addr: false, + }, + }; + + self.move_into(val, loc.clone()); + loc + } + } + + pub fn write_call( + &mut self, + targ: VStackValue, + params: Vec>, + next: Option, + ) { + match targ { + VStackValue::Constant(ir::Value::GlobalAddress { ty, item }) => { + let real_ty = match ty { + ir::Type::FnType(fnty) => XLangBox::into_inner(fnty), + ty => panic!("Cannot call a global constant of type {}", ty), + }; + + let params = params + .into_iter() + .map(|val| self.make_opaque(val)) + .collect::>(); + + let sym = match &*item.components { + [ir::PathComponent::Text(name)] + | [ir::PathComponent::Root, ir::PathComponent::Text(name)] => name.to_string(), + [ir::PathComponent::Root, rest @ ..] | [rest @ ..] => self.mach.mangle(rest), + }; + + if let Some(next) = next { + todo!("Call with next") + } else { + self.insns.push(SsaInstruction::Tailcall( + CallTarget { + ptr: OpaquePtr::Symbol(sym), + real_ty, + }, + params, + )); + } + } + VStackValue::Constant(val) => todo!("{}", val), + VStackValue::LValue(_, _) => todo!(), + VStackValue::Pointer(_, _) => todo!(), + VStackValue::OpaqueScalar(_, _) => todo!(), + VStackValue::AggregatePieced(_, _) => todo!(), + VStackValue::OpaqueAggregate(_, _) => todo!(), + VStackValue::CompareResult(_, _) => todo!(), + VStackValue::Trapped => todo!(), + VStackValue::ArrayRepeat(_, _) => todo!(), + } + } + + pub fn write_expr(&mut self, expr: &ir::Expr) { + match expr { + ir::Expr::Sequence(_) => todo!("sequence"), + ir::Expr::Const(val) => { + self.push(VStackValue::Constant(val.clone())); + } + ir::Expr::BinaryOp(_, _) => todo!("binary op"), + ir::Expr::UnaryOp(_, _) => todo!("unary op"), + ir::Expr::Convert(_, _) => todo!("convert"), + ir::Expr::Derive(_, _) => todo!("derive"), + ir::Expr::Local(_) => todo!("local"), + ir::Expr::Pop(_) => todo!("pop"), + ir::Expr::Dup(_) => todo!("dup"), + ir::Expr::Pivot(_, _) => todo!("pivot"), + ir::Expr::Aggregate(_) => todo!("aggregate"), + ir::Expr::Member(_) => todo!("member"), + ir::Expr::MemberIndirect(_) => todo!("member indirect"), + ir::Expr::Assign(_) => todo!("assign"), + ir::Expr::AsRValue(_) => todo!("as_rvalue"), + ir::Expr::CompoundAssign(_, _, _) => todo!("compound_assign"), + ir::Expr::FetchAssign(_, _, _) => todo!("fetch_assign"), + ir::Expr::LValueOp(_, _) => todo!("lvalue op"), + ir::Expr::UnaryLValue(_, _, _) => todo!("unary lvalue op"), + ir::Expr::Indirect => todo!("indirect"), + ir::Expr::AddrOf => todo!("addr_of"), + ir::Expr::Fence(_) => todo!("fence"), + ir::Expr::BeginStorage(_) => todo!("begin storage"), + ir::Expr::EndStorage(_) => todo!("end storage"), + ir::Expr::Select(_) => todo!("select"), + } + } + + pub fn write_terminator(&mut self, term: &ir::Terminator) { + match term { + ir::Terminator::Jump(targ) => { + let vals = self.incoming_count[&targ.target]; + + let vals = self.pop_opaque(vals); + + if targ.flags.contains(ir::JumpTargetFlags::FALLTHROUGH) { + self.insns + .push(SsaInstruction::Fallthrough(targ.target, vals)); + } else { + self.insns.push(SsaInstruction::Jump(targ.target, vals)); + } + } + ir::Terminator::Branch(_, _, _) => todo!("branch"), + ir::Terminator::BranchIndirect => todo!("branch indirect"), + ir::Terminator::Call(_, _, _) => todo!("call"), + ir::Terminator::Tailcall(_, call_fnty) => { + let params_count = call_fnty.params.len(); + let vals = self.pop_values(params_count); + + let target = self.pop(); + + self.write_call(target, vals, None); + } + ir::Terminator::Exit(_) => todo!("exit"), + ir::Terminator::Asm(_) => todo!("asm"), + ir::Terminator::Switch(_) => todo!("switch"), + ir::Terminator::Unreachable => todo!("unreachable"), + } + } +} + +impl core::fmt::Display for BasicBlockBuilder { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_fmt(format_args!(" target @{}:\n", self.id))?; + + for insn in &self.insns { + f.write_fmt(format_args!(" {};\n", insn))?; + } + Ok(()) + } } diff --git a/xlang/xlang_backend/src/ty.rs b/xlang/xlang_backend/src/ty.rs index 5e9ae22d..54402799 100644 --- a/xlang/xlang_backend/src/ty.rs +++ b/xlang/xlang_backend/src/ty.rs @@ -5,7 +5,7 @@ use xlang::{ AggregateDefinition, AggregateKind, AnnotationItem, Path, PointerAliasingRule, PointerKind, ScalarType, ScalarTypeHeader, ScalarTypeKind, ScalarValidity, Type, ValidRangeType, Value, }, - prelude::v1::{format, Box, HashMap, Pair, Some as XLangSome}, + prelude::v1::{HashMap, Pair, Some as XLangSome}, targets::properties::TargetProperties, }; diff --git a/xlang/xlang_host/build.rs b/xlang/xlang_host/build.rs index 9ce9f1cc..cfdd8f7a 100644 --- a/xlang/xlang_host/build.rs +++ b/xlang/xlang_host/build.rs @@ -1,6 +1,7 @@ use std::path::PathBuf; fn main() { + println!("cargo:rerun-if-changed=build.rs"); let rustc = std::env::var_os("RUSTC").unwrap(); let rustflags = std::env::var("CARGO_ENCODED_RUSTFLAGS") .ok() From 54c02a9477a74d27310faba6a2269861bb6b69d0 Mon Sep 17 00:00:00 2001 From: Connor Horman Date: Wed, 31 Jan 2024 23:25:18 -0500 Subject: [PATCH 10/74] feat(x86-codegen): Implement support for Tailcall and Jump --- codegen-clever/src/lib.rs | 5 +- codegen-w65/src/lib.rs | 5 +- codegen-x86/src/callconv.rs | 491 ++++++++++++------------- codegen-x86/src/lib.rs | 273 +++++++++++++- lc-binutils | 2 +- lcrust/libraries/liballoc/src/alloc.rs | 5 +- xlang/xlang_backend/src/callconv.rs | 428 +++++++++++++++++---- xlang/xlang_backend/src/lib.rs | 21 +- xlang/xlang_backend/src/mach.rs | 9 +- xlang/xlang_backend/src/ssa.rs | 9 +- xlang/xlang_backend/src/ty.rs | 139 +++++-- 11 files changed, 993 insertions(+), 394 deletions(-) diff --git a/codegen-clever/src/lib.rs b/codegen-clever/src/lib.rs index 9b351b27..fde692bd 100644 --- a/codegen-clever/src/lib.rs +++ b/codegen-clever/src/lib.rs @@ -3,7 +3,7 @@ use xlang::{ abi::string::StringView, plugin::XLangCodegen, prelude::v1::*, targets::properties::TargetProperties, }; -use xlang_backend::{mach::Machine, SsaCodegenPlugin}; +use xlang_backend::{mach::Machine, ssa::OpaqueLocation, ty::TypeInformation, SsaCodegenPlugin}; pub struct CleverAssignments {} @@ -32,6 +32,8 @@ impl Machine for CleverMachine { insns: &[xlang_backend::ssa::SsaInstruction], incoming: &[xlang_backend::ssa::OpaqueLocation], which: u32, + incoming_set: &HashMap>, + tys: &TypeInformation, ) -> Self::BlockClobbers { todo!() } @@ -52,6 +54,7 @@ impl Machine for CleverMachine { out: &mut W, label_sym: F, which: u32, + tys: &TypeInformation, ) -> std::io::Result<()> { todo!() } diff --git a/codegen-w65/src/lib.rs b/codegen-w65/src/lib.rs index 1883dbd5..5fee482a 100644 --- a/codegen-w65/src/lib.rs +++ b/codegen-w65/src/lib.rs @@ -3,7 +3,7 @@ use xlang::{ abi::string::StringView, plugin::XLangCodegen, prelude::v1::*, targets::properties::TargetProperties, }; -use xlang_backend::{mach::Machine, SsaCodegenPlugin}; +use xlang_backend::{mach::Machine, ssa::OpaqueLocation, ty::TypeInformation, SsaCodegenPlugin}; pub struct W65Assignments {} @@ -32,6 +32,8 @@ impl Machine for W65Machine { insns: &[xlang_backend::ssa::SsaInstruction], incoming: &[xlang_backend::ssa::OpaqueLocation], which: u32, + incoming_set: &HashMap>, + tys: &TypeInformation, ) -> Self::BlockClobbers { todo!() } @@ -52,6 +54,7 @@ impl Machine for W65Machine { out: &mut W, label_sym: F, which: u32, + tys: &TypeInformation, ) -> std::io::Result<()> { todo!() } diff --git a/codegen-x86/src/callconv.rs b/codegen-x86/src/callconv.rs index 1689fd6f..5ecdce9f 100644 --- a/codegen-x86/src/callconv.rs +++ b/codegen-x86/src/callconv.rs @@ -1,299 +1,280 @@ -use std::{collections::HashSet, rc::Rc}; - -use arch_ops::x86::{features::X86Feature, X86Register, X86RegisterClass}; - -use xlang::{ - prelude::v1::{Pair, Some as XLangSome}, - targets::properties::TargetProperties, -}; -use xlang_backend::{callconv::CallingConvention, ty::TypeInformation}; -use xlang_struct::{ - AggregateDefinition, FloatFormat, FnType, ScalarType, ScalarTypeHeader, ScalarTypeKind, Type, +use arch_ops::x86::{X86Mode, X86Register, X86RegisterClass}; +use xlang_backend::callconv::{ + CallConvInfo, ClassifyAggregateDisposition, ParamPosition, RegisterDisposition, + ReturnPointerBehaviour, StackedParamsOrder, Tag, }; -use crate::ValLocation; +use xlang::abi::option::Some as XLangSome; -#[allow(dead_code)] -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] -pub enum TypeClass { - Float, +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +pub enum X86TypeClass { + NoClass, X87, + X87Up, Sse, + SseUp, + SseWide(X86RegisterClass), + Float, Integer, Memory, - Zero, } -#[allow(clippy::missing_panics_doc)] // TODO: remove todo!() -#[must_use] -pub fn classify_type(ty: &Type) -> Option { - match ty { - Type::Scalar(ScalarType { - header: - ScalarTypeHeader { - vectorsize: XLangSome(1..=65535), - .. - }, - .. - }) => Some(TypeClass::Sse), - Type::Scalar(ScalarType { - header: ScalarTypeHeader { bitsize: 80, .. }, - kind: - ScalarTypeKind::Float { - format: FloatFormat::IeeeExtPrecision, - }, - .. - }) => Some(TypeClass::X87), - Type::Scalar(ScalarType { - header: ScalarTypeHeader { - bitsize: 65..=128, .. - }, - kind: ScalarTypeKind::Float { .. }, - }) => Some(TypeClass::Sse), - Type::Scalar(ScalarType { - kind: ScalarTypeKind::Float { .. }, - .. - }) => Some(TypeClass::Float), - Type::Scalar(_) | Type::Pointer(_) => Some(TypeClass::Integer), - Type::Void | Type::FnType(_) | Type::Null => None, - Type::Array(ty) => classify_type(&ty.ty), - Type::TaggedType(_, ty) => classify_type(ty), - Type::Product(tys) => { - let mut infected = TypeClass::Zero; - for ty in tys { - infected = match (classify_type(ty)?, infected) { - (a, TypeClass::Zero) => a, - (_, TypeClass::Memory) => TypeClass::Memory, - (TypeClass::Float, TypeClass::Sse) => TypeClass::Sse, - (TypeClass::Float, TypeClass::X87) => TypeClass::X87, - (a, b) if a == b => a, - _ => TypeClass::Memory, - }; - } - Some(infected) - } - Type::Aligned(_, _) => todo!(), - Type::Aggregate(AggregateDefinition { fields, .. }) => { - let mut infected = TypeClass::Zero; - for ty in fields.iter().map(|Pair(_, ty)| ty) { - infected = match (classify_type(ty)?, infected) { - (a, TypeClass::Zero) => a, - (_, TypeClass::Memory) => TypeClass::Memory, - (TypeClass::Float, TypeClass::Sse) => TypeClass::Sse, - (TypeClass::Float, TypeClass::X87) => TypeClass::X87, - (a, b) if a == b => a, - _ => TypeClass::Memory, - }; - } - Some(infected) - } - Type::Named(path) => todo!("named type {:?}", path), - } +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +pub enum X86Tag { + SysV64, } -pub trait X86CallConv { - fn prepare_stack(&self, ty: &FnType, frame_size: usize) -> usize; - fn find_parameter(&self, off: u32, ty: &FnType, infn: bool) -> ValLocation; - fn find_return_val(&self, ty: &Type) -> Option; - fn pass_return_place(&self, ty: &Type, frame_size: usize) -> Option; - fn with_tag(&self, tag: &str) -> Option>; - fn callee_saved(&self) -> &[X86Register]; -} +impl Tag for X86Tag { + type Register = X86Register; + type TypeClass = X86TypeClass; -#[derive(Clone, Debug)] -pub struct SysV64CC( - &'static TargetProperties<'static>, - HashSet, - Rc, -); - -impl X86CallConv for SysV64CC { - fn prepare_stack(&self, _ty: &FnType, _frame_size: usize) -> usize { - todo!() + fn tag_name(&self) -> &'static str { + match self { + Self::SysV64 => "SysV64", + } } - #[allow(clippy::no_effect_underscore_binding)] // TODO: use xmm_regs - fn find_parameter(&self, off: u32, ty: &FnType, _: bool) -> ValLocation { - let mut int_regs: &[X86Register] = &[ - X86Register::Rdi, - X86Register::Rsi, - X86Register::Rdx, - X86Register::Rcx, - X86Register::R8, - X86Register::R9, - ]; - let mut _xmm_regs: &[X86Register] = &[ - X86Register::Xmm(0), - X86Register::Xmm(1), - X86Register::Xmm(2), - X86Register::Xmm(3), - X86Register::Xmm(4), - X86Register::Xmm(5), - ]; + fn param_regs_for_class(&self, cl: &Self::TypeClass) -> &[Self::Register] { + match (self, cl) { + (X86Tag::SysV64, X86TypeClass::SseWide(X86RegisterClass::Tmm)) => &[], + (X86Tag::SysV64, X86TypeClass::Sse) + | (X86Tag::SysV64, X86TypeClass::SseUp) + | (X86Tag::SysV64, X86TypeClass::SseWide(X86RegisterClass::Xmm)) + | (X86Tag::SysV64, X86TypeClass::Float) => &[ + X86Register::Xmm(0), + X86Register::Xmm(1), + X86Register::Xmm(2), + X86Register::Xmm(3), + X86Register::Xmm(4), + X86Register::Xmm(5), + ], + (X86Tag::SysV64, X86TypeClass::SseWide(X86RegisterClass::Ymm)) => &[ + X86Register::Ymm(0), + X86Register::Ymm(1), + X86Register::Ymm(2), + X86Register::Ymm(3), + X86Register::Ymm(4), + X86Register::Ymm(5), + ], + (X86Tag::SysV64, X86TypeClass::SseWide(X86RegisterClass::Zmm)) => &[ + X86Register::Zmm(0), + X86Register::Zmm(1), + X86Register::Zmm(2), + X86Register::Zmm(3), + X86Register::Zmm(4), + X86Register::Zmm(5), + ], + (X86Tag::SysV64, X86TypeClass::Integer) => &[ + X86Register::Rdi, + X86Register::Rsi, + X86Register::Rdx, + X86Register::Rcx, + X86Register::R8, + X86Register::R9, + ], + (X86Tag::SysV64, _) => &[], + } + } - let has_return_param = self.pass_return_place(&ty.ret, 0).is_some(); - if has_return_param { - int_regs = &int_regs[1..]; + fn return_regs_for_class(&self, cl: &Self::TypeClass) -> &[Self::Register] { + match (self, cl) { + (X86Tag::SysV64, X86TypeClass::X87) => &[X86Register::Fp(0), X86Register::Fp(1)], + (X86Tag::SysV64, X86TypeClass::Sse) + | (X86Tag::SysV64, X86TypeClass::SseUp) + | (X86Tag::SysV64, X86TypeClass::Float) => &[X86Register::Xmm(0), X86Register::Xmm(1)], + (X86Tag::SysV64, X86TypeClass::SseWide(X86RegisterClass::Tmm)) => &[], + (X86Tag::SysV64, X86TypeClass::SseWide(X86RegisterClass::Xmm)) => { + &[X86Register::Xmm(0)] + } + (X86Tag::SysV64, X86TypeClass::SseWide(X86RegisterClass::Ymm)) => { + &[X86Register::Ymm(0)] + } + (X86Tag::SysV64, X86TypeClass::SseWide(X86RegisterClass::Zmm)) => { + &[X86Register::Zmm(0)] + } + (X86Tag::SysV64, X86TypeClass::Integer) => &[X86Register::Rax, X86Register::Rdx], + (X86Tag::SysV64, _) => &[], } - let mut last_val = ValLocation::Unassigned(0); - for ty in ty.params.iter().take(off as usize + 1) { - match (classify_type(ty).unwrap(), self.2.type_size(ty).unwrap()) { - (_, 0) => last_val = ValLocation::Null, - (TypeClass::Integer, 1) => { - int_regs.get(0).map_or_else( - || todo!(), - |reg| { - int_regs = &int_regs[1..]; - let reg = - X86Register::from_class(X86RegisterClass::ByteRex, reg.regnum()) - .unwrap(); - last_val = ValLocation::Register(reg); - }, - ); - } - (TypeClass::Integer, 2) => { - int_regs.get(0).map_or_else( - || todo!(), - |reg| { - int_regs = &int_regs[1..]; - let reg = X86Register::from_class(X86RegisterClass::Word, reg.regnum()) - .unwrap(); - last_val = ValLocation::Register(reg); - }, - ); - } - (TypeClass::Integer, 3 | 4) => { - int_regs.get(0).map_or_else( - || todo!(), - |reg| { - int_regs = &int_regs[1..]; - let reg = - X86Register::from_class(X86RegisterClass::Double, reg.regnum()) - .unwrap(); - last_val = ValLocation::Register(reg); - }, - ); - } - (TypeClass::Integer, 5..=8) => { - int_regs.get(0).map_or_else( - || todo!(), - |reg| { - int_regs = &int_regs[1..]; - last_val = ValLocation::Register(*reg); - }, - ); - } - (TypeClass::Integer, 9..=16) => { - int_regs.get(0..2).map_or_else( - || todo!(), - |reg| { - int_regs = &int_regs[2..]; - last_val = ValLocation::Regs(reg.to_owned()); - }, - ); - } - (TypeClass::Integer, 17..=24) => { - int_regs.get(0..3).map_or_else( - || todo!(), - |reg| { - int_regs = &int_regs[3..]; - last_val = ValLocation::Regs(reg.to_owned()); - }, - ); - } - (TypeClass::Integer, 25..=32) => { - int_regs.get(0..4).map_or_else( - || todo!(), - |reg| { - int_regs = &int_regs[4..]; - last_val = ValLocation::Regs(reg.to_owned()); - }, - ); + } + + fn replace_param_with_pointer(&self, cl: &[Self::TypeClass]) -> Option { + eprintln!("replace_param_with_pointer({:?})", cl); + match self { + Self::SysV64 => { + if cl.len() > 2 { + Some(X86TypeClass::Integer) + } else if cl.iter().any(|x| { + *x == X86TypeClass::Memory + || *x == X86TypeClass::X87 + || *x == X86TypeClass::X87Up + }) { + Some(X86TypeClass::Integer) + } else { + None } - _ => todo!(), } } - - last_val } - #[allow(clippy::unnested_or_patterns)] - fn find_return_val(&self, ty: &Type) -> Option { - match (classify_type(ty), self.2.type_size(ty)) { - (None, Some(_)) | (Some(_), None) => unreachable!(), - (Some(TypeClass::Zero), Some(0)) => Some(ValLocation::Null), - (Some(TypeClass::Zero), Some(_)) => { - panic!("Impossible situation (type has zst class, but has a size)") - } - (Some(TypeClass::Integer), Some(1)) => Some(ValLocation::Register(X86Register::Al)), - (Some(TypeClass::Integer), Some(2)) => Some(ValLocation::Register(X86Register::Ax)), - (Some(TypeClass::Integer), Some(4)) => Some(ValLocation::Register(X86Register::Eax)), - (Some(TypeClass::Integer), Some(8)) => Some(ValLocation::Register(X86Register::Rax)), - (Some(TypeClass::Integer), Some(16)) => { - Some(ValLocation::Regs(vec![X86Register::Rax, X86Register::Rdx])) + fn combine_wide(&self, cl: &[Self::TypeClass]) -> Option { + match cl { + [X86TypeClass::Sse, X86TypeClass::SseUp] => { + Some(X86TypeClass::SseWide(X86RegisterClass::Xmm)) } - (Some(TypeClass::X87), Some(16)) => Some(ValLocation::Register(X86Register::Fp(0))), - (Some(TypeClass::Float), Some(4)) - | (Some(TypeClass::Float), Some(8)) - | (Some(TypeClass::Float), Some(16)) => { - Some(ValLocation::Register(X86Register::Xmm(0))) + [X86TypeClass::Sse, X86TypeClass::SseUp, X86TypeClass::SseUp, X86TypeClass::SseUp] => { + Some(X86TypeClass::SseWide(X86RegisterClass::Ymm)) } - (Some(TypeClass::Sse), Some(4)) - | (Some(TypeClass::Sse), Some(8)) - | (Some(TypeClass::Sse), Some(16)) => Some(ValLocation::Register(X86Register::Xmm(0))), - (Some(TypeClass::Sse), Some(32)) if self.1.contains(&X86Feature::Avx) => { - Some(ValLocation::Register(X86Register::Ymm(0))) + [X86TypeClass::Sse, X86TypeClass::SseUp, X86TypeClass::SseUp, X86TypeClass::SseUp, X86TypeClass::SseUp, X86TypeClass::SseUp, X86TypeClass::SseUp, X86TypeClass::SseUp] => { + Some(X86TypeClass::SseWide(X86RegisterClass::Zmm)) } - (Some(TypeClass::Sse), Some(64)) if self.1.contains(&X86Feature::Avx512f) => { - Some(ValLocation::Register(X86Register::Zmm(0))) + [X86TypeClass::X87, X86TypeClass::X87Up] + | [X86TypeClass::X87, X86TypeClass::X87Up, X86TypeClass::X87Up, X86TypeClass::X87Up] + | [X86TypeClass::X87, X86TypeClass::X87Up, X86TypeClass::X87Up, X86TypeClass::X87Up, X86TypeClass::X87Up, X86TypeClass::X87Up, X86TypeClass::X87Up, X86TypeClass::X87Up] => { + Some(X86TypeClass::X87) } _ => None, } } - fn pass_return_place(&self, _ty: &Type, _frame_size: usize) -> Option { - None // For now + fn replace_return_with_pointer( + &self, + cl: &[Self::TypeClass], + ) -> Option> { + match self { + Self::SysV64 => { + if cl.len() > 2 { + Some(ReturnPointerBehaviour::Param( + ParamPosition::First, + X86TypeClass::Integer, + )) + } else if cl.iter().any(|x| *x == X86TypeClass::Memory) { + Some(ReturnPointerBehaviour::Param( + ParamPosition::First, + X86TypeClass::Integer, + )) + } else { + None + } + } + } } - fn with_tag(&self, _: &str) -> Option> { - Some(Box::new((*self).clone())) + fn replace_class_as_varargs(&self, _: &Self::TypeClass) -> Option { + None + } + + fn register_disposition(&self, cl: &Self::TypeClass) -> RegisterDisposition { + match self { + X86Tag::SysV64 => match cl { + X86TypeClass::Sse + | X86TypeClass::SseUp + | X86TypeClass::SseWide(_) + | X86TypeClass::Float => RegisterDisposition::Consume, + _ => RegisterDisposition::Interleave, + }, + } } - fn callee_saved(&self) -> &[X86Register] { - &[ - X86Register::Rbx, - X86Register::Rbp, - X86Register::Rsp, // note: This is hardcoded in the codegen - X86Register::R12, - X86Register::R13, - X86Register::R14, - X86Register::R15, - ] + fn stacked_params_order(&self) -> StackedParamsOrder { + match self { + X86Tag::SysV64 => StackedParamsOrder::Rtl, + } } } -impl<'a> CallingConvention for dyn X86CallConv + 'a { - type Loc = ValLocation; +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +pub struct X86CallConvInfo { + pub mode: X86Mode, +} + +impl CallConvInfo for X86CallConvInfo { + type Tag = X86Tag; + + type TypeClass = X86TypeClass; + + type Register = X86Register; + + fn get_tag(&self, tag: &str) -> Self::Tag { + match (self.mode, tag) { + (X86Mode::Long, "SysV64") => X86Tag::SysV64, + (mode, tag) => todo!("{} in {:?}", tag, mode), + } + } + + fn no_class(&self) -> Self::TypeClass { + X86TypeClass::NoClass + } + + fn classify_scalar(&self, sty: xlang_struct::ScalarType) -> Vec { + let width = self.mode.width(); + let scalar_regs = ((sty.header.bitsize + (width - 1)) as usize) >> (width.trailing_zeros()); + + if let XLangSome(elements) = sty.header.vectorsize { + let total_regs = (scalar_regs) * (elements as usize); + if total_regs > 0 { + let mut regs = Vec::with_capacity(total_regs); + regs.push(X86TypeClass::Sse); + for _ in 1..total_regs { + regs.push(X86TypeClass::SseUp) + } + regs + } else { + vec![X86TypeClass::NoClass] + } + } else { + if scalar_regs == 0 { + vec![X86TypeClass::NoClass] + } else { + match sty.kind { + xlang_struct::ScalarTypeKind::Empty => vec![X86TypeClass::NoClass; scalar_regs], + xlang_struct::ScalarTypeKind::Integer { .. } + | xlang_struct::ScalarTypeKind::Fixed { .. } + | xlang_struct::ScalarTypeKind::Char { .. } => { + vec![X86TypeClass::Integer; scalar_regs] + } + xlang_struct::ScalarTypeKind::Float { + format: xlang::ir::FloatFormat::IeeeExtPrecision, + } if sty.header.bitsize < 80 => { + let mut regs = Vec::with_capacity(scalar_regs); + regs.push(X86TypeClass::X87); + for _ in 1..scalar_regs { + regs.push(X86TypeClass::X87Up) + } + regs + } + xlang_struct::ScalarTypeKind::Float { .. } => { + vec![X86TypeClass::Float; scalar_regs] + } + xlang_struct::ScalarTypeKind::Posit => vec![X86TypeClass::Float; scalar_regs], + } + } + } + } - fn pass_return_place(&self, ty: &Type) -> Option { - self.pass_return_place(ty, 0) + fn classify_pointer(&self) -> Self::TypeClass { + X86TypeClass::Integer } - fn find_param(&self, fnty: &FnType, _: &FnType, param: u32, infn: bool) -> Self::Loc { - self.find_parameter(param, fnty, infn) + fn classify_aggregate_disposition( + &self, + ) -> xlang_backend::callconv::ClassifyAggregateDisposition { + ClassifyAggregateDisposition::SplitFlat((self.mode.width() as u64) >> 3) } - fn find_return_val(&self, fnty: &FnType) -> Self::Loc { - self.find_return_val(&fnty.ret).unwrap() + fn merge_class(&self, left: Self::TypeClass, right: Self::TypeClass) -> Self::TypeClass { + if left == right { + left + } else { + match (left, right) { + (X86TypeClass::NoClass, other) | (other, X86TypeClass::NoClass) => other, + (X86TypeClass::Memory, _) | (_, X86TypeClass::Memory) => X86TypeClass::Memory, + (X86TypeClass::Sse, X86TypeClass::SseUp) + | (X86TypeClass::SseUp, X86TypeClass::Sse) => X86TypeClass::Sse, + (X86TypeClass::Integer, _) | (_, X86TypeClass::Integer) => X86TypeClass::Integer, + _ => X86TypeClass::Memory, + } + } } -} -#[allow(clippy::module_name_repetitions)] -pub fn get_callconv( - _tag: &str, - target: &'static TargetProperties<'static>, - features: HashSet, - tys: Rc, -) -> Option> { - Some(Box::new(SysV64CC(target, features, tys))) + fn adjust_classes_after_combine(&self, _: &mut [Self::TypeClass]) {} } diff --git a/codegen-x86/src/lib.rs b/codegen-x86/src/lib.rs index a9bd4269..b7dfd6e1 100644 --- a/codegen-x86/src/lib.rs +++ b/codegen-x86/src/lib.rs @@ -1,12 +1,39 @@ -use arch_ops::x86::{X86Mode, X86Register}; +use arch_ops::{ + traits::{Address, InsnWrite}, + x86::{ + codegen::{X86CodegenOpcode, X86Encoder, X86Instruction, X86Operand}, + X86Mode, X86Register, X86RegisterClass, + }, +}; +use callconv::X86CallConvInfo; use target_tuples::{Architecture, Target}; use xlang::{ - abi::string::StringView, plugin::XLangCodegen, prelude::v1::*, + abi::{pair::Pair, string::StringView}, + plugin::XLangCodegen, + prelude::v1::{DynBox, HashMap}, targets::properties::TargetProperties, }; -use xlang_backend::{mach::Machine, SsaCodegenPlugin}; +use xlang_backend::{ + callconv::{compute_call_conv, CallConvLocation}, + expr::Trap, + mach::Machine, + ssa::OpaquePtr, + ty::TypeInformation, + SsaCodegenPlugin, +}; + +mod callconv; -pub struct LocationAssignment {} +#[derive(Clone, Debug, Hash, PartialEq, Eq)] +pub enum X86ValLocation { + Register(X86Register), +} + +pub struct LocationAssignment { + foreign_location: X86ValLocation, + owning_bb: u32, + change_owner: Vec<(usize, X86ValLocation)>, +} pub struct X86Machine { mode: Option, @@ -16,12 +43,93 @@ pub struct X86Assignments { mode: X86Mode, sp: X86Register, available_int_registers: Vec, - stack_width: u32, + stack_width: i32, assigns: HashMap, } pub struct X86Clobbers {} +fn move_opcode( + class: X86RegisterClass, + align: Option, + hint_vec_is_int: bool, +) -> X86CodegenOpcode { + match class { + X86RegisterClass::Xmm if align.unwrap_or(16) >= 16 => { + if hint_vec_is_int { + X86CodegenOpcode::Movdqa + } else { + X86CodegenOpcode::Movaps + } + } + X86RegisterClass::Ymm if align.unwrap_or(32) >= 32 => { + if hint_vec_is_int { + X86CodegenOpcode::Vmovdqa + } else { + X86CodegenOpcode::Vmovaps + } + } + X86RegisterClass::Zmm if align.unwrap_or(64) >= 64 => { + if hint_vec_is_int { + X86CodegenOpcode::Vmovdqa32 + } else { + X86CodegenOpcode::Vmovaps + } + } + X86RegisterClass::Xmm => { + if hint_vec_is_int { + X86CodegenOpcode::Movdqu + } else { + X86CodegenOpcode::Movups + } + } + X86RegisterClass::Ymm => { + if hint_vec_is_int { + X86CodegenOpcode::Vmovdqu + } else { + X86CodegenOpcode::Vmovups + } + } + X86RegisterClass::Zmm => { + if hint_vec_is_int { + X86CodegenOpcode::Vmovdqu32 + } else { + X86CodegenOpcode::Vmovups + } + } + X86RegisterClass::Tmm => todo!("tmmreg"), + X86RegisterClass::St => todo!("fpreg"), + _ => X86CodegenOpcode::Mov, + } +} + +impl X86Machine { + pub fn write_move( + &self, + writer: &mut X86Encoder, + dest: &X86ValLocation, + src: &X86ValLocation, + ) -> std::io::Result<()> { + if dest == src { + return Ok(()); + } + match (dest, src) { + (X86ValLocation::Register(dest), X86ValLocation::Register(src)) => { + if dest.class() != src.class() { + panic!("Cannot move between register classes") + } + + let opcode = move_opcode(dest.class(), None, false); + + writer.write_insn(X86Instruction::new( + opcode, + vec![X86Operand::Register(*dest), X86Operand::Register(*src)], + )) + } + } + } +} + impl Machine for X86Machine { fn matches_target(&self, targ: StringView) -> bool { let arch = Target::parse(&targ).arch(); @@ -43,7 +151,25 @@ impl Machine for X86Machine { type BlockClobbers = X86Clobbers; fn new_assignments(&self) -> Self::Assignments { - todo!() + let mode = self.mode.expect("Target must have been set"); + let gpr_size = mode.largest_gpr(); + let sp = X86Register::from_class(gpr_size, 4).unwrap(); + let max_gpr_num = match mode { + X86Mode::Long => 16, + _ => 8, + }; + let int_registers = (0..max_gpr_num) + .filter(|s| *s != 4) + .flat_map(|x| X86Register::from_class(gpr_size, x)) + .collect(); + + X86Assignments { + mode, + sp, + available_int_registers: int_registers, + stack_width: 0, + assigns: HashMap::new(), + } } fn assign_locations( @@ -52,8 +178,51 @@ impl Machine for X86Machine { insns: &[xlang_backend::ssa::SsaInstruction], incoming: &[xlang_backend::ssa::OpaqueLocation], which: u32, + incoming_set: &HashMap>, + tys: &TypeInformation, ) -> Self::BlockClobbers { - todo!() + let mut clobbers = X86Clobbers {}; + for (num, insn) in insns.iter().enumerate() { + match insn { + xlang_backend::ssa::SsaInstruction::Jump(targ, old_locs) + | xlang_backend::ssa::SsaInstruction::Fallthrough(targ, old_locs) => { + let foreign_locs = &incoming_set[targ]; + + for (old_loc, new_loc) in old_locs.iter().zip(foreign_locs) { + todo!("jump remap {} => {}", old_loc, new_loc) + } + } + xlang_backend::ssa::SsaInstruction::Exit(_) => todo!("exit"), + xlang_backend::ssa::SsaInstruction::Tailcall(targ, locs) => { + let callconv = X86CallConvInfo { + mode: assignments.mode, + }; + + let callconv = compute_call_conv(&callconv, &targ.real_ty, &targ.call_ty, tys); + + for (loc, call_loc) in locs.iter().zip(callconv.params()) { + match call_loc { + CallConvLocation::Null => {} + CallConvLocation::Register(reg) => { + assignments + .assigns + .get_or_insert_with_mut(loc.num, |_| LocationAssignment { + foreign_location: X86ValLocation::Register(*reg), + owning_bb: which, + change_owner: vec![], + }) + .change_owner + .push((num, X86ValLocation::Register(*reg))); + } + loc => todo!("{:?}", loc), + } + } + } + xlang_backend::ssa::SsaInstruction::Trap(_) => {} + xlang_backend::ssa::SsaInstruction::LoadImmediate(_, _) => {} + } + } + clobbers } fn codegen_block std::prelude::v1::String>( @@ -64,8 +233,83 @@ impl Machine for X86Machine { out: &mut W, label_sym: F, which: u32, + tys: &TypeInformation, ) -> std::io::Result<()> { - todo!() + let mut encoder = X86Encoder::new(out, assignments.mode); + let mut cur_locations = HashMap::<_, _>::new(); + for Pair(addr, loc) in &assignments.assigns { + if loc.owning_bb == which { + cur_locations.insert(*addr, loc.foreign_location.clone()); + } + } + for (num, insn) in insns.iter().enumerate() { + for Pair(addr, loc) in &assignments.assigns { + if loc.owning_bb == which { + for (at, new_loc) in &loc.change_owner { + if *at == num { + self.write_move(&mut encoder, new_loc, &cur_locations[addr])?; + break; + } + } + } + } + + match insn { + xlang_backend::ssa::SsaInstruction::Jump(targ, _) => { + encoder.write_insn(X86Instruction::new( + X86CodegenOpcode::Jmp, + vec![X86Operand::RelOffset(Address::Symbol { + name: label_sym(*targ), + disp: 0, + })], + ))?; + } + xlang_backend::ssa::SsaInstruction::Fallthrough(_, _) => {} + xlang_backend::ssa::SsaInstruction::Exit(_) => { + if assignments.stack_width > 0 { + encoder.write_insn(X86Instruction::new( + X86CodegenOpcode::Add, + vec![ + X86Operand::Register(assignments.sp), + X86Operand::Immediate(assignments.stack_width as i64), + ], + ))?; + } + encoder.write_insn(X86Instruction::Ret)?; + } + xlang_backend::ssa::SsaInstruction::Tailcall(targ, _) => match &targ.ptr { + OpaquePtr::Symbol(sym) => encoder.write_insn(X86Instruction::new( + X86CodegenOpcode::Jmp, + vec![X86Operand::RelOffset(Address::PltSym { name: sym.clone() })], + ))?, + OpaquePtr::Pointer(ptr) => todo!("indirect call"), + }, + xlang_backend::ssa::SsaInstruction::Trap(Trap::Breakpoint) => { + encoder.write_insn(X86Instruction::Int3)? + } + xlang_backend::ssa::SsaInstruction::Trap(_) => { + encoder.write_insn(X86Instruction::Ud2)? + } + xlang_backend::ssa::SsaInstruction::LoadImmediate(loc, val) => { + if let Some(location) = cur_locations.get(&loc.num) { + match location { + X86ValLocation::Register(reg) => { + let mov = move_opcode(reg.class(), None, true); + encoder.write_insn(X86Instruction::new( + mov, + vec![ + X86Operand::Register(*reg), + X86Operand::Immediate((*val) as i64), + ], + ))?; + } + } + } + } + } + } + + Ok(()) } fn codegen_prologue( @@ -73,7 +317,18 @@ impl Machine for X86Machine { assignments: &Self::Assignments, out: &mut W, ) -> std::io::Result<()> { - todo!() + if assignments.stack_width != 0 { + let mut encoder = X86Encoder::new(out, assignments.mode); + encoder.write_insn(X86Instruction::new( + X86CodegenOpcode::Sub, + vec![ + X86Operand::Register(assignments.sp), + X86Operand::Immediate(-assignments.stack_width as i64), + ], + ))?; + } + + Ok(()) } } diff --git a/lc-binutils b/lc-binutils index d97de389..967ded32 160000 --- a/lc-binutils +++ b/lc-binutils @@ -1 +1 @@ -Subproject commit d97de389bd9a941c222320229bbe41f41294230b +Subproject commit 967ded327dff8b16e8f04474b359fe565b3519f9 diff --git a/lcrust/libraries/liballoc/src/alloc.rs b/lcrust/libraries/liballoc/src/alloc.rs index b341282b..c542f34f 100644 --- a/lcrust/libraries/liballoc/src/alloc.rs +++ b/lcrust/libraries/liballoc/src/alloc.rs @@ -265,7 +265,10 @@ unsafe impl Allocator for Global { fn allocate(&self, layout: Layout) -> Result, AllocError> { if layout.size() == 0 { Ok(unsafe { - NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(1usize as *mut u8, 0)) + NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut( + layout.align() as *mut u8, + 0, + )) }) } else { let ptr = unsafe { self::alloc(layout) }; diff --git a/xlang/xlang_backend/src/callconv.rs b/xlang/xlang_backend/src/callconv.rs index fe514099..b92234e9 100644 --- a/xlang/xlang_backend/src/callconv.rs +++ b/xlang/xlang_backend/src/callconv.rs @@ -1,123 +1,389 @@ -use std::{rc::Rc, sync::Arc}; +use core::cmp::Eq; +use core::hash::Hash; -use xlang::ir::{FnType, Type}; +use xlang::{ + abi::{collection::HashMap, pair::Pair}, + ir::{ArrayType, FnType, ScalarType, Type}, +}; -use crate::expr::ValLocation; +use crate::ty::{AggregateLayout, FlattenFieldsOf, TypeInformation}; -/// Represents a calling convention, which can -pub trait CallingConvention { - /// The type for locations that the Calling Convention stores values - type Loc: ValLocation; +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +pub enum ParamPosition { + First, + Last, +} - /// If a given type is passed in a return place, then obtain the location to store that place. - fn pass_return_place(&self, ty: &Type) -> Option; +#[derive(Clone, Debug, Hash, PartialEq, Eq)] +pub enum CallConvLocation { + Register(R), + Indirect(R), + StackOffset(i32), + Split(Vec>), + Null, +} - /// Find the `n`th parameter inside or outside of the context of the function - fn find_param(&self, fnty: &FnType, real: &FnType, param: u32, infn: bool) -> Self::Loc; +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +pub enum ReturnPointerBehaviour { + Dedicated(R), + Param(ParamPosition, C), +} - /// Finds the return value location - fn find_return_val(&self, fnty: &FnType) -> Self::Loc; +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +pub enum RegisterDisposition { + Interleave, + Consume, +} - /// Determines whether tailcall is possible +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +pub enum StackedParamsOrder { + Rtl, + Ltr, +} - fn can_tail(&self, fnty: &FnType, base_ty: &FnType) -> bool { - self.pass_return_place(&fnty.ret).is_some() - || !self.pass_return_place(&base_ty.ret).is_some() - } +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +pub enum ClassifyAggregateDisposition { + Single(C), + Recursive, + SplitFlat(u64), } -impl CallingConvention for &C { - type Loc = C::Loc; +pub trait Tag { + type Register: Clone + Eq; + type TypeClass; + fn tag_name(&self) -> &'static str; - fn pass_return_place(&self, ty: &Type) -> Option { - C::pass_return_place(self, ty) - } + fn param_regs_for_class(&self, cl: &Self::TypeClass) -> &[Self::Register]; - fn find_param(&self, fnty: &FnType, real: &FnType, param: u32, infn: bool) -> Self::Loc { - C::find_param(self, fnty, real, param, infn) - } + fn return_regs_for_class(&self, cl: &Self::TypeClass) -> &[Self::Register]; - fn find_return_val(&self, fnty: &FnType) -> Self::Loc { - C::find_return_val(self, fnty) - } -} + fn replace_param_with_pointer(&self, cl: &[Self::TypeClass]) -> Option; -impl CallingConvention for &mut C { - type Loc = C::Loc; + fn combine_wide(&self, cl: &[Self::TypeClass]) -> Option; - fn pass_return_place(&self, ty: &Type) -> Option { - C::pass_return_place(self, ty) - } + fn replace_return_with_pointer( + &self, + cl: &[Self::TypeClass], + ) -> Option>; - fn find_param(&self, fnty: &FnType, real: &FnType, param: u32, infn: bool) -> Self::Loc { - C::find_param(self, fnty, real, param, infn) - } + fn replace_class_as_varargs(&self, cl: &Self::TypeClass) -> Option; - fn find_return_val(&self, fnty: &FnType) -> Self::Loc { - C::find_return_val(self, fnty) - } -} + fn register_disposition(&self, cl: &Self::TypeClass) -> RegisterDisposition; -impl CallingConvention for Box { - type Loc = C::Loc; + fn stacked_params_order(&self) -> StackedParamsOrder; +} - fn pass_return_place(&self, ty: &Type) -> Option { - C::pass_return_place(self, ty) - } +pub trait CallConvInfo { + type Tag: Tag; + type TypeClass: Eq + Hash + Clone; + type Register: Eq + Clone; + fn get_tag(&self, tag: &str) -> Self::Tag; - fn find_param(&self, fnty: &FnType, real: &FnType, param: u32, infn: bool) -> Self::Loc { - C::find_param(self, fnty, real, param, infn) - } + fn no_class(&self) -> Self::TypeClass; + fn classify_scalar(&self, sty: ScalarType) -> Vec; + fn classify_pointer(&self) -> Self::TypeClass; + fn classify_aggregate_disposition(&self) -> ClassifyAggregateDisposition; - fn find_return_val(&self, fnty: &FnType) -> Self::Loc { - C::find_return_val(self, fnty) - } + fn merge_class(&self, left: Self::TypeClass, right: Self::TypeClass) -> Self::TypeClass; + fn adjust_classes_after_combine(&self, classes: &mut [Self::TypeClass]); } -impl CallingConvention for xlang::abi::boxed::Box { - type Loc = C::Loc; +pub struct CallConv { + params: Vec>, + stacked_params_count: u32, + ret_location: CallConvLocation, + tag: T, + is_varargs: bool, +} - fn pass_return_place(&self, ty: &Type) -> Option { - C::pass_return_place(self, ty) +impl CallConv { + pub fn tag(&self) -> &T { + &self.tag } - fn find_param(&self, fnty: &FnType, real: &FnType, param: u32, infn: bool) -> Self::Loc { - C::find_param(self, fnty, real, param, infn) + pub fn params(&self) -> &[CallConvLocation] { + &self.params } - fn find_return_val(&self, fnty: &FnType) -> Self::Loc { - C::find_return_val(self, fnty) + pub fn ret_location(&self) -> &CallConvLocation { + &self.ret_location } -} -impl CallingConvention for Rc { - type Loc = C::Loc; - - fn pass_return_place(&self, ty: &Type) -> Option { - C::pass_return_place(self, ty) + pub fn stacked_params_count(&self) -> u32 { + self.stacked_params_count } - fn find_param(&self, fnty: &FnType, real: &FnType, param: u32, infn: bool) -> Self::Loc { - C::find_param(self, fnty, real, param, infn) + pub fn is_varargs(&self) -> bool { + self.is_varargs } +} - fn find_return_val(&self, fnty: &FnType) -> Self::Loc { - C::find_return_val(self, fnty) +pub fn classify_type( + info: &I, + ty: &Type, + tys: &TypeInformation, +) -> Vec { + let mut classes = Vec::new(); + match ty { + Type::Null | Type::Void => classes.push(info.no_class()), + Type::Scalar(sty) => classes.extend(info.classify_scalar(*sty)), + Type::FnType(_) => panic!("Cannot classify a non-value type (other than void)"), + Type::Pointer(_) => classes.push(info.classify_pointer()), + Type::Array(_) => todo!(), + Type::TaggedType(_, ty) => classes = classify_type(info, ty, tys), + Type::Product(_) => todo!(), + Type::Aligned(_, _) => todo!(), + Type::Aggregate(_) => todo!(), + Type::Named(_) => todo!(), } -} -impl CallingConvention for Arc { - type Loc = C::Loc; + info.adjust_classes_after_combine(&mut classes); - fn pass_return_place(&self, ty: &Type) -> Option { - C::pass_return_place(self, ty) - } + classes +} - fn find_param(&self, fnty: &FnType, real: &FnType, param: u32, infn: bool) -> Self::Loc { - C::find_param(self, fnty, real, param, infn) +pub fn compute_call_conv( + info: &I, + real_ty: &FnType, + call_ty: &FnType, + tys: &TypeInformation, +) -> CallConv { + let tag = info.get_tag(&real_ty.tag); + + let is_varargs = real_ty.variadic; + let fixed_arity_params_count = real_ty.params.len(); + + let mut param_register_by_class = HashMap::<_, _>::new(); + + let mut return_register_by_class = HashMap::<_, _>::new(); + + let return_classes = classify_type(info, &real_ty.ret, tys); + + let mut return_loc = None; + + let mut total_params_consumed = 0; + + let mut param_locs = Vec::new(); + + let no_class = info.no_class(); + + if let Some(combined) = tag.combine_wide(&return_classes) { + if combined == no_class { + return_loc = Some(CallConvLocation::Null); + } else { + let regs = return_register_by_class + .get_or_insert_with_mut(combined, |cl| tag.return_regs_for_class(cl)); + + if let Some(val) = regs.first() { + return_loc = Some(CallConvLocation::Register(val.clone())); + } else { + panic!("Wide register return is non-existant") + } + } + } else if let Some(targ) = tag.replace_return_with_pointer(&return_classes) { + match targ { + ReturnPointerBehaviour::Dedicated(reg) => { + return_loc = Some(CallConvLocation::Register(reg)) + } + ReturnPointerBehaviour::Param(ParamPosition::First, cl) => { + let regs = param_register_by_class + .get_or_insert_with_mut(cl.clone(), |cl| tag.param_regs_for_class(cl)); + let reg = match tag.register_disposition(&cl) { + RegisterDisposition::Interleave => { + if let Some((l, r)) = regs.split_first() { + let l = l.clone(); + *regs = r; + Some(l) + } else { + None + } + } + RegisterDisposition::Consume => { + if let Some(val) = regs.get(total_params_consumed) { + total_params_consumed += 1; + Some(val.clone()) + } else { + None + } + } + }; + + if let Some(reg) = reg { + return_loc = Some(CallConvLocation::Indirect(reg)) + } else { + todo!("return in memory on stack") + } + } + ReturnPointerBehaviour::Param(_, cl) => todo!("non-start return pointer"), + } + } else { + let mut split_locs = vec![]; + + for cl in return_classes { + if cl == no_class { + split_locs.push(CallConvLocation::Null); + } else { + let regs = return_register_by_class + .get_or_insert_with_mut(cl, |cl| tag.return_regs_for_class(cl)); + + if let Some((reg, rest)) = regs.split_first() { + split_locs.push(CallConvLocation::Register(reg.clone())); + *regs = rest; + } else { + panic!("Register return is non-existant") + } + } + } + + if split_locs.is_empty() { + return_loc = Some(CallConvLocation::Null); + } else if split_locs.len() == 1 { + let val = split_locs.pop().unwrap(); + return_loc = Some(val); + } else { + return_loc = Some(CallConvLocation::Split(split_locs)); + } } - fn find_return_val(&self, fnty: &FnType) -> Self::Loc { - C::find_return_val(self, fnty) + for (pos, param) in call_ty.params.iter().enumerate() { + let class = classify_type(info, param, tys); + if let Some(combined) = tag.combine_wide(&class) { + let class = if pos >= fixed_arity_params_count { + if let Some(replace) = tag.replace_class_as_varargs(&combined) { + replace + } else { + combined + } + } else { + combined + }; + let regs = param_register_by_class + .get_or_insert_with_mut(class.clone(), |cl| tag.param_regs_for_class(cl)); + let reg = match tag.register_disposition(&class) { + RegisterDisposition::Interleave => { + if let Some((l, r)) = regs.split_first() { + let l = l.clone(); + *regs = r; + Some(l) + } else { + None + } + } + RegisterDisposition::Consume => { + if let Some(val) = regs.get(total_params_consumed) { + total_params_consumed += 1; + Some(val.clone()) + } else { + None + } + } + }; + + if let Some(reg) = reg { + param_locs.push(CallConvLocation::Register(reg)) + } else { + todo!("stack") + } + } else if let Some(ptr) = tag.replace_param_with_pointer(&class) { + let class = if pos >= fixed_arity_params_count { + if let Some(replace) = tag.replace_class_as_varargs(&ptr) { + replace + } else { + ptr + } + } else { + ptr + }; + let regs = param_register_by_class + .get_or_insert_with_mut(class.clone(), |cl| tag.param_regs_for_class(cl)); + let reg = match tag.register_disposition(&class) { + RegisterDisposition::Interleave => { + if let Some((l, r)) = regs.split_first() { + let l = l.clone(); + *regs = r; + Some(l) + } else { + None + } + } + RegisterDisposition::Consume => { + if let Some(val) = regs.get(total_params_consumed) { + total_params_consumed += 1; + Some(val.clone()) + } else { + None + } + } + }; + + if let Some(reg) = reg { + param_locs.push(CallConvLocation::Indirect(reg)) + } else { + todo!("stack") + } + } else { + let mut split_locs = Vec::new(); + for class in class { + if class == no_class { + split_locs.push(CallConvLocation::Null); + } else { + let class = if pos >= fixed_arity_params_count { + if let Some(replace) = tag.replace_class_as_varargs(&class) { + replace + } else { + class + } + } else { + class + }; + let regs = param_register_by_class + .get_or_insert_with_mut(class.clone(), |cl| tag.param_regs_for_class(cl)); + let reg = match tag.register_disposition(&class) { + RegisterDisposition::Interleave => { + if let Some((l, r)) = regs.split_first() { + let l = l.clone(); + *regs = r; + Some(l) + } else { + None + } + } + RegisterDisposition::Consume => { + if let Some(val) = regs.get(total_params_consumed) { + total_params_consumed += 1; + Some(val.clone()) + } else { + None + } + } + }; + + if let Some(reg) = reg { + param_locs.push(CallConvLocation::Register(reg)) + } else { + todo!("stack") + } + } + } + + if split_locs.is_empty() { + param_locs.push(CallConvLocation::Null); + } else if split_locs.len() == 1 { + let val = split_locs.pop().unwrap(); + param_locs.push(val); + } else { + param_locs.push(CallConvLocation::Split(split_locs)); + } + } + } + let return_loc = return_loc.expect("We have a return type, right"); + drop(return_register_by_class); // These are here for Borrowck: Now I'm missing the dropck_eyepatch on drop code for `HashMap` + drop(param_register_by_class); + CallConv { + params: param_locs, + stacked_params_count: 0, + ret_location: return_loc, + tag, + is_varargs, } } diff --git a/xlang/xlang_backend/src/lib.rs b/xlang/xlang_backend/src/lib.rs index 595ef704..bbdd7bc4 100644 --- a/xlang/xlang_backend/src/lib.rs +++ b/xlang/xlang_backend/src/lib.rs @@ -13,11 +13,11 @@ use xlang::{ abi::{io::WriteAdapter, option::Some as XLangSome, pair::Pair, try_}, ir::{self, Linkage}, plugin::{XLangCodegen, XLangPlugin}, - targets::properties::TargetProperties, + targets::properties::{StackAttributeControlStyle, TargetProperties}, }; use binfmt::{ - fmt::{Section, SectionFlag}, + fmt::{Section, SectionFlag, SectionType}, sym::{Symbol, SymbolKind, SymbolType}, }; @@ -275,7 +275,22 @@ impl XLangCodegen for SsaCodegenPlugin { syms.push(Symbol::new_undef(sym_name, SymbolType::Function, sym_kind)); } } - + match targ.link.stack_attribute_control { + StackAttributeControlStyle::NoExec => {} + StackAttributeControlStyle::CveFactory => { + eprintln!("Warning: Codegen for target selected sets Stack as Writable") + } + StackAttributeControlStyle::GnuStack => { + sections.push(Section { + name: format!(".note.GNU-stack"), + align: 1024, + ty: SectionType::NoBits, + flags: Some(SectionFlag::Writable.into()), + ..Default::default() + }); + } + ctrl => eprintln!("Warning: Unknown stack attribute control style {:?}", ctrl), + } let mut section_map = vec![]; for section in sections { let new_off = output diff --git a/xlang/xlang_backend/src/mach.rs b/xlang/xlang_backend/src/mach.rs index e5b7f516..e858ab98 100644 --- a/xlang/xlang_backend/src/mach.rs +++ b/xlang/xlang_backend/src/mach.rs @@ -1,8 +1,12 @@ -use xlang::{abi::string::StringView, targets::properties::TargetProperties}; +use xlang::{ + abi::collection::HashMap, abi::string::StringView, targets::properties::TargetProperties, + vec::Vec, +}; use crate::{ mangle::mangle_itanium, ssa::{OpaqueLocation, SsaInstruction}, + ty::TypeInformation, }; use arch_ops::traits::InsnWrite; @@ -21,6 +25,8 @@ pub trait Machine { insns: &[SsaInstruction], incoming: &[OpaqueLocation], which: u32, + incoming_set: &HashMap>, + tys: &TypeInformation, ) -> Self::BlockClobbers; fn codegen_prologue( &self, @@ -35,6 +41,7 @@ pub trait Machine { out: &mut W, label_sym: F, which: u32, + tys: &TypeInformation, ) -> std::io::Result<()>; fn mangle(&self, path: &[xlang::ir::PathComponent]) -> String { mangle_itanium(path) diff --git a/xlang/xlang_backend/src/ssa.rs b/xlang/xlang_backend/src/ssa.rs index 820b1b75..03262bd4 100644 --- a/xlang/xlang_backend/src/ssa.rs +++ b/xlang/xlang_backend/src/ssa.rs @@ -94,6 +94,7 @@ impl core::fmt::Display for SsaInstruction { pub struct CallTarget { pub ptr: OpaquePtr, pub real_ty: ir::FnType, + pub call_ty: ir::FnType, } impl core::fmt::Display for CallTarget { @@ -253,12 +254,15 @@ impl FunctionBuilder { &bb.insns, self.incoming_locations.get(&bb.id).unwrap(), bb.id, + &self.incoming_locations, + &self.tys, )); } self.mach.codegen_prologue(&assigns, out)?; for (bb, block_clobbers) in self.basic_blocks.iter().zip(block_clobbers) { let symbol = format!("{}._B{}", self.sym_name, bb.id); + sym_accepter(symbol, out.offset() as u128); self.mach.codegen_block( &assigns, &bb.insns, @@ -266,6 +270,7 @@ impl FunctionBuilder { out, |id| format!("{}._B{}", self.sym_name, id), bb.id, + &self.tys, )?; } Ok(()) @@ -390,6 +395,7 @@ impl BasicBlockBuilder { &mut self, targ: VStackValue, params: Vec>, + call_ty: ir::FnType, next: Option, ) { match targ { @@ -417,6 +423,7 @@ impl BasicBlockBuilder { CallTarget { ptr: OpaquePtr::Symbol(sym), real_ty, + call_ty, }, params, )); @@ -489,7 +496,7 @@ impl BasicBlockBuilder { let target = self.pop(); - self.write_call(target, vals, None); + self.write_call(target, vals, (**call_fnty).clone(), None); } ir::Terminator::Exit(_) => todo!("exit"), ir::Terminator::Asm(_) => todo!("asm"), diff --git a/xlang/xlang_backend/src/ty.rs b/xlang/xlang_backend/src/ty.rs index 54402799..93dc417d 100644 --- a/xlang/xlang_backend/src/ty.rs +++ b/xlang/xlang_backend/src/ty.rs @@ -2,8 +2,9 @@ use std::convert::TryInto; use xlang::{ ir::{ - AggregateDefinition, AggregateKind, AnnotationItem, Path, PointerAliasingRule, PointerKind, - ScalarType, ScalarTypeHeader, ScalarTypeKind, ScalarValidity, Type, ValidRangeType, Value, + self, AggregateDefinition, AggregateKind, AnnotationItem, Path, PointerAliasingRule, + PointerKind, ScalarType, ScalarTypeHeader, ScalarTypeKind, ScalarValidity, Type, + ValidRangeType, Value, }, prelude::v1::{HashMap, Pair, Some as XLangSome}, targets::properties::TargetProperties, @@ -11,6 +12,8 @@ use xlang::{ use crate::expr::{LValue, NoOpaque, VStackValue}; +use core::cell::RefCell; + pub(crate) fn scalar_align(size: u64, max_align: u16) -> u64 { if size <= (max_align as u64) { size.next_power_of_two() @@ -40,13 +43,54 @@ pub struct AggregateLayout { /// /// A map of information about the types on the system -#[derive(Clone, Debug, Hash, PartialEq, Eq)] +#[derive(Clone, Debug)] pub struct TypeInformation { aggregates: HashMap>, aliases: HashMap, + aggregate_layout_cache: RefCell>>, properties: &'static TargetProperties<'static>, } +pub struct FlattenFieldsOf<'a> { + fields_stack: Vec>, + + tys: &'a TypeInformation, +} + +impl<'a> FlattenFieldsOf<'a> { + fn push_fields_of(&mut self, base_offset: u64, agl: &'a AggregateLayout) { + let mut fields = vec![]; + + for Pair(_, (offset, ty)) in &agl.fields { + fields.push((*offset + base_offset, ty)); + } + + fields.sort_by_key(|(off, _)| *off); + + self.fields_stack.push(fields.into_iter()); + } + + fn push_fields_of_array(&mut self, base_off: u64, arr_ty: &'a ir::ArrayType) { + let mut fields = vec![]; + let ty = &arr_ty.ty; + let size = self + .tys + .type_size(ty) + .expect("array type must have a complete value type as a field"); + + let len = match &arr_ty.len { + Value::Integer { val, .. } => (*val) as u64, + val => panic!("Cannot determine length of array from {}", val), + }; + + for i in 0..len { + fields.push((base_off + i * size, ty)); + } + + self.fields_stack.push(fields.into_iter()); + } +} + impl TypeInformation { /// Constructs a new set of [`TypeInformation`] from the properties of a given target pub fn from_properties(properties: &'static TargetProperties) -> Self { @@ -54,6 +98,7 @@ impl TypeInformation { properties, aliases: HashMap::new(), aggregates: HashMap::new(), + aggregate_layout_cache: RefCell::new(HashMap::new()), } } @@ -190,7 +235,7 @@ impl TypeInformation { } /// Computes the layout of an aggregate type from it's definition - pub fn aggregate_layout_from_defn(&self, defn: &AggregateDefinition) -> AggregateLayout { + fn aggregate_layout_from_defn(&self, defn: &AggregateDefinition) -> AggregateLayout { let mut align = 1u64; let mut size = 0u64; let mut fields = HashMap::new(); @@ -262,44 +307,58 @@ impl TypeInformation { } /// Determines the total aggregate layout of a type - pub fn aggregate_layout(&self, ty: &Type) -> Option { - match ty { - Type::TaggedType(_, ty) => self.aggregate_layout(ty), - Type::Aligned(_, ty) => self.aggregate_layout(ty), - Type::Product(elems) => { - let mut elems = elems.clone(); - elems.sort_by_key(|ty| self.type_align(ty).unwrap()); - - let mut align = 1u64; - let mut size = 0u64; - let mut fields = HashMap::new(); - - for (i, field) in elems.into_iter().enumerate() { - align = self.type_align(&field)?.max(align); - let offset = size; - size += self.type_size(&field)?; - fields.insert(i.to_string(), (offset, field)); - } + pub fn aggregate_layout(&self, ty: &Type) -> Option<&AggregateLayout> { + let cache = self.aggregate_layout_cache.borrow(); + if let Some(layout) = cache.get(ty) { + unsafe { Some(&*((&**layout) as *const AggregateLayout)) } + } else { + drop(cache); + let layout = match ty { + Type::TaggedType(_, ty) => return self.aggregate_layout(ty), + Type::Aligned(_, ty) => return self.aggregate_layout(ty), + Type::Product(elems) => { + let mut elems = elems.clone(); + elems.sort_by_key(|ty| self.type_align(ty).unwrap()); + + let mut align = 1u64; + let mut size = 0u64; + let mut fields = HashMap::new(); + + for (i, field) in elems.into_iter().enumerate() { + align = self.type_align(&field)?.max(align); + let offset = size; + size += self.type_size(&field)?; + fields.insert(i.to_string(), (offset, field)); + } - Some(AggregateLayout { - total_size: size, - total_align: align, - fields, - transparent_over: None, - first_niche: None, - }) - } - Type::Aggregate(defn) => Some(self.aggregate_layout_from_defn(defn)), - Type::Named(p) => { - if let Some(ty) = self.aliases.get(p) { - self.aggregate_layout(ty) - } else if let Some(Some(defn)) = self.aggregates.get(p) { - Some(self.aggregate_layout_from_defn(defn)) - } else { - None + Some(AggregateLayout { + total_size: size, + total_align: align, + fields, + transparent_over: None, + first_niche: None, + }) + } + Type::Aggregate(defn) => Some(self.aggregate_layout_from_defn(defn)), + Type::Named(p) => { + if let Some(ty) = self.aliases.get(p) { + return self.aggregate_layout(ty); + } else if let Some(Some(defn)) = self.aggregates.get(p) { + Some(self.aggregate_layout_from_defn(defn)) + } else { + None + } } + _ => None, + }; + + if let Some(layout) = layout { + let mut cache = self.aggregate_layout_cache.borrow_mut(); + cache.insert(ty.clone(), Box::new(layout)); + unsafe { Some(&*((&*cache[ty]) as *const AggregateLayout)) } + } else { + None } - _ => None, } } @@ -403,7 +462,7 @@ impl TypeInformation { /// Gets the type of the field of `ty` with a given name pub fn get_field_type(&self, ty: &Type, name: &str) -> Option { self.aggregate_layout(ty) - .map(|layout| layout.fields) + .map(|layout| &layout.fields) .map(|fields| fields.get(name).cloned().map(|(_, ty)| ty)) .and_then(Into::into) } From 26331d4be09e152d4c22e8eec6d6b3916451bf18 Mon Sep 17 00:00:00 2001 From: Connor Horman Date: Fri, 2 Feb 2024 12:48:02 -0500 Subject: [PATCH 11/74] chore: update lc-binutils --- lc-binutils | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lc-binutils b/lc-binutils index 967ded32..39d2cf0a 160000 --- a/lc-binutils +++ b/lc-binutils @@ -1 +1 @@ -Subproject commit 967ded327dff8b16e8f04474b359fe565b3519f9 +Subproject commit 39d2cf0a5b1aba7dafb1fb5eada9e9ce4c4cc7e7 From b2d8ecdd2bb729e36e1d10cef04b39f511dbc424 Mon Sep 17 00:00:00 2001 From: Connor Horman Date: Fri, 2 Feb 2024 20:17:15 -0500 Subject: [PATCH 12/74] feat(rust-sema): Typecheck, lower, and visit character literals --- lccc/src/manifest.rs | 144 ++++++++++++++++++- lccc/src/manifest/parse.rs | 0 rust/src/irgen/visitor.rs | 17 +++ rust/src/irgen/xir_visitor.rs | 54 ++++++- rust/src/sema/hir.rs | 86 ++++++++++- rust/src/sema/mir.rs | 29 +++- rust/src/sema/mir/transform/unreachable.rs | 1 + rust/src/sema/mir_defs.rs | 1 + rust/src/sema/ty.rs | 3 +- rust/src/sema/tyck.rs | 159 ++++++++++++++++----- 10 files changed, 452 insertions(+), 42 deletions(-) create mode 100644 lccc/src/manifest/parse.rs diff --git a/lccc/src/manifest.rs b/lccc/src/manifest.rs index 3754ff84..cf42146a 100644 --- a/lccc/src/manifest.rs +++ b/lccc/src/manifest.rs @@ -1 +1,143 @@ -pub struct OptimizerManifest {} +pub mod parse; + +#[derive(Clone, Debug, Hash, PartialEq, Eq)] +pub enum MatcherFragment<'a> { + String(&'a str), + Glob(&'a str, Box>), + GlobChar(&'a str, Box>), +} + +impl<'a> MatcherFragment<'a> { + pub fn match_length(&self) -> (usize, Option) { + match self { + MatcherFragment::String(val) => (val.len(), Some(val.len())), + MatcherFragment::Glob(left, right) => { + let (right_min, _) = right.match_length(); + + (left.len() + right_min, None) + } + MatcherFragment::GlobChar(left, right) => { + let (right_min, right_max) = right.match_length(); + + ( + right_min + 1 + left.len(), + right_max.map(|max| max + 1 + left.len()), + ) + } + } + } + pub fn matches(&self, val: &'a str) -> bool { + match self { + MatcherFragment::String(x) => *x == val, + MatcherFragment::Glob(left, right) => { + if let Some(x) = val.strip_prefix(*left) { + let (right_min, right_max) = right.match_length(); + + let x_len = x.len(); + + let possible_base = if let Some(right_max) = right_max { + let mut off = x_len.saturating_sub(right_max); + while !x.is_char_boundary(off) { + off += 1; + } + &x[off..] + } else { + x + }; + + let mut iter = possible_base.chars(); + + loop { + if iter.as_str().len() < right_min { + break false; + } else if right.matches(iter.as_str()) { + break true; + } else { + iter.next(); + } + } + } else { + false + } + } + MatcherFragment::GlobChar(left, right) => { + if let Some(x) = val.strip_prefix(*left) { + if x.is_empty() { + false + } else { + let mut iter = x.chars(); + iter.next(); + right.matches(iter.as_str()) + } + } else { + false + } + } + } + } + + pub fn parse(x: &'a str) -> Self { + if let Some(val) = x.find(['*', '?']) { + let (l, r) = x.split_at(val); + let (c, r) = r.split_at(1); // We've matched an ASCII Char, so it's exactly 1 byte long + + let right = Box::new(Self::parse(r)); + + match c { + "*" => MatcherFragment::Glob(l, right), + "?" => MatcherFragment::GlobChar(l, right), + _ => unreachable!(), + } + } else { + Self::String(x) + } + } +} + +#[derive(Clone, Debug)] +pub struct SelectorManifest<'a> { + pub lines: Vec>, +} + +impl<'a> SelectorManifest<'a> { + pub fn parse(x: &'a str) -> Self { + Self { + lines: x + .lines() + .map(|x| { + if let Some((x, _comment)) = x.split_once(';') { + x + } else { + x + } + }) + .map(str::trim) + .filter(|x| !x.is_empty()) + .map(PluginSelector::parse) + .collect(), + } + } +} + +#[derive(Clone, Debug)] +pub struct PluginSelector<'a> { + pub matchers: Vec>, + pub plugin: &'a str, +} + +impl<'a> PluginSelector<'a> { + pub fn parse(x: &'a str) -> Self { + if let Some((matchers, plugins)) = x.split_once(':') { + let matchers = matchers + .split(',') + .map(|x| x.trim()) + .map(MatcherFragment::parse) + .collect(); + + let plugin = plugins.trim(); + Self { matchers, plugin } + } else { + panic!("Invalid plugin") + } + } +} diff --git a/lccc/src/manifest/parse.rs b/lccc/src/manifest/parse.rs new file mode 100644 index 00000000..e69de29b diff --git a/rust/src/irgen/visitor.rs b/rust/src/irgen/visitor.rs index c1ac4550..02a4806b 100644 --- a/rust/src/irgen/visitor.rs +++ b/rust/src/irgen/visitor.rs @@ -2,6 +2,7 @@ use ty::Spanned; use xlang::abi::pair::Pair; use crate::{ + ast::CharType, interning::Symbol, lex::StringType, sema::{ @@ -564,6 +565,16 @@ pub fn visit_expr(mut visitor: V, expr: &mir::MirExpr, defs: &De mir::MirExpr::UnaryExpr(op, lhs) => { visit_unary_expr(visitor.visit_unary_expr(), op, lhs, defs) } + mir::MirExpr::ConstChar(ty, val) => { + let mut visitor = visitor.visit_const_char(); + + if visitor.is_none() { + return; + } + visitor.visit_charty(*ty); + + visitor.visit_value(*val); + } } } @@ -726,6 +737,7 @@ def_visitors! { pub trait ExprVisitor { fn visit_unreachable(&mut self); fn visit_const_int(&mut self) -> Option>; + fn visit_const_char(&mut self) -> Option>; fn visit_const(&mut self, defid: DefId); fn visit_cast(&mut self) -> Option>; fn visit_const_string(&mut self) -> Option>; @@ -747,6 +759,11 @@ def_visitors! { fn visit_value(&mut self, val: u128); } + pub trait ConstCharVisitor { + fn visit_charty(&mut self, ty: CharType); + fn visit_value(&mut self, val: u32); + } + pub trait CastVisitor { fn visit_inner(&mut self) -> Option>; fn visit_cast_type(&mut self) -> Option>; diff --git a/rust/src/irgen/xir_visitor.rs b/rust/src/irgen/xir_visitor.rs index 2ce664d4..802e0af1 100644 --- a/rust/src/irgen/xir_visitor.rs +++ b/rust/src/irgen/xir_visitor.rs @@ -16,6 +16,7 @@ use xlang::{ ir::PathComponent, }; +use crate::lex::CharType; use crate::sema::mir; use crate::sema::{cx, hir::BinaryOp, mir::SsaVarId}; use crate::sema::{generics, ty, UserTypeKind}; @@ -30,7 +31,7 @@ use crate::{lex::StringType, sema::Definitions}; use super::visitor::{ ArrayTyVisitor, AttrVisitor, BasicBlockVisitor, BinaryExprVisitor, BranchArmVisitor, - BranchVisitor, CallVisitor, CastVisitor, ConstIntVisitor, ConstStringVisitor, + BranchVisitor, CallVisitor, CastVisitor, ConstCharVisitor, ConstIntVisitor, ConstStringVisitor, ConstructorDefVisitor, ConstructorVisitor, ExprVisitor, FieldAccessVisitor, FieldInitVisitor, FieldVisitor, FunctionBodyVisitor, FunctionDefVisitor, FunctionTyVisitor, IntTyVisitor, JumpVisitor, LetStatementVisitor, ModVisitor, PointerTyVisitor, ReferenceTyVisitor, @@ -646,6 +647,17 @@ const NEVER: ir::Type = ir::Type::Scalar(ir::ScalarType { }, }); +const CHAR: ir::Type = ir::Type::Scalar(ir::ScalarType { + kind: ScalarTypeKind::Char { + flags: ir::CharFlags::UNICODE, + }, + header: ir::ScalarTypeHeader { + bitsize: 32, + vectorsize: XLangNone, + validity: ir::ScalarValidity::empty(), + }, +}); + impl<'a> XirTypeVisitor<'a> { fn new( defs: &'a Definitions, @@ -1635,6 +1647,21 @@ impl<'a> ExprVisitor for XirExprVisitor<'a> { ))) } + fn visit_const_char(&mut self) -> Option> { + let (intty, val) = match self.exprs.push_mut(ir::Expr::Const(ir::Value::Integer { + ty: ir::ScalarType::default(), + val: 0, + })) { + ir::Expr::Const(ir::Value::Integer { ty, val }) => (ty, val), + _ => unsafe { core::hint::unreachable_unchecked() }, + }; + Some(Box::new(XirConstIntVisitor::new( + self.properties, + val, + intty, + ))) + } + fn visit_const(&mut self, defid: DefId) { let name = self.names[&defid]; @@ -2443,3 +2470,28 @@ impl<'a> ConstIntVisitor for XirConstIntVisitor<'a> { *self.val = val; } } + +impl<'a> ConstCharVisitor for XirConstIntVisitor<'a> { + fn visit_charty(&mut self, ty: crate::lex::CharType) { + match ty { + CharType::Default => { + self.intty.kind = ir::ScalarTypeKind::Char { + flags: ir::CharFlags::UNICODE, + }; + self.intty.header.bitsize = 32; + } + CharType::Byte => { + self.intty.kind = ir::ScalarTypeKind::Integer { + signed: false, + min: XLangNone, + max: XLangNone, + }; + self.intty.header.bitsize = 8; + } + } + } + + fn visit_value(&mut self, val: u32) { + *self.val = val as u128; + } +} diff --git a/rust/src/sema/hir.rs b/rust/src/sema/hir.rs index 116460f6..8b58e303 100644 --- a/rust/src/sema/hir.rs +++ b/rust/src/sema/hir.rs @@ -85,7 +85,7 @@ pub enum HirExpr { Var(HirVarId), ConstInt(Option>, u128), ConstString(StringType, Spanned), - ConstChar(CharType, Spanned), + ConstChar(CharType, u32), Const(DefId, GenericArgs), #[allow(dead_code)] Unreachable, @@ -128,9 +128,22 @@ impl core::fmt::Display for HirExpr { s.escape_default().fmt(f)?; f.write_str("\"") } - HirExpr::ConstChar(_, s) => { + HirExpr::ConstChar(CharType::Default, val) => { f.write_str("'")?; - s.escape_default().fmt(f)?; + if let Some(c) = char::from_u32(*val) { + c.escape_default().fmt(f)?; + } else { + f.write_fmt(format_args!("\\u{{invalid char: {:04x}}}", val))?; + } + + f.write_str("'") + } + HirExpr::ConstChar(CharType::Byte, val) => { + f.write_str("'")?; + match val { + &val @ 0x20..=0x7F => (val as u8 as char).fmt(f)?, + val => f.write_fmt(format_args!("\\x{:02x}", val))?, + } f.write_str("'") } HirExpr::Tuple(v) => { @@ -722,7 +735,72 @@ impl<'a> HirLowerer<'a> { Literal { lit_kind: LiteralKind::Char(cty), val: sym, - } => Ok(expr.copy_span(|_| HirExpr::ConstChar(cty, sym))), + } => { + dbg!(sym); + let mut c = sym.chars().peekable(); + + let val = match c.next().expect("We have at least one character") { + '\\' => match c.next().expect("malformed escape sequence") { + 't' => '\t' as u32, + 'n' => '\n' as u32, + 'r' => '\r' as u32, + '\'' => '\'' as u32, + '"' => '"' as u32, + '0' => 0, + '\\' => '\\' as u32, + 'x' => { + let mut val = 0; + + match c + .next() + .expect("Malformed escape sequence - error in lexer") + { + c => val = c.to_digit(16).expect("Malformed escape sequence"), + } + + match c + .peek() + .expect("Malformed character literal - error in lexer") + { + '\'' => {} + &v => { + c.next(); + val <<= 4; + val |= v + .to_digit(16) + .expect("Malformed character literal - error in lexer"); + } + } + + val + } + 'u' => { + assert_eq!( + c.next(), + Some('{'), + "Malformed escape sequence - error in lexer" + ); + let mut val = 0; + for c in &mut c { + match c { + '}' => break, + c => { + val <<= 4; + val |= c.to_digit(16).expect( + "Malformed character literal - error in lexer", + ); + } + } + } + val + } + val => panic!("Expected an escape sequence, got {}", val), + }, + val => val as u32, + }; + + Ok(expr.copy_span(|_| HirExpr::ConstChar(cty, val))) + } _ => todo!("literal"), }, ast::Expr::Break(_, _) => todo!("break"), diff --git a/rust/src/sema/mir.rs b/rust/src/sema/mir.rs index 7741c539..35eea8de 100644 --- a/rust/src/sema/mir.rs +++ b/rust/src/sema/mir.rs @@ -4,7 +4,7 @@ use xlang::abi::{ }; use crate::{ - ast::{Mutability, StringType}, + ast::{CharType, Mutability, StringType}, helpers::{FetchIncrement, TabPrinter}, interning::Symbol, lex::Error, @@ -119,6 +119,24 @@ impl core::fmt::Display for MirExpr { MirExpr::ConstString(_, val) => { f.write_fmt(format_args!("\"{}\"", val.escape_default())) } + MirExpr::ConstChar(CharType::Default, val) => { + f.write_str("'")?; + if let Some(c) = char::from_u32(*val) { + c.escape_default().fmt(f)?; + } else { + f.write_fmt(format_args!("\\u{{invalid char: {:04x}}}", val))?; + } + + f.write_str("'") + } + MirExpr::ConstChar(CharType::Byte, val) => { + f.write_str("'")?; + match val { + &val @ 0x20..=0x7F => (val as u8 as char).fmt(f)?, + val => f.write_fmt(format_args!("\\x{:02x}", val))?, + } + f.write_str("'") + } MirExpr::Const(defid, generics) => f.write_fmt(format_args!("{}{}", defid, generics)), MirExpr::Retag(rk, mt, inner) => { f.write_str("&")?; @@ -525,6 +543,7 @@ impl<'a> MirConverter<'a> { ThirExprInner::Const(_, _) | ThirExprInner::ConstInt(_, _) | ThirExprInner::ConstString(_, _) + | ThirExprInner::ConstChar(_, _) | ThirExprInner::Cast(_, _) | ThirExprInner::Tuple(_) | ThirExprInner::Read(_) @@ -720,7 +739,8 @@ impl<'a> MirConverter<'a> { | ThirExprInner::BinaryExpr(_, _, _) | ThirExprInner::Array(_) | ThirExprInner::UnaryExpr(_, _) - | ThirExprInner::Index(_, _) => unreachable!("cannot access"), + | ThirExprInner::Index(_, _) + | ThirExprInner::ConstChar(_, _) => unreachable!("cannot access"), } } @@ -807,6 +827,10 @@ impl<'a> MirConverter<'a> { span, body: MirExpr::ConstString(sty, *val), }), + super::tyck::ThirExprInner::ConstChar(cty, val) => Ok(Spanned { + span, + body: MirExpr::ConstChar(cty, val), + }), super::tyck::ThirExprInner::Cast(inner, ty) => { let inner = self.lower_expr(*inner)?; @@ -1016,6 +1040,7 @@ impl<'a> MirConverter<'a> { | ThirExprInner::Const(_, _) | ThirExprInner::ConstInt(_, _) | ThirExprInner::ConstString(_, _) + | ThirExprInner::ConstChar(_, _) | ThirExprInner::Cast(_, _) | ThirExprInner::Tuple(_) | ThirExprInner::Ctor(_) diff --git a/rust/src/sema/mir/transform/unreachable.rs b/rust/src/sema/mir/transform/unreachable.rs index 0f085183..6f9034fc 100644 --- a/rust/src/sema/mir/transform/unreachable.rs +++ b/rust/src/sema/mir/transform/unreachable.rs @@ -33,6 +33,7 @@ impl PropagateUnreachable { } MirExpr::UnaryExpr(_, u) => Self::expr_contains_unreachable(u), MirExpr::GetSymbol(_) => false, + MirExpr::ConstChar(_, _) => false, } } } diff --git a/rust/src/sema/mir_defs.rs b/rust/src/sema/mir_defs.rs index 50777a11..c151bf97 100644 --- a/rust/src/sema/mir_defs.rs +++ b/rust/src/sema/mir_defs.rs @@ -65,6 +65,7 @@ pub enum MirExpr { AllocaDrop(Type, DropFlagState), ConstInt(IntType, u128), ConstString(StringType, Symbol), + ConstChar(CharType, u32), Const(DefId, GenericArgs), Retag(RefKind, Mutability, Box>), Cast(Box>, Type), diff --git a/rust/src/sema/ty.rs b/rust/src/sema/ty.rs index 667a301b..e5085ddb 100644 --- a/rust/src/sema/ty.rs +++ b/rust/src/sema/ty.rs @@ -92,7 +92,7 @@ pub enum Type { IncompleteAlias(DefId), Pointer(Spanned, Box>), Array(Box>, Spanned), - Inferable(InferId), + Inferable(Option), InferableInt(InferId), Reference( Option>>, @@ -723,6 +723,7 @@ impl core::fmt::Display for Type { pub fn convert_builtin_type(name: &str) -> Option { match name { + "_" => Some(Type::Inferable(None)), "char" => Some(Type::Char), "str" => Some(Type::Str), x if x.starts_with('i') || x.starts_with('u') => { diff --git a/rust/src/sema/tyck.rs b/rust/src/sema/tyck.rs index 9d71c40a..21221634 100644 --- a/rust/src/sema/tyck.rs +++ b/rust/src/sema/tyck.rs @@ -1,7 +1,7 @@ use xlang::abi::collection::{HashMap, HashSet}; use crate::{ - ast::{self, Mutability, Safety, StringType}, + ast::{self, CharType, Mutability, Safety, StringType}, helpers::{CyclicOperationStatus, FetchIncrement, TabPrinter}, interning::Symbol, span::Span, @@ -140,6 +140,7 @@ pub enum ThirExprInner { Const(DefId, GenericArgs), ConstInt(Option>, u128), ConstString(StringType, Spanned), + ConstChar(CharType, u32), Cast(Box>, Spanned), Tuple(Vec>), Ctor(Spanned), @@ -210,6 +211,24 @@ impl core::fmt::Display for ThirExprInner { str.escape_default().fmt(f)?; f.write_str("\"") } + ThirExprInner::ConstChar(CharType::Default, val) => { + f.write_str("'")?; + if let Some(c) = char::from_u32(*val) { + c.escape_default().fmt(f)?; + } else { + f.write_fmt(format_args!("\\u{{invalid char: {:04x}}}", val))?; + } + + f.write_str("'") + } + ThirExprInner::ConstChar(CharType::Byte, val) => { + f.write_str("'")?; + match val { + &val @ 0x20..=0x7F => (val as u8 as char).fmt(f)?, + val => f.write_fmt(format_args!("\\x{:02x}", val))?, + } + f.write_str("'") + } ThirExprInner::Cast(e, ty) => { f.write_str("(")?; e.body.fmt(f)?; @@ -610,6 +629,61 @@ impl<'a> ThirConverter<'a> { Ok(ret) } + pub fn convert_syntatic_type(&mut self, ty: Spanned) -> Spanned { + ty.map_span(|ty| match ty { + Type::Inferable(None) => { + Type::Inferable(Some(InferId(self.next_infer.fetch_increment()))) + } + + Type::IncompleteAlias(_) => todo!("incomplete alias"), + + Type::Tuple(tys) => Type::Tuple( + tys.into_iter() + .map(|ty| self.convert_syntatic_type(ty)) + .collect(), + ), + Type::FnPtr(mut fnty) => { + fnty.paramtys = fnty + .paramtys + .into_iter() + .map(|ty| self.convert_syntatic_type(ty)) + .collect(); + *fnty.retty = self.convert_syntatic_type(*fnty.retty); + + Type::FnPtr(fnty) + } + + Type::Pointer(mt, mut ty) => { + *ty = self.convert_syntatic_type(*ty); + + Type::Pointer(mt, ty) + } + Type::Array(mut ty, cx) => { + *ty = self.convert_syntatic_type(*ty); + + Type::Array(ty, cx) + } + Type::Reference(life, mt, mut ty) => { + *ty = self.convert_syntatic_type(*ty); + + Type::Reference(life, mt, ty) + } + val @ (Type::Inferable(Some(_)) + | Type::InferableInt(_) + | Type::Param(_) + | Type::TraitSelf(_) + | Type::DropFlags(_) + | Type::Bool + | Type::Int(_) + | Type::Float(_) + | Type::Char + | Type::Str + | Type::FnItem(_, _, _) + | Type::UserType(_, _) + | Type::Never) => val, + }) + } + pub fn convert_expr( &mut self, expr: &Spanned, @@ -692,8 +766,19 @@ impl<'a> ThirConverter<'a> { cat: ValueCategory::Rvalue, }) } - &hir::HirExpr::ConstChar(cty, val) => { - todo!("const char"); + hir::HirExpr::ConstChar(cty, val) => { + let ty = match cty { + CharType::Byte => Type::Int(IntType::u8), + CharType::Default => Type::Char, + }; + + let inner = ThirExprInner::ConstChar(*cty, *val); + + Ok(ThirExpr { + ty, + inner, + cat: ValueCategory::Rvalue, + }) } hir::HirExpr::Const(defid, generics) => { let def = self.defs.definition(*defid); @@ -718,7 +803,10 @@ impl<'a> ThirConverter<'a> { hir::HirExpr::Cast(expr, ty) => Ok(ThirExpr { ty: ty.body.clone(), cat: ValueCategory::Rvalue, - inner: ThirExprInner::Cast(Box::new(self.convert_rvalue(expr)?), ty.clone()), + inner: ThirExprInner::Cast( + Box::new(self.convert_rvalue(expr)?), + self.convert_syntatic_type(ty.clone()), + ), }), hir::HirExpr::Tuple(vals) => { let vals = vals @@ -826,7 +914,7 @@ impl<'a> ThirConverter<'a> { .next() .unwrap_or_else(|| { let inferid = InferId(self.next_infer.fetch_increment()); - Type::Inferable(inferid) + Type::Inferable(Some(inferid)) }); let inner = ThirExprInner::MemberAccess(Box::new(val), name.clone()); @@ -839,7 +927,7 @@ impl<'a> ThirConverter<'a> { Type::Int(x) => Type::Int(*x), Type::Float(x) => Type::Float(*x), Type::InferableInt(x) => Type::InferableInt(*x), - _ => Type::Inferable(InferId(self.next_infer.fetch_increment())), + _ => Type::Inferable(Some(InferId(self.next_infer.fetch_increment()))), }; Ok(ThirExpr { ty, @@ -854,7 +942,7 @@ impl<'a> ThirConverter<'a> { (Type::Int(x), y) if Type::Int(*x) == *y => Type::Int(*x), (Type::Float(x), y) if Type::Float(*x) == *y => Type::Float(*x), (Type::InferableInt(x), Type::InferableInt(_)) => Type::InferableInt(*x), - _ => Type::Inferable(InferId(self.next_infer.fetch_increment())), + _ => Type::Inferable(Some(InferId(self.next_infer.fetch_increment()))), }; Ok(ThirExpr { ty, @@ -869,7 +957,7 @@ impl<'a> ThirConverter<'a> { .collect::>>()?; if elements.len() == 0 { Ok(ThirExpr { - ty: Type::Inferable(InferId(self.next_infer.fetch_increment())), + ty: Type::Inferable(Some(InferId(self.next_infer.fetch_increment()))), cat: ValueCategory::Rvalue, inner: ThirExprInner::Array(elements), }) @@ -886,7 +974,7 @@ impl<'a> ThirConverter<'a> { Ok(ThirExpr { ty: match &base.ty { Type::Array(ty, _) => ty.body.clone(), - _ => Type::Inferable(InferId(self.next_infer.fetch_increment())), + _ => Type::Inferable(Some(InferId(self.next_infer.fetch_increment()))), }, cat: ValueCategory::Rvalue, inner: ThirExprInner::Index( @@ -907,7 +995,7 @@ impl<'a> ThirConverter<'a> { let matcher = match &matcher.body { hir::HirPatternMatcher::Hole => { let infer = InferId(self.next_infer.fetch_increment()); - let ty = Type::Inferable(infer); + let ty = Type::Inferable(Some(infer)); Spanned { body: ThirPatternMatcher { @@ -1008,14 +1096,18 @@ impl<'a> ThirConverter<'a> { var, ty, } => { - let ty = ty.as_ref().cloned().unwrap_or_else(|| { - let infer = InferId(self.next_infer.fetch_increment()); + let ty = ty + .as_ref() + .cloned() + .map(|ty| self.convert_syntatic_type(ty)) + .unwrap_or_else(|| { + let infer = InferId(self.next_infer.fetch_increment()); - Spanned { - span: var.span, - body: Type::Inferable(infer), - } - }); + Spanned { + span: var.span, + body: Type::Inferable(Some(infer)), + } + }); let mutability = *mutability; @@ -1204,13 +1296,13 @@ impl<'a> Inferer<'a> { ) -> super::Result { match (&mut *left, &mut *right) { (a, b) if a == b => Ok(CyclicOperationStatus::Complete), - (Type::Inferable(l), Type::Inferable(r)) => { + (Type::Inferable(Some(l)), Type::Inferable(Some(r))) => { let l = *l; let r = *r; // Note: We do RTL Propagation here. The left type is typically the assignee and the right type is typically the assigned // This *should* narrow the inference set quicker in general - *left = Type::Inferable(r); + *left = Type::Inferable(Some(r)); if let Some(ty) = self.inference_set.get_mut(&l) { let mut ty = core::mem::replace(ty, Type::Never); @@ -1225,12 +1317,12 @@ impl<'a> Inferer<'a> { self.inference_set.insert(r, ty); } } else { - self.inference_set.insert(l, Type::Inferable(r)); + self.inference_set.insert(l, Type::Inferable(Some(r))); } Ok(CyclicOperationStatus::Incomplete) } - (Type::InferableInt(l), Type::Inferable(r)) => { + (Type::InferableInt(l), Type::Inferable(Some(r))) => { let l = *l; let r = *r; //Note: We're doing LTR propagation here because the left type is more specific. @@ -1249,17 +1341,17 @@ impl<'a> Inferer<'a> { self.inference_set.insert(r, ty); } } else { - self.inference_set.insert(l, Type::Inferable(r)); + self.inference_set.insert(l, Type::Inferable(Some(r))); } Ok(CyclicOperationStatus::Incomplete) } - (Type::Inferable(l), Type::InferableInt(r)) => { + (Type::Inferable(Some(l)), Type::InferableInt(r)) => { let l = *l; let r = *r; //Note: We do RTL Propagation here because the right type is more specific. - *left = Type::Inferable(r); + *left = Type::Inferable(Some(r)); if let Some(ty) = self.inference_set.get_mut(&l) { let mut ty = core::mem::replace(ty, Type::Never); @@ -1275,12 +1367,12 @@ impl<'a> Inferer<'a> { self.inference_set.insert(r, ty); } } else { - self.inference_set.insert(l, Type::Inferable(r)); + self.inference_set.insert(l, Type::Inferable(Some(r))); } Ok(CyclicOperationStatus::Incomplete) } - (Type::Inferable(l), ty) => { + (Type::Inferable(Some(l)), ty) => { let l = *l; if let Some(gty) = self.inference_set.get_mut(&l) { @@ -1319,7 +1411,7 @@ impl<'a> Inferer<'a> { } Ok(CyclicOperationStatus::Incomplete) } - (ty, Type::Inferable(r)) => { + (ty, Type::Inferable(Some(r))) => { let r = *r; if let Some(gty) = self.inference_set.get_mut(&r) { @@ -1356,11 +1448,12 @@ impl<'a> Inferer<'a> { status &= self.unify_single_expr(inner)?; status &= self.unify_types(&mut inner.ty, &mut left.ty)?; } - ThirExprInner::Unreachable => {} - ThirExprInner::Var(_) => {} - ThirExprInner::Const(_, _) => {} - ThirExprInner::ConstInt(_, _) => {} - ThirExprInner::ConstString(_, _) => {} + ThirExprInner::Unreachable + | ThirExprInner::Var(_) + | ThirExprInner::Const(_, _) + | ThirExprInner::ConstInt(_, _) + | ThirExprInner::ConstString(_, _) + | ThirExprInner::ConstChar(_, _) => {} ThirExprInner::Cast(inner, ty) => { status &= self.unify_single_expr(inner)?; @@ -1753,7 +1846,7 @@ impl<'a> Inferer<'a> { } Type::Pointer(_, pty) => self.propagate_type(pty), Type::Array(inner, _) => self.propagate_type(inner), - Type::Inferable(infer) | Type::InferableInt(infer) => { + Type::Inferable(Some(infer)) | Type::InferableInt(infer) => { if let Some(subty) = self.inference_set.get(infer) { *ty = subty.clone(); Ok(Incomplete) From 07c92d5144429c7773c7d1355b44efb97663fb59 Mon Sep 17 00:00:00 2001 From: Ray Redondo Date: Fri, 2 Feb 2024 20:32:01 -0600 Subject: [PATCH 13/74] fix(rust-parse): add prefix to parsed character --- lc-binutils | 2 +- rust/src/lex.rs | 8 ++++---- rust/src/parse.rs | 5 +++-- rust/src/sema/hir.rs | 2 +- 4 files changed, 9 insertions(+), 8 deletions(-) diff --git a/lc-binutils b/lc-binutils index 39d2cf0a..967ded32 160000 --- a/lc-binutils +++ b/lc-binutils @@ -1 +1 @@ -Subproject commit 39d2cf0a5b1aba7dafb1fb5eada9e9ce4c4cc7e7 +Subproject commit 967ded327dff8b16e8f04474b359fe565b3519f9 diff --git a/rust/src/lex.rs b/rust/src/lex.rs index 420eee99..7fe4e806 100644 --- a/rust/src/lex.rs +++ b/rust/src/lex.rs @@ -837,19 +837,19 @@ fn do_lexeme(file: &mut Speekable>) -> Result } else if file.peek() == Some(&'\'') { file.next(); let mut result = do_char(file, start)?; + let span = result.span; match &mut result.body { LexemeBody::Token(Token { - ty: TokenType::Character(ref mut char_ty), - .. + ty: TokenType::Character(_), + body, }) => { - *char_ty = CharType::Byte; + break Ok(Token::new(TokenType::Character(CharType::Byte), id + &body).with_span(span)); } LexemeBody::Token(_) => { todo!("validation error for \"byte lifetimes\""); } _ => unreachable!("do_char returns a Token always"), } - break Ok(result); } } break Ok(Token::new(ty, id).with_span(Span::new_simple( diff --git a/rust/src/parse.rs b/rust/src/parse.rs index 7acf0a14..d7cffa8c 100644 --- a/rust/src/parse.rs +++ b/rust/src/parse.rs @@ -3248,8 +3248,8 @@ pub fn do_char( }; let str = full_str.text().unwrap(); let str = match chr_ty { - CharType::Default => &str[1..str.len() - 1], // Skip " and " - CharType::Byte => &str[2..str.len() - 1], // Skip b" and " + CharType::Default => &str[1..str.len() - 1], // Skip ' and ' + CharType::Byte => &str[2..str.len() - 1], // Skip b' and ' }; let mut parsed = String::new(); let mut str_iter = str.chars(); @@ -3264,6 +3264,7 @@ pub fn do_char( x => parsed.push(x), } } + dbg!(&full_str); Ok(( Spanned { body: parsed.into(), diff --git a/rust/src/sema/hir.rs b/rust/src/sema/hir.rs index 8b58e303..5b537b68 100644 --- a/rust/src/sema/hir.rs +++ b/rust/src/sema/hir.rs @@ -739,7 +739,7 @@ impl<'a> HirLowerer<'a> { dbg!(sym); let mut c = sym.chars().peekable(); - let val = match c.next().expect("We have at least one character") { + let val = match c.next().expect("missing a character") { '\\' => match c.next().expect("malformed escape sequence") { 't' => '\t' as u32, 'n' => '\n' as u32, From a3d8d49cbb1ced6045918236c536a96ab862ccf6 Mon Sep 17 00:00:00 2001 From: Connor Horman Date: Sat, 3 Feb 2024 03:18:40 -0500 Subject: [PATCH 14/74] feat(rust-sema): Pass down the type checking for `HirExpr::Cast` properly --- lc-binutils | 2 +- rust/src/sema/tyck.rs | 16 ++++++++-------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/lc-binutils b/lc-binutils index 967ded32..39d2cf0a 160000 --- a/lc-binutils +++ b/lc-binutils @@ -1 +1 @@ -Subproject commit 967ded327dff8b16e8f04474b359fe565b3519f9 +Subproject commit 39d2cf0a5b1aba7dafb1fb5eada9e9ce4c4cc7e7 diff --git a/rust/src/sema/tyck.rs b/rust/src/sema/tyck.rs index 21221634..4cdff619 100644 --- a/rust/src/sema/tyck.rs +++ b/rust/src/sema/tyck.rs @@ -800,14 +800,14 @@ impl<'a> ThirConverter<'a> { cat: ValueCategory::Rvalue, inner: ThirExprInner::Unreachable, }), - hir::HirExpr::Cast(expr, ty) => Ok(ThirExpr { - ty: ty.body.clone(), - cat: ValueCategory::Rvalue, - inner: ThirExprInner::Cast( - Box::new(self.convert_rvalue(expr)?), - self.convert_syntatic_type(ty.clone()), - ), - }), + hir::HirExpr::Cast(expr, ty) => { + let ty = self.convert_syntatic_type(ty.clone()); + Ok(ThirExpr { + ty: ty.body.clone(), + cat: ValueCategory::Rvalue, + inner: ThirExprInner::Cast(Box::new(self.convert_rvalue(expr)?), ty), + }) + } hir::HirExpr::Tuple(vals) => { let vals = vals .iter() From 66dddae22272f378baa677d77154afe2987eb0ed Mon Sep 17 00:00:00 2001 From: Ray Redondo Date: Sun, 4 Feb 2024 01:46:29 -0600 Subject: [PATCH 15/74] feat(xlang-backend-ssa): half-support calls --- xlang/xlang_backend/src/ssa.rs | 9 ++++++++- xlang/xlang_struct/src/lib.rs | 2 +- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/xlang/xlang_backend/src/ssa.rs b/xlang/xlang_backend/src/ssa.rs index 03262bd4..f855091a 100644 --- a/xlang/xlang_backend/src/ssa.rs +++ b/xlang/xlang_backend/src/ssa.rs @@ -489,7 +489,14 @@ impl BasicBlockBuilder { } ir::Terminator::Branch(_, _, _) => todo!("branch"), ir::Terminator::BranchIndirect => todo!("branch indirect"), - ir::Terminator::Call(_, _, _) => todo!("call"), + ir::Terminator::Call(_, call_fnty, next) => { + let params_count = call_fnty.params.len(); + let vals = self.pop_values(params_count); + + let target = self.pop(); + + self.write_call(target, vals, (**call_fnty).clone(), Some(*next)); + } ir::Terminator::Tailcall(_, call_fnty) => { let params_count = call_fnty.params.len(); let vals = self.pop_values(params_count); diff --git a/xlang/xlang_struct/src/lib.rs b/xlang/xlang_struct/src/lib.rs index c57c127b..92915fe1 100644 --- a/xlang/xlang_struct/src/lib.rs +++ b/xlang/xlang_struct/src/lib.rs @@ -1916,7 +1916,7 @@ impl core::fmt::Display for JumpTargetFlags { /// jump-target := [*()] @ /// ``` #[repr(C)] -#[derive(Clone, Debug, Hash, PartialEq, Eq)] +#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)] pub struct JumpTarget { /// The flags for the jump pub flags: JumpTargetFlags, From 2de6914e2e55ba78c91283825ed4637fd369f9ef Mon Sep 17 00:00:00 2001 From: Ray Redondo Date: Fri, 9 Feb 2024 23:25:50 -0600 Subject: [PATCH 16/74] feat(xlang-backend-ssa): start implementing non-tailcall --- codegen-x86/src/lib.rs | 2 ++ xlang/xlang_backend/src/ssa.rs | 47 +++++++++++++++++++++++++--------- 2 files changed, 37 insertions(+), 12 deletions(-) diff --git a/codegen-x86/src/lib.rs b/codegen-x86/src/lib.rs index b7dfd6e1..378a48d8 100644 --- a/codegen-x86/src/lib.rs +++ b/codegen-x86/src/lib.rs @@ -184,6 +184,7 @@ impl Machine for X86Machine { let mut clobbers = X86Clobbers {}; for (num, insn) in insns.iter().enumerate() { match insn { + xlang_backend::ssa::SsaInstruction::Call(_, _) => todo!("call"), xlang_backend::ssa::SsaInstruction::Jump(targ, old_locs) | xlang_backend::ssa::SsaInstruction::Fallthrough(targ, old_locs) => { let foreign_locs = &incoming_set[targ]; @@ -255,6 +256,7 @@ impl Machine for X86Machine { } match insn { + xlang_backend::ssa::SsaInstruction::Call(_, _) => todo!("call"), xlang_backend::ssa::SsaInstruction::Jump(targ, _) => { encoder.write_insn(X86Instruction::new( X86CodegenOpcode::Jmp, diff --git a/xlang/xlang_backend/src/ssa.rs b/xlang/xlang_backend/src/ssa.rs index f855091a..2a8334cd 100644 --- a/xlang/xlang_backend/src/ssa.rs +++ b/xlang/xlang_backend/src/ssa.rs @@ -9,7 +9,7 @@ use crate::mach::Machine; use crate::ty::TypeInformation; use arch_ops::traits::InsnWrite; -use xlang::targets::properties::TargetProperties; +use xlang::{ir::JumpTarget, targets::properties::TargetProperties}; use xlang::ir; @@ -31,6 +31,7 @@ impl SharedCounter { #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub enum SsaInstruction { + Call(CallTarget, Vec), Jump(u32, Vec), Fallthrough(u32, Vec), Exit(Vec), @@ -42,6 +43,16 @@ pub enum SsaInstruction { impl core::fmt::Display for SsaInstruction { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { + SsaInstruction::Call(targ, params) => { + f.write_fmt(format_args!("call {}(", targ))?; + let mut sep = ""; + for item in params { + f.write_str(sep)?; + sep = ", "; + item.fmt(f)?; + } + f.write_str(")") + } SsaInstruction::Jump(val, stack) => { f.write_fmt(format_args!("jump @{} [", val))?; let mut sep = ""; @@ -417,7 +428,15 @@ impl BasicBlockBuilder { }; if let Some(next) = next { - todo!("Call with next") + self.insns.push(SsaInstruction::Call( + CallTarget { + ptr: OpaquePtr::Symbol(sym), + real_ty, + call_ty, + }, + params, + )); + self.write_jump(&next); } else { self.insns.push(SsaInstruction::Tailcall( CallTarget { @@ -441,6 +460,19 @@ impl BasicBlockBuilder { } } + pub fn write_jump(&mut self, targ: &JumpTarget) { + let vals = self.incoming_count[&targ.target]; + + let vals = self.pop_opaque(vals); + + if targ.flags.contains(ir::JumpTargetFlags::FALLTHROUGH) { + self.insns + .push(SsaInstruction::Fallthrough(targ.target, vals)); + } else { + self.insns.push(SsaInstruction::Jump(targ.target, vals)); + } + } + pub fn write_expr(&mut self, expr: &ir::Expr) { match expr { ir::Expr::Sequence(_) => todo!("sequence"), @@ -476,16 +508,7 @@ impl BasicBlockBuilder { pub fn write_terminator(&mut self, term: &ir::Terminator) { match term { ir::Terminator::Jump(targ) => { - let vals = self.incoming_count[&targ.target]; - - let vals = self.pop_opaque(vals); - - if targ.flags.contains(ir::JumpTargetFlags::FALLTHROUGH) { - self.insns - .push(SsaInstruction::Fallthrough(targ.target, vals)); - } else { - self.insns.push(SsaInstruction::Jump(targ.target, vals)); - } + self.write_jump(targ); } ir::Terminator::Branch(_, _, _) => todo!("branch"), ir::Terminator::BranchIndirect => todo!("branch indirect"), From cfcaf57a01377df1426e67bfdb0593cdea14f019 Mon Sep 17 00:00:00 2001 From: Connor Horman Date: Mon, 12 Feb 2024 19:08:11 -0500 Subject: [PATCH 17/74] doc(xlang-backend): Add documentation for xlang_backed rewrite feat(xlang-backend): Add support for head values for `make_jump` --- codegen-clever/src/lib.rs | 10 ++ codegen-w65/src/lib.rs | 10 ++ codegen-x86/src/callconv.rs | 2 +- codegen-x86/src/lib.rs | 22 ++++ xlang/xlang_backend/src/callconv.rs | 133 ++++++++++++++++++++- xlang/xlang_backend/src/expr.rs | 6 +- xlang/xlang_backend/src/lib.rs | 22 +++- xlang/xlang_backend/src/mach.rs | 30 ++++- xlang/xlang_backend/src/ssa.rs | 174 ++++++++++++++++++++++++---- xlang/xlang_backend/src/ty.rs | 45 +------ 10 files changed, 372 insertions(+), 82 deletions(-) diff --git a/codegen-clever/src/lib.rs b/codegen-clever/src/lib.rs index fde692bd..836f3bfa 100644 --- a/codegen-clever/src/lib.rs +++ b/codegen-clever/src/lib.rs @@ -58,6 +58,16 @@ impl Machine for CleverMachine { ) -> std::io::Result<()> { todo!() } + + fn assign_call_conv( + &self, + assignments: &mut Self::Assignments, + incoming: &[OpaqueLocation], + fnty: &xlang_struct::FnType, + tys: &TypeInformation, + ) { + todo!() + } } xlang::host::rustcall! { diff --git a/codegen-w65/src/lib.rs b/codegen-w65/src/lib.rs index 5fee482a..e626c136 100644 --- a/codegen-w65/src/lib.rs +++ b/codegen-w65/src/lib.rs @@ -58,6 +58,16 @@ impl Machine for W65Machine { ) -> std::io::Result<()> { todo!() } + + fn assign_call_conv( + &self, + assignments: &mut Self::Assignments, + incoming: &[OpaqueLocation], + fnty: &xlang_struct::FnType, + tys: &TypeInformation, + ) { + todo!() + } } xlang::host::rustcall! { diff --git a/codegen-x86/src/callconv.rs b/codegen-x86/src/callconv.rs index 5ecdce9f..2bae052c 100644 --- a/codegen-x86/src/callconv.rs +++ b/codegen-x86/src/callconv.rs @@ -251,7 +251,7 @@ impl CallConvInfo for X86CallConvInfo { } } - fn classify_pointer(&self) -> Self::TypeClass { + fn classify_pointer(&self, _: xlang::ir::PointerKind) -> Self::TypeClass { X86TypeClass::Integer } diff --git a/codegen-x86/src/lib.rs b/codegen-x86/src/lib.rs index 378a48d8..a76b3c3d 100644 --- a/codegen-x86/src/lib.rs +++ b/codegen-x86/src/lib.rs @@ -332,6 +332,28 @@ impl Machine for X86Machine { Ok(()) } + + fn assign_call_conv( + &self, + assignments: &mut Self::Assignments, + incoming: &[xlang_backend::ssa::OpaqueLocation], + fnty: &xlang::ir::FnType, + tys: &TypeInformation, + ) { + let callconv = X86CallConvInfo { + mode: assignments.mode, + }; + let callconv = compute_call_conv(&callconv, fnty, fnty, tys); + + for (param, incoming) in callconv.params().iter().zip(incoming) { + todo!() + } + + match callconv.ret_location() { + CallConvLocation::Null => {} + loc => todo!("Return in {:?}", loc), + } + } } xlang::host::rustcall! { diff --git a/xlang/xlang_backend/src/callconv.rs b/xlang/xlang_backend/src/callconv.rs index b92234e9..a432e364 100644 --- a/xlang/xlang_backend/src/callconv.rs +++ b/xlang/xlang_backend/src/callconv.rs @@ -2,92 +2,196 @@ use core::cmp::Eq; use core::hash::Hash; use xlang::{ - abi::{collection::HashMap, pair::Pair}, - ir::{ArrayType, FnType, ScalarType, Type}, + abi::collection::HashMap, + ir::{FnType, PointerKind, ScalarType, Type}, }; -use crate::ty::{AggregateLayout, FlattenFieldsOf, TypeInformation}; +use crate::ty::TypeInformation; +/// The position in the parameters list to insert a location #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +#[non_exhaustive] pub enum ParamPosition { + /// Inserts the location in the first parameter location First, + /// Inserts the location in the last parameter location Last, } +/// The location of a parameter or return value #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub enum CallConvLocation { + /// Passed in a register (which may be a synthetic register) Register(R), + /// Passed indirectly, with a pointer present in the given register Indirect(R), + /// Passed on the stack, starting at the given displacement from the start of the parameter area. StackOffset(i32), + /// Passed in multiple [`CallConvLocation`]s, typically registers. Split(Vec>), + /// An empty value passed in no location Null, } +/// Describes the location of a return pointer passed to a function #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +#[non_exhaustive] pub enum ReturnPointerBehaviour { + /// Specifies that a dedicated register is used. The [`Tag`] must ensure that this does not conflict with any register used by parameters Dedicated(R), + /// Specifies that the return pointer is passed as a parameter of the given class. Param(ParamPosition, C), } +/// Describes the behaviour when passing multiple different classes of values #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +#[non_exhaustive] pub enum RegisterDisposition { + /// Interleaves parameters of a given class with parameters of other classes + /// + /// With different classes mixing [`RegisterDisposition`]s, classes with [`RegisterDisposition::Interleave`] neither affect, nor are affected by parameter classes with [`RegisterDisposition::Consume`] Interleave, + /// Consumes parameters positionally, jointly with other classes with [`RegisterDisposition::Consume`]. + /// + /// With different classes mixing [`RegisterDisposition`]s, only paramaeter classes with [`RegisterDisposition::Consume`] affect the list of consumed registers shared by those classes. Consume, } +/// Indicates the order that parameters passed on the stack are layed out (in the directionality of the stack) #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] pub enum StackedParamsOrder { + /// Indicates that the first slot for the parameter region is used by the rightmost parameter passed on the stack Rtl, + /// Indicates that the first slot for the parameter region is used by the leftmost parameter passed on the stack Ltr, } +/// The disposition of a [`Tag`] for classifying aggregate types #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +#[non_exhaustive] pub enum ClassifyAggregateDisposition { + /// Indicates that all aggregate types recieving the given class. Single(C), + /// Classifies aggregate types by field, recursively, merging all fields to tag the whole aggregate one single class Recursive, + /// Splits the aggregate type into chunks of the given size, and classifies each chunk according to contained scalar fields merging the classes in each chunk. SplitFlat(u64), } +/// A trait for compiled calling convention [`Tag`]s. +/// [`Tag`]s decide how different classified values are ultimately passed or returned from functions, specifying which registers are used for which class, and when certain classes are replaced. +/// +/// The [`Tag`] is expected to uphold certain invariants about type classes and dispositions it indicates via +/// pub trait Tag { + /// The type of the registers used by the calling convention. type Register: Clone + Eq; + /// The type of the classification (or partial classification) of individual values that can be passed or returned type TypeClass; + + /// The name of the tag, for display purposes fn tag_name(&self) -> &'static str; + /// Returns the list of registers used for passing values (or parts thereof) of the specified `cl`, if any. + /// + /// It is a logic error if any two different type classes share the same (non-empty) lists of registers unless both are [`RegisterDisposition::Consume`], + /// or if any two different type classes use any of the same registers except as the same parameter position when both classes use [`RegisterDisposition::Consume`] fn param_regs_for_class(&self, cl: &Self::TypeClass) -> &[Self::Register]; + /// Returns the list of registers used for returning values (or parts thereof) of the specified `cl`, if any. fn return_regs_for_class(&self, cl: &Self::TypeClass) -> &[Self::Register]; + /// If the given list of classes should be passed indirectly, returns the type class of the pointer the parameter is replaced by. + /// Otherwise, returns [`None`]. + /// + /// It is a logic error of the implementation if this function returns a type class which, if given as the single input to this function, would return [`Some`]. fn replace_param_with_pointer(&self, cl: &[Self::TypeClass]) -> Option; + /// If the given list of classes was generated by spliting a large value that can be passed as a single unit, returns the type class of the combined unit. + /// Otherwise, returns [`None`]. + /// + /// It is a logic error of the implementation if this function returns a type class which, if given as the single input to [`Tag::replace_param_with_pointer`] or [`Tag::replace_return_with_pointer`], + /// would return [`None`]. fn combine_wide(&self, cl: &[Self::TypeClass]) -> Option; + /// If the given list of type classes should be returned indirectly, returns the disposition for passing the return pointer to the function. + /// Otherwise, returns [`None`]. + /// + /// It is a logic error of the implementation if the function returns [`ReturnPointerBehaviour::Param`], and the class of the parameter specified, + /// if given as the single input to [`Tag::return_regs_for_class`] would yield a slice without 1 or more registers or, if passed to this function, would return [`Some`]. + /// + /// It is further a logic error of the implementation if the function returns [`ReturnPointerBehaviour::Dedicated`], and the specified register collides with any register used for a parameter of any class. fn replace_return_with_pointer( &self, cl: &[Self::TypeClass], ) -> Option>; + /// If the given type class needs special handling when passed as a variadic parameter (parameter after the fixed airty parameter list), + /// returns the type class of the replaced segment. + /// + /// For parameters with multiple classes, each class is replaced individually. + /// + /// It is a logic error of the implementation if the function returns a type class which, when combined with the other classes for the full value and passed to [`Tag::replace_param_with_pointer`] would return [`Some`], + /// unless the function applied to the original set would return the same value. + /// Note that checks for [`Tag::replace_param_with_pointer`] are performed before calling this function, so a [`Some`] value becoming [`None`] after this function is not necessarily respected (but it considered well-defined). fn replace_class_as_varargs(&self, cl: &Self::TypeClass) -> Option; + /// Specifies how parameter parts of the given type class interact with other parameters passed in different type classes. + /// + /// Two options are provided: + /// * [`RegisterDisposition::Interleave`] causes all such type classes to be disjoint - only parameters passed in registers of the same type class will affect the registers used for other parameters + /// * [`RegisterDisposition::Consume`] causes a global (accross all parameters of the same call) list to be used, and parameters are assigned positionally. + /// + /// A [`Tag`] may specify a single register disposition for all type classes, or may specify mixed dispositions. + /// The behaviour of mixed dispositions is described by the variants of [`RegisterDisposition`] fn register_disposition(&self, cl: &Self::TypeClass) -> RegisterDisposition; + /// Specifies the order that parameters are placed on the stack after all registers are consumed fn stacked_params_order(&self) -> StackedParamsOrder; } +/// A trait for information about calling conventions on a target. +/// +/// As opposed to [`Tag`], [`CallConvInfo`] contains the information needed to perform initial classification of parameters/return values, as well as how to convert tag strings to [`Tag`] values. pub trait CallConvInfo { + /// The type of the [`Tag`] associated with the calling conventions. type Tag: Tag; + /// The type of parameter/return value classes used by the [`CallConvInfo::Tag`] and by value classification type TypeClass: Eq + Hash + Clone; + /// The type of registers yielded by the [`CallConvInfo::Tag`] type Register: Eq + Clone; + /// Maps the specified `tag` to the associated [`Tag`] value. + /// + /// ## Panics + /// The implementation may panic (or otherwise return a nonsensical value) if the `tag` is invalid for the target. fn get_tag(&self, tag: &str) -> Self::Tag; + /// Returns the [`CallConvInfo::TypeClass`] that an empty value (such as a portion of an aggregate that entirely consists of padding bytes) recieves. + /// Anything classified as the return value will be ignored for parameter passing or return purposes (passed in [`CallConvLocation::Null`]) fn no_class(&self) -> Self::TypeClass; + + /// Classifies the given scalar type, splitting it into sufficient individual pieces to allow passing it in registers. fn classify_scalar(&self, sty: ScalarType) -> Vec; - fn classify_pointer(&self) -> Self::TypeClass; + /// Classifies pointer types of the given `width`. + /// It is generally assumed that a pointer can allways be passed as a single value + fn classify_pointer(&self, width: PointerKind) -> Self::TypeClass; + /// Indicates how the [`classify_type`] function should classify aggregates. + /// See documentation of [`ClassifyAggregateDisposition`] for details fn classify_aggregate_disposition(&self) -> ClassifyAggregateDisposition; + /// Merges two type classes into a single class. + /// + /// It is a logic error of the implementation if any of the following properties do not hold: + /// * Given the classes `a` and `b`, [`CallConvInfo::merge_class`]`(a,b)` is the same value as [`CallConvInfo::merge_class`]`(b,a)` + /// * Given the classes `a` and `b`, if either `a` or `b` does not equal [`CallConvInfo::no_class`] , then [`CallConvInfo::merge_class`]`(a,b)` does not equal [`CallConvInfo::no_class`]`()` + /// fn merge_class(&self, left: Self::TypeClass, right: Self::TypeClass) -> Self::TypeClass; + + /// Performs a final merge step after all pieces of value have been classified fn adjust_classes_after_combine(&self, classes: &mut [Self::TypeClass]); } +/// A type that stores the result of computing the full calling convention of a function type. pub struct CallConv { params: Vec>, stacked_params_count: u32, @@ -97,27 +201,35 @@ pub struct CallConv { } impl CallConv { + /// The computed [`Tag`] of the function type. pub fn tag(&self) -> &T { &self.tag } + /// The full list of locations to place (or find) the parameters of the function. + /// + /// Each parameter in the "call type" (at the call site) or function type (at the def site) has exactly one element in order. pub fn params(&self) -> &[CallConvLocation] { &self.params } + /// Specifies the location to place (or find) the return value of the function. pub fn ret_location(&self) -> &CallConvLocation { &self.ret_location } + /// Specifies the number of different parameter classes that were passed on the stack. pub fn stacked_params_count(&self) -> u32 { self.stacked_params_count } + /// Specifies whether or not the function was variadic. pub fn is_varargs(&self) -> bool { self.is_varargs } } +/// Classifies the given [`Type`] according to `info`. Information about the types are derived from `tys` pub fn classify_type( info: &I, ty: &Type, @@ -128,7 +240,7 @@ pub fn classify_type( Type::Null | Type::Void => classes.push(info.no_class()), Type::Scalar(sty) => classes.extend(info.classify_scalar(*sty)), Type::FnType(_) => panic!("Cannot classify a non-value type (other than void)"), - Type::Pointer(_) => classes.push(info.classify_pointer()), + Type::Pointer(ptr) => classes.push(info.classify_pointer(ptr.kind)), Type::Array(_) => todo!(), Type::TaggedType(_, ty) => classes = classify_type(info, ty, tys), Type::Product(_) => todo!(), @@ -142,6 +254,14 @@ pub fn classify_type( classes } +/// Computes the [`CallConv`] of a function call site (or def site) with the given [`CallConvInfo`] using `real_ty` and `call_ty`. +/// +/// This function primarily computes calling convention for the call site of a function, with `real_ty` set to the type of the function being called, +/// and `call_ty` being the fn-type for the `call` or `tailcall` instruction when they differ (for example, for variadic parameters). +/// When computing the calling convention of a function inside that function, `real_ty` and `call_ty` should be set to the same value. +/// +/// +/// pub fn compute_call_conv( info: &I, real_ty: &FnType, @@ -159,6 +279,7 @@ pub fn compute_call_conv( let return_classes = classify_type(info, &real_ty.ret, tys); + #[allow(unused_assignments)] // ParamPosition::Last will be implemented later. let mut return_loc = None; let mut total_params_consumed = 0; @@ -214,7 +335,7 @@ pub fn compute_call_conv( todo!("return in memory on stack") } } - ReturnPointerBehaviour::Param(_, cl) => todo!("non-start return pointer"), + ReturnPointerBehaviour::Param(_, _cl) => todo!("non-start return pointer"), } } else { let mut split_locs = vec![]; diff --git a/xlang/xlang_backend/src/expr.rs b/xlang/xlang_backend/src/expr.rs index 9a832d8a..2a626f09 100644 --- a/xlang/xlang_backend/src/expr.rs +++ b/xlang/xlang_backend/src/expr.rs @@ -6,8 +6,6 @@ use xlang::{ use core::{fmt::Debug, hash::Hash}; use std::num::NonZeroU128; -use crate::str::Encoding; - /// Represents the location of opaque values both as locals and on the value stack pub trait ValLocation: Eq + Debug + Clone { /// Checks if this location is addressable (is not a register) @@ -160,8 +158,8 @@ impl VStackValue { VStackValue::Constant(val) => match val { Value::Invalid(ty) | Value::Uninitialized(ty) => ty.clone(), Value::GenericParameter(_) => panic!("Cannot handle generic params this late"), - Value::Integer { ty, val } => Type::Scalar(*ty), - Value::GlobalAddress { ty, item } => { + Value::Integer { ty, .. } => Type::Scalar(*ty), + Value::GlobalAddress { ty, .. } => { let mut pty = PointerType::default(); *pty.inner = ty.clone(); Type::Pointer(pty) diff --git a/xlang/xlang_backend/src/lib.rs b/xlang/xlang_backend/src/lib.rs index bbdd7bc4..3ec9caf1 100644 --- a/xlang/xlang_backend/src/lib.rs +++ b/xlang/xlang_backend/src/lib.rs @@ -1,5 +1,4 @@ -// #![deny(missing_docs, warnings)] // No clippy::nursery -#![allow(dead_code)] // I'm not deleting a bunch of randomly placed shit +#![deny(missing_docs, warnings)] // No clippy::nursery //! A helper crate for implementing [`xlang::plugin::XLangCodegen`]s without duplicating code (also can be used to evaluate constant expressions) //! the `xlang_backend` crate provides a general interface for writing expressions to an output. @@ -43,15 +42,29 @@ pub mod mangle; pub mod mach; /// Module for building SSA from XIR that can be readily lowered to machine code -/// Does not use FunctionCodegen pub mod ssa; +/// The section a symbol definition is placed in #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub enum SectionSpec { + /// The global section for the current type of definition, which (on ELF platforms), are generally: + /// * `.text` (RX) for functions + /// * `.data` (RW) for mutable statics + /// * `.rodata` (RO) for immutable statics + /// * `.bss` (RW - no data) for uninitialized statics + /// * `.tdata` (RW TLS) for thread-local statics + /// * `.tbss` (RW TLS - no data) for uninitialized thread-local statics + /// + /// Note that there is no guarantee as to the exact name of the sections, or which section a particular symbol is placed in if multiple sections are valid (For example, immutable statics may be placed in `.data` or `.bss`). + /// + /// However, the same set of sections will be used for all symbols defined in the [`SectionSpec::Global`]. + /// + /// Global, } impl core::fmt::Display for SectionSpec { + #[allow(unused_variables)] // we'll have more sections than `Global` at some point fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { match self { Self::Global => Ok(()), @@ -59,6 +72,7 @@ impl core::fmt::Display for SectionSpec { } } +/// A codegen definition of a [`ir::FunctionDeclaration`] pub struct FunctionDef { section: SectionSpec, linkage: Linkage, @@ -66,6 +80,7 @@ pub struct FunctionDef { body: Option>, } +/// an [`XLangCodegen`] implementation parameterized on a [`Machine`] that uses [`ssa::FunctionBuilder`] to generate machine code or assembly pub struct SsaCodegenPlugin { mach: Rc, targ: Option<&'static TargetProperties<'static>>, @@ -73,6 +88,7 @@ pub struct SsaCodegenPlugin { } impl SsaCodegenPlugin { + /// Constructs a new [`SsaCodegenPlugin`] based on `mach`. pub fn new(mach: M) -> Self { Self { mach: Rc::new(mach), diff --git a/xlang/xlang_backend/src/mach.rs b/xlang/xlang_backend/src/mach.rs index e858ab98..6a87585e 100644 --- a/xlang/xlang_backend/src/mach.rs +++ b/xlang/xlang_backend/src/mach.rs @@ -1,5 +1,5 @@ use xlang::{ - abi::collection::HashMap, abi::string::StringView, targets::properties::TargetProperties, + abi::collection::HashMap, abi::string::StringView, ir, targets::properties::TargetProperties, vec::Vec, }; @@ -11,14 +11,31 @@ use crate::{ use arch_ops::traits::InsnWrite; -use std::io::Result; - +/// A trait for representing the properties of a machine, and for generating code for the machine from [`SsaInstruction`]s pub trait Machine { + /// The type used per-function to track where each opaque location is placed. type Assignments; + /// The type used per-basic block to track where values need to be spilled or moved to a new assignment type BlockClobbers; + /// Determines whether the given named `targ` matches the current [`Machine`] fn matches_target(&self, targ: StringView) -> bool; + /// Initializes the machine from the properties specified in `targ`. + /// It may be assumed by the implementation that all functions, other than [`Machine::matches_target`], are called only after calling this function and that this function is called at most one. fn init_from_target(&mut self, targ: &TargetProperties); + /// Constructs a new (empty) state for [`Machine::Assignments`]. + /// A single return value is used for only one XIR function, and is not reused by multiple functions. fn new_assignments(&self) -> Self::Assignments; + + /// Assigns locations for incoming locations from the calling convention of the function + fn assign_call_conv( + &self, + assignments: &mut Self::Assignments, + incoming: &[OpaqueLocation], + fnty: &ir::FnType, + tys: &TypeInformation, + ); + + /// Assigns the locations for a basic block, specifying the instructions the make up the lowered basic block, and the list of locations incoming to the basic block. fn assign_locations( &self, assignments: &mut Self::Assignments, @@ -28,11 +45,15 @@ pub trait Machine { incoming_set: &HashMap>, tys: &TypeInformation, ) -> Self::BlockClobbers; + /// Writes the prologue instructions for a function to the given [`InsnWrite`] fn codegen_prologue( &self, assignments: &Self::Assignments, out: &mut W, ) -> std::io::Result<()>; + /// Writes the machine instructions corresponding to the [`SsaInstruction`]s that make up a basic block. + /// + /// The function may generate a label string from it's basic block id by using the `label_sym` callback. fn codegen_block String>( &self, assignments: &Self::Assignments, @@ -43,6 +64,9 @@ pub trait Machine { which: u32, tys: &TypeInformation, ) -> std::io::Result<()>; + + /// Mangles the components of a complex XIR [`Path`][xlang::ir::Path] + /// (Does not solely consistent of an optional [`Root`][xlang::ir::PathComponent::Root] followed by a single bare [`Text`][xlang::ir::PathComponent::Text]) fn mangle(&self, path: &[xlang::ir::PathComponent]) -> String { mangle_itanium(path) } diff --git a/xlang/xlang_backend/src/ssa.rs b/xlang/xlang_backend/src/ssa.rs index 2a8334cd..8f09cf54 100644 --- a/xlang/xlang_backend/src/ssa.rs +++ b/xlang/xlang_backend/src/ssa.rs @@ -1,6 +1,5 @@ -#![allow(missing_docs)] -use core::cell::{Cell, RefCell}; -use std::panic::Location; +use core::cell::Cell; + use std::rc::Rc; use crate::expr::ValLocation; @@ -29,29 +28,49 @@ impl SharedCounter { } } +/// An Aggregate type containing the parameter and return locations of an [`SsaInstruction::Call`] +#[derive(Clone, Debug, Hash, PartialEq, Eq)] +pub struct CallLocations { + /// The locations of the parameters + pub params: Vec, + /// The location of the return value + pub ret: OpaqueLocation, +} + +/// An instruction for ssa codegen lowered from a [`Expr`][xlang::ir::Expr] #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub enum SsaInstruction { - Call(CallTarget, Vec), + /// Performs a call to the given [`CallTarget`] with the parameters in the specified [`OpaqueLocation`]s + Call(CallTarget, CallLocations), + /// Jumps to the destination basic block, mapping the given `OpaqueLocation`s to the incoming locations in the destination. Jump(u32, Vec), + /// Falls through to the destination basic block, mapping the given `OpaqueLocation`s to the incoming locations in the destination. + /// The Code generator may generate code that is invalid if the destination basic block does not immediately follow the current basic block Fallthrough(u32, Vec), + /// Exits from the function, with the values in the given location Exit(Vec), + /// Performs a tailcall to the given [`CallTarget`] with the parameters in the specified [`OpaqueLocation`]s Tailcall(CallTarget, Vec), + /// Executes the specified [`Trap`] Trap(Trap), + /// Loads a scalar immediate value into the given [`OpaqueLocation`] LoadImmediate(OpaqueLocation, u128), } impl core::fmt::Display for SsaInstruction { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - SsaInstruction::Call(targ, params) => { + SsaInstruction::Call(targ, locs) => { f.write_fmt(format_args!("call {}(", targ))?; let mut sep = ""; - for item in params { + for item in &locs.params { f.write_str(sep)?; sep = ", "; item.fmt(f)?; } - f.write_str(")") + f.write_str(") -> ")?; + + locs.ret.fmt(f) } SsaInstruction::Jump(val, stack) => { f.write_fmt(format_args!("jump @{} [", val))?; @@ -101,10 +120,16 @@ impl core::fmt::Display for SsaInstruction { } } +/// Specifies the target of a call site for either [`SsaInstruction::Call`] or [`SsaInstruction::Tailcall`] #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct CallTarget { + /// The pointer to call, which either may be inside an indirect location or a symbol name pub ptr: OpaquePtr, + /// The real (def) type of the pointer, used for computing the calling convention of the function. + /// + /// This may differ from the `call_ty` if the function is variadic pub real_ty: ir::FnType, + /// The callsite type of the pointer, used for computing the calling convention of the function. pub call_ty: ir::FnType, } @@ -114,9 +139,12 @@ impl core::fmt::Display for CallTarget { } } +/// A pointer which can be directly called or read from #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub enum OpaquePtr { + /// A symbol, such as a global variable, function, or label addres Symbol(String), + /// A pointer already stored in another location (where the value is unknown) Pointer(OpaqueLocation), } @@ -129,10 +157,14 @@ impl core::fmt::Display for OpaquePtr { } } +/// An opaque value location #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct OpaqueLocation { + /// The type of the stored value pub ty: Rc, + /// The kind (lvalue or rvalue) of the stored value. pub kind: ir::StackValueKind, + /// The unique identifier of the value location pub num: u32, has_addr: bool, } @@ -153,6 +185,7 @@ impl ValLocation for OpaqueLocation { } } +/// A builder that can convert [`ir::FunctionBody`]s into Machine code, lowering through [`SsaInstruction`]s. pub struct FunctionBuilder { tys: Rc, sym_name: String, @@ -167,6 +200,9 @@ pub struct FunctionBuilder { } impl FunctionBuilder { + /// Constructs a new [`FunctionBuilder`] for the function, that uses the given [`Machine`], [`TypeInformation`], and [`TargetProperties`]. + /// + /// `sym_name` and `fnty` are the symbol (mangled) named and definition type of the function definition, respectively. pub fn new( sym_name: String, mach: Rc, @@ -187,9 +223,8 @@ impl FunctionBuilder { incoming_count: Rc::new(HashMap::new()), } } -} -impl FunctionBuilder { + /// Inserts a new local variable of type `ty` pub fn push_local(&mut self, ty: ir::Type) { Rc::get_mut(&mut self.locals) .expect("No basic blocks may have been pushed yet") @@ -201,12 +236,14 @@ impl FunctionBuilder { }) } + /// Allocates space for the incoming set of basic block `id`, according to `incoming` pub fn push_incoming(&mut self, id: u32, incoming: &Vec) { let incoming_count = Rc::get_mut(&mut self.incoming_count) .expect("new_basic_block may not have been called yet"); incoming_count.insert(id, incoming.len()); } + /// Creates a new [`BasicBlockBuilder`] that refers to basic block `id`, with the given `incoming` stack values. pub fn new_basic_block( &mut self, id: u32, @@ -251,7 +288,10 @@ impl FunctionBuilder { self.basic_blocks.push_mut(builder) } +} +impl FunctionBuilder { + /// Writes the function to the given [`InsnWrite`], with the given function to accept a new symbol pub fn write( &mut self, out: &mut W, @@ -259,6 +299,16 @@ impl FunctionBuilder { ) -> std::io::Result<()> { let mut assigns = self.mach.new_assignments(); let mut block_clobbers = vec![]; + + if let Some(bb) = &self.basic_blocks.first() { + self.mach.assign_call_conv( + &mut assigns, + &self.incoming_locations[&bb.id], + &self.fnty, + &self.tys, + ) + } + for bb in &self.basic_blocks { block_clobbers.push(self.mach.assign_locations( &mut assigns, @@ -302,6 +352,8 @@ impl core::fmt::Display for FunctionBuilder { } } +/// A builder for code generation of [`ir::Block`]s into [`SsaInstruction`]s +#[allow(dead_code)] // ignore unused variables, they'll be used when there's less `todo!()` pub struct BasicBlockBuilder { id: u32, tys: Rc, @@ -315,6 +367,7 @@ pub struct BasicBlockBuilder { } impl BasicBlockBuilder { + /// Moves the given [`VStackValue`] into the specified [`OpaqueLocation`], generating appropriate loads and move instructions to place both transparent and opaque values in the new location pub fn move_into(&mut self, val: VStackValue, loc: OpaqueLocation) { match val { VStackValue::Constant(val) => match val { @@ -324,9 +377,9 @@ impl BasicBlockBuilder { ir::Value::Integer { val, .. } => { self.insns.push(SsaInstruction::LoadImmediate(loc, val)) } - ir::Value::GlobalAddress { ty, item } => todo!(), - ir::Value::ByteString { content } => todo!(), - ir::Value::String { encoding, utf8, ty } => todo!(), + ir::Value::GlobalAddress { .. } => todo!(), + ir::Value::ByteString { .. } => todo!(), + ir::Value::String { .. } => todo!(), ir::Value::LabelAddress(_) => todo!(), ir::Value::Empty => panic!("Empty IR value"), }, @@ -341,16 +394,31 @@ impl BasicBlockBuilder { } } + /// Pushes a single [`VStackValue`] to the basic block's eval stack pub fn push(&mut self, val: VStackValue) { self.vstack.push(val); } + /// Pushes all of the specified [`VStackValue`]s to the basic block's eval stack + + pub fn push_values>>(&mut self, vals: I) { + self.vstack.extend(vals) + } + + /// Pops a single [`VStackValue`] from the basic block's eval stack + /// + /// ## Panics + /// Panics if the eval stack is empty at the current location pub fn pop(&mut self) -> VStackValue { self.vstack .pop() .expect("BasicBlockBuilder::pop called with an empty stack") } + /// Pops `N` values from the basic block's eval stack as an array, to allow easier destructuring of several values + /// + /// ## Panics + /// Panics if the eval stack has fewer than `N` values at the current location pub fn pop_values_static(&mut self) -> [VStackValue; N] { use core::mem::MaybeUninit; let mut val = MaybeUninit::<[_; N]>::uninit(); @@ -364,9 +432,18 @@ impl BasicBlockBuilder { unsafe { val.assume_init() } } + /// Pops `n` values from the basic block's eval stack and returns them as a vector. + /// + /// ## Panics + /// Panics if the eval stack has fewer than `n` values at the current location pub fn pop_values(&mut self, n: usize) -> Vec> { self.vstack.split_off_back(n) } + + /// Pops `n` values as opaque locations from the basic block's eval stack and returns them as a vector, moving them into a new [`OpaqueLocation`] if necessary + /// + /// ## Panics + /// Panics if the eval stack has fewer than `n` values at the current location pub fn pop_opaque(&mut self, n: usize) -> Vec { let mut vstack = core::mem::take(&mut self.vstack); @@ -378,6 +455,8 @@ impl BasicBlockBuilder { ret } + /// Returns the [`OpaqueLocation`] currently storing the specified [`VStackValue`], if any, + /// or otherwise allocates a new [`OpaqueLocation`] and moves `val` into it as though by [`BasicBlockBuilder::move_into`] pub fn make_opaque(&mut self, val: VStackValue) -> OpaqueLocation { if let Some(loc) = val.opaque_location() { loc.clone() @@ -402,6 +481,10 @@ impl BasicBlockBuilder { } } + /// Writes a function call with the given parameters to the given target using the specified `call_ty`. + /// + /// If `next` is Some, treats this as a normal call, and ends by jumping to the `next`. + /// Otherwise, treats this as a tail call, with no following jump. pub fn write_call( &mut self, targ: VStackValue, @@ -428,15 +511,24 @@ impl BasicBlockBuilder { }; if let Some(next) = next { + let ret_loc = OpaqueLocation { + ty: Rc::new(real_ty.ret.clone()), + kind: ir::StackValueKind::RValue, + num: self.loc_id_counter.next(), + has_addr: false, + }; self.insns.push(SsaInstruction::Call( CallTarget { ptr: OpaquePtr::Symbol(sym), real_ty, call_ty, }, - params, + CallLocations { + params, + ret: ret_loc.clone(), + }, )); - self.write_jump(&next); + self.write_jump(&next, [ret_loc]); } else { self.insns.push(SsaInstruction::Tailcall( CallTarget { @@ -460,10 +552,23 @@ impl BasicBlockBuilder { } } - pub fn write_jump(&mut self, targ: &JumpTarget) { - let vals = self.incoming_count[&targ.target]; + /// Writes a (possibly [`FALLTHROUGH`][ir::JumpFlags::FALLTHROUGH]) jump to the specified target. + /// + /// `head_vals` contains value placed on the head of the incoming target stack, which much be at most the total number of incoming values + pub fn write_jump>( + &mut self, + targ: &JumpTarget, + head_vals: H, + ) where + H::IntoIter: ExactSizeIterator, + { + let head_vals = head_vals.into_iter(); + let head_count = head_vals.len(); + let vals = self.incoming_count[&targ.target] - head_count; + + let mut vals = self.pop_opaque(vals); - let vals = self.pop_opaque(vals); + vals.extend(head_vals); if targ.flags.contains(ir::JumpTargetFlags::FALLTHROUGH) { self.insns @@ -473,6 +578,11 @@ impl BasicBlockBuilder { } } + /// Writes the given [`ir::Expr`] to the block, generating all necessary instructions, and modifying the basic block's eval stack according to the results of the expression + /// + /// ## Panics + /// The function may panic (or otherwise have incorrect behaviour) if the current state of the basic block's eval stack violates the input constraints in the "Typechecking" specification for `expr`, + /// or if the basic block terminator has already been written. pub fn write_expr(&mut self, expr: &ir::Expr) { match expr { ir::Expr::Sequence(_) => todo!("sequence"), @@ -484,9 +594,22 @@ impl BasicBlockBuilder { ir::Expr::Convert(_, _) => todo!("convert"), ir::Expr::Derive(_, _) => todo!("derive"), ir::Expr::Local(_) => todo!("local"), - ir::Expr::Pop(_) => todo!("pop"), - ir::Expr::Dup(_) => todo!("dup"), - ir::Expr::Pivot(_, _) => todo!("pivot"), + ir::Expr::Pop(n) => { + self.pop_values(*n as usize); + } + ir::Expr::Dup(n) => { + let vals = self.pop_values(*n as usize); + + self.push_values(vals.clone()); + self.push_values(vals); + } + ir::Expr::Pivot(m, n) => { + let first = self.pop_values(*n as usize); + let second = self.pop_values(*m as usize); + + self.push_values(first); + self.push_values(second); + } ir::Expr::Aggregate(_) => todo!("aggregate"), ir::Expr::Member(_) => todo!("member"), ir::Expr::MemberIndirect(_) => todo!("member indirect"), @@ -505,10 +628,17 @@ impl BasicBlockBuilder { } } + /// Writes the given terminator to the basic block. + /// The eval stack is left in an unspecified state, + /// and operations that place constraints on the stack of the eval stack may not be validly used (they may panic or produce incorrect results) + /// + /// ## Panics + /// The function may panic (or otherwise have incorrect behaviour) if the current state of the basic block's eval stack violates the input constraints in the "Typechecking" specification for `term`, + /// or if the basic block terminator has already been written. pub fn write_terminator(&mut self, term: &ir::Terminator) { match term { ir::Terminator::Jump(targ) => { - self.write_jump(targ); + self.write_jump(targ, core::iter::empty()); } ir::Terminator::Branch(_, _, _) => todo!("branch"), ir::Terminator::BranchIndirect => todo!("branch indirect"), @@ -531,7 +661,7 @@ impl BasicBlockBuilder { ir::Terminator::Exit(_) => todo!("exit"), ir::Terminator::Asm(_) => todo!("asm"), ir::Terminator::Switch(_) => todo!("switch"), - ir::Terminator::Unreachable => todo!("unreachable"), + ir::Terminator::Unreachable => self.insns.push(SsaInstruction::Trap(Trap::Unreachable)), } } } diff --git a/xlang/xlang_backend/src/ty.rs b/xlang/xlang_backend/src/ty.rs index 93dc417d..c2df07ec 100644 --- a/xlang/xlang_backend/src/ty.rs +++ b/xlang/xlang_backend/src/ty.rs @@ -2,9 +2,8 @@ use std::convert::TryInto; use xlang::{ ir::{ - self, AggregateDefinition, AggregateKind, AnnotationItem, Path, PointerAliasingRule, - PointerKind, ScalarType, ScalarTypeHeader, ScalarTypeKind, ScalarValidity, Type, - ValidRangeType, Value, + AggregateDefinition, AggregateKind, AnnotationItem, Path, PointerAliasingRule, PointerKind, + ScalarType, ScalarTypeHeader, ScalarTypeKind, ScalarValidity, Type, ValidRangeType, Value, }, prelude::v1::{HashMap, Pair, Some as XLangSome}, targets::properties::TargetProperties, @@ -51,46 +50,6 @@ pub struct TypeInformation { properties: &'static TargetProperties<'static>, } -pub struct FlattenFieldsOf<'a> { - fields_stack: Vec>, - - tys: &'a TypeInformation, -} - -impl<'a> FlattenFieldsOf<'a> { - fn push_fields_of(&mut self, base_offset: u64, agl: &'a AggregateLayout) { - let mut fields = vec![]; - - for Pair(_, (offset, ty)) in &agl.fields { - fields.push((*offset + base_offset, ty)); - } - - fields.sort_by_key(|(off, _)| *off); - - self.fields_stack.push(fields.into_iter()); - } - - fn push_fields_of_array(&mut self, base_off: u64, arr_ty: &'a ir::ArrayType) { - let mut fields = vec![]; - let ty = &arr_ty.ty; - let size = self - .tys - .type_size(ty) - .expect("array type must have a complete value type as a field"); - - let len = match &arr_ty.len { - Value::Integer { val, .. } => (*val) as u64, - val => panic!("Cannot determine length of array from {}", val), - }; - - for i in 0..len { - fields.push((base_off + i * size, ty)); - } - - self.fields_stack.push(fields.into_iter()); - } -} - impl TypeInformation { /// Constructs a new set of [`TypeInformation`] from the properties of a given target pub fn from_properties(properties: &'static TargetProperties) -> Self { From d71b0fa901ad941d9070d3cf52103ab30f3bb0ef Mon Sep 17 00:00:00 2001 From: Connor Horman Date: Mon, 12 Feb 2024 22:15:58 -0500 Subject: [PATCH 18/74] doc(xlang-ir): Document some variants of `Expr` --- xlang/xlang_struct/src/lib.rs | 373 +++++++++++++++++++++++++++++++++- 1 file changed, 365 insertions(+), 8 deletions(-) diff --git a/xlang/xlang_struct/src/lib.rs b/xlang/xlang_struct/src/lib.rs index 92915fe1..91b1504b 100644 --- a/xlang/xlang_struct/src/lib.rs +++ b/xlang/xlang_struct/src/lib.rs @@ -1767,33 +1767,388 @@ pub enum Expr { /// /// ## Semantics /// - /// 1. If `access-class` contains `volatile`, the instruction performs a observable side effect (`[intro.abstract]#6`). - /// 2. If `access-class` contains `nontemporal`, hints that subsequent expressions are unlikely to access memory accessed or modified by preceeding expressions. - /// 3. If `access-class` contains an atomic access class, the instruction functions the same a fence instruction with that atomic access class, except that the fence does not *synchronize-with* atomic operations or fence instructions performed by another thread of execution. + /// 1. If `access-class` contains `volatile`, the expression performs a observable side effect (`[intro.abstract]#6`). + /// 2. If `access-class` contains `nontemporal`, then all preceeding operations weekly-sequenced-before this expression *weekly-happens-before* this expression. + /// 3. If `access-class` contains an atomic access class, the expression functions the same a fence expression with that atomic access class, except that the fence does not *synchronize-with* atomic operations or fence instructions performed by another thread of execution. /// 4. [_Note: This is suitable for communicating with signal or interrupt handlers executed on the current thread._] - /// 5. [_Note: The `access-class` modifier `freeze` may appear, but is ignored by this expression. ] + /// 5. [_Note: The `access-class` modifier `freeze` may appear, but is ignored by this expression. _] /// 6. If `access-class` does not contain either `volatile`, `nontemporal`, or an atomic access class, the expression performs no operation. /// + /// ### Platform Notes + /// + /// Typically, a `sequence` expr corresponds to no generated machine code, regardless of access-class. + /// Sequence(AccessClass), /// Pushes a constant value. /// - /// # Stack + /// ## Stack + /// + /// Type checking: `[..]`=>`[..,T]`` + /// + /// Operands: `[..]``=>`[..,Value]` + /// + /// ## Syntax /// - /// Type checking: [..]=>[..,T] + /// ```abnf + /// expr /= "const" [..,Value] + /// ## Semantics + /// Pushes a constant value, with no side effects. Const(Value), - /// Computes + /// Computes the given binary Op + /// + /// ## Stack + /// In all of the following, `S` means a scalar type, `P` means a pointer type `I` an integer type. + /// Each occurance of `S`, `P`, or `I` in the same type checking specification refers to the same type. + /// + /// ### Form 1 + /// + /// Type checking: `[..,S,S]`=>`[..,S]` + /// + /// Operands: `[..,a,b]`=>`[..,res]` + /// + /// (All except comparison ops or [`OverflowBehaviour::Checked`]) + /// + /// ### Form 2 + /// + /// Type checking: `[..,S, S]`=>`[..,S, uint(1)]` + /// + /// Operands: `[..,a,b]`=>`[..,res, v]` + /// + /// ([`OverflowBehaviour::Checked`], except for comarpison ops) + /// + /// `S` may not have a vectorsize + /// + /// ### Form 3 + /// + /// Type checking: `[..,S,S]` => `[..,uint(1)]` or `[..,P,P]` => `[..,uint(1)]` + /// + /// Operands: `[..,a,b]`=>`[..,res]` + /// + /// (Comparison Op, other than [`BinaryOp::Cmp`]) + /// + /// ### Form 4 + /// + /// Type checking: `[..,S, S]`=>`[..,int(32)]` or `[..P,P]`=>`[..,int(32)]` + /// + /// Operands: `[..,a,b]`=>`[..,res]` + /// + /// ([`BinaryOp::Cmp`] except [`OverflowBehaviour::Checked`]) + /// + /// ### Form 5 + /// + /// Type checking: `[..,S, S]`=>`[..,int(32), uint(0)]` + /// + /// Operands: `[..,a,b]`=>`[..,res,unordered]` + /// ([`BinaryOp::Cmp`] with [`OverflowBehaviour::Checked`]) + /// + /// ### Form 6 + /// + /// Type checking: `[..,P,I]`=>`[..,P]` or `[..,I,P]`=>`[..,P]` ([`BinaryOp::Add`] only) + /// + /// Operands: `[.., ptr, idx]`=>`[..,res]` or `[.., idx, ptr]`=>`[..,res]` ([`BinaryOp::Add`] only) + /// + /// [`BinaryOp::Add`] or [`BinaryOp::Sub`] only (pointer+integer) + /// + /// ### Form 7 + /// + /// Type checking: `[..,P, P]`=>`[..,DI]` + /// + /// (`DI` is the signed integer type with width equal to the `size_bits` target property) + /// + /// Operands: `[.., a, b]`=>`[..,res]` + /// + /// [`BinaryOp::Sub`] only (pointer+pointer) + /// + /// ## Syntax + /// + /// ```abnf + /// expr /= [] + /// ``` + /// + /// ## Semantics + /// + /// If any operand is `uninit`, all results are `uninit`. + /// + /// ### Form 1 + /// + /// Computes the value of the specified binary-op with `a,b` according to `overflow-behaviour` (defaults to `wrap` if not specified) + /// + /// If an operation overflows (or an integer/fixed-point operation attempts to divide by zero), the result is the following, based on the specified `overflow-behaviour`: + /// * `wrap`: Wraps modulo `2^N`. Division by zero yields `uninit` + /// * `unchecked`: Yields `uninit` + /// * `trap`: Causes abnormal program termination + /// * `saturate`: Saturates to Minimum/Maxmimum value of the type. Division by zero yields a quotient of the maximum value, and a remainder of the minimum value of the type. + /// + /// Overflow in this section is not the same as floating-point overflow. + /// Operations on floating-point or posit types are not considered to overflow for the purposes of this section, even if they would return +/-infinity from finite inputs or cause the `FE_OVERFLOW` floating-point exception. + /// Operations on rationals or fixed-point values *can* cause overflow. + /// + /// If `overflow-behaviour` is `trap`, and any operand is uninit, the behaviour is undefined. + /// + /// ### Form 2 + /// + /// Computes the value of the specified binary-op, checking for overflow or input errors. + /// If an operation overflow (or an integer/fixed-point operation attempts to divide by zero), + /// the result is wrapped modulo 2^N (or saturated for `div`/`rem`) and the overflow flag (`v`) is `1` (else `0`). + /// + /// ### Form 3 + /// + /// Compares two scalar or pointer values, and returns `0` if the comparison fails and `1` otherwise. + /// + /// `overflow-behaviour` is ignored, and may not be `checked`. + /// + /// ### Form 4 + /// + /// Performs 3-way comparison on a scalar or pointer value, returning `-1` if `ab`. + /// + /// `overflow-behaviour` is ignored. If the comparison of `a,b` is unordered, the result is `uninit`. + /// + /// ### Form 5 + /// + /// Performs 3-way comparison on a scalar value, returning `-1` if `ab`. + /// + /// If the comparison of `a,b` is unordered, the result is `uninit`. If the comparison is unordered, then `unord` is `1`, otherwise `unord` is `0`. + /// + /// ### Form 6 + /// + /// Performs pointer addition/subtraction. + /// + /// Let `P` be a pointer to `T` where `T` is a complete value type. + /// + /// For `add`, the result is the pointer derived from `ptr` with address given by the offset from `ptr` + /// determined by the offset between an element `a` and the `idx`th following element `b`. + /// + /// For `sub`, the result is the pointer derived from `ptr` with address given by the offset from `ptr` + /// determined by the offset between an element `a` and the `idx`th previous element `b`. + /// + /// `overflow-behaviour` (default is `wrap`) must be either `wrap` or `unchecked`. + /// + /// If `overflow-behaviour` is `unchecked`, then the result is `uninit` if any of the following are true: + /// * The resulting pointer as above does not point into an object + /// * The resulting pointer as above points into a different complete object than `ptr` + /// * The offset between the elements of the array is the value `off`, such that `off` cannot be exactly represented as a value of type `DI` + /// * The resulting pointers address wraps the address space (for a far pointer) or segment (for a near pointer) + /// + /// (DI is the signed integer type with width equal to the `sizebits` target property) + /// + /// ### Form 7 + /// + /// Performs pointer subtraction. + /// + /// Let `P` be a pointer to `T`, where `T` is a complete value type. + /// + /// Let the offset between the addresses of `b` and `a` be `off`. + /// + /// Returns `idx`, such that the offset between some element `A` and the `idx`th previous element `B` is the value `off`. + /// + /// `overflow-behaviour` (default is `wrap`) must be either `wrap` or `unchecked`. + /// + /// If `overflow-behaviour` is `unchecked`, then the result is `uninit` if any of the following are true: + /// * Either `a` or `b` do not point into an object + /// * `a` and `b` do not point into the same complete object + /// * Either `off` or `idx` cannot be represented as a value of type `DI` + /// * The pointers are near pointers that refer to objects in different segments of the address space + /// + /// (DI is the signed integer type with width equal to the `sizebits` target property) BinaryOp(BinaryOp, OverflowBehaviour), + /// Performs the given unary operation + /// + /// ## Stack + /// In all of the following, `S` means a scalar type, `I` means an integer type. + /// Each occurance of `S` or `I` in the same type checking specification refers to the same type. + /// + /// ### Form 1 + /// + /// Type checking: `[..,S]` => `[..,S]` + /// + /// Operands: `[..,val]`=>`[..,res]` + /// + /// (Except for [`OverflowBehaviour::Checked`] or [`UnaryOp::LogicNot`]) + /// + /// ### Form 2 + /// + /// Type checking: `[..,S]`=>`[..,S, uint(1)]` + /// + /// Operands: `[..,val]`=>[..,res,v] + /// + /// ([`OverflowBehaviour::Checked`] except for [`UnaryOp::LogicNot`])) + /// + /// ### Form 3 + /// + /// Type checking: `[..I]`=>`[..,I]` + /// + /// Operands: `[..,val]`=>`[..,val]` + /// + /// ([`UnaryOp::LogicNot`] only) + /// + /// ## Syntax + /// + /// ```abnf + /// expr /= [] + /// ``` + /// + /// ## Semantics + /// + /// If `val` is `uninit`, all results are `uninit`. + /// `overflow-behaviour` defaults to `wrap` + /// + /// ### Form 1 + /// + /// Computes the given unary op on `val`, returning the result. + /// + /// If overflow occurs, the result is according to the `overflow-behaviour`: + /// * `wrap`: The value is wrapped modulo 2^N + /// * `unchecked`: The result is `uninit`. + /// * `trap`: The program terminates abnormally + /// * `saturate`: The value is saturated to the minimum/maximum value of the type. + /// + /// ### Form 2 + /// + /// Computes the given unary op on `val` checking for overflow. + /// + /// If overflow occurs, the result is wrapped modulo `2^N` and `v` is 1. + /// + /// ### Form 3 + /// + /// Computes the logical negation of `val`. + /// If `val` is `0`, then the result is `1` if the integer type is unsigned, and `-1` if the integer type is signed. + /// If `val` is non-zero then the result is `0`. + /// + /// If `val` cannot be represented as `I` (only possible for width `0` integer types), the result is `0`. + /// + /// The overflow-behaviour is ignored (overflow-behaviour cannot be `checked`) + /// UnaryOp(UnaryOp, OverflowBehaviour), Convert(ConversionStrength, Type), Derive(PointerType, Box), + /// Obtains an lvalue that designates the specified local variable + /// + /// ## Stack + /// + /// Type checking: `[..]`=>`[..,lvalue T]` + /// Operands: `[..]`=>`[..,local]` + /// + /// ## Syntax + /// ```abnf + /// local-id = + /// + /// expr :/= "local" + /// ``` + /// + /// where `local-id` matches `"_"*` + /// + /// ## Semantics + /// + /// Pushes an lvalue that designates the specified `` defined in the current function. + /// + /// Ill-formed if `` is not defined in the function + /// Local(u32), + /// The stack `pop n` operation + /// + /// ## Stack + /// + /// Type checking: `[..,T0,T1, ..., Tn]`=>`[..]` + /// + /// Operands: `[..,i0,i1, ..., in]`=>`[..]` + /// + /// ## Syntax + /// + /// ```abnf + /// expr /= "pop" [] + /// ``` + /// + /// ## Semantics + /// + /// For `pop n`, pops `n` values from the stack and discards them with no side effects. + /// + /// `pop` is an alias for `pop 1`. + /// Pop(u32), + /// The stack `dup n` operation + /// + /// ## Stack + /// + /// Type checking: `[.., T0, T1, ..., Tn]` => `[..,T0,T1, ..., Tn, T0, T1, ..., Tn]` + /// + /// Operands: `[..,v0,v1, ..., vn]`=>`[..,v0,v1, ..., vn,v0,v1, ..., vn]` + /// + /// ## Syntax + /// ```abnf + /// expr /= "dup" [] + /// ``` + /// + /// ## Semantics + /// + /// For `dup n`, pops `n` values from the stack, then pushes those `n` values twice in order. + /// + /// `dup` is an alias for `dup 1` Dup(u32), + /// The stack `pivot m n` operation + /// + /// ## Stack + /// + /// Type checking: `[.., S0, S1, ..., Sm, T0, T1, ..., Tn]`=> `[.., T0, T1, ..., Tn, S0, S1, ..., Sm]` + /// + /// Operands: `[.., s0, s1, ..., sm, t0, t1, ..., tn]`=>`[.., t0, t1, ..., tn, s0, s1, ..., sm]` + /// + /// ## Syntax + /// + /// ```abnf + /// expr /= "pivot" [ []] + /// ``` + /// + /// ## Semantics + /// + /// For `pivot m n`, pops `n` values from the stack, preserves them in an unspecified location, pops `m` additional values, + /// then pushes the first set of values popped followed by the second set of values popped. Pivot(u32, u32), Aggregate(AggregateCtor), + /// Projects to a field of the type of an aggregate + /// + /// ## Stack + /// + /// In all of the following, `A` is an aggregate type or a product type. `F` is the type of the named field of `A`. + /// + /// The expression is invalid if `A` does not have the field named by `member-name`. + /// + /// ### Form 1 + /// Type checking: `[.., A]`=>`[..,F]` + /// + /// Operands: `[..,a]`=>`[..,f]` + /// + /// ### Form 2 + /// Type checking: `[.., lvalue A]`=>`[..,lvalue F]` + /// + /// Operands: `[..,a]`=>`[..,f]` + /// + /// ## Syntax + /// ```abnf + /// member-name := / / "(" ")" + /// expr /= "member" + /// ``` + /// + /// ## Semantics + /// + /// Projects to a named member of an aggregate type or product type. + /// + /// ### Form 1 + /// + /// The operand and the result are both rvalues. + /// Returns the value of the named member. + /// + /// ### Form 2 + /// The operand and the result are both lvalues. + /// Returns an lvalue that designates the named member of `a`. + /// If the member is a bitfield, then the resulting lvalue is an lvalue that designates a bitfield. + /// + /// The program is ill-formed if an lvalue that designates a bitfield is used in any of the following ways: + /// * As an operand to a `compound_assign`, `fetch_assign`, `assign`, `as_rvalue`, lvalue op, or unary lvalue op with an atomic access class + /// * As the operand of a `member` or `addr_of` expression + /// * As a value used as an incoming value for a jump target Member(String), MemberIndirect(String), Assign(AccessClass), @@ -1933,8 +2288,10 @@ impl core::fmt::Display for JumpTarget { } bitflags::bitflags! { + /// Flags for the call instruction #[repr(transparent)] pub struct CallFlags: u32{ + /// Indicates the the call or tailcall will definitely return in finite time const WILLRETURN = 1; } } From c9b3b039c24eff66894b524e43eefd3d08eee837 Mon Sep 17 00:00:00 2001 From: Ray Redondo Date: Tue, 13 Feb 2024 15:35:57 -0600 Subject: [PATCH 19/74] feat(xlang-backend-ssa): implement convert for pointers --- xlang/xlang_backend/src/ssa.rs | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/xlang/xlang_backend/src/ssa.rs b/xlang/xlang_backend/src/ssa.rs index 8f09cf54..31fb016e 100644 --- a/xlang/xlang_backend/src/ssa.rs +++ b/xlang/xlang_backend/src/ssa.rs @@ -379,7 +379,7 @@ impl BasicBlockBuilder { } ir::Value::GlobalAddress { .. } => todo!(), ir::Value::ByteString { .. } => todo!(), - ir::Value::String { .. } => todo!(), + ir::Value::String { .. } => todo!("{}", loc), ir::Value::LabelAddress(_) => todo!(), ir::Value::Empty => panic!("Empty IR value"), }, @@ -591,7 +591,27 @@ impl BasicBlockBuilder { } ir::Expr::BinaryOp(_, _) => todo!("binary op"), ir::Expr::UnaryOp(_, _) => todo!("unary op"), - ir::Expr::Convert(_, _) => todo!("convert"), + ir::Expr::Convert(strength, new_ty) => match (self.pop(), strength, new_ty) { + ( + VStackValue::Pointer(_, val), + ir::ConversionStrength::Reinterpret, + ir::Type::Pointer(new_ty), + ) => { + self.push(VStackValue::Pointer(new_ty.clone(), val)); + } + ( + VStackValue::Constant(ir::Value::String { encoding, utf8, .. }), + ir::ConversionStrength::Reinterpret, + ir::Type::Pointer(new_ty), + ) => { + self.push(VStackValue::Constant(ir::Value::String { + encoding, + utf8, + ty: ir::Type::Pointer(new_ty.clone()), + })); + } + x => todo!("{:?}", x), + }, ir::Expr::Derive(_, _) => todo!("derive"), ir::Expr::Local(_) => todo!("local"), ir::Expr::Pop(n) => { From 9ceac788dda4d69058a171472d146d978b3a4f78 Mon Sep 17 00:00:00 2001 From: Ray Redondo Date: Tue, 13 Feb 2024 15:41:49 -0600 Subject: [PATCH 20/74] feat(xlang-backend-ssa): implement exit --- xlang/xlang_backend/src/ssa.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/xlang/xlang_backend/src/ssa.rs b/xlang/xlang_backend/src/ssa.rs index 31fb016e..e98c7242 100644 --- a/xlang/xlang_backend/src/ssa.rs +++ b/xlang/xlang_backend/src/ssa.rs @@ -678,7 +678,10 @@ impl BasicBlockBuilder { self.write_call(target, vals, (**call_fnty).clone(), None); } - ir::Terminator::Exit(_) => todo!("exit"), + ir::Terminator::Exit(n) => { + let vals = self.pop_opaque(*n as usize); + self.insns.push(SsaInstruction::Exit(vals)); + } ir::Terminator::Asm(_) => todo!("asm"), ir::Terminator::Switch(_) => todo!("switch"), ir::Terminator::Unreachable => self.insns.push(SsaInstruction::Trap(Trap::Unreachable)), From 5aea21c5841ca0a7969f827e889730b18c4ab8f3 Mon Sep 17 00:00:00 2001 From: Connor Horman Date: Wed, 14 Feb 2024 21:35:27 -0500 Subject: [PATCH 21/74] chore: Move autobuild to a submodule --- .gitignore | 3 +- .gitmodules | 5 +- Cargo.lock | 19 +-- autobuild | 1 + autobuild.toml | 3 + autobuild/Cargo.lock | 154 -------------------- autobuild/Cargo.toml | 18 --- autobuild/build.rs | 6 - autobuild/src/config.rs | 239 ------------------------------- autobuild/src/hash.rs | 222 ----------------------------- autobuild/src/hash/blake.rs | 1 - autobuild/src/hash/sha.rs | 242 -------------------------------- autobuild/src/helpers.rs | 26 ---- autobuild/src/main.rs | 40 ------ autobuild/src/programs.rs | 1 - autobuild/src/programs/rustc.rs | 24 ---- autobuild/src/rand.rs | 20 --- autobuild/src/tools.rs | 58 -------- autobuild/src/tools/config.rs | 230 ------------------------------ 19 files changed, 15 insertions(+), 1297 deletions(-) create mode 160000 autobuild delete mode 100644 autobuild/Cargo.lock delete mode 100644 autobuild/Cargo.toml delete mode 100644 autobuild/build.rs delete mode 100644 autobuild/src/config.rs delete mode 100644 autobuild/src/hash.rs delete mode 100644 autobuild/src/hash/blake.rs delete mode 100644 autobuild/src/hash/sha.rs delete mode 100644 autobuild/src/helpers.rs delete mode 100644 autobuild/src/main.rs delete mode 100644 autobuild/src/programs.rs delete mode 100644 autobuild/src/programs/rustc.rs delete mode 100644 autobuild/src/rand.rs delete mode 100644 autobuild/src/tools.rs delete mode 100644 autobuild/src/tools/config.rs diff --git a/.gitignore b/.gitignore index 50ee3084..e6571968 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,5 @@ autom4te.cache/ *.a a.out testfile -test-output.txt \ No newline at end of file +test-output.txt +/build*/ \ No newline at end of file diff --git a/.gitmodules b/.gitmodules index f0120532..3aa2b241 100644 --- a/.gitmodules +++ b/.gitmodules @@ -6,4 +6,7 @@ url = https://github.com/chorman0773/rust-target-tuples.git [submodule "rtlibs"] path = rtlibs - url = https://github.com/lccc-project/rtlibs.git \ No newline at end of file + url = https://github.com/lccc-project/rtlibs.git +[submodule "autobuild"] + path = autobuild + url = https://github.com/lccc-project/autobuild.git \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 9713e6ce..8eae8725 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -17,6 +17,7 @@ dependencies = [ name = "autobuild" version = "0.1.0" dependencies = [ + "cfg-match", "install-dirs 0.3.2", "itertools", "lccc-siphash 0.1.0 (git+https://github.com/lccc-project/lccc-siphash)", @@ -33,15 +34,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" -[[package]] -name = "binary-io" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d48e3ca0c86b51d14834d49f91771531b31cce782f4d7774fd7908b044e44769" -dependencies = [ - "fake-enum", -] - [[package]] name = "binfmt" version = "0.1.0" @@ -207,11 +199,9 @@ dependencies = [ name = "frontend-xir" version = "0.1.0" dependencies = [ - "binary-io", - "peekmore", - "target-tuples", "unicode-xid", "xlang", + "xlang_frontend", "xlang_struct", ] @@ -264,9 +254,9 @@ version = "0.1.0" [[package]] name = "itertools" -version = "0.11.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" dependencies = [ "either", ] @@ -701,6 +691,7 @@ dependencies = [ name = "xlang_frontend" version = "0.1.0" dependencies = [ + "unicode-xid", "xlang", ] diff --git a/autobuild b/autobuild new file mode 160000 index 00000000..67d0bbc2 --- /dev/null +++ b/autobuild @@ -0,0 +1 @@ +Subproject commit 67d0bbc2c58160e8ccd5d20243eee49463d47d19 diff --git a/autobuild.toml b/autobuild.toml index 8820142f..2429cc3a 100644 --- a/autobuild.toml +++ b/autobuild.toml @@ -1,7 +1,10 @@ +env = ["RUSTC", "RUSTC_FOR_BUILD"] + [package] name = "lccc" version = "0.1" + [dirs] lcccdir = "{libdir}/lccc" xlangdir = "{lcccdir}/xlang" diff --git a/autobuild/Cargo.lock b/autobuild/Cargo.lock deleted file mode 100644 index 046be4b9..00000000 --- a/autobuild/Cargo.lock +++ /dev/null @@ -1,154 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "autobuild" -version = "0.1.0" -dependencies = [ - "install-dirs", - "serde", - "serde_derive", - "toml", -] - -[[package]] -name = "equivalent" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" - -[[package]] -name = "hashbrown" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" - -[[package]] -name = "indexmap" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" -dependencies = [ - "equivalent", - "hashbrown", -] - -[[package]] -name = "install-dirs" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9bbf71970b5e249fb464339b5a835d35c03bac97a8fb99e8863d444297fa8bb9" - -[[package]] -name = "memchr" -version = "2.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5486aed0026218e61b8a01d5fbd5a0a134649abb71a0e53b7bc088529dced86e" - -[[package]] -name = "proc-macro2" -version = "1.0.66" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "quote" -version = "1.0.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "serde" -version = "1.0.188" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.188" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "serde_spanned" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96426c9936fd7a0124915f9185ea1d20aa9445cc9821142f0a73bc9207a2e186" -dependencies = [ - "serde", -] - -[[package]] -name = "syn" -version = "2.0.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c324c494eba9d92503e6f1ef2e6df781e78f6a7705a0202d9801b198807d518a" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "toml" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17e963a819c331dcacd7ab957d80bc2b9a9c1e71c804826d2f283dd65306542" -dependencies = [ - "serde", - "serde_spanned", - "toml_datetime", - "toml_edit", -] - -[[package]] -name = "toml_datetime" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" -dependencies = [ - "serde", -] - -[[package]] -name = "toml_edit" -version = "0.19.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a" -dependencies = [ - "indexmap", - "serde", - "serde_spanned", - "toml_datetime", - "winnow", -] - -[[package]] -name = "unicode-ident" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" - -[[package]] -name = "winnow" -version = "0.5.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc" -dependencies = [ - "memchr", -] diff --git a/autobuild/Cargo.toml b/autobuild/Cargo.toml deleted file mode 100644 index f76601c6..00000000 --- a/autobuild/Cargo.toml +++ /dev/null @@ -1,18 +0,0 @@ -[package] -name = "autobuild" -version = "0.1.0" -edition = "2018" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -serde = "1" -serde_derive = "1" -toml = "0.8.8" -install-dirs= {version="0.3.2", features=["serde"]} -target-tuples = "0.5.12" -itertools = "0.11.0" -lccc-siphash = {git = "https://github.com/lccc-project/lccc-siphash"} - -[target.'cfg(unix)'.dependencies] -libc = "0.2.152" \ No newline at end of file diff --git a/autobuild/build.rs b/autobuild/build.rs deleted file mode 100644 index 504c2a02..00000000 --- a/autobuild/build.rs +++ /dev/null @@ -1,6 +0,0 @@ -fn main() { - let env = std::env::var("TARGET").unwrap(); - - println!("cargo:rustc-env=TARGET={}", env); - println!("cargo:rerun-if-changed=build.rs"); -} diff --git a/autobuild/src/config.rs b/autobuild/src/config.rs deleted file mode 100644 index f7545344..00000000 --- a/autobuild/src/config.rs +++ /dev/null @@ -1,239 +0,0 @@ -use std::{collections::HashMap, convert::TryFrom, ffi::OsString, path::PathBuf, str::FromStr}; - -use install_dirs::dirs::InstallDirs; -use serde_derive::{Deserialize, Serialize}; - -use target_tuples::{Target, UnknownError}; - -use crate::hash::{self, FileHash}; -use crate::programs::rustc::RustcVersion; -use crate::rand::Rand; - -#[derive(Clone, Debug)] -pub enum ConfigVarValue { - Set, - Unset, - Value(String), -} - -impl serde::Serialize for ConfigVarValue { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - match self { - ConfigVarValue::Set => serializer.serialize_bool(true), - ConfigVarValue::Unset => serializer.serialize_bool(false), - ConfigVarValue::Value(v) => serializer.serialize_str(v), - } - } -} - -impl<'de> serde::Deserialize<'de> for ConfigVarValue { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - struct ValueVisitor; - - impl<'de> serde::de::Visitor<'de> for ValueVisitor { - type Value = ConfigVarValue; - - fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { - formatter.write_str("a string or a boolean") - } - - fn visit_bool(self, v: bool) -> Result - where - E: serde::de::Error, - { - if v { - Ok(ConfigVarValue::Set) - } else { - Ok(ConfigVarValue::Unset) - } - } - - fn visit_string(self, v: String) -> Result - where - E: serde::de::Error, - { - Ok(ConfigVarValue::Value(v)) - } - - fn visit_str(self, v: &str) -> Result - where - E: serde::de::Error, - { - Ok(ConfigVarValue::Value(v.to_string())) - } - } - - deserializer.deserialize_any(ValueVisitor) - } -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct ConfigInstallDirs { - #[serde(flatten)] - pub install_dirs: InstallDirs, - #[serde(flatten)] - pub rest: HashMap, -} - -mod serde_target { - use serde::{ - de::{self, Expected}, - Deserialize, Deserializer, Serializer, - }; - use target_tuples::Target; - - pub fn serialize(targ: &Target, ser: S) -> Result - where - S: Serializer, - { - ser.serialize_str(targ.get_name()) - } - - pub fn deserialize<'de, D>(de: D) -> Result - where - D: Deserializer<'de>, - { - struct ExpectedTarget; - - impl de::Expected for ExpectedTarget { - fn fmt(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { - formatter.write_str("Expected a target in the form - or -- with sys being one of , , or followed by either or ") - } - } - let ty = <&str>::deserialize(de)?; - - ty.parse().map_err(|e| { - ::invalid_value(de::Unexpected::Str(ty), &ExpectedTarget) - }) - } -} - -#[derive(Clone, Debug, Hash, PartialEq, Eq, Deserialize, Serialize)] -pub struct ConfigTargets { - #[serde(with = "serde_target")] - pub build: Target, - #[serde(with = "serde_target")] - pub host: Target, - #[serde(with = "serde_target")] - pub target: Target, -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct ConfigFoundProgram { - pub location: PathBuf, - #[serde(flatten)] - pub info: Option, -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub enum ConfigProgramInfo { - Rustc(RustcVersion), -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct ConfigData { - pub src_dir: PathBuf, - pub dirs: ConfigInstallDirs, - pub env: HashMap, - pub programs: HashMap, - pub targets: ConfigTargets, - pub file_cache: HashMap, - pub config_vars: HashMap, - pub global_key: FileHash, -} - -impl ConfigData { - pub fn new(src_dir: PathBuf, dirs: ConfigInstallDirs, targets: ConfigTargets) -> Self { - Self { - src_dir, - dirs, - env: HashMap::new(), - programs: HashMap::new(), - targets, - file_cache: HashMap::new(), - config_vars: HashMap::new(), - global_key: FileHash::ZERO, - } - } -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct ExtraConfigDirs { - #[serde(flatten)] - pub dirs: HashMap, -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(untagged)] -pub enum ConfigDirSpec { - Plain(String), - Separator(Vec), -} - -#[derive(Clone, Debug, Deserialize, Serialize, Default)] -#[serde(default)] -pub struct ProgramSpec { - #[serde(rename = "type")] - pub ty: Option, - pub names: Vec, - pub target: Option, -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -pub enum ProgramType { - Rustc, -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct BuildTargets { - pub groups: HashMap, - #[serde(flatten)] - pub targets: HashMap, -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct GroupSpec {} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct TargetSpec {} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct Manifest { - pub dirs: ExtraConfigDirs, - pub programs: HashMap, - pub target: BuildTargets, -} - -use std::io; - -#[derive(Clone, Debug)] -pub struct Config { - data: Box, - manifests: HashMap, - rand: Rand, -} - -impl Config { - pub fn new(mut data: Box) -> Self { - let mut rand = Rand::init(); - Self { - data, - manifests: HashMap::new(), - rand, - } - } - - pub fn check_up_to_date(&mut self, file: String) -> io::Result { - let mut buf = self.data.src_dir.clone(); - buf.push(&file); - - todo!() - } -} diff --git a/autobuild/src/hash.rs b/autobuild/src/hash.rs deleted file mode 100644 index ba6984b8..00000000 --- a/autobuild/src/hash.rs +++ /dev/null @@ -1,222 +0,0 @@ -use std::{ - fs, - io::{self, Read as _}, - path::Path, -}; - -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, Hash, PartialEq, Eq)] -#[repr(transparent)] -pub struct FileHash([u8; 32]); - -impl FileHash { - pub const ZERO: FileHash = FileHash([0; 32]); -} - -pub trait FileHasher { - type Output: AsRef<[u8]> + 'static; - - /// The size of blocks given to the hash function - /// It is a logic error if this is not a power of two (but the result is not undefined behaviour) - const BLOCK_SIZE: usize; - - fn update(&mut self, buf: &[u8]); - - fn do_final(self, buf: &[u8]) -> Self::Output; -} - -pub struct HashingReader { - buf: Vec, - state: S, - inner: R, -} - -impl HashingReader { - pub const fn new(state: S, inner: R) -> Self { - Self { - state, - inner, - buf: Vec::new(), - } - } - - pub fn into_inner(self) -> R { - self.inner - } -} - -impl HashingReader { - pub fn init(&mut self, k: FileHash) { - self.buf.clear(); - self.buf.reserve(S::BLOCK_SIZE); - - if S::BLOCK_SIZE < 32 { - for v in k.0.chunks_exact(S::BLOCK_SIZE) { - self.state.update(v); - } - } else { - self.buf.extend_from_slice(&k.0); - } - } - pub fn finish(self) -> FileHash { - let mut output = [0; 32]; - - let val = self.state.do_final(&self.buf); - - let val = val.as_ref(); - - let len = val.len().min(32); - - output[..len].copy_from_slice(&val[..len]); - - FileHash(output) - } -} - -impl io::Read for HashingReader { - fn read(&mut self, mut buf: &mut [u8]) -> io::Result { - let read = self.inner.read(buf)?; - - let mut bytes = &buf[..read]; - - let buf_pos = self.buf.len(); - - if bytes.len() > (S::BLOCK_SIZE - buf_pos) { - if buf_pos != 0 { - let (l, r) = bytes.split_at(S::BLOCK_SIZE - buf_pos); - self.buf.extend_from_slice(l); - bytes = r; - self.state.update(&self.buf); - self.buf.clear(); - } - - let ce = bytes.chunks_exact(S::BLOCK_SIZE); - self.buf.extend_from_slice(ce.remainder()); - for chunk in ce { - self.state.update(chunk); - } - } else { - self.buf.extend_from_slice(bytes); - self.buf.resize(S::BLOCK_SIZE, 0); - self.state.update(&self.buf); - self.buf.clear(); - } - - Ok(read) - } -} - -pub mod sha; - -pub fn hash_file>( - path: P, - hasher: S, - key: FileHash, -) -> io::Result { - let mut buf = vec![0; S::BLOCK_SIZE]; - let file = fs::File::open(path)?; - - let mut reader = HashingReader::new(hasher, file); - - reader.init(key); - - loop { - if reader.read(&mut buf)? == 0 { - break; - } - } - - Ok(reader.finish()) -} - -const ALPHA: [u8; 16] = [ - b'0', b'1', b'2', b'3', b'4', b'5', b'6', b'7', b'8', b'9', b'a', b'b', b'c', b'd', b'e', b'f', -]; - -impl Serialize for FileHash { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - let mut string = Vec::::with_capacity(64); - - for b in self.0.iter().rev() { - string.push(ALPHA[(b >> 4) as usize]); - string.push(ALPHA[(b & 0xf) as usize]); - } - - // SAFETY: The Vec is entirely ASCII - let string = unsafe { String::from_utf8_unchecked(string) }; - - serializer.serialize_str(&string) - } -} - -impl<'de> Deserialize<'de> for FileHash { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - pub struct HexStringVisitor; - impl<'de> serde::de::Visitor<'de> for HexStringVisitor { - type Value = [u8; 32]; - - fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { - formatter.write_str("a 32-byte value as a hex string") - } - - fn visit_str(self, v: &str) -> Result - where - E: serde::de::Error, - { - if v.len() != 64 { - return Err(E::invalid_value(serde::de::Unexpected::Str(v), &self)); - } - let mut bytes = [0u8; 32]; - let mut arrays = v - .as_bytes() - .chunks_exact(2) - .map(|octet| { - let hi = octet[0]; - let lo = octet[1]; - - match (hi, lo) { - ( - hi @ (b'0'..=b'9' | b'A'..=b'F' | b'a'..=b'f'), - lo @ (b'0'..=b'9' | b'A'..=b'F' | b'a'..=b'f'), - ) => { - let mut mask = (hi << 1) & 0xF0 | (lo >> 3) & 0xF; - - mask |= mask >> 1; - mask |= mask >> 2; - - let val = (hi << 4) & 0xF0 | lo & 0x0F; - - let val = val + (0xAA & mask); - - Ok(val) - } - _ => Err(E::invalid_value(serde::de::Unexpected::Str(v), &self)), - } - }) - .zip(bytes.iter_mut().rev()) - .map(|(b, r)| Ok(*r = b?)) - .collect::>()?; - - Ok(bytes) - } - - fn visit_string(self, v: String) -> Result - where - E: serde::de::Error, - { - self.visit_str(&v) - } - } - - let bytes = deserializer.deserialize_str(HexStringVisitor)?; - - Ok(FileHash(bytes)) - } -} diff --git a/autobuild/src/hash/blake.rs b/autobuild/src/hash/blake.rs deleted file mode 100644 index 8b137891..00000000 --- a/autobuild/src/hash/blake.rs +++ /dev/null @@ -1 +0,0 @@ - diff --git a/autobuild/src/hash/sha.rs b/autobuild/src/hash/sha.rs deleted file mode 100644 index 189930ef..00000000 --- a/autobuild/src/hash/sha.rs +++ /dev/null @@ -1,242 +0,0 @@ -use super::FileHasher; - -#[derive(Copy, Clone)] -pub struct Sha64State { - state: [u64; 8], - processed_bytes: u128, -} - -impl Sha64State { - pub const SHA512: Sha64State = Sha64State::with_init([ - 0x6a09e667f3bcc908, - 0xbb67ae8584caa73b, - 0x3c6ef372fe94f82b, - 0xa54ff53a5f1d36f1, - 0x510e527fade682d1, - 0x9b05688c2b3e6c1f, - 0x1f83d9abfb41bd6b, - 0x5be0cd19137e2179, - ]); - - pub const SHA384: Sha64State = Sha64State::with_init([ - 0xcbbb9d5dc1059ed8, - 0x629a292a367cd507, - 0x9159015a3070dd17, - 0x152fecd8f70e5939, - 0x67332667ffc00b31, - 0x8eb44a8768581511, - 0xdb0c2e0d64f98fa7, - 0x47b5481dbefa4fa4, - ]); - - pub const SHA512_256: Sha64State = Sha64State::with_init([ - 0x22312194FC2BF72C, - 0x9F555FA3C84C64C2, - 0x2393B86B6F53B151, - 0x963877195940EABD, - 0x96283EE2A88EFFE3, - 0xBE5E1E2553863992, - 0x2B0199FC2C85B8AA, - 0x0EB72DDC81C52CA2, - ]); - - pub const SHA512_224: Sha64State = Sha64State::with_init([ - 0x8C3D37C819544DA2, - 0x73E1996689DCD4D6, - 0x1DFAB7AE32FF9C82, - 0x679DD514582F9FCF, - 0x0F6D2B697BD44DA8, - 0x77E36F7304C48942, - 0x3F9D85A86A1D36C8, - 0x1112E6AD91D692A1, - ]); - - pub const fn with_init(state: [u64; 8]) -> Self { - Self { - state, - processed_bytes: 0, - } - } - - #[inline] - fn compress(&mut self, w: &[u64; 80]) { - static K: [u64; 80] = [ - 0x428a2f98d728ae22, - 0x7137449123ef65cd, - 0xb5c0fbcfec4d3b2f, - 0xe9b5dba58189dbbc, - 0x3956c25bf348b538, - 0x59f111f1b605d019, - 0x923f82a4af194f9b, - 0xab1c5ed5da6d8118, - 0xd807aa98a3030242, - 0x12835b0145706fbe, - 0x243185be4ee4b28c, - 0x550c7dc3d5ffb4e2, - 0x72be5d74f27b896f, - 0x80deb1fe3b1696b1, - 0x9bdc06a725c71235, - 0xc19bf174cf692694, - 0xe49b69c19ef14ad2, - 0xefbe4786384f25e3, - 0x0fc19dc68b8cd5b5, - 0x240ca1cc77ac9c65, - 0x2de92c6f592b0275, - 0x4a7484aa6ea6e483, - 0x5cb0a9dcbd41fbd4, - 0x76f988da831153b5, - 0x983e5152ee66dfab, - 0xa831c66d2db43210, - 0xb00327c898fb213f, - 0xbf597fc7beef0ee4, - 0xc6e00bf33da88fc2, - 0xd5a79147930aa725, - 0x06ca6351e003826f, - 0x142929670a0e6e70, - 0x27b70a8546d22ffc, - 0x2e1b21385c26c926, - 0x4d2c6dfc5ac42aed, - 0x53380d139d95b3df, - 0x650a73548baf63de, - 0x766a0abb3c77b2a8, - 0x81c2c92e47edaee6, - 0x92722c851482353b, - 0xa2bfe8a14cf10364, - 0xa81a664bbc423001, - 0xc24b8b70d0f89791, - 0xc76c51a30654be30, - 0xd192e819d6ef5218, - 0xd69906245565a910, - 0xf40e35855771202a, - 0x106aa07032bbd1b8, - 0x19a4c116b8d2d0c8, - 0x1e376c085141ab53, - 0x2748774cdf8eeb99, - 0x34b0bcb5e19b48a8, - 0x391c0cb3c5c95a63, - 0x4ed8aa4ae3418acb, - 0x5b9cca4f7763e373, - 0x682e6ff3d6b2b8a3, - 0x748f82ee5defb2fc, - 0x78a5636f43172f60, - 0x84c87814a1f0ab72, - 0x8cc702081a6439ec, - 0x90befffa23631e28, - 0xa4506cebde82bde9, - 0xbef9a3f7b2c67915, - 0xc67178f2e372532b, - 0xca273eceea26619c, - 0xd186b8c721c0c207, - 0xeada7dd6cde0eb1e, - 0xf57d4f7fee6ed178, - 0x06f067aa72176fba, - 0x0a637dc5a2c898a6, - 0x113f9804bef90dae, - 0x1b710b35131c471b, - 0x28db77f523047d84, - 0x32caab7b40c72493, - 0x3c9ebe0a15c9bebc, - 0x431d67c49c100d4c, - 0x4cc5d4becb3e42b6, - 0x597f299cfc657e2a, - 0x5fcb6fab3ad6faec, - 0x6c44198c4a475817, - ]; - let [mut a, mut b, mut c, mut d, mut e, mut f, mut g, mut h] = self.state; - - for i in 0..80 { - let S0 = (a.rotate_right(28)) ^ (a.rotate_right(34)) ^ (a.rotate_right(39)); - let S1 = (e.rotate_right(14)) ^ (e.rotate_right(18)) ^ (e.rotate_right(41)); - let ch = (e & f) ^ (!e & g); - let maj = (a & b) ^ (a & c) ^ (b & c); - let temp1 = h - .wrapping_add(S1) - .wrapping_add(ch) - .wrapping_add(K[i]) - .wrapping_add(w[i]); - let temp2 = S0.wrapping_add(maj); - - h = g; - g = f; - f = e; - e = d.wrapping_add(temp1); - d = c; - c = b; - b = a; - a = temp1.wrapping_add(temp2); - } - - for (h, v) in self.state.iter_mut().zip([a, b, c, d, e, f, g, h]) { - *h = (*h).wrapping_add(v); - } - } - - #[inline] - fn update_with(&mut self, msg: &[u8]) { - let mut w = [0u64; 80]; - - for (i, v) in msg.chunks_exact(8).enumerate() { - // SAFETY: chunks_exact returns exactly 8 byte chunks - w[i] = u64::from_be_bytes(unsafe { *(v as *const [u8] as *const [u8; 8]) }); - } - - for i in 16..80 { - let s0 = (w[i - 15].rotate_right(1)) ^ (w[i - 15].rotate_right(8)) ^ (w[i - 15] >> 7); - let s1 = (w[i - 2].rotate_right(19)) ^ (w[i - 2].rotate_right(61)) ^ (w[i - 2] >> 6); - - w[i] = w[i - 16] - .wrapping_add(s0) - .wrapping_add(w[i - 7]) - .wrapping_add(s1); - } - - self.compress(&w); - } -} - -impl FileHasher for Sha64State { - type Output = [u8; 64]; - const BLOCK_SIZE: usize = 128; - - #[inline] - fn update(&mut self, msg: &[u8]) { - assert!(msg.len() == 128); - - self.processed_bytes += 128; - - self.update_with(msg); - } - - #[inline] - fn do_final(mut self, msg_tail: &[u8]) -> [u8; 64] { - assert!(msg_tail.len() < 128); - - let mut real_msg = [0u8; 128]; - let tail_len = msg_tail.len(); - - let processed_bytes = self.processed_bytes + (tail_len as u128); - - real_msg[..tail_len].copy_from_slice(msg_tail); - - real_msg[tail_len] = 0x80; - - if tail_len < 17 { - self.update_with(&real_msg); - real_msg.fill(0); - } - - let processed_bits = processed_bytes << 3; - - real_msg[112..].copy_from_slice(&processed_bits.to_be_bytes()); - - self.update_with(&real_msg); - - let mut output = [0u8; 64]; - - for (output, val) in output.chunks_exact_mut(8).zip(self.state) { - output.copy_from_slice(&val.to_be_bytes()) - } - - output - } -} diff --git a/autobuild/src/helpers.rs b/autobuild/src/helpers.rs deleted file mode 100644 index b64ae668..00000000 --- a/autobuild/src/helpers.rs +++ /dev/null @@ -1,26 +0,0 @@ -pub trait SplitOnceOwned: Sized { - fn split_once_take(&mut self, pat: &str) -> Option<(Self, Self)>; - fn split_once_owned(mut self, pat: &str) -> Result<(Self, Self), Self> { - self.split_once_take(pat).ok_or(self) - } -} - -impl SplitOnceOwned for String { - fn split_once_take(&mut self, pat: &str) -> Option<(Self, Self)> { - if let Some(pos) = self.find(pat) { - let mut new_str = Vec::new(); - - let off = pos + pat.len(); - - new_str.extend_from_slice(&self.as_bytes()[off..]); - - self.truncate(pos); - - Some((core::mem::take(self), unsafe { - String::from_utf8_unchecked(new_str) - })) - } else { - None - } - } -} diff --git a/autobuild/src/main.rs b/autobuild/src/main.rs deleted file mode 100644 index fbd5adec..00000000 --- a/autobuild/src/main.rs +++ /dev/null @@ -1,40 +0,0 @@ -use std::env::Args; -use std::io; - -mod config; -mod hash; -mod helpers; -mod programs; -mod rand; -mod tools; - -fn main() { - let mut args = std::env::args(); - - let mut prg_name = args.next().unwrap(); - - if prg_name == "cargo" { - prg_name.push(' '); - prg_name += &args.next().unwrap(); - } - - match real_main(&prg_name, args) { - Ok(()) => {} - Err(e) => { - eprintln!("{}: {}", prg_name, e); - - std::process::exit(1); - } - } -} - -fn real_main(prg_name: &str, mut args: Args) -> io::Result<()> { - let subcommand = args - .next() - .ok_or_else(|| ()) - .or_else(|_| tools::help_subcommands())?; - - let subcommand_entry = tools::find_tool(&subcommand)?; - - subcommand_entry(prg_name, args) -} diff --git a/autobuild/src/programs.rs b/autobuild/src/programs.rs deleted file mode 100644 index 3d8b2d42..00000000 --- a/autobuild/src/programs.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod rustc; diff --git a/autobuild/src/programs/rustc.rs b/autobuild/src/programs/rustc.rs deleted file mode 100644 index f4377481..00000000 --- a/autobuild/src/programs/rustc.rs +++ /dev/null @@ -1,24 +0,0 @@ -use std::collections::HashSet; - -use serde_derive::{Deserialize, Serialize}; - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct RustcVersion { - pub supported_editions: HashSet, - pub features_available: HashSet, -} - -#[derive(Clone, Debug, Hash, PartialEq, Eq, Deserialize, Serialize)] -#[serde(rename_all = "lowercase")] -pub enum RustEdition { - Rust2015, - Rust2018, - Rust2021, - Rust2024, -} - -#[derive(Clone, Debug, Hash, PartialEq, Eq, Deserialize, Serialize)] -#[serde(rename_all = "lowercase")] -pub enum RustcFeature { - AllowNightly, -} diff --git a/autobuild/src/rand.rs b/autobuild/src/rand.rs deleted file mode 100644 index 92b33ca0..00000000 --- a/autobuild/src/rand.rs +++ /dev/null @@ -1,20 +0,0 @@ -use lccc_siphash::siphash::sys::SipHashState; - -#[derive(Clone, Debug)] -pub struct Rand(SipHashState); - -impl Rand { - pub fn init() -> Self { - let mut keys; - - #[cfg(unix)] - unsafe { - keys = [0, 0]; - libc::getrandom(keys.as_mut_ptr().cast(), 16, libc::GRND_RANDOM); - } - - let [k0, k1] = keys; - - Self(SipHashState::from_keys(k0, k1)) - } -} diff --git a/autobuild/src/tools.rs b/autobuild/src/tools.rs deleted file mode 100644 index 3b5bb8b6..00000000 --- a/autobuild/src/tools.rs +++ /dev/null @@ -1,58 +0,0 @@ -use std::env::Args; -use std::io; - -macro_rules! def_tools{ - { - $(tool $tool:ident;)* - } => { - $(pub mod $tool;)* - - pub fn find_tool(x: &str) -> io::Resultio::Result<()>>{ - match x{ - $(::core::stringify!($tool) => Ok($tool::main),)* - x => Err(io::Error::new(io::ErrorKind::InvalidInput, format!("No such subcommand {}", x))) - } - } - - pub fn print_help(prg_name: &str, tool_name: &str, help_cb: fn()){ - println!("Usage: {} ",prg_name); - println!("Available Subcommands:"); - $(println!("\t{}", ::core::stringify!($tool));)* - - println!("{} {} Usage:", prg_name, tool_name); - help_cb() - } - - pub fn help_subcommands() -> io::Result{ - Err(io::Error::new(io::ErrorKind::InvalidInput, { - use core::fmt::Write; - let mut st = String::new(); - - let _ = writeln!(st, "Subcommands:"); - $(let _ = writeln!(st, "\t{}", ::core::stringify!($tool));)* - - st - })) - } - } -} - -def_tools! { - tool config; -} - -pub fn print_version() { - println!("cargo-autobuild v{}", env!("CARGO_PKG_VERSION")); - println!("Copyright (C) 2024 LCCC Maintainers"); - println!("This Package is released under the terms of the 2BSD License + Patent Grant"); - println!("See LICENSE for details"); -} - -pub fn require_arg>(flag: &str, args: &mut I) -> io::Result { - args.next().ok_or_else(|| { - io::Error::new( - io::ErrorKind::InvalidInput, - format!("{} requires an argument", flag), - ) - }) -} diff --git a/autobuild/src/tools/config.rs b/autobuild/src/tools/config.rs deleted file mode 100644 index d4961ca1..00000000 --- a/autobuild/src/tools/config.rs +++ /dev/null @@ -1,230 +0,0 @@ -use std::collections::HashMap; -use std::env::Args; -use std::fs::File; -use std::io::{self, Read}; - -use std::path::{Path, PathBuf}; - -use install_dirs::dirs::InstallDirs; -use target_tuples::Target; - -use crate::config::{ConfigData, ConfigInstallDirs, ConfigTargets, ConfigVarValue}; -use crate::helpers::SplitOnceOwned; - -fn help() { - println!() -} - -pub fn main(prg_name: &str, mut args: Args) -> io::Result<()> { - let mut base_dir = None; - let mut src_dir = None; - let mut cfg_dir = None; - let mut config_vars = HashMap::new(); - let mut extra_install_dirs = HashMap::new(); - - let mut prefix_set = false; - - let mut install_dirs = InstallDirs::defaults(); - - let mut build_alias = None; - let mut host_alias = None; - let mut target_alias = None; - - install_dirs.read_env(); - - while let Some(arg) = args.next() { - match &*arg { - "--help" => { - super::print_help(prg_name, "config", help); - return Ok(()); - } - "--version" => { - super::print_version(); - return Ok(()); - } - "--set" => { - let mut val = super::require_arg("--set", &mut args)?; - - if let Some((k, v)) = val.split_once_take("=") { - config_vars.insert(k, ConfigVarValue::Value(v)); - } else { - config_vars.insert(val, ConfigVarValue::Set); - } - } - "--unset" => { - let val = super::require_arg("--unset", &mut args)?; - - config_vars.insert(val, ConfigVarValue::Unset); - } - "--install" => { - let mut val = super::require_arg("--install", &mut args)?; - - let (k,v) = val.split_once_owned("=") - .map_err(|val| io::Error::new(io::ErrorKind::InvalidInput, format!("--install requires an argument of the form dir=path, but got `{}` instead", val)))?; - - extra_install_dirs.insert(k, PathBuf::from(v)); - } - "--src-dir" => { - let val = super::require_arg("--src-dir", &mut args)?; - - src_dir = Some(PathBuf::from(val)); - } - - "--config-dir" => { - let val = super::require_arg("--config-dir", &mut args)?; - - cfg_dir = Some(PathBuf::from(val)); - } - - "--prefix" => { - let val = super::require_arg("--prefix", &mut args)?; - - install_dirs.prefix = PathBuf::from(val); - - prefix_set = true - } - "--build" => { - let val = super::require_arg("--build", &mut args)?; - - build_alias = Some(val); - } - - "--host" => { - let val = super::require_arg("--host", &mut args)?; - - host_alias = Some(val); - } - - "--target" => { - let val = super::require_arg("--target", &mut args)?; - - target_alias = Some(val); - } - - "--" => { - base_dir = args.next().map(PathBuf::from); - break; - } - x if x.starts_with("--") => { - let val = super::require_arg(x, &mut args)?; - - install_dirs.set_from_arg(x, val).map_err(|_| { - io::Error::new( - io::ErrorKind::InvalidInput, - format!("Unrecognized option {}", x), - ) - })?; - } - - _ => { - base_dir = Some(PathBuf::from(arg)); - break; - } - } - } - - let base_dir = base_dir.ok_or(()).or_else(|_| std::env::current_dir())?; - - let base_dir = base_dir.canonicalize()?; - - let cfg_dir = match cfg_dir { - Some(cfg_dir) => cfg_dir.canonicalize()?, - None => { - let mut base_dir_config = base_dir.clone(); - base_dir_config.push(".config.toml"); - - if std::fs::metadata(base_dir_config).is_ok() { - base_dir.clone() - } else { - std::env::current_dir()? - } - } - }; - - let cfg_file = { - let mut buf = cfg_dir.clone(); - buf.push(".config.toml"); - buf - }; - - let mut cfg_data = match File::open(&cfg_file) { - Ok(mut file) => { - let mut st = String::new(); - file.read_to_string(&mut st)?; - Box::new( - toml::from_str::(&st) - .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?, - ) - } - Err(_) => { - let src_dir = match src_dir { - Some(src_dir) => src_dir.canonicalize()?, - None => { - let mut src_dir = base_dir; - - loop { - let mut autobuild_file = src_dir.clone(); - autobuild_file.push("autobuild.toml"); - - if std::fs::metadata(autobuild_file).is_ok() { - break; - } else if &src_dir == Path::new("/") { - return Err(io::Error::new( - io::ErrorKind::InvalidInput, - "Could not find source manifest (autobuild.toml)", - )); - } else { - src_dir.pop(); - } - } - - src_dir - } - }; - - let build = match build_alias.as_ref() { - Some(alias) => alias.parse().map_err(|_| { - io::Error::new( - io::ErrorKind::InvalidInput, - format!("Target `{}` is not recognized or cannot be parsed", alias), - ) - })?, - None => target_tuples::from_env!("TARGET"), - }; - - let host = match host_alias.as_ref() { - Some(alias) => alias.parse().map_err(|_| { - io::Error::new( - io::ErrorKind::InvalidInput, - format!("Target `{}` is not recognized or cannot be parsed", alias), - ) - })?, - None => build.clone(), - }; - - let target = match target_alias.as_ref() { - Some(alias) => alias.parse().map_err(|_| { - io::Error::new( - io::ErrorKind::InvalidInput, - format!("Target `{}` is not recognized or cannot be parsed", alias), - ) - })?, - None => host.clone(), - }; - - let dirs = ConfigInstallDirs { - install_dirs, - rest: extra_install_dirs, - }; - - let targets = ConfigTargets { - build, - host, - target, - }; - Box::new(ConfigData::new(src_dir, dirs, targets)) - } - }; - - Ok(()) -} From f6853ceefbaaa5656c79c23add74799cfba7cac1 Mon Sep 17 00:00:00 2001 From: Connor Horman Date: Sun, 25 Feb 2024 21:14:22 -0500 Subject: [PATCH 22/74] feat: Add `RUSTFLAGS` to autobuild.toml --- Cargo.lock | 37 ++++++++++++++----------------------- Cargo.toml | 1 + autobuild | 2 +- autobuild.toml | 3 +-- 4 files changed, 17 insertions(+), 26 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8eae8725..75371f8c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -19,13 +19,13 @@ version = "0.1.0" dependencies = [ "cfg-match", "install-dirs 0.3.2", - "itertools", "lccc-siphash 0.1.0 (git+https://github.com/lccc-project/lccc-siphash)", "libc", "serde", "serde_derive", "target-tuples", - "toml 0.8.8", + "toml 0.8.10", + "with_builtin_macros", ] [[package]] @@ -216,15 +216,15 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.14.0" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" [[package]] name = "indexmap" -version = "2.0.0" +version = "2.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" +checksum = "233cf39063f058ea2caae4091bf4a3ef70a653afbc026f5c4a4135d114e3c177" dependencies = [ "equivalent", "hashbrown", @@ -252,15 +252,6 @@ dependencies = [ name = "interning-static-syms" version = "0.1.0" -[[package]] -name = "itertools" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" -dependencies = [ - "either", -] - [[package]] name = "lazy_static" version = "1.4.0" @@ -320,9 +311,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.6.2" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5486aed0026218e61b8a01d5fbd5a0a134649abb71a0e53b7bc088529dced86e" +checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" [[package]] name = "once_cell" @@ -500,9 +491,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.8" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1a195ec8c9da26928f773888e0742ca3ca1040c6cd859c919c9f59c1954ab35" +checksum = "9a9aad4a3066010876e8dcf5a8a06e70a558751117a145c6ce2b82c2e2054290" dependencies = [ "serde", "serde_spanned", @@ -521,9 +512,9 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.21.0" +version = "0.22.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d34d383cd00a163b4a5b85053df514d45bc330f6de7737edfe0a93311d1eaa03" +checksum = "2c1b5fd4128cc8d3e0cb74d4ed9a9cc7c7284becd4df68f5f940e1ad123606f6" dependencies = [ "indexmap", "serde", @@ -621,9 +612,9 @@ checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" [[package]] name = "winnow" -version = "0.5.15" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc" +checksum = "7a4191c47f15cc3ec71fcb4913cb83d58def65dd3787610213c649283b5ce178" dependencies = [ "memchr", ] diff --git a/Cargo.toml b/Cargo.toml index a2e2e1cf..13bb7a81 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,3 +14,4 @@ members = [ "rust/interning-static-syms", "rust/rust_mir_macro", ] +exclude = [] diff --git a/autobuild b/autobuild index 67d0bbc2..fb101710 160000 --- a/autobuild +++ b/autobuild @@ -1 +1 @@ -Subproject commit 67d0bbc2c58160e8ccd5d20243eee49463d47d19 +Subproject commit fb101710b7e09b79a2cd1ca62494204b4342b181 diff --git a/autobuild.toml b/autobuild.toml index 2429cc3a..1b0a7049 100644 --- a/autobuild.toml +++ b/autobuild.toml @@ -1,10 +1,9 @@ -env = ["RUSTC", "RUSTC_FOR_BUILD"] +env = ["RUSTC", "RUSTC_FOR_BUILD", "RUSTFLAGS", "RUSTFLAGS_FOR_BUILDS"] [package] name = "lccc" version = "0.1" - [dirs] lcccdir = "{libdir}/lccc" xlangdir = "{lcccdir}/xlang" From b41bcd859e3c6ab26945adc8d2182ef47d430504 Mon Sep 17 00:00:00 2001 From: Connor Horman Date: Tue, 27 Feb 2024 21:42:57 -0500 Subject: [PATCH 23/74] chore: Remove vendor directory from git --- vendor/bitflags/.cargo-checksum.json | 1 - vendor/bitflags/CHANGELOG.md | 206 - vendor/bitflags/CODE_OF_CONDUCT.md | 73 - vendor/bitflags/Cargo.toml | 58 - vendor/bitflags/LICENSE-APACHE | 201 - vendor/bitflags/LICENSE-MIT | 25 - vendor/bitflags/README.md | 32 - vendor/bitflags/src/example_generated.rs | 14 - vendor/bitflags/src/lib.rs | 1729 ----- vendor/bitflags/tests/basic.rs | 20 - .../bitflags/tests/compile-fail/impls/copy.rs | 10 - .../tests/compile-fail/impls/copy.stderr.beta | 27 - .../bitflags/tests/compile-fail/impls/eq.rs | 10 - .../tests/compile-fail/impls/eq.stderr.beta | 55 - .../non_integer_base/all_defined.rs | 123 - .../non_integer_base/all_defined.stderr.beta | 27 - .../non_integer_base/all_missing.rs | 13 - .../non_integer_base/all_missing.stderr.beta | 13 - .../compile-fail/visibility/private_field.rs | 13 - .../visibility/private_field.stderr.beta | 10 - .../compile-fail/visibility/private_flags.rs | 18 - .../visibility/private_flags.stderr.beta | 18 - .../compile-fail/visibility/pub_const.rs | 9 - .../visibility/pub_const.stderr.beta | 5 - .../tests/compile-pass/impls/convert.rs | 17 - .../tests/compile-pass/impls/default.rs | 10 - .../compile-pass/impls/inherent_methods.rs | 15 - .../tests/compile-pass/redefinition/core.rs | 14 - .../compile-pass/redefinition/stringify.rs | 19 - vendor/bitflags/tests/compile-pass/repr/c.rs | 10 - .../tests/compile-pass/repr/transparent.rs | 10 - .../compile-pass/visibility/bits_field.rs | 11 - .../tests/compile-pass/visibility/pub_in.rs | 19 - vendor/bitflags/tests/compile.rs | 63 - vendor/bytemuck/.cargo-checksum.json | 1 - vendor/bytemuck/Cargo.toml | 41 - vendor/bytemuck/LICENSE-APACHE | 61 - vendor/bytemuck/LICENSE-MIT | 9 - vendor/bytemuck/LICENSE-ZLIB | 11 - vendor/bytemuck/README.md | 19 - vendor/bytemuck/changelog.md | 161 - vendor/bytemuck/rustfmt.toml | 16 - vendor/bytemuck/src/allocation.rs | 215 - vendor/bytemuck/src/contiguous.rs | 202 - vendor/bytemuck/src/lib.rs | 471 -- vendor/bytemuck/src/offset_of.rs | 135 - vendor/bytemuck/src/pod.rs | 119 - vendor/bytemuck/src/transparent.rs | 249 - vendor/bytemuck/src/zeroable.rs | 158 - vendor/bytemuck/tests/array_tests.rs | 12 - vendor/bytemuck/tests/cast_slice_tests.rs | 194 - vendor/bytemuck/tests/derive.rs | 25 - vendor/bytemuck/tests/doc_tests.rs | 121 - vendor/bytemuck/tests/offset_of_tests.rs | 59 - vendor/bytemuck/tests/std_tests.rs | 45 - vendor/bytemuck/tests/transparent.rs | 64 - vendor/bytemuck/tests/wrapper_forgets.rs | 13 - vendor/bytemuck_derive/.cargo-checksum.json | 1 - vendor/bytemuck_derive/Cargo.toml | 37 - vendor/bytemuck_derive/LICENSE-APACHE | 61 - vendor/bytemuck_derive/LICENSE-MIT | 9 - vendor/bytemuck_derive/LICENSE-ZLIB | 11 - vendor/bytemuck_derive/README.md | 4 - vendor/bytemuck_derive/changelog.md | 11 - vendor/bytemuck_derive/src/lib.rs | 179 - vendor/bytemuck_derive/src/traits.rs | 328 - vendor/bytemuck_derive/tests/basic.rs | 50 - vendor/fake-enum/.cargo-checksum.json | 1 - vendor/fake-enum/Cargo.toml | 22 - vendor/fake-enum/LICENSE-APACHE | 201 - vendor/fake-enum/LICENSE-MIT | 8 - vendor/fake-enum/Makefile.in | 179 - vendor/fake-enum/README.md | 13 - vendor/fake-enum/aclocal.m4 | 16 - vendor/fake-enum/build-dirs.sh | 4 - vendor/fake-enum/config.guess | 1700 ----- vendor/fake-enum/config.sub | 1879 ------ vendor/fake-enum/configure | 4207 ------------ vendor/fake-enum/configure.ac | 23 - vendor/fake-enum/install-sh | 541 -- vendor/fake-enum/m4/ax_prog_cc_for_build.m4 | 139 - vendor/fake-enum/m4/bindgen.m4 | 5 - vendor/fake-enum/m4/build_std.m4 | 38 - vendor/fake-enum/m4/lcrust_checks.m4 | 48 - vendor/fake-enum/m4/lcrust_prog_rustc.m4 | 585 -- vendor/fake-enum/run-tests.sh | 4 - vendor/fake-enum/src/lib.rs | 182 - vendor/install-dirs/.cargo-checksum.json | 1 - vendor/install-dirs/Cargo.toml | 24 - vendor/install-dirs/README.md | 3 - vendor/install-dirs/src/dirs.rs | 632 -- vendor/install-dirs/src/lib.rs | 1 - vendor/lazy_static/.cargo-checksum.json | 1 - vendor/lazy_static/Cargo.toml | 46 - vendor/lazy_static/LICENSE-APACHE | 201 - vendor/lazy_static/LICENSE-MIT | 25 - vendor/lazy_static/README.md | 79 - vendor/lazy_static/src/core_lazy.rs | 31 - vendor/lazy_static/src/inline_lazy.rs | 57 - vendor/lazy_static/src/lib.rs | 215 - vendor/lazy_static/tests/no_std.rs | 20 - vendor/lazy_static/tests/test.rs | 164 - vendor/proc-macro2/.cargo-checksum.json | 1 - vendor/proc-macro2/Cargo.toml | 41 - vendor/proc-macro2/LICENSE-APACHE | 201 - vendor/proc-macro2/LICENSE-MIT | 25 - vendor/proc-macro2/README.md | 93 - vendor/proc-macro2/build.rs | 172 - vendor/proc-macro2/src/detection.rs | 67 - vendor/proc-macro2/src/fallback.rs | 923 --- vendor/proc-macro2/src/lib.rs | 1272 ---- vendor/proc-macro2/src/marker.rs | 18 - vendor/proc-macro2/src/parse.rs | 866 --- vendor/proc-macro2/src/wrapper.rs | 966 --- vendor/proc-macro2/tests/comments.rs | 103 - vendor/proc-macro2/tests/features.rs | 8 - vendor/proc-macro2/tests/marker.rs | 92 - vendor/proc-macro2/tests/test.rs | 562 -- vendor/proc-macro2/tests/test_fmt.rs | 26 - vendor/quote/.cargo-checksum.json | 1 - vendor/quote/Cargo.toml | 40 - vendor/quote/LICENSE-APACHE | 201 - vendor/quote/LICENSE-MIT | 25 - vendor/quote/README.md | 261 - vendor/quote/src/ext.rs | 112 - vendor/quote/src/format.rs | 164 - vendor/quote/src/ident_fragment.rs | 86 - vendor/quote/src/lib.rs | 1267 ---- vendor/quote/src/runtime.rs | 402 -- vendor/quote/src/spanned.rs | 42 - vendor/quote/src/to_tokens.rs | 209 - vendor/quote/tests/compiletest.rs | 6 - vendor/quote/tests/test.rs | 459 -- .../ui/does-not-have-iter-interpolated-dup.rs | 9 - .../ui/does-not-have-iter-interpolated.rs | 9 - .../tests/ui/does-not-have-iter-separated.rs | 5 - vendor/quote/tests/ui/does-not-have-iter.rs | 5 - vendor/quote/tests/ui/not-quotable.rs | 7 - vendor/quote/tests/ui/not-repeatable.rs | 8 - vendor/quote/tests/ui/wrong-type-span.rs | 7 - vendor/serde/.cargo-checksum.json | 1 - vendor/serde/Cargo.toml | 44 - vendor/serde/LICENSE-APACHE | 201 - vendor/serde/LICENSE-MIT | 23 - vendor/serde/README.md | 111 - vendor/serde/build.rs | 139 - vendor/serde/crates-io.md | 62 - vendor/serde/src/de/ignored_any.rs | 243 - vendor/serde/src/de/impls.rs | 2664 -------- vendor/serde/src/de/mod.rs | 2275 ------- vendor/serde/src/de/seed.rs | 19 - vendor/serde/src/de/utf8.rs | 46 - vendor/serde/src/de/value.rs | 1635 ----- vendor/serde/src/integer128.rs | 82 - vendor/serde/src/lib.rs | 297 - vendor/serde/src/macros.rs | 236 - vendor/serde/src/private/de.rs | 2959 --------- vendor/serde/src/private/doc.rs | 159 - vendor/serde/src/private/mod.rs | 50 - vendor/serde/src/private/ser.rs | 1310 ---- vendor/serde/src/private/size_hint.rs | 21 - vendor/serde/src/ser/fmt.rs | 174 - vendor/serde/src/ser/impls.rs | 935 --- vendor/serde/src/ser/impossible.rs | 216 - vendor/serde/src/ser/mod.rs | 1986 ------ vendor/serde/src/std_error.rs | 48 - vendor/serde_derive/.cargo-checksum.json | 1 - vendor/serde_derive/Cargo.toml | 44 - vendor/serde_derive/LICENSE-APACHE | 201 - vendor/serde_derive/LICENSE-MIT | 23 - vendor/serde_derive/README.md | 111 - vendor/serde_derive/build.rs | 36 - vendor/serde_derive/crates-io.md | 62 - vendor/serde_derive/src/bound.rs | 408 -- vendor/serde_derive/src/de.rs | 3132 --------- vendor/serde_derive/src/dummy.rs | 48 - vendor/serde_derive/src/fragment.rs | 74 - vendor/serde_derive/src/internals/ast.rs | 202 - vendor/serde_derive/src/internals/attr.rs | 1954 ------ vendor/serde_derive/src/internals/case.rs | 197 - vendor/serde_derive/src/internals/check.rs | 420 -- vendor/serde_derive/src/internals/ctxt.rs | 62 - vendor/serde_derive/src/internals/mod.rs | 28 - vendor/serde_derive/src/internals/receiver.rs | 287 - vendor/serde_derive/src/internals/respan.rs | 16 - vendor/serde_derive/src/internals/symbol.rs | 68 - vendor/serde_derive/src/lib.rs | 107 - vendor/serde_derive/src/pretend.rs | 201 - vendor/serde_derive/src/ser.rs | 1338 ---- vendor/serde_derive/src/try.rs | 24 - vendor/static_assertions/.cargo-checksum.json | 1 - vendor/static_assertions/CHANGELOG.md | 181 - vendor/static_assertions/Cargo.toml | 39 - vendor/static_assertions/LICENSE-APACHE | 202 - vendor/static_assertions/LICENSE-MIT | 21 - vendor/static_assertions/README.md | 188 - vendor/static_assertions/src/assert_cfg.rs | 49 - .../static_assertions/src/assert_eq_align.rs | 45 - .../static_assertions/src/assert_eq_size.rs | 123 - vendor/static_assertions/src/assert_fields.rs | 72 - vendor/static_assertions/src/assert_impl.rs | 356 -- .../static_assertions/src/assert_obj_safe.rs | 76 - vendor/static_assertions/src/assert_trait.rs | 105 - vendor/static_assertions/src/assert_type.rs | 101 - vendor/static_assertions/src/const_assert.rs | 109 - vendor/static_assertions/src/lib.rs | 97 - vendor/syn/.cargo-checksum.json | 1 - vendor/syn/Cargo.toml | 97 - vendor/syn/LICENSE-APACHE | 201 - vendor/syn/LICENSE-MIT | 23 - vendor/syn/README.md | 285 - vendor/syn/benches/file.rs | 30 - vendor/syn/benches/rust.rs | 158 - vendor/syn/build.rs | 43 - vendor/syn/src/attr.rs | 664 -- vendor/syn/src/await.rs | 2 - vendor/syn/src/bigint.rs | 66 - vendor/syn/src/buffer.rs | 399 -- vendor/syn/src/custom_keyword.rs | 253 - vendor/syn/src/custom_punctuation.rs | 300 - vendor/syn/src/data.rs | 501 -- vendor/syn/src/derive.rs | 280 - vendor/syn/src/discouraged.rs | 194 - vendor/syn/src/error.rs | 412 -- vendor/syn/src/export.rs | 37 - vendor/syn/src/expr.rs | 3497 ---------- vendor/syn/src/ext.rs | 139 - vendor/syn/src/file.rs | 125 - vendor/syn/src/gen/clone.rs | 2234 ------- vendor/syn/src/gen/debug.rs | 3035 --------- vendor/syn/src/gen/eq.rs | 2288 ------- vendor/syn/src/gen/fold.rs | 3209 ---------- vendor/syn/src/gen/hash.rs | 2867 --------- vendor/syn/src/gen/visit.rs | 3775 ----------- vendor/syn/src/gen/visit_mut.rs | 3781 ----------- vendor/syn/src/gen_helper.rs | 154 - vendor/syn/src/generics.rs | 1290 ---- vendor/syn/src/group.rs | 282 - vendor/syn/src/ident.rs | 102 - vendor/syn/src/item.rs | 3360 ---------- vendor/syn/src/lib.rs | 1004 --- vendor/syn/src/lifetime.rs | 154 - vendor/syn/src/lit.rs | 1574 ----- vendor/syn/src/lookahead.rs | 166 - vendor/syn/src/mac.rs | 219 - vendor/syn/src/macros.rs | 168 - vendor/syn/src/op.rs | 234 - vendor/syn/src/parse.rs | 1287 ---- vendor/syn/src/parse_macro_input.rs | 179 - vendor/syn/src/parse_quote.rs | 143 - vendor/syn/src/pat.rs | 932 --- vendor/syn/src/path.rs | 842 --- vendor/syn/src/print.rs | 16 - vendor/syn/src/punctuated.rs | 1040 --- vendor/syn/src/reserved.rs | 44 - vendor/syn/src/sealed.rs | 4 - vendor/syn/src/span.rs | 67 - vendor/syn/src/spanned.rs | 114 - vendor/syn/src/stmt.rs | 322 - vendor/syn/src/thread.rs | 41 - vendor/syn/src/token.rs | 1013 --- vendor/syn/src/tt.rs | 107 - vendor/syn/src/ty.rs | 1238 ---- vendor/syn/src/verbatim.rs | 15 - vendor/syn/src/whitespace.rs | 65 - vendor/syn/tests/.gitignore | 1 - vendor/syn/tests/common/eq.rs | 630 -- vendor/syn/tests/common/mod.rs | 27 - vendor/syn/tests/common/parse.rs | 48 - vendor/syn/tests/debug/gen.rs | 5645 ----------------- vendor/syn/tests/debug/mod.rs | 118 - vendor/syn/tests/macros/mod.rs | 75 - vendor/syn/tests/repo/mod.rs | 137 - vendor/syn/tests/repo/progress.rs | 37 - vendor/syn/tests/test_asyncness.rs | 37 - vendor/syn/tests/test_attribute.rs | 336 - vendor/syn/tests/test_derive_input.rs | 892 --- vendor/syn/tests/test_expr.rs | 320 - vendor/syn/tests/test_generics.rs | 283 - vendor/syn/tests/test_grouping.rs | 52 - vendor/syn/tests/test_ident.rs | 85 - vendor/syn/tests/test_item.rs | 301 - vendor/syn/tests/test_iterators.rs | 49 - vendor/syn/tests/test_lit.rs | 271 - vendor/syn/tests/test_meta.rs | 376 -- vendor/syn/tests/test_parse_buffer.rs | 90 - vendor/syn/tests/test_parse_stream.rs | 12 - vendor/syn/tests/test_pat.rs | 67 - vendor/syn/tests/test_path.rs | 104 - vendor/syn/tests/test_precedence.rs | 416 -- vendor/syn/tests/test_receiver.rs | 127 - vendor/syn/tests/test_round_trip.rs | 200 - vendor/syn/tests/test_shebang.rs | 59 - vendor/syn/tests/test_should_parse.rs | 45 - vendor/syn/tests/test_size.rs | 29 - vendor/syn/tests/test_stmt.rs | 74 - vendor/syn/tests/test_token_trees.rs | 30 - vendor/syn/tests/test_ty.rs | 287 - vendor/syn/tests/test_visibility.rs | 148 - vendor/syn/tests/zzz_stable.rs | 33 - .../target-tuples-0.4.1/.cargo-checksum.json | 1 - vendor/target-tuples-0.4.1/Cargo.lock | 6 - vendor/target-tuples-0.4.1/Cargo.toml | 33 - vendor/target-tuples-0.4.1/LICENSE-APACHE | 0 vendor/target-tuples-0.4.1/LICENSE-MIT | 6 - vendor/target-tuples-0.4.1/Makefile.in | 133 - vendor/target-tuples-0.4.1/README.md | 30 - vendor/target-tuples-0.4.1/aclocal.m4 | 15 - vendor/target-tuples-0.4.1/config.guess | 1480 ----- vendor/target-tuples-0.4.1/config.sub | 1801 ------ vendor/target-tuples-0.4.1/config.sub.1 | 17 - vendor/target-tuples-0.4.1/configure | 4128 ------------ vendor/target-tuples-0.4.1/configure.ac | 17 - vendor/target-tuples-0.4.1/install-sh | 541 -- .../m4/lcrust_prog_rustc.m4 | 220 - vendor/target-tuples-0.4.1/run-tests.sh | 4 - vendor/target-tuples-0.4.1/src/config-sub.rs | 32 - vendor/target-tuples-0.4.1/src/lib.rs | 356 -- vendor/target-tuples-0.4.1/src/pieces.rs | 1011 --- .../target-tuples-0.4.1/tests/config-sub.data | 7 - vendor/target-tuples-0.4.1/tests/sub_tests.rs | 46 - vendor/target-tuples/.cargo-checksum.json | 1 - vendor/target-tuples/Cargo.lock | 7 - vendor/target-tuples/Cargo.toml | 33 - vendor/target-tuples/LICENSE-APACHE | 0 vendor/target-tuples/LICENSE-MIT | 6 - vendor/target-tuples/Makefile.in | 133 - vendor/target-tuples/README.md | 30 - vendor/target-tuples/aclocal.m4 | 15 - vendor/target-tuples/config.guess | 1480 ----- vendor/target-tuples/config.sub | 1801 ------ vendor/target-tuples/config.sub.1 | 17 - vendor/target-tuples/configure | 4128 ------------ vendor/target-tuples/configure.ac | 17 - vendor/target-tuples/install-sh | 541 -- vendor/target-tuples/m4/lcrust_prog_rustc.m4 | 220 - vendor/target-tuples/run-tests.sh | 4 - vendor/target-tuples/src/config-sub.rs | 32 - vendor/target-tuples/src/lib.rs | 364 -- vendor/target-tuples/src/pieces.rs | 1044 --- vendor/target-tuples/tests/config-sub.data | 7 - vendor/target-tuples/tests/rustc-targets.data | 169 - vendor/target-tuples/tests/sub_tests.rs | 63 - vendor/temp-file/.cargo-checksum.json | 1 - vendor/temp-file/Cargo.toml | 27 - vendor/temp-file/LICENSE | 13 - vendor/temp-file/Readme.md | 98 - vendor/temp-file/src/lib.rs | 255 - vendor/temp-file/src/test.rs | 273 - vendor/toml/.cargo-checksum.json | 1 - vendor/toml/Cargo.lock | 101 - vendor/toml/Cargo.toml | 40 - vendor/toml/LICENSE-APACHE | 201 - vendor/toml/LICENSE-MIT | 25 - vendor/toml/README.md | 38 - vendor/toml/examples/decode.rs | 53 - vendor/toml/examples/enum_external.rs | 44 - vendor/toml/examples/toml2json.rs | 47 - vendor/toml/src/datetime.rs | 425 -- vendor/toml/src/de.rs | 2263 ------- vendor/toml/src/lib.rs | 180 - vendor/toml/src/macros.rs | 462 -- vendor/toml/src/map.rs | 595 -- vendor/toml/src/ser.rs | 1854 ------ vendor/toml/src/spanned.rs | 168 - vendor/toml/src/tokens.rs | 744 --- vendor/toml/src/value.rs | 1080 ---- .../toml/tests/enum_external_deserialize.rs | 258 - vendor/unicode-xid/.cargo-checksum.json | 1 - vendor/unicode-xid/COPYRIGHT | 7 - vendor/unicode-xid/Cargo.toml | 37 - vendor/unicode-xid/LICENSE-APACHE | 201 - vendor/unicode-xid/LICENSE-MIT | 25 - vendor/unicode-xid/README.md | 44 - vendor/unicode-xid/benches/xid.rs | 60 - vendor/unicode-xid/src/lib.rs | 92 - vendor/unicode-xid/src/tables.rs | 1419 ----- vendor/unicode-xid/src/tests.rs | 95 - vendor/unicode-xid/tests/exhaustive_tests.rs | 25 - 379 files changed, 140552 deletions(-) delete mode 100644 vendor/bitflags/.cargo-checksum.json delete mode 100644 vendor/bitflags/CHANGELOG.md delete mode 100644 vendor/bitflags/CODE_OF_CONDUCT.md delete mode 100644 vendor/bitflags/Cargo.toml delete mode 100644 vendor/bitflags/LICENSE-APACHE delete mode 100644 vendor/bitflags/LICENSE-MIT delete mode 100644 vendor/bitflags/README.md delete mode 100644 vendor/bitflags/src/example_generated.rs delete mode 100644 vendor/bitflags/src/lib.rs delete mode 100644 vendor/bitflags/tests/basic.rs delete mode 100644 vendor/bitflags/tests/compile-fail/impls/copy.rs delete mode 100644 vendor/bitflags/tests/compile-fail/impls/copy.stderr.beta delete mode 100644 vendor/bitflags/tests/compile-fail/impls/eq.rs delete mode 100644 vendor/bitflags/tests/compile-fail/impls/eq.stderr.beta delete mode 100644 vendor/bitflags/tests/compile-fail/non_integer_base/all_defined.rs delete mode 100644 vendor/bitflags/tests/compile-fail/non_integer_base/all_defined.stderr.beta delete mode 100644 vendor/bitflags/tests/compile-fail/non_integer_base/all_missing.rs delete mode 100644 vendor/bitflags/tests/compile-fail/non_integer_base/all_missing.stderr.beta delete mode 100644 vendor/bitflags/tests/compile-fail/visibility/private_field.rs delete mode 100644 vendor/bitflags/tests/compile-fail/visibility/private_field.stderr.beta delete mode 100644 vendor/bitflags/tests/compile-fail/visibility/private_flags.rs delete mode 100644 vendor/bitflags/tests/compile-fail/visibility/private_flags.stderr.beta delete mode 100644 vendor/bitflags/tests/compile-fail/visibility/pub_const.rs delete mode 100644 vendor/bitflags/tests/compile-fail/visibility/pub_const.stderr.beta delete mode 100644 vendor/bitflags/tests/compile-pass/impls/convert.rs delete mode 100644 vendor/bitflags/tests/compile-pass/impls/default.rs delete mode 100644 vendor/bitflags/tests/compile-pass/impls/inherent_methods.rs delete mode 100644 vendor/bitflags/tests/compile-pass/redefinition/core.rs delete mode 100644 vendor/bitflags/tests/compile-pass/redefinition/stringify.rs delete mode 100644 vendor/bitflags/tests/compile-pass/repr/c.rs delete mode 100644 vendor/bitflags/tests/compile-pass/repr/transparent.rs delete mode 100644 vendor/bitflags/tests/compile-pass/visibility/bits_field.rs delete mode 100644 vendor/bitflags/tests/compile-pass/visibility/pub_in.rs delete mode 100644 vendor/bitflags/tests/compile.rs delete mode 100644 vendor/bytemuck/.cargo-checksum.json delete mode 100644 vendor/bytemuck/Cargo.toml delete mode 100644 vendor/bytemuck/LICENSE-APACHE delete mode 100644 vendor/bytemuck/LICENSE-MIT delete mode 100644 vendor/bytemuck/LICENSE-ZLIB delete mode 100644 vendor/bytemuck/README.md delete mode 100644 vendor/bytemuck/changelog.md delete mode 100644 vendor/bytemuck/rustfmt.toml delete mode 100644 vendor/bytemuck/src/allocation.rs delete mode 100644 vendor/bytemuck/src/contiguous.rs delete mode 100644 vendor/bytemuck/src/lib.rs delete mode 100644 vendor/bytemuck/src/offset_of.rs delete mode 100644 vendor/bytemuck/src/pod.rs delete mode 100644 vendor/bytemuck/src/transparent.rs delete mode 100644 vendor/bytemuck/src/zeroable.rs delete mode 100644 vendor/bytemuck/tests/array_tests.rs delete mode 100644 vendor/bytemuck/tests/cast_slice_tests.rs delete mode 100644 vendor/bytemuck/tests/derive.rs delete mode 100644 vendor/bytemuck/tests/doc_tests.rs delete mode 100644 vendor/bytemuck/tests/offset_of_tests.rs delete mode 100644 vendor/bytemuck/tests/std_tests.rs delete mode 100644 vendor/bytemuck/tests/transparent.rs delete mode 100644 vendor/bytemuck/tests/wrapper_forgets.rs delete mode 100644 vendor/bytemuck_derive/.cargo-checksum.json delete mode 100644 vendor/bytemuck_derive/Cargo.toml delete mode 100644 vendor/bytemuck_derive/LICENSE-APACHE delete mode 100644 vendor/bytemuck_derive/LICENSE-MIT delete mode 100644 vendor/bytemuck_derive/LICENSE-ZLIB delete mode 100644 vendor/bytemuck_derive/README.md delete mode 100644 vendor/bytemuck_derive/changelog.md delete mode 100644 vendor/bytemuck_derive/src/lib.rs delete mode 100644 vendor/bytemuck_derive/src/traits.rs delete mode 100644 vendor/bytemuck_derive/tests/basic.rs delete mode 100644 vendor/fake-enum/.cargo-checksum.json delete mode 100644 vendor/fake-enum/Cargo.toml delete mode 100644 vendor/fake-enum/LICENSE-APACHE delete mode 100644 vendor/fake-enum/LICENSE-MIT delete mode 100644 vendor/fake-enum/Makefile.in delete mode 100644 vendor/fake-enum/README.md delete mode 100644 vendor/fake-enum/aclocal.m4 delete mode 100755 vendor/fake-enum/build-dirs.sh delete mode 100755 vendor/fake-enum/config.guess delete mode 100755 vendor/fake-enum/config.sub delete mode 100755 vendor/fake-enum/configure delete mode 100644 vendor/fake-enum/configure.ac delete mode 100755 vendor/fake-enum/install-sh delete mode 100644 vendor/fake-enum/m4/ax_prog_cc_for_build.m4 delete mode 100644 vendor/fake-enum/m4/bindgen.m4 delete mode 100644 vendor/fake-enum/m4/build_std.m4 delete mode 100644 vendor/fake-enum/m4/lcrust_checks.m4 delete mode 100644 vendor/fake-enum/m4/lcrust_prog_rustc.m4 delete mode 100755 vendor/fake-enum/run-tests.sh delete mode 100644 vendor/fake-enum/src/lib.rs delete mode 100644 vendor/install-dirs/.cargo-checksum.json delete mode 100644 vendor/install-dirs/Cargo.toml delete mode 100644 vendor/install-dirs/README.md delete mode 100644 vendor/install-dirs/src/dirs.rs delete mode 100644 vendor/install-dirs/src/lib.rs delete mode 100644 vendor/lazy_static/.cargo-checksum.json delete mode 100644 vendor/lazy_static/Cargo.toml delete mode 100644 vendor/lazy_static/LICENSE-APACHE delete mode 100644 vendor/lazy_static/LICENSE-MIT delete mode 100644 vendor/lazy_static/README.md delete mode 100644 vendor/lazy_static/src/core_lazy.rs delete mode 100644 vendor/lazy_static/src/inline_lazy.rs delete mode 100644 vendor/lazy_static/src/lib.rs delete mode 100644 vendor/lazy_static/tests/no_std.rs delete mode 100644 vendor/lazy_static/tests/test.rs delete mode 100644 vendor/proc-macro2/.cargo-checksum.json delete mode 100644 vendor/proc-macro2/Cargo.toml delete mode 100644 vendor/proc-macro2/LICENSE-APACHE delete mode 100644 vendor/proc-macro2/LICENSE-MIT delete mode 100644 vendor/proc-macro2/README.md delete mode 100644 vendor/proc-macro2/build.rs delete mode 100644 vendor/proc-macro2/src/detection.rs delete mode 100644 vendor/proc-macro2/src/fallback.rs delete mode 100644 vendor/proc-macro2/src/lib.rs delete mode 100644 vendor/proc-macro2/src/marker.rs delete mode 100644 vendor/proc-macro2/src/parse.rs delete mode 100644 vendor/proc-macro2/src/wrapper.rs delete mode 100644 vendor/proc-macro2/tests/comments.rs delete mode 100644 vendor/proc-macro2/tests/features.rs delete mode 100644 vendor/proc-macro2/tests/marker.rs delete mode 100644 vendor/proc-macro2/tests/test.rs delete mode 100644 vendor/proc-macro2/tests/test_fmt.rs delete mode 100644 vendor/quote/.cargo-checksum.json delete mode 100644 vendor/quote/Cargo.toml delete mode 100644 vendor/quote/LICENSE-APACHE delete mode 100644 vendor/quote/LICENSE-MIT delete mode 100644 vendor/quote/README.md delete mode 100644 vendor/quote/src/ext.rs delete mode 100644 vendor/quote/src/format.rs delete mode 100644 vendor/quote/src/ident_fragment.rs delete mode 100644 vendor/quote/src/lib.rs delete mode 100644 vendor/quote/src/runtime.rs delete mode 100644 vendor/quote/src/spanned.rs delete mode 100644 vendor/quote/src/to_tokens.rs delete mode 100644 vendor/quote/tests/compiletest.rs delete mode 100644 vendor/quote/tests/test.rs delete mode 100644 vendor/quote/tests/ui/does-not-have-iter-interpolated-dup.rs delete mode 100644 vendor/quote/tests/ui/does-not-have-iter-interpolated.rs delete mode 100644 vendor/quote/tests/ui/does-not-have-iter-separated.rs delete mode 100644 vendor/quote/tests/ui/does-not-have-iter.rs delete mode 100644 vendor/quote/tests/ui/not-quotable.rs delete mode 100644 vendor/quote/tests/ui/not-repeatable.rs delete mode 100644 vendor/quote/tests/ui/wrong-type-span.rs delete mode 100644 vendor/serde/.cargo-checksum.json delete mode 100644 vendor/serde/Cargo.toml delete mode 100644 vendor/serde/LICENSE-APACHE delete mode 100644 vendor/serde/LICENSE-MIT delete mode 100644 vendor/serde/README.md delete mode 100644 vendor/serde/build.rs delete mode 100644 vendor/serde/crates-io.md delete mode 100644 vendor/serde/src/de/ignored_any.rs delete mode 100644 vendor/serde/src/de/impls.rs delete mode 100644 vendor/serde/src/de/mod.rs delete mode 100644 vendor/serde/src/de/seed.rs delete mode 100644 vendor/serde/src/de/utf8.rs delete mode 100644 vendor/serde/src/de/value.rs delete mode 100644 vendor/serde/src/integer128.rs delete mode 100644 vendor/serde/src/lib.rs delete mode 100644 vendor/serde/src/macros.rs delete mode 100644 vendor/serde/src/private/de.rs delete mode 100644 vendor/serde/src/private/doc.rs delete mode 100644 vendor/serde/src/private/mod.rs delete mode 100644 vendor/serde/src/private/ser.rs delete mode 100644 vendor/serde/src/private/size_hint.rs delete mode 100644 vendor/serde/src/ser/fmt.rs delete mode 100644 vendor/serde/src/ser/impls.rs delete mode 100644 vendor/serde/src/ser/impossible.rs delete mode 100644 vendor/serde/src/ser/mod.rs delete mode 100644 vendor/serde/src/std_error.rs delete mode 100644 vendor/serde_derive/.cargo-checksum.json delete mode 100644 vendor/serde_derive/Cargo.toml delete mode 100644 vendor/serde_derive/LICENSE-APACHE delete mode 100644 vendor/serde_derive/LICENSE-MIT delete mode 100644 vendor/serde_derive/README.md delete mode 100644 vendor/serde_derive/build.rs delete mode 100644 vendor/serde_derive/crates-io.md delete mode 100644 vendor/serde_derive/src/bound.rs delete mode 100644 vendor/serde_derive/src/de.rs delete mode 100644 vendor/serde_derive/src/dummy.rs delete mode 100644 vendor/serde_derive/src/fragment.rs delete mode 100644 vendor/serde_derive/src/internals/ast.rs delete mode 100644 vendor/serde_derive/src/internals/attr.rs delete mode 100644 vendor/serde_derive/src/internals/case.rs delete mode 100644 vendor/serde_derive/src/internals/check.rs delete mode 100644 vendor/serde_derive/src/internals/ctxt.rs delete mode 100644 vendor/serde_derive/src/internals/mod.rs delete mode 100644 vendor/serde_derive/src/internals/receiver.rs delete mode 100644 vendor/serde_derive/src/internals/respan.rs delete mode 100644 vendor/serde_derive/src/internals/symbol.rs delete mode 100644 vendor/serde_derive/src/lib.rs delete mode 100644 vendor/serde_derive/src/pretend.rs delete mode 100644 vendor/serde_derive/src/ser.rs delete mode 100644 vendor/serde_derive/src/try.rs delete mode 100644 vendor/static_assertions/.cargo-checksum.json delete mode 100644 vendor/static_assertions/CHANGELOG.md delete mode 100644 vendor/static_assertions/Cargo.toml delete mode 100644 vendor/static_assertions/LICENSE-APACHE delete mode 100644 vendor/static_assertions/LICENSE-MIT delete mode 100644 vendor/static_assertions/README.md delete mode 100644 vendor/static_assertions/src/assert_cfg.rs delete mode 100644 vendor/static_assertions/src/assert_eq_align.rs delete mode 100644 vendor/static_assertions/src/assert_eq_size.rs delete mode 100644 vendor/static_assertions/src/assert_fields.rs delete mode 100644 vendor/static_assertions/src/assert_impl.rs delete mode 100644 vendor/static_assertions/src/assert_obj_safe.rs delete mode 100644 vendor/static_assertions/src/assert_trait.rs delete mode 100644 vendor/static_assertions/src/assert_type.rs delete mode 100644 vendor/static_assertions/src/const_assert.rs delete mode 100644 vendor/static_assertions/src/lib.rs delete mode 100644 vendor/syn/.cargo-checksum.json delete mode 100644 vendor/syn/Cargo.toml delete mode 100644 vendor/syn/LICENSE-APACHE delete mode 100644 vendor/syn/LICENSE-MIT delete mode 100644 vendor/syn/README.md delete mode 100644 vendor/syn/benches/file.rs delete mode 100644 vendor/syn/benches/rust.rs delete mode 100644 vendor/syn/build.rs delete mode 100644 vendor/syn/src/attr.rs delete mode 100644 vendor/syn/src/await.rs delete mode 100644 vendor/syn/src/bigint.rs delete mode 100644 vendor/syn/src/buffer.rs delete mode 100644 vendor/syn/src/custom_keyword.rs delete mode 100644 vendor/syn/src/custom_punctuation.rs delete mode 100644 vendor/syn/src/data.rs delete mode 100644 vendor/syn/src/derive.rs delete mode 100644 vendor/syn/src/discouraged.rs delete mode 100644 vendor/syn/src/error.rs delete mode 100644 vendor/syn/src/export.rs delete mode 100644 vendor/syn/src/expr.rs delete mode 100644 vendor/syn/src/ext.rs delete mode 100644 vendor/syn/src/file.rs delete mode 100644 vendor/syn/src/gen/clone.rs delete mode 100644 vendor/syn/src/gen/debug.rs delete mode 100644 vendor/syn/src/gen/eq.rs delete mode 100644 vendor/syn/src/gen/fold.rs delete mode 100644 vendor/syn/src/gen/hash.rs delete mode 100644 vendor/syn/src/gen/visit.rs delete mode 100644 vendor/syn/src/gen/visit_mut.rs delete mode 100644 vendor/syn/src/gen_helper.rs delete mode 100644 vendor/syn/src/generics.rs delete mode 100644 vendor/syn/src/group.rs delete mode 100644 vendor/syn/src/ident.rs delete mode 100644 vendor/syn/src/item.rs delete mode 100644 vendor/syn/src/lib.rs delete mode 100644 vendor/syn/src/lifetime.rs delete mode 100644 vendor/syn/src/lit.rs delete mode 100644 vendor/syn/src/lookahead.rs delete mode 100644 vendor/syn/src/mac.rs delete mode 100644 vendor/syn/src/macros.rs delete mode 100644 vendor/syn/src/op.rs delete mode 100644 vendor/syn/src/parse.rs delete mode 100644 vendor/syn/src/parse_macro_input.rs delete mode 100644 vendor/syn/src/parse_quote.rs delete mode 100644 vendor/syn/src/pat.rs delete mode 100644 vendor/syn/src/path.rs delete mode 100644 vendor/syn/src/print.rs delete mode 100644 vendor/syn/src/punctuated.rs delete mode 100644 vendor/syn/src/reserved.rs delete mode 100644 vendor/syn/src/sealed.rs delete mode 100644 vendor/syn/src/span.rs delete mode 100644 vendor/syn/src/spanned.rs delete mode 100644 vendor/syn/src/stmt.rs delete mode 100644 vendor/syn/src/thread.rs delete mode 100644 vendor/syn/src/token.rs delete mode 100644 vendor/syn/src/tt.rs delete mode 100644 vendor/syn/src/ty.rs delete mode 100644 vendor/syn/src/verbatim.rs delete mode 100644 vendor/syn/src/whitespace.rs delete mode 100644 vendor/syn/tests/.gitignore delete mode 100644 vendor/syn/tests/common/eq.rs delete mode 100644 vendor/syn/tests/common/mod.rs delete mode 100644 vendor/syn/tests/common/parse.rs delete mode 100644 vendor/syn/tests/debug/gen.rs delete mode 100644 vendor/syn/tests/debug/mod.rs delete mode 100644 vendor/syn/tests/macros/mod.rs delete mode 100644 vendor/syn/tests/repo/mod.rs delete mode 100644 vendor/syn/tests/repo/progress.rs delete mode 100644 vendor/syn/tests/test_asyncness.rs delete mode 100644 vendor/syn/tests/test_attribute.rs delete mode 100644 vendor/syn/tests/test_derive_input.rs delete mode 100644 vendor/syn/tests/test_expr.rs delete mode 100644 vendor/syn/tests/test_generics.rs delete mode 100644 vendor/syn/tests/test_grouping.rs delete mode 100644 vendor/syn/tests/test_ident.rs delete mode 100644 vendor/syn/tests/test_item.rs delete mode 100644 vendor/syn/tests/test_iterators.rs delete mode 100644 vendor/syn/tests/test_lit.rs delete mode 100644 vendor/syn/tests/test_meta.rs delete mode 100644 vendor/syn/tests/test_parse_buffer.rs delete mode 100644 vendor/syn/tests/test_parse_stream.rs delete mode 100644 vendor/syn/tests/test_pat.rs delete mode 100644 vendor/syn/tests/test_path.rs delete mode 100644 vendor/syn/tests/test_precedence.rs delete mode 100644 vendor/syn/tests/test_receiver.rs delete mode 100644 vendor/syn/tests/test_round_trip.rs delete mode 100644 vendor/syn/tests/test_shebang.rs delete mode 100644 vendor/syn/tests/test_should_parse.rs delete mode 100644 vendor/syn/tests/test_size.rs delete mode 100644 vendor/syn/tests/test_stmt.rs delete mode 100644 vendor/syn/tests/test_token_trees.rs delete mode 100644 vendor/syn/tests/test_ty.rs delete mode 100644 vendor/syn/tests/test_visibility.rs delete mode 100644 vendor/syn/tests/zzz_stable.rs delete mode 100644 vendor/target-tuples-0.4.1/.cargo-checksum.json delete mode 100644 vendor/target-tuples-0.4.1/Cargo.lock delete mode 100644 vendor/target-tuples-0.4.1/Cargo.toml delete mode 100644 vendor/target-tuples-0.4.1/LICENSE-APACHE delete mode 100644 vendor/target-tuples-0.4.1/LICENSE-MIT delete mode 100644 vendor/target-tuples-0.4.1/Makefile.in delete mode 100644 vendor/target-tuples-0.4.1/README.md delete mode 100644 vendor/target-tuples-0.4.1/aclocal.m4 delete mode 100755 vendor/target-tuples-0.4.1/config.guess delete mode 100755 vendor/target-tuples-0.4.1/config.sub delete mode 100644 vendor/target-tuples-0.4.1/config.sub.1 delete mode 100755 vendor/target-tuples-0.4.1/configure delete mode 100644 vendor/target-tuples-0.4.1/configure.ac delete mode 100755 vendor/target-tuples-0.4.1/install-sh delete mode 100644 vendor/target-tuples-0.4.1/m4/lcrust_prog_rustc.m4 delete mode 100755 vendor/target-tuples-0.4.1/run-tests.sh delete mode 100644 vendor/target-tuples-0.4.1/src/config-sub.rs delete mode 100644 vendor/target-tuples-0.4.1/src/lib.rs delete mode 100644 vendor/target-tuples-0.4.1/src/pieces.rs delete mode 100644 vendor/target-tuples-0.4.1/tests/config-sub.data delete mode 100644 vendor/target-tuples-0.4.1/tests/sub_tests.rs delete mode 100644 vendor/target-tuples/.cargo-checksum.json delete mode 100644 vendor/target-tuples/Cargo.lock delete mode 100644 vendor/target-tuples/Cargo.toml delete mode 100644 vendor/target-tuples/LICENSE-APACHE delete mode 100644 vendor/target-tuples/LICENSE-MIT delete mode 100644 vendor/target-tuples/Makefile.in delete mode 100644 vendor/target-tuples/README.md delete mode 100644 vendor/target-tuples/aclocal.m4 delete mode 100755 vendor/target-tuples/config.guess delete mode 100755 vendor/target-tuples/config.sub delete mode 100644 vendor/target-tuples/config.sub.1 delete mode 100755 vendor/target-tuples/configure delete mode 100644 vendor/target-tuples/configure.ac delete mode 100755 vendor/target-tuples/install-sh delete mode 100644 vendor/target-tuples/m4/lcrust_prog_rustc.m4 delete mode 100755 vendor/target-tuples/run-tests.sh delete mode 100644 vendor/target-tuples/src/config-sub.rs delete mode 100644 vendor/target-tuples/src/lib.rs delete mode 100644 vendor/target-tuples/src/pieces.rs delete mode 100644 vendor/target-tuples/tests/config-sub.data delete mode 100644 vendor/target-tuples/tests/rustc-targets.data delete mode 100644 vendor/target-tuples/tests/sub_tests.rs delete mode 100644 vendor/temp-file/.cargo-checksum.json delete mode 100644 vendor/temp-file/Cargo.toml delete mode 100644 vendor/temp-file/LICENSE delete mode 100644 vendor/temp-file/Readme.md delete mode 100644 vendor/temp-file/src/lib.rs delete mode 100644 vendor/temp-file/src/test.rs delete mode 100644 vendor/toml/.cargo-checksum.json delete mode 100644 vendor/toml/Cargo.lock delete mode 100644 vendor/toml/Cargo.toml delete mode 100644 vendor/toml/LICENSE-APACHE delete mode 100644 vendor/toml/LICENSE-MIT delete mode 100644 vendor/toml/README.md delete mode 100644 vendor/toml/examples/decode.rs delete mode 100644 vendor/toml/examples/enum_external.rs delete mode 100644 vendor/toml/examples/toml2json.rs delete mode 100644 vendor/toml/src/datetime.rs delete mode 100644 vendor/toml/src/de.rs delete mode 100644 vendor/toml/src/lib.rs delete mode 100644 vendor/toml/src/macros.rs delete mode 100644 vendor/toml/src/map.rs delete mode 100644 vendor/toml/src/ser.rs delete mode 100644 vendor/toml/src/spanned.rs delete mode 100644 vendor/toml/src/tokens.rs delete mode 100644 vendor/toml/src/value.rs delete mode 100644 vendor/toml/tests/enum_external_deserialize.rs delete mode 100644 vendor/unicode-xid/.cargo-checksum.json delete mode 100644 vendor/unicode-xid/COPYRIGHT delete mode 100644 vendor/unicode-xid/Cargo.toml delete mode 100644 vendor/unicode-xid/LICENSE-APACHE delete mode 100644 vendor/unicode-xid/LICENSE-MIT delete mode 100644 vendor/unicode-xid/README.md delete mode 100644 vendor/unicode-xid/benches/xid.rs delete mode 100644 vendor/unicode-xid/src/lib.rs delete mode 100644 vendor/unicode-xid/src/tables.rs delete mode 100644 vendor/unicode-xid/src/tests.rs delete mode 100644 vendor/unicode-xid/tests/exhaustive_tests.rs diff --git a/vendor/bitflags/.cargo-checksum.json b/vendor/bitflags/.cargo-checksum.json deleted file mode 100644 index 7e8d470b..00000000 --- a/vendor/bitflags/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{"CHANGELOG.md":"d362fc1fccaaf4d421bcf0fe8b80ddb4f625dade0c1ee52d08bd0b95509a49d1","CODE_OF_CONDUCT.md":"42634d0f6d922f49857175af991802822f7f920487aefa2ee250a50d12251a66","Cargo.toml":"87aced7532a7974eb37ab5fe6037f0abafc36d6b2d74891ecd2bf2f14f50d11e","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","README.md":"baa8604f8afb34fd93b9c79729daafb884dedcaf34023e4af8ad037d916061fd","src/example_generated.rs":"e43eb59e90f317f38d436670a6067d2fd9eb35fb319fe716184e4a04e24ed1b2","src/lib.rs":"e6477688535ee326d27238aeedc9cb4320ac35b9d17a4deda09e0587b0ccdbd4","tests/basic.rs":"146f1cbf6279bc609242cd3349f29cb21b41294f5e4921875f5ec95bd83529a2","tests/compile-fail/impls/copy.rs":"b791371237ddc75a7c04d2130e03b462c9c00a80dca08bd45aa97433d9c0d13a","tests/compile-fail/impls/copy.stderr.beta":"77d83484ce221d4b6ff2f7de843929a452d779fcfff428122710dd8218c298e3","tests/compile-fail/impls/eq.rs":"0cee8b9e07d537890e0189710293b53972d0fab63c09366f33c391065afafa99","tests/compile-fail/impls/eq.stderr.beta":"381fc6143d45ce76d7cecc47aa59cb69fe5e79c0b60a4a85d5c6163b400b3cc7","tests/compile-fail/non_integer_base/all_defined.rs":"95e14cad9e94560262f2862c3c01865ac30369b69da1001b0e7285cb55e6cb75","tests/compile-fail/non_integer_base/all_defined.stderr.beta":"1760739a276690903bb03844025587d37939f5dfcbfab309db3c86f32bdbf748","tests/compile-fail/non_integer_base/all_missing.rs":"b3d9da619d23213731ba2581aa7999c796c3c79aaf4f0ee6b11ceec08a11537f","tests/compile-fail/non_integer_base/all_missing.stderr.beta":"37e102290d3867e175b21976be798939f294efb17580d5b51e7b17b590d55132","tests/compile-fail/visibility/private_field.rs":"38e4d3fe6471829360d12c8d09b097f6a21aa93fb51eac3b215d96bdae23316b","tests/compile-fail/visibility/private_field.stderr.beta":"5aa24a3ebb39326f31927721c5017b8beb66c3e501fb865a3fa814c9763bfa0f","tests/compile-fail/visibility/private_flags.rs":"2ce4235802aa4e9c96c4e77d9e31d8401ef58dcda4741325184f0764ab1fe393","tests/compile-fail/visibility/private_flags.stderr.beta":"f3eb9f7baf2689258f3519ff7ee5c6ec3c237264ebcfe63f40c40f2023e5022f","tests/compile-fail/visibility/pub_const.rs":"8f813a97ac518c5ea8ac65b184101912452384afaf7b8d6c5e62f8370eca3c0a","tests/compile-fail/visibility/pub_const.stderr.beta":"823976ae1794d7f5372e2ec9aabba497e7bb88004722904c38da342ed98e8962","tests/compile-pass/impls/convert.rs":"88fe80bfb9cd5779f0e1d92c9ec02a8b6bb67e334c07f2309e9c0ba5ef776eb0","tests/compile-pass/impls/default.rs":"c508f9a461691f44b45142fa5ad599f02326e1de4c0cbca6c0593f4652eba109","tests/compile-pass/impls/inherent_methods.rs":"ecc26388e9a394bfa7a5bb69a5d621ab3d4d1e53f28f657bb8e78fe79f437913","tests/compile-pass/redefinition/core.rs":"ff5b6e72f87acc6ebb12405d3c0f6e3fa62e669933656a454bb63b30ea44179c","tests/compile-pass/redefinition/stringify.rs":"1edbce42b900c14425d7ffa14e83e165ebe452d7dccd8c0a8a821bdec64f5c93","tests/compile-pass/repr/c.rs":"6fda17f7c2edfcd155314579e83d0fc8a16209e400f1f9a5ca77bd9a799041f2","tests/compile-pass/repr/transparent.rs":"6cdc87a2137d8a4e0c8ce9b6cba83c82255f8ea125951bf614418685600489ce","tests/compile-pass/visibility/bits_field.rs":"1f3e5ba5a047440066a9f6bf7b7af33f5b06f6b1da3dd9af6886168199a7ea0a","tests/compile-pass/visibility/pub_in.rs":"e95312ff60966d42ec4bc00225507895a9b8ec24056ce6a9edd9145be35d730f","tests/compile.rs":"f27c67a7dd183ca30efea1b6e0880e3469a6dd63b92b1fd711c082df182c9eec"},"package":"bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"} \ No newline at end of file diff --git a/vendor/bitflags/CHANGELOG.md b/vendor/bitflags/CHANGELOG.md deleted file mode 100644 index 12fea167..00000000 --- a/vendor/bitflags/CHANGELOG.md +++ /dev/null @@ -1,206 +0,0 @@ -# 1.3.2 - -- Allow `non_snake_case` in generated flags types ([#256]) - -[#252]: https://github.com/bitflags/bitflags/pull/256 - -# 1.3.1 - -- Revert unconditional `#[repr(transparent)]` ([#252]) - -[#252]: https://github.com/bitflags/bitflags/pull/252 - -# 1.3.0 (yanked) - -- Add `#[repr(transparent)]` ([#187]) - -- End `empty` doc comment with full stop ([#202]) - -- Fix typo in crate root docs ([#206]) - -- Document from_bits_unchecked unsafety ([#207]) - -- Let `is_all` ignore extra bits ([#211]) - -- Allows empty flag definition ([#225]) - -- Making crate accessible from std ([#227]) - -- Make `from_bits` a const fn ([#229]) - -- Allow multiple bitflags structs in one macro invocation ([#235]) - -- Add named functions to perform set operations ([#244]) - -- Fix typos in method docs ([#245]) - -- Modernization of the `bitflags` macro to take advantage of newer features and 2018 idioms ([#246]) - -- Fix regression (in an unreleased feature) and simplify tests ([#247]) - -- Use `Self` and fix bug when overriding `stringify!` ([#249]) - -[#187]: https://github.com/bitflags/bitflags/pull/187 -[#202]: https://github.com/bitflags/bitflags/pull/202 -[#206]: https://github.com/bitflags/bitflags/pull/206 -[#207]: https://github.com/bitflags/bitflags/pull/207 -[#211]: https://github.com/bitflags/bitflags/pull/211 -[#225]: https://github.com/bitflags/bitflags/pull/225 -[#227]: https://github.com/bitflags/bitflags/pull/227 -[#229]: https://github.com/bitflags/bitflags/pull/229 -[#235]: https://github.com/bitflags/bitflags/pull/235 -[#244]: https://github.com/bitflags/bitflags/pull/244 -[#245]: https://github.com/bitflags/bitflags/pull/245 -[#246]: https://github.com/bitflags/bitflags/pull/246 -[#247]: https://github.com/bitflags/bitflags/pull/247 -[#249]: https://github.com/bitflags/bitflags/pull/249 - -# 1.2.1 - -- Remove extraneous `#[inline]` attributes ([#194]) - -[#194]: https://github.com/bitflags/bitflags/pull/194 - -# 1.2.0 - -- Fix typo: {Lower, Upper}Exp - {Lower, Upper}Hex ([#183]) - -- Add support for "unknown" bits ([#188]) - -[#183]: https://github.com/rust-lang-nursery/bitflags/pull/183 -[#188]: https://github.com/rust-lang-nursery/bitflags/pull/188 - -# 1.1.0 - -This is a re-release of `1.0.5`, which was yanked due to a bug in the RLS. - -# 1.0.5 - -- Use compiletest_rs flags supported by stable toolchain ([#171]) - -- Put the user provided attributes first ([#173]) - -- Make bitflags methods `const` on newer compilers ([#175]) - -[#171]: https://github.com/rust-lang-nursery/bitflags/pull/171 -[#173]: https://github.com/rust-lang-nursery/bitflags/pull/173 -[#175]: https://github.com/rust-lang-nursery/bitflags/pull/175 - -# 1.0.4 - -- Support Rust 2018 style macro imports ([#165]) - - ```rust - use bitflags::bitflags; - ``` - -[#165]: https://github.com/rust-lang-nursery/bitflags/pull/165 - -# 1.0.3 - -- Improve zero value flag handling and documentation ([#157]) - -[#157]: https://github.com/rust-lang-nursery/bitflags/pull/157 - -# 1.0.2 - -- 30% improvement in compile time of bitflags crate ([#156]) - -- Documentation improvements ([#153]) - -- Implementation cleanup ([#149]) - -[#156]: https://github.com/rust-lang-nursery/bitflags/pull/156 -[#153]: https://github.com/rust-lang-nursery/bitflags/pull/153 -[#149]: https://github.com/rust-lang-nursery/bitflags/pull/149 - -# 1.0.1 -- Add support for `pub(restricted)` specifier on the bitflags struct ([#135]) -- Optimize performance of `all()` when called from a separate crate ([#136]) - -[#135]: https://github.com/rust-lang-nursery/bitflags/pull/135 -[#136]: https://github.com/rust-lang-nursery/bitflags/pull/136 - -# 1.0.0 -- **[breaking change]** Macro now generates [associated constants](https://doc.rust-lang.org/reference/items.html#associated-constants) ([#24]) - -- **[breaking change]** Minimum supported version is Rust **1.20**, due to usage of associated constants - -- After being broken in 0.9, the `#[deprecated]` attribute is now supported again ([#112]) - -- Other improvements to unit tests and documentation ([#106] and [#115]) - -[#24]: https://github.com/rust-lang-nursery/bitflags/pull/24 -[#106]: https://github.com/rust-lang-nursery/bitflags/pull/106 -[#112]: https://github.com/rust-lang-nursery/bitflags/pull/112 -[#115]: https://github.com/rust-lang-nursery/bitflags/pull/115 - -## How to update your code to use associated constants -Assuming the following structure definition: -```rust -bitflags! { - struct Something: u8 { - const FOO = 0b01, - const BAR = 0b10 - } -} -``` -In 0.9 and older you could do: -```rust -let x = FOO.bits | BAR.bits; -``` -Now you must use: -```rust -let x = Something::FOO.bits | Something::BAR.bits; -``` - -# 0.9.1 -- Fix the implementation of `Formatting` traits when other formatting traits were present in scope ([#105]) - -[#105]: https://github.com/rust-lang-nursery/bitflags/pull/105 - -# 0.9.0 -- **[breaking change]** Use struct keyword instead of flags to define bitflag types ([#84]) - -- **[breaking change]** Terminate const items with semicolons instead of commas ([#87]) - -- Implement the `Hex`, `Octal`, and `Binary` formatting traits ([#86]) - -- Printing an empty flag value with the `Debug` trait now prints "(empty)" instead of nothing ([#85]) - -- The `bitflags!` macro can now be used inside of a fn body, to define a type local to that function ([#74]) - -[#74]: https://github.com/rust-lang-nursery/bitflags/pull/74 -[#84]: https://github.com/rust-lang-nursery/bitflags/pull/84 -[#85]: https://github.com/rust-lang-nursery/bitflags/pull/85 -[#86]: https://github.com/rust-lang-nursery/bitflags/pull/86 -[#87]: https://github.com/rust-lang-nursery/bitflags/pull/87 - -# 0.8.2 -- Update feature flag used when building bitflags as a dependency of the Rust toolchain - -# 0.8.1 -- Allow bitflags to be used as a dependency of the Rust toolchain - -# 0.8.0 -- Add support for the experimental `i128` and `u128` integer types ([#57]) -- Add set method: `flags.set(SOME_FLAG, true)` or `flags.set(SOME_FLAG, false)` ([#55]) - This may break code that defines its own set method - -[#55]: https://github.com/rust-lang-nursery/bitflags/pull/55 -[#57]: https://github.com/rust-lang-nursery/bitflags/pull/57 - -# 0.7.1 -*(yanked)* - -# 0.7.0 -- Implement the Extend trait ([#49]) -- Allow definitions inside the `bitflags!` macro to refer to items imported from other modules ([#51]) - -[#49]: https://github.com/rust-lang-nursery/bitflags/pull/49 -[#51]: https://github.com/rust-lang-nursery/bitflags/pull/51 - -# 0.6.0 -- The `no_std` feature was removed as it is now the default -- The `assignment_operators` feature was remove as it is now enabled by default -- Some clippy suggestions have been applied diff --git a/vendor/bitflags/CODE_OF_CONDUCT.md b/vendor/bitflags/CODE_OF_CONDUCT.md deleted file mode 100644 index f7add90a..00000000 --- a/vendor/bitflags/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,73 +0,0 @@ -# Contributor Covenant Code of Conduct - -## Our Pledge - -In the interest of fostering an open and welcoming environment, we as -contributors and maintainers pledge to making participation in our project and -our community a harassment-free experience for everyone, regardless of age, body -size, disability, ethnicity, gender identity and expression, level of experience, -education, socio-economic status, nationality, personal appearance, race, -religion, or sexual identity and orientation. - -## Our Standards - -Examples of behavior that contributes to creating a positive environment -include: - -* Using welcoming and inclusive language -* Being respectful of differing viewpoints and experiences -* Gracefully accepting constructive criticism -* Focusing on what is best for the community -* Showing empathy towards other community members - -Examples of unacceptable behavior by participants include: - -* The use of sexualized language or imagery and unwelcome sexual attention or - advances -* Trolling, insulting/derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or electronic - address, without explicit permission -* Other conduct which could reasonably be considered inappropriate in a - professional setting - -## Our Responsibilities - -Project maintainers are responsible for clarifying the standards of acceptable -behavior and are expected to take appropriate and fair corrective action in -response to any instances of unacceptable behavior. - -Project maintainers have the right and responsibility to remove, edit, or -reject comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct, or to ban temporarily or -permanently any contributor for other behaviors that they deem inappropriate, -threatening, offensive, or harmful. - -## Scope - -This Code of Conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. Examples of -representing a project or community include using an official project e-mail -address, posting via an official social media account, or acting as an appointed -representative at an online or offline event. Representation of a project may be -further defined and clarified by project maintainers. - -## Enforcement - -Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported by contacting the project team at coc@senaite.org. All -complaints will be reviewed and investigated and will result in a response that -is deemed necessary and appropriate to the circumstances. The project team is -obligated to maintain confidentiality with regard to the reporter of an incident. -Further details of specific enforcement policies may be posted separately. - -Project maintainers who do not follow or enforce the Code of Conduct in good -faith may face temporary or permanent repercussions as determined by other -members of the project's leadership. - -## Attribution - -This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, -available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html - -[homepage]: https://www.contributor-covenant.org \ No newline at end of file diff --git a/vendor/bitflags/Cargo.toml b/vendor/bitflags/Cargo.toml deleted file mode 100644 index 9d54c725..00000000 --- a/vendor/bitflags/Cargo.toml +++ /dev/null @@ -1,58 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies -# -# If you believe there's an error in this file please file an -# issue against the rust-lang/cargo repository. If you're -# editing this file be aware that the upstream Cargo.toml -# will likely look very different (and much more reasonable) - -[package] -edition = "2018" -name = "bitflags" -version = "1.3.2" -authors = ["The Rust Project Developers"] -exclude = ["bors.toml"] -description = "A macro to generate structures which behave like bitflags.\n" -homepage = "https://github.com/bitflags/bitflags" -documentation = "https://docs.rs/bitflags" -readme = "README.md" -keywords = ["bit", "bitmask", "bitflags", "flags"] -categories = ["no-std"] -license = "MIT/Apache-2.0" -repository = "https://github.com/bitflags/bitflags" -[package.metadata.docs.rs] -features = ["example_generated"] -[dependencies.compiler_builtins] -version = "0.1.2" -optional = true - -[dependencies.core] -version = "1.0.0" -optional = true -package = "rustc-std-workspace-core" -[dev-dependencies.rustversion] -version = "1.0" - -[dev-dependencies.serde] -version = "1.0" - -[dev-dependencies.serde_derive] -version = "1.0" - -[dev-dependencies.serde_json] -version = "1.0" - -[dev-dependencies.trybuild] -version = "1.0" - -[dev-dependencies.walkdir] -version = "2.3" - -[features] -default = [] -example_generated = [] -rustc-dep-of-std = ["core", "compiler_builtins"] diff --git a/vendor/bitflags/LICENSE-APACHE b/vendor/bitflags/LICENSE-APACHE deleted file mode 100644 index 16fe87b0..00000000 --- a/vendor/bitflags/LICENSE-APACHE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/vendor/bitflags/LICENSE-MIT b/vendor/bitflags/LICENSE-MIT deleted file mode 100644 index 39d4bdb5..00000000 --- a/vendor/bitflags/LICENSE-MIT +++ /dev/null @@ -1,25 +0,0 @@ -Copyright (c) 2014 The Rust Project Developers - -Permission is hereby granted, free of charge, to any -person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the -Software without restriction, including without -limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software -is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice -shall be included in all copies or substantial portions -of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/vendor/bitflags/README.md b/vendor/bitflags/README.md deleted file mode 100644 index 0da0f853..00000000 --- a/vendor/bitflags/README.md +++ /dev/null @@ -1,32 +0,0 @@ -bitflags -======== - -[![Rust](https://github.com/bitflags/bitflags/workflows/Rust/badge.svg)](https://github.com/bitflags/bitflags/actions) -[![Join the chat at https://gitter.im/bitflags/Lobby](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/bitflags/Lobby?utm_source=badge&utm_medium=badge&utm_content=badge) -[![Latest version](https://img.shields.io/crates/v/bitflags.svg)](https://crates.io/crates/bitflags) -[![Documentation](https://docs.rs/bitflags/badge.svg)](https://docs.rs/bitflags) -![License](https://img.shields.io/crates/l/bitflags.svg) - -A Rust macro to generate structures which behave like a set of bitflags - -- [Documentation](https://docs.rs/bitflags) -- [Release notes](https://github.com/bitflags/bitflags/releases) - -## Usage - -Add this to your `Cargo.toml`: - -```toml -[dependencies] -bitflags = "1.3" -``` - -and this to your source code: - -```rust -use bitflags::bitflags; -``` - -## Rust Version Support - -The minimum supported Rust version is 1.46 due to use of associated constants and const functions. diff --git a/vendor/bitflags/src/example_generated.rs b/vendor/bitflags/src/example_generated.rs deleted file mode 100644 index cf188d99..00000000 --- a/vendor/bitflags/src/example_generated.rs +++ /dev/null @@ -1,14 +0,0 @@ -//! This module shows an example of code generated by the macro. **IT MUST NOT BE USED OUTSIDE THIS -//! CRATE**. - -bitflags! { - /// This is the same `Flags` struct defined in the [crate level example](../index.html#example). - /// Note that this struct is just for documentation purposes only, it must not be used outside - /// this crate. - pub struct Flags: u32 { - const A = 0b00000001; - const B = 0b00000010; - const C = 0b00000100; - const ABC = Self::A.bits | Self::B.bits | Self::C.bits; - } -} diff --git a/vendor/bitflags/src/lib.rs b/vendor/bitflags/src/lib.rs deleted file mode 100644 index 935e432f..00000000 --- a/vendor/bitflags/src/lib.rs +++ /dev/null @@ -1,1729 +0,0 @@ -// Copyright 2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! A typesafe bitmask flag generator useful for sets of C-style bitmask flags. -//! It can be used for creating typesafe wrappers around C APIs. -//! -//! The `bitflags!` macro generates `struct`s that manage a set of flags. The -//! flags should only be defined for integer types, otherwise unexpected type -//! errors may occur at compile time. -//! -//! # Example -//! -//! ``` -//! use bitflags::bitflags; -//! -//! bitflags! { -//! struct Flags: u32 { -//! const A = 0b00000001; -//! const B = 0b00000010; -//! const C = 0b00000100; -//! const ABC = Self::A.bits | Self::B.bits | Self::C.bits; -//! } -//! } -//! -//! fn main() { -//! let e1 = Flags::A | Flags::C; -//! let e2 = Flags::B | Flags::C; -//! assert_eq!((e1 | e2), Flags::ABC); // union -//! assert_eq!((e1 & e2), Flags::C); // intersection -//! assert_eq!((e1 - e2), Flags::A); // set difference -//! assert_eq!(!e2, Flags::A); // set complement -//! } -//! ``` -//! -//! See [`example_generated::Flags`](./example_generated/struct.Flags.html) for documentation of code -//! generated by the above `bitflags!` expansion. -//! -//! The generated `struct`s can also be extended with type and trait -//! implementations: -//! -//! ``` -//! use std::fmt; -//! -//! use bitflags::bitflags; -//! -//! bitflags! { -//! struct Flags: u32 { -//! const A = 0b00000001; -//! const B = 0b00000010; -//! } -//! } -//! -//! impl Flags { -//! pub fn clear(&mut self) { -//! self.bits = 0; // The `bits` field can be accessed from within the -//! // same module where the `bitflags!` macro was invoked. -//! } -//! } -//! -//! impl fmt::Display for Flags { -//! fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { -//! write!(f, "hi!") -//! } -//! } -//! -//! fn main() { -//! let mut flags = Flags::A | Flags::B; -//! flags.clear(); -//! assert!(flags.is_empty()); -//! assert_eq!(format!("{}", flags), "hi!"); -//! assert_eq!(format!("{:?}", Flags::A | Flags::B), "A | B"); -//! assert_eq!(format!("{:?}", Flags::B), "B"); -//! } -//! ``` -//! -//! # Visibility -//! -//! The generated structs and their associated flag constants are not exported -//! out of the current module by default. A definition can be exported out of -//! the current module by adding `pub` before `struct`: -//! -//! ``` -//! mod example { -//! use bitflags::bitflags; -//! -//! bitflags! { -//! pub struct Flags1: u32 { -//! const A = 0b00000001; -//! } -//! -//! # pub -//! struct Flags2: u32 { -//! const B = 0b00000010; -//! } -//! } -//! } -//! -//! fn main() { -//! let flag1 = example::Flags1::A; -//! let flag2 = example::Flags2::B; // error: const `B` is private -//! } -//! ``` -//! -//! # Attributes -//! -//! Attributes can be attached to the generated `struct`s by placing them -//! before the `struct` keyword. -//! -//! ## Representations -//! -//! It's valid to add a `#[repr(C)]` or `#[repr(transparent)]` attribute to a type -//! generated by `bitflags!`. In these cases, the type is guaranteed to be a newtype. -//! -//! ``` -//! use bitflags::bitflags; -//! -//! bitflags! { -//! #[repr(transparent)] -//! struct Flags: u32 { -//! const A = 0b00000001; -//! const B = 0b00000010; -//! const C = 0b00000100; -//! } -//! } -//! ``` -//! -//! # Trait implementations -//! -//! The `Copy`, `Clone`, `PartialEq`, `Eq`, `PartialOrd`, `Ord` and `Hash` -//! traits are automatically derived for the `struct`s using the `derive` attribute. -//! Additional traits can be derived by providing an explicit `derive` -//! attribute on `struct`. -//! -//! The `Extend` and `FromIterator` traits are implemented for the `struct`s, -//! too: `Extend` adds the union of the instances of the `struct` iterated over, -//! while `FromIterator` calculates the union. -//! -//! The `Binary`, `Debug`, `LowerHex`, `Octal` and `UpperHex` traits are also -//! implemented by displaying the bits value of the internal struct. -//! -//! ## Operators -//! -//! The following operator traits are implemented for the generated `struct`s: -//! -//! - `BitOr` and `BitOrAssign`: union -//! - `BitAnd` and `BitAndAssign`: intersection -//! - `BitXor` and `BitXorAssign`: toggle -//! - `Sub` and `SubAssign`: set difference -//! - `Not`: set complement -//! -//! # Methods -//! -//! The following methods are defined for the generated `struct`s: -//! -//! - `empty`: an empty set of flags -//! - `all`: the set of all defined flags -//! - `bits`: the raw value of the flags currently stored -//! - `from_bits`: convert from underlying bit representation, unless that -//! representation contains bits that do not correspond to a -//! defined flag -//! - `from_bits_truncate`: convert from underlying bit representation, dropping -//! any bits that do not correspond to defined flags -//! - `from_bits_unchecked`: convert from underlying bit representation, keeping -//! all bits (even those not corresponding to defined -//! flags) -//! - `is_empty`: `true` if no flags are currently stored -//! - `is_all`: `true` if currently set flags exactly equal all defined flags -//! - `intersects`: `true` if there are flags common to both `self` and `other` -//! - `contains`: `true` if all of the flags in `other` are contained within `self` -//! - `insert`: inserts the specified flags in-place -//! - `remove`: removes the specified flags in-place -//! - `toggle`: the specified flags will be inserted if not present, and removed -//! if they are. -//! - `set`: inserts or removes the specified flags depending on the passed value -//! - `intersection`: returns a new set of flags, containing only the flags present -//! in both `self` and `other` (the argument to the function). -//! - `union`: returns a new set of flags, containing any flags present in -//! either `self` or `other` (the argument to the function). -//! - `difference`: returns a new set of flags, containing all flags present in -//! `self` without any of the flags present in `other` (the -//! argument to the function). -//! - `symmetric_difference`: returns a new set of flags, containing all flags -//! present in either `self` or `other` (the argument -//! to the function), but not both. -//! - `complement`: returns a new set of flags, containing all flags which are -//! not set in `self`, but which are allowed for this type. -//! -//! ## Default -//! -//! The `Default` trait is not automatically implemented for the generated structs. -//! -//! If your default value is equal to `0` (which is the same value as calling `empty()` -//! on the generated struct), you can simply derive `Default`: -//! -//! ``` -//! use bitflags::bitflags; -//! -//! bitflags! { -//! // Results in default value with bits: 0 -//! #[derive(Default)] -//! struct Flags: u32 { -//! const A = 0b00000001; -//! const B = 0b00000010; -//! const C = 0b00000100; -//! } -//! } -//! -//! fn main() { -//! let derived_default: Flags = Default::default(); -//! assert_eq!(derived_default.bits(), 0); -//! } -//! ``` -//! -//! If your default value is not equal to `0` you need to implement `Default` yourself: -//! -//! ``` -//! use bitflags::bitflags; -//! -//! bitflags! { -//! struct Flags: u32 { -//! const A = 0b00000001; -//! const B = 0b00000010; -//! const C = 0b00000100; -//! } -//! } -//! -//! // explicit `Default` implementation -//! impl Default for Flags { -//! fn default() -> Flags { -//! Flags::A | Flags::C -//! } -//! } -//! -//! fn main() { -//! let implemented_default: Flags = Default::default(); -//! assert_eq!(implemented_default, (Flags::A | Flags::C)); -//! } -//! ``` -//! -//! # Zero Flags -//! -//! Flags with a value equal to zero will have some strange behavior that one should be aware of. -//! -//! ``` -//! use bitflags::bitflags; -//! -//! bitflags! { -//! struct Flags: u32 { -//! const NONE = 0b00000000; -//! const SOME = 0b00000001; -//! } -//! } -//! -//! fn main() { -//! let empty = Flags::empty(); -//! let none = Flags::NONE; -//! let some = Flags::SOME; -//! -//! // Zero flags are treated as always present -//! assert!(empty.contains(Flags::NONE)); -//! assert!(none.contains(Flags::NONE)); -//! assert!(some.contains(Flags::NONE)); -//! -//! // Zero flags will be ignored when testing for emptiness -//! assert!(none.is_empty()); -//! } -//! ``` -//! -//! Users should generally avoid defining a flag with a value of zero. - -#![cfg_attr(not(test), no_std)] -#![doc(html_root_url = "https://docs.rs/bitflags/1.3.2")] - -#[doc(hidden)] -pub extern crate core as _core; - -/// The macro used to generate the flag structures. -/// -/// See the [crate level docs](../bitflags/index.html) for complete documentation. -/// -/// # Example -/// -/// ``` -/// use bitflags::bitflags; -/// -/// bitflags! { -/// struct Flags: u32 { -/// const A = 0b00000001; -/// const B = 0b00000010; -/// const C = 0b00000100; -/// const ABC = Self::A.bits | Self::B.bits | Self::C.bits; -/// } -/// } -/// -/// fn main() { -/// let e1 = Flags::A | Flags::C; -/// let e2 = Flags::B | Flags::C; -/// assert_eq!((e1 | e2), Flags::ABC); // union -/// assert_eq!((e1 & e2), Flags::C); // intersection -/// assert_eq!((e1 - e2), Flags::A); // set difference -/// assert_eq!(!e2, Flags::A); // set complement -/// } -/// ``` -/// -/// The generated `struct`s can also be extended with type and trait -/// implementations: -/// -/// ``` -/// use std::fmt; -/// -/// use bitflags::bitflags; -/// -/// bitflags! { -/// struct Flags: u32 { -/// const A = 0b00000001; -/// const B = 0b00000010; -/// } -/// } -/// -/// impl Flags { -/// pub fn clear(&mut self) { -/// self.bits = 0; // The `bits` field can be accessed from within the -/// // same module where the `bitflags!` macro was invoked. -/// } -/// } -/// -/// impl fmt::Display for Flags { -/// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { -/// write!(f, "hi!") -/// } -/// } -/// -/// fn main() { -/// let mut flags = Flags::A | Flags::B; -/// flags.clear(); -/// assert!(flags.is_empty()); -/// assert_eq!(format!("{}", flags), "hi!"); -/// assert_eq!(format!("{:?}", Flags::A | Flags::B), "A | B"); -/// assert_eq!(format!("{:?}", Flags::B), "B"); -/// } -/// ``` -#[macro_export(local_inner_macros)] -macro_rules! bitflags { - ( - $(#[$outer:meta])* - $vis:vis struct $BitFlags:ident: $T:ty { - $( - $(#[$inner:ident $($args:tt)*])* - const $Flag:ident = $value:expr; - )* - } - - $($t:tt)* - ) => { - $(#[$outer])* - #[derive(Copy, PartialEq, Eq, Clone, PartialOrd, Ord, Hash)] - $vis struct $BitFlags { - bits: $T, - } - - __impl_bitflags! { - $BitFlags: $T { - $( - $(#[$inner $($args)*])* - $Flag = $value; - )* - } - } - - bitflags! { - $($t)* - } - }; - () => {}; -} - -// A helper macro to implement the `all` function. -#[macro_export(local_inner_macros)] -#[doc(hidden)] -macro_rules! __impl_all_bitflags { - ( - $BitFlags:ident: $T:ty { - $( - $(#[$attr:ident $($args:tt)*])* - $Flag:ident = $value:expr; - )+ - } - ) => { - // See `Debug::fmt` for why this approach is taken. - #[allow(non_snake_case)] - trait __BitFlags { - $( - const $Flag: $T = 0; - )+ - } - #[allow(non_snake_case)] - impl __BitFlags for $BitFlags { - $( - __impl_bitflags! { - #[allow(deprecated)] - $(? #[$attr $($args)*])* - const $Flag: $T = Self::$Flag.bits; - } - )+ - } - Self { bits: $(::$Flag)|+ } - }; - ( - $BitFlags:ident: $T:ty { } - ) => { - Self { bits: 0 } - }; -} - -#[macro_export(local_inner_macros)] -#[doc(hidden)] -macro_rules! __impl_bitflags { - ( - $BitFlags:ident: $T:ty { - $( - $(#[$attr:ident $($args:tt)*])* - $Flag:ident = $value:expr; - )* - } - ) => { - impl $crate::_core::fmt::Debug for $BitFlags { - fn fmt(&self, f: &mut $crate::_core::fmt::Formatter) -> $crate::_core::fmt::Result { - // This convoluted approach is to handle #[cfg]-based flag - // omission correctly. For example it needs to support: - // - // #[cfg(unix)] const A: Flag = /* ... */; - // #[cfg(windows)] const B: Flag = /* ... */; - - // Unconditionally define a check for every flag, even disabled - // ones. - #[allow(non_snake_case)] - trait __BitFlags { - $( - #[inline] - fn $Flag(&self) -> bool { false } - )* - } - - // Conditionally override the check for just those flags that - // are not #[cfg]ed away. - #[allow(non_snake_case)] - impl __BitFlags for $BitFlags { - $( - __impl_bitflags! { - #[allow(deprecated)] - #[inline] - $(? #[$attr $($args)*])* - fn $Flag(&self) -> bool { - if Self::$Flag.bits == 0 && self.bits != 0 { - false - } else { - self.bits & Self::$Flag.bits == Self::$Flag.bits - } - } - } - )* - } - - let mut first = true; - $( - if ::$Flag(self) { - if !first { - f.write_str(" | ")?; - } - first = false; - f.write_str($crate::_core::stringify!($Flag))?; - } - )* - let extra_bits = self.bits & !Self::all().bits(); - if extra_bits != 0 { - if !first { - f.write_str(" | ")?; - } - first = false; - f.write_str("0x")?; - $crate::_core::fmt::LowerHex::fmt(&extra_bits, f)?; - } - if first { - f.write_str("(empty)")?; - } - Ok(()) - } - } - impl $crate::_core::fmt::Binary for $BitFlags { - fn fmt(&self, f: &mut $crate::_core::fmt::Formatter) -> $crate::_core::fmt::Result { - $crate::_core::fmt::Binary::fmt(&self.bits, f) - } - } - impl $crate::_core::fmt::Octal for $BitFlags { - fn fmt(&self, f: &mut $crate::_core::fmt::Formatter) -> $crate::_core::fmt::Result { - $crate::_core::fmt::Octal::fmt(&self.bits, f) - } - } - impl $crate::_core::fmt::LowerHex for $BitFlags { - fn fmt(&self, f: &mut $crate::_core::fmt::Formatter) -> $crate::_core::fmt::Result { - $crate::_core::fmt::LowerHex::fmt(&self.bits, f) - } - } - impl $crate::_core::fmt::UpperHex for $BitFlags { - fn fmt(&self, f: &mut $crate::_core::fmt::Formatter) -> $crate::_core::fmt::Result { - $crate::_core::fmt::UpperHex::fmt(&self.bits, f) - } - } - - #[allow(dead_code)] - impl $BitFlags { - $( - $(#[$attr $($args)*])* - pub const $Flag: Self = Self { bits: $value }; - )* - - /// Returns an empty set of flags. - #[inline] - pub const fn empty() -> Self { - Self { bits: 0 } - } - - /// Returns the set containing all flags. - #[inline] - pub const fn all() -> Self { - __impl_all_bitflags! { - $BitFlags: $T { - $( - $(#[$attr $($args)*])* - $Flag = $value; - )* - } - } - } - - /// Returns the raw value of the flags currently stored. - #[inline] - pub const fn bits(&self) -> $T { - self.bits - } - - /// Convert from underlying bit representation, unless that - /// representation contains bits that do not correspond to a flag. - #[inline] - pub const fn from_bits(bits: $T) -> $crate::_core::option::Option { - if (bits & !Self::all().bits()) == 0 { - $crate::_core::option::Option::Some(Self { bits }) - } else { - $crate::_core::option::Option::None - } - } - - /// Convert from underlying bit representation, dropping any bits - /// that do not correspond to flags. - #[inline] - pub const fn from_bits_truncate(bits: $T) -> Self { - Self { bits: bits & Self::all().bits } - } - - /// Convert from underlying bit representation, preserving all - /// bits (even those not corresponding to a defined flag). - /// - /// # Safety - /// - /// The caller of the `bitflags!` macro can chose to allow or - /// disallow extra bits for their bitflags type. - /// - /// The caller of `from_bits_unchecked()` has to ensure that - /// all bits correspond to a defined flag or that extra bits - /// are valid for this bitflags type. - #[inline] - pub const unsafe fn from_bits_unchecked(bits: $T) -> Self { - Self { bits } - } - - /// Returns `true` if no flags are currently stored. - #[inline] - pub const fn is_empty(&self) -> bool { - self.bits() == Self::empty().bits() - } - - /// Returns `true` if all flags are currently set. - #[inline] - pub const fn is_all(&self) -> bool { - Self::all().bits | self.bits == self.bits - } - - /// Returns `true` if there are flags common to both `self` and `other`. - #[inline] - pub const fn intersects(&self, other: Self) -> bool { - !(Self { bits: self.bits & other.bits}).is_empty() - } - - /// Returns `true` if all of the flags in `other` are contained within `self`. - #[inline] - pub const fn contains(&self, other: Self) -> bool { - (self.bits & other.bits) == other.bits - } - - /// Inserts the specified flags in-place. - #[inline] - pub fn insert(&mut self, other: Self) { - self.bits |= other.bits; - } - - /// Removes the specified flags in-place. - #[inline] - pub fn remove(&mut self, other: Self) { - self.bits &= !other.bits; - } - - /// Toggles the specified flags in-place. - #[inline] - pub fn toggle(&mut self, other: Self) { - self.bits ^= other.bits; - } - - /// Inserts or removes the specified flags depending on the passed value. - #[inline] - pub fn set(&mut self, other: Self, value: bool) { - if value { - self.insert(other); - } else { - self.remove(other); - } - } - - /// Returns the intersection between the flags in `self` and - /// `other`. - /// - /// Specifically, the returned set contains only the flags which are - /// present in *both* `self` *and* `other`. - /// - /// This is equivalent to using the `&` operator (e.g. - /// [`ops::BitAnd`]), as in `flags & other`. - /// - /// [`ops::BitAnd`]: https://doc.rust-lang.org/std/ops/trait.BitAnd.html - #[inline] - #[must_use] - pub const fn intersection(self, other: Self) -> Self { - Self { bits: self.bits & other.bits } - } - - /// Returns the union of between the flags in `self` and `other`. - /// - /// Specifically, the returned set contains all flags which are - /// present in *either* `self` *or* `other`, including any which are - /// present in both (see [`Self::symmetric_difference`] if that - /// is undesirable). - /// - /// This is equivalent to using the `|` operator (e.g. - /// [`ops::BitOr`]), as in `flags | other`. - /// - /// [`ops::BitOr`]: https://doc.rust-lang.org/std/ops/trait.BitOr.html - #[inline] - #[must_use] - pub const fn union(self, other: Self) -> Self { - Self { bits: self.bits | other.bits } - } - - /// Returns the difference between the flags in `self` and `other`. - /// - /// Specifically, the returned set contains all flags present in - /// `self`, except for the ones present in `other`. - /// - /// It is also conceptually equivalent to the "bit-clear" operation: - /// `flags & !other` (and this syntax is also supported). - /// - /// This is equivalent to using the `-` operator (e.g. - /// [`ops::Sub`]), as in `flags - other`. - /// - /// [`ops::Sub`]: https://doc.rust-lang.org/std/ops/trait.Sub.html - #[inline] - #[must_use] - pub const fn difference(self, other: Self) -> Self { - Self { bits: self.bits & !other.bits } - } - - /// Returns the [symmetric difference][sym-diff] between the flags - /// in `self` and `other`. - /// - /// Specifically, the returned set contains the flags present which - /// are present in `self` or `other`, but that are not present in - /// both. Equivalently, it contains the flags present in *exactly - /// one* of the sets `self` and `other`. - /// - /// This is equivalent to using the `^` operator (e.g. - /// [`ops::BitXor`]), as in `flags ^ other`. - /// - /// [sym-diff]: https://en.wikipedia.org/wiki/Symmetric_difference - /// [`ops::BitXor`]: https://doc.rust-lang.org/std/ops/trait.BitXor.html - #[inline] - #[must_use] - pub const fn symmetric_difference(self, other: Self) -> Self { - Self { bits: self.bits ^ other.bits } - } - - /// Returns the complement of this set of flags. - /// - /// Specifically, the returned set contains all the flags which are - /// not set in `self`, but which are allowed for this type. - /// - /// Alternatively, it can be thought of as the set difference - /// between [`Self::all()`] and `self` (e.g. `Self::all() - self`) - /// - /// This is equivalent to using the `!` operator (e.g. - /// [`ops::Not`]), as in `!flags`. - /// - /// [`Self::all()`]: Self::all - /// [`ops::Not`]: https://doc.rust-lang.org/std/ops/trait.Not.html - #[inline] - #[must_use] - pub const fn complement(self) -> Self { - Self::from_bits_truncate(!self.bits) - } - - } - - impl $crate::_core::ops::BitOr for $BitFlags { - type Output = Self; - - /// Returns the union of the two sets of flags. - #[inline] - fn bitor(self, other: $BitFlags) -> Self { - Self { bits: self.bits | other.bits } - } - } - - impl $crate::_core::ops::BitOrAssign for $BitFlags { - /// Adds the set of flags. - #[inline] - fn bitor_assign(&mut self, other: Self) { - self.bits |= other.bits; - } - } - - impl $crate::_core::ops::BitXor for $BitFlags { - type Output = Self; - - /// Returns the left flags, but with all the right flags toggled. - #[inline] - fn bitxor(self, other: Self) -> Self { - Self { bits: self.bits ^ other.bits } - } - } - - impl $crate::_core::ops::BitXorAssign for $BitFlags { - /// Toggles the set of flags. - #[inline] - fn bitxor_assign(&mut self, other: Self) { - self.bits ^= other.bits; - } - } - - impl $crate::_core::ops::BitAnd for $BitFlags { - type Output = Self; - - /// Returns the intersection between the two sets of flags. - #[inline] - fn bitand(self, other: Self) -> Self { - Self { bits: self.bits & other.bits } - } - } - - impl $crate::_core::ops::BitAndAssign for $BitFlags { - /// Disables all flags disabled in the set. - #[inline] - fn bitand_assign(&mut self, other: Self) { - self.bits &= other.bits; - } - } - - impl $crate::_core::ops::Sub for $BitFlags { - type Output = Self; - - /// Returns the set difference of the two sets of flags. - #[inline] - fn sub(self, other: Self) -> Self { - Self { bits: self.bits & !other.bits } - } - } - - impl $crate::_core::ops::SubAssign for $BitFlags { - /// Disables all flags enabled in the set. - #[inline] - fn sub_assign(&mut self, other: Self) { - self.bits &= !other.bits; - } - } - - impl $crate::_core::ops::Not for $BitFlags { - type Output = Self; - - /// Returns the complement of this set of flags. - #[inline] - fn not(self) -> Self { - Self { bits: !self.bits } & Self::all() - } - } - - impl $crate::_core::iter::Extend<$BitFlags> for $BitFlags { - fn extend>(&mut self, iterator: T) { - for item in iterator { - self.insert(item) - } - } - } - - impl $crate::_core::iter::FromIterator<$BitFlags> for $BitFlags { - fn from_iter>(iterator: T) -> Self { - let mut result = Self::empty(); - result.extend(iterator); - result - } - } - }; - - // Every attribute that the user writes on a const is applied to the - // corresponding const that we generate, but within the implementation of - // Debug and all() we want to ignore everything but #[cfg] attributes. In - // particular, including a #[deprecated] attribute on those items would fail - // to compile. - // https://github.com/bitflags/bitflags/issues/109 - // - // Input: - // - // ? #[cfg(feature = "advanced")] - // ? #[deprecated(note = "Use something else.")] - // ? #[doc = r"High quality documentation."] - // fn f() -> i32 { /* ... */ } - // - // Output: - // - // #[cfg(feature = "advanced")] - // fn f() -> i32 { /* ... */ } - ( - $(#[$filtered:meta])* - ? #[cfg $($cfgargs:tt)*] - $(? #[$rest:ident $($restargs:tt)*])* - fn $($item:tt)* - ) => { - __impl_bitflags! { - $(#[$filtered])* - #[cfg $($cfgargs)*] - $(? #[$rest $($restargs)*])* - fn $($item)* - } - }; - ( - $(#[$filtered:meta])* - // $next != `cfg` - ? #[$next:ident $($nextargs:tt)*] - $(? #[$rest:ident $($restargs:tt)*])* - fn $($item:tt)* - ) => { - __impl_bitflags! { - $(#[$filtered])* - // $next filtered out - $(? #[$rest $($restargs)*])* - fn $($item)* - } - }; - ( - $(#[$filtered:meta])* - fn $($item:tt)* - ) => { - $(#[$filtered])* - fn $($item)* - }; - - // Every attribute that the user writes on a const is applied to the - // corresponding const that we generate, but within the implementation of - // Debug and all() we want to ignore everything but #[cfg] attributes. In - // particular, including a #[deprecated] attribute on those items would fail - // to compile. - // https://github.com/bitflags/bitflags/issues/109 - // - // const version - // - // Input: - // - // ? #[cfg(feature = "advanced")] - // ? #[deprecated(note = "Use something else.")] - // ? #[doc = r"High quality documentation."] - // const f: i32 { /* ... */ } - // - // Output: - // - // #[cfg(feature = "advanced")] - // const f: i32 { /* ... */ } - ( - $(#[$filtered:meta])* - ? #[cfg $($cfgargs:tt)*] - $(? #[$rest:ident $($restargs:tt)*])* - const $($item:tt)* - ) => { - __impl_bitflags! { - $(#[$filtered])* - #[cfg $($cfgargs)*] - $(? #[$rest $($restargs)*])* - const $($item)* - } - }; - ( - $(#[$filtered:meta])* - // $next != `cfg` - ? #[$next:ident $($nextargs:tt)*] - $(? #[$rest:ident $($restargs:tt)*])* - const $($item:tt)* - ) => { - __impl_bitflags! { - $(#[$filtered])* - // $next filtered out - $(? #[$rest $($restargs)*])* - const $($item)* - } - }; - ( - $(#[$filtered:meta])* - const $($item:tt)* - ) => { - $(#[$filtered])* - const $($item)* - }; -} - -#[cfg(feature = "example_generated")] -pub mod example_generated; - -#[cfg(test)] -mod tests { - use std::collections::hash_map::DefaultHasher; - use std::hash::{Hash, Hasher}; - - bitflags! { - #[doc = "> The first principle is that you must not fool yourself — and"] - #[doc = "> you are the easiest person to fool."] - #[doc = "> "] - #[doc = "> - Richard Feynman"] - #[derive(Default)] - struct Flags: u32 { - const A = 0b00000001; - #[doc = " macros are way better at generating code than trans is"] - const B = 0b00000010; - const C = 0b00000100; - #[doc = "* cmr bed"] - #[doc = "* strcat table"] - #[doc = " wait what?"] - const ABC = Self::A.bits | Self::B.bits | Self::C.bits; - } - - struct _CfgFlags: u32 { - #[cfg(unix)] - const _CFG_A = 0b01; - #[cfg(windows)] - const _CFG_B = 0b01; - #[cfg(unix)] - const _CFG_C = Self::_CFG_A.bits | 0b10; - } - - struct AnotherSetOfFlags: i8 { - const ANOTHER_FLAG = -1_i8; - } - - struct LongFlags: u32 { - const LONG_A = 0b1111111111111111; - } - } - - bitflags! { - struct EmptyFlags: u32 { - } - } - - #[test] - fn test_bits() { - assert_eq!(Flags::empty().bits(), 0b00000000); - assert_eq!(Flags::A.bits(), 0b00000001); - assert_eq!(Flags::ABC.bits(), 0b00000111); - - assert_eq!(AnotherSetOfFlags::empty().bits(), 0b00); - assert_eq!(AnotherSetOfFlags::ANOTHER_FLAG.bits(), !0_i8); - - assert_eq!(EmptyFlags::empty().bits(), 0b00000000); - } - - #[test] - fn test_from_bits() { - assert_eq!(Flags::from_bits(0), Some(Flags::empty())); - assert_eq!(Flags::from_bits(0b1), Some(Flags::A)); - assert_eq!(Flags::from_bits(0b10), Some(Flags::B)); - assert_eq!(Flags::from_bits(0b11), Some(Flags::A | Flags::B)); - assert_eq!(Flags::from_bits(0b1000), None); - - assert_eq!( - AnotherSetOfFlags::from_bits(!0_i8), - Some(AnotherSetOfFlags::ANOTHER_FLAG) - ); - - assert_eq!(EmptyFlags::from_bits(0), Some(EmptyFlags::empty())); - assert_eq!(EmptyFlags::from_bits(0b1), None); - } - - #[test] - fn test_from_bits_truncate() { - assert_eq!(Flags::from_bits_truncate(0), Flags::empty()); - assert_eq!(Flags::from_bits_truncate(0b1), Flags::A); - assert_eq!(Flags::from_bits_truncate(0b10), Flags::B); - assert_eq!(Flags::from_bits_truncate(0b11), (Flags::A | Flags::B)); - assert_eq!(Flags::from_bits_truncate(0b1000), Flags::empty()); - assert_eq!(Flags::from_bits_truncate(0b1001), Flags::A); - - assert_eq!( - AnotherSetOfFlags::from_bits_truncate(0_i8), - AnotherSetOfFlags::empty() - ); - - assert_eq!(EmptyFlags::from_bits_truncate(0), EmptyFlags::empty()); - assert_eq!(EmptyFlags::from_bits_truncate(0b1), EmptyFlags::empty()); - } - - #[test] - fn test_from_bits_unchecked() { - let extra = unsafe { Flags::from_bits_unchecked(0b1000) }; - assert_eq!(unsafe { Flags::from_bits_unchecked(0) }, Flags::empty()); - assert_eq!(unsafe { Flags::from_bits_unchecked(0b1) }, Flags::A); - assert_eq!(unsafe { Flags::from_bits_unchecked(0b10) }, Flags::B); - - assert_eq!( - unsafe { Flags::from_bits_unchecked(0b11) }, - (Flags::A | Flags::B) - ); - assert_eq!( - unsafe { Flags::from_bits_unchecked(0b1000) }, - (extra | Flags::empty()) - ); - assert_eq!( - unsafe { Flags::from_bits_unchecked(0b1001) }, - (extra | Flags::A) - ); - - let extra = unsafe { EmptyFlags::from_bits_unchecked(0b1000) }; - assert_eq!( - unsafe { EmptyFlags::from_bits_unchecked(0b1000) }, - (extra | EmptyFlags::empty()) - ); - } - - #[test] - fn test_is_empty() { - assert!(Flags::empty().is_empty()); - assert!(!Flags::A.is_empty()); - assert!(!Flags::ABC.is_empty()); - - assert!(!AnotherSetOfFlags::ANOTHER_FLAG.is_empty()); - - assert!(EmptyFlags::empty().is_empty()); - assert!(EmptyFlags::all().is_empty()); - } - - #[test] - fn test_is_all() { - assert!(Flags::all().is_all()); - assert!(!Flags::A.is_all()); - assert!(Flags::ABC.is_all()); - - let extra = unsafe { Flags::from_bits_unchecked(0b1000) }; - assert!(!extra.is_all()); - assert!(!(Flags::A | extra).is_all()); - assert!((Flags::ABC | extra).is_all()); - - assert!(AnotherSetOfFlags::ANOTHER_FLAG.is_all()); - - assert!(EmptyFlags::all().is_all()); - assert!(EmptyFlags::empty().is_all()); - } - - #[test] - fn test_two_empties_do_not_intersect() { - let e1 = Flags::empty(); - let e2 = Flags::empty(); - assert!(!e1.intersects(e2)); - - assert!(AnotherSetOfFlags::ANOTHER_FLAG.intersects(AnotherSetOfFlags::ANOTHER_FLAG)); - } - - #[test] - fn test_empty_does_not_intersect_with_full() { - let e1 = Flags::empty(); - let e2 = Flags::ABC; - assert!(!e1.intersects(e2)); - } - - #[test] - fn test_disjoint_intersects() { - let e1 = Flags::A; - let e2 = Flags::B; - assert!(!e1.intersects(e2)); - } - - #[test] - fn test_overlapping_intersects() { - let e1 = Flags::A; - let e2 = Flags::A | Flags::B; - assert!(e1.intersects(e2)); - } - - #[test] - fn test_contains() { - let e1 = Flags::A; - let e2 = Flags::A | Flags::B; - assert!(!e1.contains(e2)); - assert!(e2.contains(e1)); - assert!(Flags::ABC.contains(e2)); - - assert!(AnotherSetOfFlags::ANOTHER_FLAG.contains(AnotherSetOfFlags::ANOTHER_FLAG)); - - assert!(EmptyFlags::empty().contains(EmptyFlags::empty())); - } - - #[test] - fn test_insert() { - let mut e1 = Flags::A; - let e2 = Flags::A | Flags::B; - e1.insert(e2); - assert_eq!(e1, e2); - - let mut e3 = AnotherSetOfFlags::empty(); - e3.insert(AnotherSetOfFlags::ANOTHER_FLAG); - assert_eq!(e3, AnotherSetOfFlags::ANOTHER_FLAG); - } - - #[test] - fn test_remove() { - let mut e1 = Flags::A | Flags::B; - let e2 = Flags::A | Flags::C; - e1.remove(e2); - assert_eq!(e1, Flags::B); - - let mut e3 = AnotherSetOfFlags::ANOTHER_FLAG; - e3.remove(AnotherSetOfFlags::ANOTHER_FLAG); - assert_eq!(e3, AnotherSetOfFlags::empty()); - } - - #[test] - fn test_operators() { - let e1 = Flags::A | Flags::C; - let e2 = Flags::B | Flags::C; - assert_eq!((e1 | e2), Flags::ABC); // union - assert_eq!((e1 & e2), Flags::C); // intersection - assert_eq!((e1 - e2), Flags::A); // set difference - assert_eq!(!e2, Flags::A); // set complement - assert_eq!(e1 ^ e2, Flags::A | Flags::B); // toggle - let mut e3 = e1; - e3.toggle(e2); - assert_eq!(e3, Flags::A | Flags::B); - - let mut m4 = AnotherSetOfFlags::empty(); - m4.toggle(AnotherSetOfFlags::empty()); - assert_eq!(m4, AnotherSetOfFlags::empty()); - } - - #[test] - fn test_operators_unchecked() { - let extra = unsafe { Flags::from_bits_unchecked(0b1000) }; - let e1 = Flags::A | Flags::C | extra; - let e2 = Flags::B | Flags::C; - assert_eq!((e1 | e2), (Flags::ABC | extra)); // union - assert_eq!((e1 & e2), Flags::C); // intersection - assert_eq!((e1 - e2), (Flags::A | extra)); // set difference - assert_eq!(!e2, Flags::A); // set complement - assert_eq!(!e1, Flags::B); // set complement - assert_eq!(e1 ^ e2, Flags::A | Flags::B | extra); // toggle - let mut e3 = e1; - e3.toggle(e2); - assert_eq!(e3, Flags::A | Flags::B | extra); - } - - #[test] - fn test_set_ops_basic() { - let ab = Flags::A.union(Flags::B); - let ac = Flags::A.union(Flags::C); - let bc = Flags::B.union(Flags::C); - assert_eq!(ab.bits, 0b011); - assert_eq!(bc.bits, 0b110); - assert_eq!(ac.bits, 0b101); - - assert_eq!(ab, Flags::B.union(Flags::A)); - assert_eq!(ac, Flags::C.union(Flags::A)); - assert_eq!(bc, Flags::C.union(Flags::B)); - - assert_eq!(ac, Flags::A | Flags::C); - assert_eq!(bc, Flags::B | Flags::C); - assert_eq!(ab.union(bc), Flags::ABC); - - assert_eq!(ac, Flags::A | Flags::C); - assert_eq!(bc, Flags::B | Flags::C); - - assert_eq!(ac.union(bc), ac | bc); - assert_eq!(ac.union(bc), Flags::ABC); - assert_eq!(bc.union(ac), Flags::ABC); - - assert_eq!(ac.intersection(bc), ac & bc); - assert_eq!(ac.intersection(bc), Flags::C); - assert_eq!(bc.intersection(ac), Flags::C); - - assert_eq!(ac.difference(bc), ac - bc); - assert_eq!(bc.difference(ac), bc - ac); - assert_eq!(ac.difference(bc), Flags::A); - assert_eq!(bc.difference(ac), Flags::B); - - assert_eq!(bc.complement(), !bc); - assert_eq!(bc.complement(), Flags::A); - assert_eq!(ac.symmetric_difference(bc), Flags::A.union(Flags::B)); - assert_eq!(bc.symmetric_difference(ac), Flags::A.union(Flags::B)); - } - - #[test] - fn test_set_ops_const() { - // These just test that these compile and don't cause use-site panics - // (would be possible if we had some sort of UB) - const INTERSECT: Flags = Flags::all().intersection(Flags::C); - const UNION: Flags = Flags::A.union(Flags::C); - const DIFFERENCE: Flags = Flags::all().difference(Flags::A); - const COMPLEMENT: Flags = Flags::C.complement(); - const SYM_DIFFERENCE: Flags = UNION.symmetric_difference(DIFFERENCE); - assert_eq!(INTERSECT, Flags::C); - assert_eq!(UNION, Flags::A | Flags::C); - assert_eq!(DIFFERENCE, Flags::all() - Flags::A); - assert_eq!(COMPLEMENT, !Flags::C); - assert_eq!(SYM_DIFFERENCE, (Flags::A | Flags::C) ^ (Flags::all() - Flags::A)); - } - - #[test] - fn test_set_ops_unchecked() { - let extra = unsafe { Flags::from_bits_unchecked(0b1000) }; - let e1 = Flags::A.union(Flags::C).union(extra); - let e2 = Flags::B.union(Flags::C); - assert_eq!(e1.bits, 0b1101); - assert_eq!(e1.union(e2), (Flags::ABC | extra)); - assert_eq!(e1.intersection(e2), Flags::C); - assert_eq!(e1.difference(e2), Flags::A | extra); - assert_eq!(e2.difference(e1), Flags::B); - assert_eq!(e2.complement(), Flags::A); - assert_eq!(e1.complement(), Flags::B); - assert_eq!(e1.symmetric_difference(e2), Flags::A | Flags::B | extra); // toggle - } - - #[test] - fn test_set_ops_exhaustive() { - // Define a flag that contains gaps to help exercise edge-cases, - // especially around "unknown" flags (e.g. ones outside of `all()` - // `from_bits_unchecked`). - // - when lhs and rhs both have different sets of unknown flags. - // - unknown flags at both ends, and in the middle - // - cases with "gaps". - bitflags! { - struct Test: u16 { - // Intentionally no `A` - const B = 0b000000010; - // Intentionally no `C` - const D = 0b000001000; - const E = 0b000010000; - const F = 0b000100000; - const G = 0b001000000; - // Intentionally no `H` - const I = 0b100000000; - } - } - let iter_test_flags = - || (0..=0b111_1111_1111).map(|bits| unsafe { Test::from_bits_unchecked(bits) }); - - for a in iter_test_flags() { - assert_eq!( - a.complement(), - Test::from_bits_truncate(!a.bits), - "wrong result: !({:?})", - a, - ); - assert_eq!(a.complement(), !a, "named != op: !({:?})", a); - for b in iter_test_flags() { - // Check that the named operations produce the expected bitwise - // values. - assert_eq!( - a.union(b).bits, - a.bits | b.bits, - "wrong result: `{:?}` | `{:?}`", - a, - b, - ); - assert_eq!( - a.intersection(b).bits, - a.bits & b.bits, - "wrong result: `{:?}` & `{:?}`", - a, - b, - ); - assert_eq!( - a.symmetric_difference(b).bits, - a.bits ^ b.bits, - "wrong result: `{:?}` ^ `{:?}`", - a, - b, - ); - assert_eq!( - a.difference(b).bits, - a.bits & !b.bits, - "wrong result: `{:?}` - `{:?}`", - a, - b, - ); - // Note: Difference is checked as both `a - b` and `b - a` - assert_eq!( - b.difference(a).bits, - b.bits & !a.bits, - "wrong result: `{:?}` - `{:?}`", - b, - a, - ); - // Check that the named set operations are equivalent to the - // bitwise equivalents - assert_eq!(a.union(b), a | b, "named != op: `{:?}` | `{:?}`", a, b,); - assert_eq!( - a.intersection(b), - a & b, - "named != op: `{:?}` & `{:?}`", - a, - b, - ); - assert_eq!( - a.symmetric_difference(b), - a ^ b, - "named != op: `{:?}` ^ `{:?}`", - a, - b, - ); - assert_eq!(a.difference(b), a - b, "named != op: `{:?}` - `{:?}`", a, b,); - // Note: Difference is checked as both `a - b` and `b - a` - assert_eq!(b.difference(a), b - a, "named != op: `{:?}` - `{:?}`", b, a,); - // Verify that the operations which should be symmetric are - // actually symmetric. - assert_eq!(a.union(b), b.union(a), "asymmetry: `{:?}` | `{:?}`", a, b,); - assert_eq!( - a.intersection(b), - b.intersection(a), - "asymmetry: `{:?}` & `{:?}`", - a, - b, - ); - assert_eq!( - a.symmetric_difference(b), - b.symmetric_difference(a), - "asymmetry: `{:?}` ^ `{:?}`", - a, - b, - ); - } - } - } - - #[test] - fn test_set() { - let mut e1 = Flags::A | Flags::C; - e1.set(Flags::B, true); - e1.set(Flags::C, false); - - assert_eq!(e1, Flags::A | Flags::B); - } - - #[test] - fn test_assignment_operators() { - let mut m1 = Flags::empty(); - let e1 = Flags::A | Flags::C; - // union - m1 |= Flags::A; - assert_eq!(m1, Flags::A); - // intersection - m1 &= e1; - assert_eq!(m1, Flags::A); - // set difference - m1 -= m1; - assert_eq!(m1, Flags::empty()); - // toggle - m1 ^= e1; - assert_eq!(m1, e1); - } - - #[test] - fn test_const_fn() { - const _M1: Flags = Flags::empty(); - - const M2: Flags = Flags::A; - assert_eq!(M2, Flags::A); - - const M3: Flags = Flags::C; - assert_eq!(M3, Flags::C); - } - - #[test] - fn test_extend() { - let mut flags; - - flags = Flags::empty(); - flags.extend([].iter().cloned()); - assert_eq!(flags, Flags::empty()); - - flags = Flags::empty(); - flags.extend([Flags::A, Flags::B].iter().cloned()); - assert_eq!(flags, Flags::A | Flags::B); - - flags = Flags::A; - flags.extend([Flags::A, Flags::B].iter().cloned()); - assert_eq!(flags, Flags::A | Flags::B); - - flags = Flags::B; - flags.extend([Flags::A, Flags::ABC].iter().cloned()); - assert_eq!(flags, Flags::ABC); - } - - #[test] - fn test_from_iterator() { - assert_eq!([].iter().cloned().collect::(), Flags::empty()); - assert_eq!( - [Flags::A, Flags::B].iter().cloned().collect::(), - Flags::A | Flags::B - ); - assert_eq!( - [Flags::A, Flags::ABC].iter().cloned().collect::(), - Flags::ABC - ); - } - - #[test] - fn test_lt() { - let mut a = Flags::empty(); - let mut b = Flags::empty(); - - assert!(!(a < b) && !(b < a)); - b = Flags::B; - assert!(a < b); - a = Flags::C; - assert!(!(a < b) && b < a); - b = Flags::C | Flags::B; - assert!(a < b); - } - - #[test] - fn test_ord() { - let mut a = Flags::empty(); - let mut b = Flags::empty(); - - assert!(a <= b && a >= b); - a = Flags::A; - assert!(a > b && a >= b); - assert!(b < a && b <= a); - b = Flags::B; - assert!(b > a && b >= a); - assert!(a < b && a <= b); - } - - fn hash(t: &T) -> u64 { - let mut s = DefaultHasher::new(); - t.hash(&mut s); - s.finish() - } - - #[test] - fn test_hash() { - let mut x = Flags::empty(); - let mut y = Flags::empty(); - assert_eq!(hash(&x), hash(&y)); - x = Flags::all(); - y = Flags::ABC; - assert_eq!(hash(&x), hash(&y)); - } - - #[test] - fn test_default() { - assert_eq!(Flags::empty(), Flags::default()); - } - - #[test] - fn test_debug() { - assert_eq!(format!("{:?}", Flags::A | Flags::B), "A | B"); - assert_eq!(format!("{:?}", Flags::empty()), "(empty)"); - assert_eq!(format!("{:?}", Flags::ABC), "A | B | C | ABC"); - let extra = unsafe { Flags::from_bits_unchecked(0xb8) }; - assert_eq!(format!("{:?}", extra), "0xb8"); - assert_eq!(format!("{:?}", Flags::A | extra), "A | 0xb8"); - - assert_eq!( - format!("{:?}", Flags::ABC | extra), - "A | B | C | ABC | 0xb8" - ); - - assert_eq!(format!("{:?}", EmptyFlags::empty()), "(empty)"); - } - - #[test] - fn test_binary() { - assert_eq!(format!("{:b}", Flags::ABC), "111"); - assert_eq!(format!("{:#b}", Flags::ABC), "0b111"); - let extra = unsafe { Flags::from_bits_unchecked(0b1010000) }; - assert_eq!(format!("{:b}", Flags::ABC | extra), "1010111"); - assert_eq!(format!("{:#b}", Flags::ABC | extra), "0b1010111"); - } - - #[test] - fn test_octal() { - assert_eq!(format!("{:o}", LongFlags::LONG_A), "177777"); - assert_eq!(format!("{:#o}", LongFlags::LONG_A), "0o177777"); - let extra = unsafe { LongFlags::from_bits_unchecked(0o5000000) }; - assert_eq!(format!("{:o}", LongFlags::LONG_A | extra), "5177777"); - assert_eq!(format!("{:#o}", LongFlags::LONG_A | extra), "0o5177777"); - } - - #[test] - fn test_lowerhex() { - assert_eq!(format!("{:x}", LongFlags::LONG_A), "ffff"); - assert_eq!(format!("{:#x}", LongFlags::LONG_A), "0xffff"); - let extra = unsafe { LongFlags::from_bits_unchecked(0xe00000) }; - assert_eq!(format!("{:x}", LongFlags::LONG_A | extra), "e0ffff"); - assert_eq!(format!("{:#x}", LongFlags::LONG_A | extra), "0xe0ffff"); - } - - #[test] - fn test_upperhex() { - assert_eq!(format!("{:X}", LongFlags::LONG_A), "FFFF"); - assert_eq!(format!("{:#X}", LongFlags::LONG_A), "0xFFFF"); - let extra = unsafe { LongFlags::from_bits_unchecked(0xe00000) }; - assert_eq!(format!("{:X}", LongFlags::LONG_A | extra), "E0FFFF"); - assert_eq!(format!("{:#X}", LongFlags::LONG_A | extra), "0xE0FFFF"); - } - - mod submodule { - bitflags! { - pub struct PublicFlags: i8 { - const X = 0; - } - - struct PrivateFlags: i8 { - const Y = 0; - } - } - - #[test] - fn test_private() { - let _ = PrivateFlags::Y; - } - } - - #[test] - fn test_public() { - let _ = submodule::PublicFlags::X; - } - - mod t1 { - mod foo { - pub type Bar = i32; - } - - bitflags! { - /// baz - struct Flags: foo::Bar { - const A = 0b00000001; - #[cfg(foo)] - const B = 0b00000010; - #[cfg(foo)] - const C = 0b00000010; - } - } - } - - #[test] - fn test_in_function() { - bitflags! { - struct Flags: u8 { - const A = 1; - #[cfg(any())] // false - const B = 2; - } - } - assert_eq!(Flags::all(), Flags::A); - assert_eq!(format!("{:?}", Flags::A), "A"); - } - - #[test] - fn test_deprecated() { - bitflags! { - pub struct TestFlags: u32 { - #[deprecated(note = "Use something else.")] - const ONE = 1; - } - } - } - - #[test] - fn test_pub_crate() { - mod module { - bitflags! { - pub (crate) struct Test: u8 { - const FOO = 1; - } - } - } - - assert_eq!(module::Test::FOO.bits(), 1); - } - - #[test] - fn test_pub_in_module() { - mod module { - mod submodule { - bitflags! { - // `pub (in super)` means only the module `module` will - // be able to access this. - pub (in super) struct Test: u8 { - const FOO = 1; - } - } - } - - mod test { - // Note: due to `pub (in super)`, - // this cannot be accessed directly by the testing code. - pub(super) fn value() -> u8 { - super::submodule::Test::FOO.bits() - } - } - - pub fn value() -> u8 { - test::value() - } - } - - assert_eq!(module::value(), 1) - } - - #[test] - fn test_zero_value_flags() { - bitflags! { - struct Flags: u32 { - const NONE = 0b0; - const SOME = 0b1; - } - } - - assert!(Flags::empty().contains(Flags::NONE)); - assert!(Flags::SOME.contains(Flags::NONE)); - assert!(Flags::NONE.is_empty()); - - assert_eq!(format!("{:?}", Flags::empty()), "NONE"); - assert_eq!(format!("{:?}", Flags::SOME), "SOME"); - } - - #[test] - fn test_empty_bitflags() { - bitflags! {} - } - - #[test] - fn test_u128_bitflags() { - bitflags! { - struct Flags128: u128 { - const A = 0x0000_0000_0000_0000_0000_0000_0000_0001; - const B = 0x0000_0000_0000_1000_0000_0000_0000_0000; - const C = 0x8000_0000_0000_0000_0000_0000_0000_0000; - const ABC = Self::A.bits | Self::B.bits | Self::C.bits; - } - } - - assert_eq!(Flags128::ABC, Flags128::A | Flags128::B | Flags128::C); - assert_eq!(Flags128::A.bits, 0x0000_0000_0000_0000_0000_0000_0000_0001); - assert_eq!(Flags128::B.bits, 0x0000_0000_0000_1000_0000_0000_0000_0000); - assert_eq!(Flags128::C.bits, 0x8000_0000_0000_0000_0000_0000_0000_0000); - assert_eq!( - Flags128::ABC.bits, - 0x8000_0000_0000_1000_0000_0000_0000_0001 - ); - assert_eq!(format!("{:?}", Flags128::A), "A"); - assert_eq!(format!("{:?}", Flags128::B), "B"); - assert_eq!(format!("{:?}", Flags128::C), "C"); - assert_eq!(format!("{:?}", Flags128::ABC), "A | B | C | ABC"); - } - - #[test] - fn test_serde_bitflags_serialize() { - let flags = SerdeFlags::A | SerdeFlags::B; - - let serialized = serde_json::to_string(&flags).unwrap(); - - assert_eq!(serialized, r#"{"bits":3}"#); - } - - #[test] - fn test_serde_bitflags_deserialize() { - let deserialized: SerdeFlags = serde_json::from_str(r#"{"bits":12}"#).unwrap(); - - let expected = SerdeFlags::C | SerdeFlags::D; - - assert_eq!(deserialized.bits, expected.bits); - } - - #[test] - fn test_serde_bitflags_roundtrip() { - let flags = SerdeFlags::A | SerdeFlags::B; - - let deserialized: SerdeFlags = serde_json::from_str(&serde_json::to_string(&flags).unwrap()).unwrap(); - - assert_eq!(deserialized.bits, flags.bits); - } - - bitflags! { - #[derive(serde::Serialize, serde::Deserialize)] - struct SerdeFlags: u32 { - const A = 1; - const B = 2; - const C = 4; - const D = 8; - } - } -} diff --git a/vendor/bitflags/tests/basic.rs b/vendor/bitflags/tests/basic.rs deleted file mode 100644 index 73a52bec..00000000 --- a/vendor/bitflags/tests/basic.rs +++ /dev/null @@ -1,20 +0,0 @@ -#![no_std] - -use bitflags::bitflags; - -bitflags! { - /// baz - struct Flags: u32 { - const A = 0b00000001; - #[doc = "bar"] - const B = 0b00000010; - const C = 0b00000100; - #[doc = "foo"] - const ABC = Flags::A.bits | Flags::B.bits | Flags::C.bits; - } -} - -#[test] -fn basic() { - assert_eq!(Flags::ABC, Flags::A | Flags::B | Flags::C); -} diff --git a/vendor/bitflags/tests/compile-fail/impls/copy.rs b/vendor/bitflags/tests/compile-fail/impls/copy.rs deleted file mode 100644 index 38f4822f..00000000 --- a/vendor/bitflags/tests/compile-fail/impls/copy.rs +++ /dev/null @@ -1,10 +0,0 @@ -use bitflags::bitflags; - -bitflags! { - #[derive(Clone, Copy)] - struct Flags: u32 { - const A = 0b00000001; - } -} - -fn main() {} diff --git a/vendor/bitflags/tests/compile-fail/impls/copy.stderr.beta b/vendor/bitflags/tests/compile-fail/impls/copy.stderr.beta deleted file mode 100644 index 0c13aa50..00000000 --- a/vendor/bitflags/tests/compile-fail/impls/copy.stderr.beta +++ /dev/null @@ -1,27 +0,0 @@ -error[E0119]: conflicting implementations of trait `std::clone::Clone` for type `Flags` - --> $DIR/copy.rs:3:1 - | -3 | / bitflags! { -4 | | #[derive(Clone, Copy)] - | | ----- first implementation here -5 | | struct Flags: u32 { -6 | | const A = 0b00000001; -7 | | } -8 | | } - | |_^ conflicting implementation for `Flags` - | - = note: this error originates in the derive macro `Clone` (in Nightly builds, run with -Z macro-backtrace for more info) - -error[E0119]: conflicting implementations of trait `std::marker::Copy` for type `Flags` - --> $DIR/copy.rs:3:1 - | -3 | / bitflags! { -4 | | #[derive(Clone, Copy)] - | | ---- first implementation here -5 | | struct Flags: u32 { -6 | | const A = 0b00000001; -7 | | } -8 | | } - | |_^ conflicting implementation for `Flags` - | - = note: this error originates in the derive macro `Copy` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/bitflags/tests/compile-fail/impls/eq.rs b/vendor/bitflags/tests/compile-fail/impls/eq.rs deleted file mode 100644 index 4abbd630..00000000 --- a/vendor/bitflags/tests/compile-fail/impls/eq.rs +++ /dev/null @@ -1,10 +0,0 @@ -use bitflags::bitflags; - -bitflags! { - #[derive(PartialEq, Eq)] - struct Flags: u32 { - const A = 0b00000001; - } -} - -fn main() {} diff --git a/vendor/bitflags/tests/compile-fail/impls/eq.stderr.beta b/vendor/bitflags/tests/compile-fail/impls/eq.stderr.beta deleted file mode 100644 index 8a1a3b41..00000000 --- a/vendor/bitflags/tests/compile-fail/impls/eq.stderr.beta +++ /dev/null @@ -1,55 +0,0 @@ -error[E0119]: conflicting implementations of trait `std::cmp::PartialEq` for type `Flags` - --> $DIR/eq.rs:3:1 - | -3 | / bitflags! { -4 | | #[derive(PartialEq, Eq)] - | | --------- first implementation here -5 | | struct Flags: u32 { -6 | | const A = 0b00000001; -7 | | } -8 | | } - | |_^ conflicting implementation for `Flags` - | - = note: this error originates in the derive macro `PartialEq` (in Nightly builds, run with -Z macro-backtrace for more info) - -error[E0119]: conflicting implementations of trait `std::cmp::Eq` for type `Flags` - --> $DIR/eq.rs:3:1 - | -3 | / bitflags! { -4 | | #[derive(PartialEq, Eq)] - | | -- first implementation here -5 | | struct Flags: u32 { -6 | | const A = 0b00000001; -7 | | } -8 | | } - | |_^ conflicting implementation for `Flags` - | - = note: this error originates in the derive macro `Eq` (in Nightly builds, run with -Z macro-backtrace for more info) - -error[E0119]: conflicting implementations of trait `std::marker::StructuralPartialEq` for type `Flags` - --> $DIR/eq.rs:3:1 - | -3 | / bitflags! { -4 | | #[derive(PartialEq, Eq)] - | | --------- first implementation here -5 | | struct Flags: u32 { -6 | | const A = 0b00000001; -7 | | } -8 | | } - | |_^ conflicting implementation for `Flags` - | - = note: this error originates in the derive macro `PartialEq` (in Nightly builds, run with -Z macro-backtrace for more info) - -error[E0119]: conflicting implementations of trait `std::marker::StructuralEq` for type `Flags` - --> $DIR/eq.rs:3:1 - | -3 | / bitflags! { -4 | | #[derive(PartialEq, Eq)] - | | -- first implementation here -5 | | struct Flags: u32 { -6 | | const A = 0b00000001; -7 | | } -8 | | } - | |_^ conflicting implementation for `Flags` - | - = note: this error originates in the derive macro `Eq` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/bitflags/tests/compile-fail/non_integer_base/all_defined.rs b/vendor/bitflags/tests/compile-fail/non_integer_base/all_defined.rs deleted file mode 100644 index c2856b10..00000000 --- a/vendor/bitflags/tests/compile-fail/non_integer_base/all_defined.rs +++ /dev/null @@ -1,123 +0,0 @@ -use std::{ - fmt::{ - self, - Debug, - Display, - LowerHex, - UpperHex, - Octal, - Binary, - }, - ops::{ - BitAnd, - BitOr, - BitXor, - BitAndAssign, - BitOrAssign, - BitXorAssign, - Not, - }, -}; - -use bitflags::bitflags; - -// Ideally we'd actually want this to work, but currently need something like `num`'s `Zero` -// With some design work it could be made possible -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -struct MyInt(u8); - -impl BitAnd for MyInt { - type Output = Self; - - fn bitand(self, other: Self) -> Self { - MyInt(self.0 & other.0) - } -} - -impl BitOr for MyInt { - type Output = Self; - - fn bitor(self, other: Self) -> Self { - MyInt(self.0 | other.0) - } -} - -impl BitXor for MyInt { - type Output = Self; - - fn bitxor(self, other: Self) -> Self { - MyInt(self.0 ^ other.0) - } -} - -impl BitAndAssign for MyInt { - fn bitand_assign(&mut self, other: Self) { - self.0 &= other.0 - } -} - -impl BitOrAssign for MyInt { - fn bitor_assign(&mut self, other: Self) { - self.0 |= other.0 - } -} - -impl BitXorAssign for MyInt { - fn bitxor_assign(&mut self, other: Self) { - self.0 ^= other.0 - } -} - -impl Debug for MyInt { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&self.0, f) - } -} - -impl Display for MyInt { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Display::fmt(&self.0, f) - } -} - -impl LowerHex for MyInt { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - LowerHex::fmt(&self.0, f) - } -} - -impl UpperHex for MyInt { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - UpperHex::fmt(&self.0, f) - } -} - -impl Octal for MyInt { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Octal::fmt(&self.0, f) - } -} - -impl Binary for MyInt { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Binary::fmt(&self.0, f) - } -} - -impl Not for MyInt { - type Output = MyInt; - - fn not(self) -> Self { - MyInt(!self.0) - } -} - -bitflags! { - struct Flags128: MyInt { - const A = MyInt(0b0000_0001u8); - const B = MyInt(0b0000_0010u8); - const C = MyInt(0b0000_0100u8); - } -} - -fn main() {} diff --git a/vendor/bitflags/tests/compile-fail/non_integer_base/all_defined.stderr.beta b/vendor/bitflags/tests/compile-fail/non_integer_base/all_defined.stderr.beta deleted file mode 100644 index 1f0fb5cf..00000000 --- a/vendor/bitflags/tests/compile-fail/non_integer_base/all_defined.stderr.beta +++ /dev/null @@ -1,27 +0,0 @@ -error[E0308]: mismatched types - --> $DIR/all_defined.rs:115:1 - | -115 | / bitflags! { -116 | | struct Flags128: MyInt { -117 | | const A = MyInt(0b0000_0001u8); -118 | | const B = MyInt(0b0000_0010u8); -119 | | const C = MyInt(0b0000_0100u8); -120 | | } -121 | | } - | |_^ expected struct `MyInt`, found integer - | - = note: this error originates in the macro `__impl_all_bitflags` (in Nightly builds, run with -Z macro-backtrace for more info) - -error[E0308]: mismatched types - --> $DIR/all_defined.rs:115:1 - | -115 | / bitflags! { -116 | | struct Flags128: MyInt { -117 | | const A = MyInt(0b0000_0001u8); -118 | | const B = MyInt(0b0000_0010u8); -119 | | const C = MyInt(0b0000_0100u8); -120 | | } -121 | | } - | |_^ expected struct `MyInt`, found integer - | - = note: this error originates in the macro `__impl_bitflags` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/bitflags/tests/compile-fail/non_integer_base/all_missing.rs b/vendor/bitflags/tests/compile-fail/non_integer_base/all_missing.rs deleted file mode 100644 index fff6b2cc..00000000 --- a/vendor/bitflags/tests/compile-fail/non_integer_base/all_missing.rs +++ /dev/null @@ -1,13 +0,0 @@ -use bitflags::bitflags; - -struct MyInt(u8); - -bitflags! { - struct Flags128: MyInt { - const A = MyInt(0b0000_0001); - const B = MyInt(0b0000_0010); - const C = MyInt(0b0000_0100); - } -} - -fn main() {} diff --git a/vendor/bitflags/tests/compile-fail/non_integer_base/all_missing.stderr.beta b/vendor/bitflags/tests/compile-fail/non_integer_base/all_missing.stderr.beta deleted file mode 100644 index ee95f836..00000000 --- a/vendor/bitflags/tests/compile-fail/non_integer_base/all_missing.stderr.beta +++ /dev/null @@ -1,13 +0,0 @@ -error[E0204]: the trait `Copy` may not be implemented for this type - --> $DIR/all_missing.rs:5:1 - | -5 | / bitflags! { -6 | | struct Flags128: MyInt { -7 | | const A = MyInt(0b0000_0001); -8 | | const B = MyInt(0b0000_0010); -9 | | const C = MyInt(0b0000_0100); -10 | | } -11 | | } - | |_^ this field does not implement `Copy` - | - = note: this error originates in the derive macro `Copy` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/bitflags/tests/compile-fail/visibility/private_field.rs b/vendor/bitflags/tests/compile-fail/visibility/private_field.rs deleted file mode 100644 index a6a3912a..00000000 --- a/vendor/bitflags/tests/compile-fail/visibility/private_field.rs +++ /dev/null @@ -1,13 +0,0 @@ -mod example { - use bitflags::bitflags; - - bitflags! { - pub struct Flags1: u32 { - const FLAG_A = 0b00000001; - } - } -} - -fn main() { - let flag1 = example::Flags1::FLAG_A.bits; -} diff --git a/vendor/bitflags/tests/compile-fail/visibility/private_field.stderr.beta b/vendor/bitflags/tests/compile-fail/visibility/private_field.stderr.beta deleted file mode 100644 index 58a04660..00000000 --- a/vendor/bitflags/tests/compile-fail/visibility/private_field.stderr.beta +++ /dev/null @@ -1,10 +0,0 @@ -error[E0616]: field `bits` of struct `Flags1` is private - --> $DIR/private_field.rs:12:41 - | -12 | let flag1 = example::Flags1::FLAG_A.bits; - | ^^^^ private field - | -help: a method `bits` also exists, call it with parentheses - | -12 | let flag1 = example::Flags1::FLAG_A.bits(); - | ^^ diff --git a/vendor/bitflags/tests/compile-fail/visibility/private_flags.rs b/vendor/bitflags/tests/compile-fail/visibility/private_flags.rs deleted file mode 100644 index 85a5b186..00000000 --- a/vendor/bitflags/tests/compile-fail/visibility/private_flags.rs +++ /dev/null @@ -1,18 +0,0 @@ -mod example { - use bitflags::bitflags; - - bitflags! { - pub struct Flags1: u32 { - const FLAG_A = 0b00000001; - } - - struct Flags2: u32 { - const FLAG_B = 0b00000010; - } - } -} - -fn main() { - let flag1 = example::Flags1::FLAG_A; - let flag2 = example::Flags2::FLAG_B; -} diff --git a/vendor/bitflags/tests/compile-fail/visibility/private_flags.stderr.beta b/vendor/bitflags/tests/compile-fail/visibility/private_flags.stderr.beta deleted file mode 100644 index d23f8320..00000000 --- a/vendor/bitflags/tests/compile-fail/visibility/private_flags.stderr.beta +++ /dev/null @@ -1,18 +0,0 @@ -error[E0603]: struct `Flags2` is private - --> $DIR/private_flags.rs:17:26 - | -17 | let flag2 = example::Flags2::FLAG_B; - | ^^^^^^ private struct - | -note: the struct `Flags2` is defined here - --> $DIR/private_flags.rs:4:5 - | -4 | / bitflags! { -5 | | pub struct Flags1: u32 { -6 | | const FLAG_A = 0b00000001; -7 | | } -... | -11 | | } -12 | | } - | |_____^ - = note: this error originates in the macro `bitflags` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/vendor/bitflags/tests/compile-fail/visibility/pub_const.rs b/vendor/bitflags/tests/compile-fail/visibility/pub_const.rs deleted file mode 100644 index b90f0ce9..00000000 --- a/vendor/bitflags/tests/compile-fail/visibility/pub_const.rs +++ /dev/null @@ -1,9 +0,0 @@ -use bitflags::bitflags; - -bitflags! { - pub struct Flags1: u32 { - pub const FLAG_A = 0b00000001; - } -} - -fn main() {} diff --git a/vendor/bitflags/tests/compile-fail/visibility/pub_const.stderr.beta b/vendor/bitflags/tests/compile-fail/visibility/pub_const.stderr.beta deleted file mode 100644 index b01122c7..00000000 --- a/vendor/bitflags/tests/compile-fail/visibility/pub_const.stderr.beta +++ /dev/null @@ -1,5 +0,0 @@ -error: no rules expected the token `pub` - --> $DIR/pub_const.rs:5:9 - | -5 | pub const FLAG_A = 0b00000001; - | ^^^ no rules expected this token in macro call diff --git a/vendor/bitflags/tests/compile-pass/impls/convert.rs b/vendor/bitflags/tests/compile-pass/impls/convert.rs deleted file mode 100644 index 1f02982a..00000000 --- a/vendor/bitflags/tests/compile-pass/impls/convert.rs +++ /dev/null @@ -1,17 +0,0 @@ -use bitflags::bitflags; - -bitflags! { - struct Flags: u32 { - const A = 0b00000001; - } -} - -impl From for Flags { - fn from(v: u32) -> Flags { - Flags::from_bits_truncate(v) - } -} - -fn main() { - -} diff --git a/vendor/bitflags/tests/compile-pass/impls/default.rs b/vendor/bitflags/tests/compile-pass/impls/default.rs deleted file mode 100644 index a97b6536..00000000 --- a/vendor/bitflags/tests/compile-pass/impls/default.rs +++ /dev/null @@ -1,10 +0,0 @@ -use bitflags::bitflags; - -bitflags! { - #[derive(Default)] - struct Flags: u32 { - const A = 0b00000001; - } -} - -fn main() {} diff --git a/vendor/bitflags/tests/compile-pass/impls/inherent_methods.rs b/vendor/bitflags/tests/compile-pass/impls/inherent_methods.rs deleted file mode 100644 index 3052c460..00000000 --- a/vendor/bitflags/tests/compile-pass/impls/inherent_methods.rs +++ /dev/null @@ -1,15 +0,0 @@ -use bitflags::bitflags; - -bitflags! { - struct Flags: u32 { - const A = 0b00000001; - } -} - -impl Flags { - pub fn new() -> Flags { - Flags::A - } -} - -fn main() {} diff --git a/vendor/bitflags/tests/compile-pass/redefinition/core.rs b/vendor/bitflags/tests/compile-pass/redefinition/core.rs deleted file mode 100644 index 47549215..00000000 --- a/vendor/bitflags/tests/compile-pass/redefinition/core.rs +++ /dev/null @@ -1,14 +0,0 @@ -use bitflags::bitflags; - -// Checks for possible errors caused by overriding names used by `bitflags!` internally. - -mod core {} -mod _core {} - -bitflags! { - struct Test: u8 { - const A = 1; - } -} - -fn main() {} diff --git a/vendor/bitflags/tests/compile-pass/redefinition/stringify.rs b/vendor/bitflags/tests/compile-pass/redefinition/stringify.rs deleted file mode 100644 index b04f2f6a..00000000 --- a/vendor/bitflags/tests/compile-pass/redefinition/stringify.rs +++ /dev/null @@ -1,19 +0,0 @@ -use bitflags::bitflags; - -// Checks for possible errors caused by overriding names used by `bitflags!` internally. - -#[allow(unused_macros)] -macro_rules! stringify { - ($($t:tt)*) => { "..." }; -} - -bitflags! { - struct Test: u8 { - const A = 1; - } -} - -fn main() { - // Just make sure we don't call the redefined `stringify` macro - assert_eq!(format!("{:?}", Test::A), "A"); -} diff --git a/vendor/bitflags/tests/compile-pass/repr/c.rs b/vendor/bitflags/tests/compile-pass/repr/c.rs deleted file mode 100644 index 6feba36e..00000000 --- a/vendor/bitflags/tests/compile-pass/repr/c.rs +++ /dev/null @@ -1,10 +0,0 @@ -use bitflags::bitflags; - -bitflags! { - #[repr(C)] - struct Flags: u32 { - const A = 0b00000001; - } -} - -fn main() {} diff --git a/vendor/bitflags/tests/compile-pass/repr/transparent.rs b/vendor/bitflags/tests/compile-pass/repr/transparent.rs deleted file mode 100644 index e38db4dd..00000000 --- a/vendor/bitflags/tests/compile-pass/repr/transparent.rs +++ /dev/null @@ -1,10 +0,0 @@ -use bitflags::bitflags; - -bitflags! { - #[repr(transparent)] - struct Flags: u32 { - const A = 0b00000001; - } -} - -fn main() {} diff --git a/vendor/bitflags/tests/compile-pass/visibility/bits_field.rs b/vendor/bitflags/tests/compile-pass/visibility/bits_field.rs deleted file mode 100644 index 33a7967e..00000000 --- a/vendor/bitflags/tests/compile-pass/visibility/bits_field.rs +++ /dev/null @@ -1,11 +0,0 @@ -use bitflags::bitflags; - -bitflags! { - pub struct Flags1: u32 { - const FLAG_A = 0b00000001; - } -} - -fn main() { - assert_eq!(0b00000001, Flags1::FLAG_A.bits); -} diff --git a/vendor/bitflags/tests/compile-pass/visibility/pub_in.rs b/vendor/bitflags/tests/compile-pass/visibility/pub_in.rs deleted file mode 100644 index c11050e3..00000000 --- a/vendor/bitflags/tests/compile-pass/visibility/pub_in.rs +++ /dev/null @@ -1,19 +0,0 @@ -mod a { - mod b { - use bitflags::bitflags; - - bitflags! { - pub(in crate::a) struct Flags: u32 { - const FLAG_A = 0b00000001; - } - } - } - - pub fn flags() -> u32 { - b::Flags::FLAG_A.bits() - } -} - -fn main() { - assert_eq!(0b00000001, a::flags()); -} diff --git a/vendor/bitflags/tests/compile.rs b/vendor/bitflags/tests/compile.rs deleted file mode 100644 index ed02d01e..00000000 --- a/vendor/bitflags/tests/compile.rs +++ /dev/null @@ -1,63 +0,0 @@ -use std::{ - fs, - ffi::OsStr, - io, - path::Path, -}; - -use walkdir::WalkDir; - -#[test] -fn fail() { - prepare_stderr_files("tests/compile-fail").unwrap(); - - let t = trybuild::TestCases::new(); - t.compile_fail("tests/compile-fail/**/*.rs"); -} - -#[test] -fn pass() { - let t = trybuild::TestCases::new(); - t.pass("tests/compile-pass/**/*.rs"); -} - -// Compiler messages may change between versions -// We don't want to have to track these too closely for `bitflags`, but -// having some message to check makes sure user-facing errors are sensical. -// -// The approach we use is to run the test on all compilers, but only check stderr -// output on beta (which is the next stable release). We do this by default ignoring -// any `.stderr` files in the `compile-fail` directory, and copying `.stderr.beta` files -// when we happen to be running on a beta compiler. -fn prepare_stderr_files(path: impl AsRef) -> io::Result<()> { - for entry in WalkDir::new(path) { - let entry = entry?; - - if entry.path().extension().and_then(OsStr::to_str) == Some("beta") { - let renamed = entry.path().with_extension(""); - - // Unconditionally remove a corresponding `.stderr` file for a `.stderr.beta` - // file if it exists. On `beta` compilers, we'll recreate it. On other compilers, - // we don't want to end up checking it anyways. - if renamed.exists() { - fs::remove_file(&renamed)?; - } - - rename_beta_stderr(entry.path(), renamed)?; - } - } - - Ok(()) -} - -#[rustversion::beta] -fn rename_beta_stderr(from: impl AsRef, to: impl AsRef) -> io::Result<()> { - fs::copy(from, to)?; - - Ok(()) -} - -#[rustversion::not(beta)] -fn rename_beta_stderr(_: impl AsRef, _: impl AsRef) -> io::Result<()> { - Ok(()) -} diff --git a/vendor/bytemuck/.cargo-checksum.json b/vendor/bytemuck/.cargo-checksum.json deleted file mode 100644 index 60d865af..00000000 --- a/vendor/bytemuck/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{"Cargo.toml":"7c8549c6fa80cd41ae7621ada77dacbe019621067299848f9db637c20bf61586","LICENSE-APACHE":"870e20c217d15bcfcbe53d7c5867cd8fac44a4ca0b41fc1eb843557e16063eba","LICENSE-MIT":"0b2d108c9c686a74ac312990ee8377902756a2a081a7af3b0f9d68abf0a8f1a1","LICENSE-ZLIB":"682b4c81b85e83ce6cc6e1ace38fdd97aeb4de0e972bd2b44aa0916c54af8c96","README.md":"0adbc44aa99089d69cdc3c9d1608524a1a9b913656ed4b0e637fc201a3ab74fc","changelog.md":"be39168557b84c812934224715184174db97274c58b54dd6b0c5e743bf96eaf0","rustfmt.toml":"f4c215534437936f924c937dbb1677f614761589300d6b389f3b518b3eb551b8","src/allocation.rs":"c5d85bc9697333a83da8dd5ac8e47471b80c7ae7eedb387564953e16024d7031","src/contiguous.rs":"867e162651b435aa0298caad1d81f46877c22c74a2766d9e79be0ab3c615ce46","src/lib.rs":"6efa427faed025a11a6ed4349649586c8773cd6b61c7f034f044ab9e5c1a7716","src/offset_of.rs":"2afd190ef0462b30ade786fe813a91e7bf41cc2fa99a1d79002cbafab5964f37","src/pod.rs":"bee5f9569b82b891d1216c62a2af27eb7df995f6eb9023d9e5119e8d0758e5d5","src/transparent.rs":"04dfa5d947a22fbd955fc0bcb93fd499161bed4bc8264e62f3dfe8a462b960a8","src/zeroable.rs":"e7b321d98d5196cb59c19be099c35ca5a88c9d0f4340328fdf926fa028957a90","tests/array_tests.rs":"98ca7a0dcd93e65f70d4db19643e707cafae5a249561ab151998cedb89b2e036","tests/cast_slice_tests.rs":"567e4de061f9ad1eeba5abac75b3395a75d5cf48b3bd1186e740ece5c8cffd1b","tests/derive.rs":"6843d5278aa8d6a53f4b4907f84e4c1d144818b954b4e69e0f8f683de7a90ec9","tests/doc_tests.rs":"eb4ce9cb167dbc1d6742b7d5c518c0636c77ce020c7cde28802d8dd67961bd15","tests/offset_of_tests.rs":"435a92c321865ddba9f29b81fc27c23f268e1dc12955a3c8b01ff2cc0c4f5615","tests/std_tests.rs":"ba0936ed2508b109bb4d3edda1294f61cda28118f511f9909dc6916be4e59207","tests/transparent.rs":"df39457958906e7708fc59d1bbc100d55701e9777424562aac2c4e70923fe6e6","tests/wrapper_forgets.rs":"c6330546f6aa696245625056e7323b3916e3fb1a9fbecefe9c9e62d3726812d9"},"package":"439989e6b8c38d1b6570a384ef1e49c8848128f5a97f3914baef02920842712f"} \ No newline at end of file diff --git a/vendor/bytemuck/Cargo.toml b/vendor/bytemuck/Cargo.toml deleted file mode 100644 index 9811483f..00000000 --- a/vendor/bytemuck/Cargo.toml +++ /dev/null @@ -1,41 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies. -# -# If you are reading this file be aware that the original Cargo.toml -# will likely look very different (and much more reasonable). -# See Cargo.toml.orig for the original contents. - -[package] -edition = "2018" -name = "bytemuck" -version = "1.7.3" -authors = ["Lokathor "] -exclude = ["/pedantic.bat"] -description = "A crate for mucking around with piles of bytes." -readme = "README.md" -keywords = ["transmute", "bytes", "casting"] -categories = ["encoding", "no-std"] -license = "Zlib OR Apache-2.0 OR MIT" -repository = "https://github.com/Lokathor/bytemuck" -[package.metadata.docs.rs] -features = ["derive", "extern_crate_alloc", "extern_crate_std", "zeroable_maybe_uninit", "min_const_generics", "wasm_simd"] - -[package.metadata.playground] -features = ["derive", "extern_crate_alloc", "extern_crate_std", "zeroable_maybe_uninit", "min_const_generics", "wasm_simd"] -[dependencies.bytemuck_derive] -version = "1" -optional = true - -[features] -derive = ["bytemuck_derive"] -extern_crate_alloc = [] -extern_crate_std = ["extern_crate_alloc"] -min_const_generics = [] -nightly_portable_simd = [] -unsound_ptr_pod_impl = [] -wasm_simd = [] -zeroable_maybe_uninit = [] diff --git a/vendor/bytemuck/LICENSE-APACHE b/vendor/bytemuck/LICENSE-APACHE deleted file mode 100644 index 1d02268d..00000000 --- a/vendor/bytemuck/LICENSE-APACHE +++ /dev/null @@ -1,61 +0,0 @@ -Apache License -Version 2.0, January 2004 -http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. - - "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. - 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. - 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. - 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: - (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and - (b) You must cause any modified files to carry prominent notices stating that You changed the files; and - (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and - (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. - - You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. - 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. - 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. - 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. - 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. - 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - -To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/vendor/bytemuck/LICENSE-MIT b/vendor/bytemuck/LICENSE-MIT deleted file mode 100644 index 0aa88160..00000000 --- a/vendor/bytemuck/LICENSE-MIT +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) 2019 Daniel "Lokathor" Gee. - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/bytemuck/LICENSE-ZLIB b/vendor/bytemuck/LICENSE-ZLIB deleted file mode 100644 index aa2dabe6..00000000 --- a/vendor/bytemuck/LICENSE-ZLIB +++ /dev/null @@ -1,11 +0,0 @@ -Copyright (c) 2019 Daniel "Lokathor" Gee. - -This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software. - -Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: - -1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. - -2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. - -3. This notice may not be removed or altered from any source distribution. diff --git a/vendor/bytemuck/README.md b/vendor/bytemuck/README.md deleted file mode 100644 index d8026e10..00000000 --- a/vendor/bytemuck/README.md +++ /dev/null @@ -1,19 +0,0 @@ -[![License:Zlib](https://img.shields.io/badge/License-Zlib-brightgreen.svg)](https://opensource.org/licenses/Zlib) -![Minimum Rust Version](https://img.shields.io/badge/Min%20Rust-1.34-green.svg) -[![crates.io](https://img.shields.io/crates/v/bytemuck.svg)](https://crates.io/crates/bytemuck) -[![docs.rs](https://docs.rs/bytemuck/badge.svg)](https://docs.rs/bytemuck/) - -# bytemuck - -A crate for mucking around with piles of bytes. - -## Stability - -The goal is to stay at 1.y.z until _at least_ the next edition of Rust. - -I consider any increase of the Minimum Rust Version to be a semver breaking change, -so `rustc-1.34` will continue to be supported for at least the rest of the -`bytemuck-1.y.z` series of the crate. - -(The secret goal is to get all of this functionality into the standard library -some day so that we don't even need to import a crate to do all this fun stuff.) diff --git a/vendor/bytemuck/changelog.md b/vendor/bytemuck/changelog.md deleted file mode 100644 index a5834f52..00000000 --- a/vendor/bytemuck/changelog.md +++ /dev/null @@ -1,161 +0,0 @@ -# `bytemuck` changelog - -## 1.7.3 - -* Experimental support for the `portable_simd` language extension under the - `nightly_portable_simd` cargo feature. As the name implies, this is an - experimental crate feature and it's **not** part of the semver contract. All - it does is add the appropriate `Zeroable` and `Pod` impls. - -## 1.7.2 - -* Why does this repo keep being hit with publishing problems? What did I do to - deserve this curse, Ferris? This doesn't ever happen with tinyvec or fermium, - only bytemuck. - -## 1.7.1 - -* **Soundness Fix:** The wrap/peel methods for owned value conversion, added to - `TransparentWrapper` in 1.6, can cause a double-drop if used with types that - impl `Drop`. The fix was simply to add a `ManuallyDrop` layer around the value - before doing the `transmute_copy` that is used to wrap/peel. While this fix - could technically be backported to the 1.6 series, since 1.7 is semver - compatible anyway the 1.6 series has simply been yanked. - -## 1.7 - -* In response to [Unsafe Code Guidelines Issue - #286](https://github.com/rust-lang/unsafe-code-guidelines/issues/286), this - version of Bytemuck has a ***Soundness-Required Breaking Change***. This is - "allowed" under Rust's backwards-compatibility guidelines, but it's still - annoying of course so we're trying to keep the damage minimal. - * **The Reason:** It turns out that pointer values should not have been `Pod`. More - specifically, `ptr as usize` is *not* the same operation as calling - `transmute::<_, usize>(ptr)`. - * LLVM has yet to fully sort out their story, but until they do, transmuting - pointers can cause miscompilations. They may fix things up in the future, - but we're not gonna just wait and have broken code in the mean time. - * **The Fix:** The breaking change is that the `Pod` impls for `*const T`, - `*mut T`, and `Option` are now gated behind the - `unsound_ptr_pod_impl` feature, which is off by default. - * You are *strongly discouraged* from using this feature, but if a dependency - of yours doesn't work when you upgrade to 1.7 because it relied on pointer - casting, then you might wish to temporarily enable the feature just to get - that dependency to build. Enabled features are global across all users of a - given semver compatible version, so if you enable the feature in your own - crate, your dependency will also end up getting the feature too, and then - it'll be able to compile. - * Please move away from using this feature as soon as you can. Consider it to - *already* be deprecated. - * [PR 65](https://github.com/Lokathor/bytemuck/pull/65) - -## 1.6.3 - -* Small goof with an errant `;`, so [PR 69](https://github.com/Lokathor/bytemuck/pull/69) - *actually* got things working on SPIR-V. - -## 1.6.2 - -cargo upload goof! ignore this one. - -## 1.6.1 - -* [DJMcNab](https://github.com/DJMcNab) did a fix so that the crate can build for SPIR-V - [PR 67](https://github.com/Lokathor/bytemuck/pull/67) - -## 1.6 - -* The `TransparentWrapper` trait now has more methods. More ways to wrap, and - now you can "peel" too! Note that we don't call it "unwrap" because that name - is too strongly associated with the Option/Result methods. - Thanks to [LU15W1R7H](https://github.com/LU15W1R7H) for doing - [PR 58](https://github.com/Lokathor/bytemuck/pull/58) -* Min Const Generics! Now there's Pod and Zeroable for arrays of any size when - you turn on the `min_const_generics` crate feature. - [zakarumych](https://github.com/zakarumych) got the work started in - [PR 59](https://github.com/Lokathor/bytemuck/pull/59), - and [chorman0773](https://github.com/chorman0773) finished off the task in - [PR 63](https://github.com/Lokathor/bytemuck/pull/63) - -## 1.5.1 - -* Fix `bytes_of` failing on zero sized types. - [PR 53](https://github.com/Lokathor/bytemuck/pull/53) - -## 1.5 - -* Added `pod_collect_to_vec`, which will gather a slice into a vec, -allowing you to change the pod type while also safely ignoring alignment. -[PR 50](https://github.com/Lokathor/bytemuck/pull/50) - -## 1.4.2 - -* [Kimundi](https://github.com/Kimundi) fixed an issue that could make `try_zeroed_box` -stack overflow for large values at low optimization levels. -[PR 43](https://github.com/Lokathor/bytemuck/pull/43) - -## 1.4.1 - -* [thomcc](https://github.com/thomcc) fixed up the CI and patched over a soundness hole in `offset_of!`. -[PR 38](https://github.com/Lokathor/bytemuck/pull/38) - -## 1.4 - -* [icewind1991](https://github.com/icewind1991) has contributed the proc-macros - for deriving impls of `Pod`, `TransparentWrapper`, `Zeroable`!! Everyone has - been waiting for this one folks! It's a big deal. Just enable the `derive` - cargo feature and then you'll be able to derive the traits on your types. It - generates all the appropriate tests for you. -* The `zeroable_maybe_uninit` feature now adds a `Zeroable` impl to the - `MaybeUninit` type. This is only behind a feature flag because `MaybeUninit` - didn't exist back in `1.34.0` (the minimum rust version of `bytemuck`). - -## 1.3.1 - -* The entire crate is now available under the `Apache-2.0 OR MIT` license as - well as the previous `Zlib` license - [#24](https://github.com/Lokathor/bytemuck/pull/24). -* [HeroicKatora](https://github.com/HeroicKatora) added the - `try_zeroed_slice_box` function - [#10](https://github.com/Lokathor/bytemuck/pull/17). `zeroed_slice_box` is - also available. -* The `offset_of!` macro now supports a 2-arg version. For types that impl - Default, it'll just make an instance using `default` and then call over to the - 3-arg version. -* The `PodCastError` type now supports `Hash` and `Display`. Also if you enable - the `extern_crate_std` feature then it will support `std::error::Error`. -* We now provide a `TransparentWrapper` impl for `core::num::Wrapper`. -* The error type of `try_from_bytes` and `try_from_bytes_mut` when the input - isn't aligned has been corrected from being `AlignmentMismatch` (intended for - allocation casting only) to `TargetAlignmentGreaterAndInputNotAligned`. - -## 1.3.0 - -* Had a bug because the CI was messed up! It wasn't soundness related, because - it prevented the crate from building entirely if the `extern_crate_alloc` - feature was used. Still, this is yanked, sorry. - -## 1.2.0 - -* [thomcc](https://github.com/thomcc) added many things: - * A fully sound `offset_of!` macro - [#10](https://github.com/Lokathor/bytemuck/pull/10) - * A `Contiguous` trait for when you've got enums with declared values - all in a row [#12](https://github.com/Lokathor/bytemuck/pull/12) - * A `TransparentWrapper` marker trait for when you want to more clearly - enable adding and removing a wrapper struct to its inner value - [#15](https://github.com/Lokathor/bytemuck/pull/15) - * Now MIRI is run on CI in every single push! - [#16](https://github.com/Lokathor/bytemuck/pull/16) - -## 1.1.0 - -* [SimonSapin](https://github.com/SimonSapin) added `from_bytes`, - `from_bytes_mut`, `try_from_bytes`, and `try_from_bytes_mut` ([PR - Link](https://github.com/Lokathor/bytemuck/pull/8)) - -## 1.0.1 - -* Changed to the [zlib](https://opensource.org/licenses/Zlib) license. -* Added much more proper documentation. -* Reduced the minimum Rust version to 1.34 diff --git a/vendor/bytemuck/rustfmt.toml b/vendor/bytemuck/rustfmt.toml deleted file mode 100644 index a572164a..00000000 --- a/vendor/bytemuck/rustfmt.toml +++ /dev/null @@ -1,16 +0,0 @@ -# Based on -# https://github.com/rust-lang/rustfmt/blob/rustfmt-1.4.19/Configurations.md - -# Stable -edition = "2018" -fn_args_layout = "Compressed" -max_width = 80 -tab_spaces = 2 -use_field_init_shorthand = true -use_try_shorthand = true -use_small_heuristics = "Max" - -# Unstable -format_code_in_doc_comments = true -imports_granularity = "Crate" -wrap_comments = true diff --git a/vendor/bytemuck/src/allocation.rs b/vendor/bytemuck/src/allocation.rs deleted file mode 100644 index d2c98899..00000000 --- a/vendor/bytemuck/src/allocation.rs +++ /dev/null @@ -1,215 +0,0 @@ -#![cfg(feature = "extern_crate_alloc")] - -//! Stuff to boost things in the `alloc` crate. -//! -//! * You must enable the `extern_crate_alloc` feature of `bytemuck` or you will -//! not be able to use this module! This is generally done by adding the -//! feature to the dependency in Cargo.toml like so: -//! `bytemuck = { version = "VERSION_YOU_ARE_USING", features = ["extern_crate_alloc"]}` - - -use super::*; -use alloc::{ - alloc::{alloc_zeroed, Layout}, - boxed::Box, - vec, - vec::Vec, -}; -use core::convert::TryInto; - -/// As [`try_cast_box`](try_cast_box), but unwraps for you. -#[inline] -pub fn cast_box(input: Box) -> Box { - try_cast_box(input).map_err(|(e, _v)| e).unwrap() -} - -/// Attempts to cast the content type of a [`Box`](alloc::boxed::Box). -/// -/// On failure you get back an error along with the starting `Box`. -/// -/// ## Failure -/// -/// * The start and end content type of the `Box` must have the exact same -/// alignment. -/// * The start and end size of the `Box` must have the exact same size. -#[inline] -pub fn try_cast_box( - input: Box, -) -> Result, (PodCastError, Box)> { - if align_of::() != align_of::() { - Err((PodCastError::AlignmentMismatch, input)) - } else if size_of::() != size_of::() { - Err((PodCastError::SizeMismatch, input)) - } else { - // Note(Lokathor): This is much simpler than with the Vec casting! - let ptr: *mut B = Box::into_raw(input) as *mut B; - Ok(unsafe { Box::from_raw(ptr) }) - } -} - -/// Allocates a `Box` with all of the contents being zeroed out. -/// -/// This uses the global allocator to create a zeroed allocation and _then_ -/// turns it into a Box. In other words, it's 100% assured that the zeroed data -/// won't be put temporarily on the stack. You can make a box of any size -/// without fear of a stack overflow. -/// -/// ## Failure -/// -/// This fails if the allocation fails. -#[inline] -pub fn try_zeroed_box() -> Result, ()> { - if size_of::() == 0 { - // This will not allocate but simple create a dangling slice pointer. - // NB: We go the way via a push to `Vec` to ensure the compiler - // does not allocate space for T on the stack even if the branch - // would not be taken. - let mut vec = Vec::with_capacity(1); - vec.resize_with(1, || T::zeroed()); - let ptr: Box<[T; 1]> = vec.into_boxed_slice().try_into().ok().unwrap(); - debug_assert!( - align_of::<[T; 1]>() == align_of::() - && size_of::<[T; 1]>() == size_of::() - ); - // NB: We basically do the same as in try_cast_box here: - let ptr: Box = unsafe { Box::from_raw(Box::into_raw(ptr) as *mut _) }; - return Ok(ptr); - } - let layout = - Layout::from_size_align(size_of::(), align_of::()).unwrap(); - let ptr = unsafe { alloc_zeroed(layout) }; - if ptr.is_null() { - // we don't know what the error is because `alloc_zeroed` is a dumb API - Err(()) - } else { - Ok(unsafe { Box::::from_raw(ptr as *mut T) }) - } -} - -/// As [`try_zeroed_box`], but unwraps for you. -#[inline] -pub fn zeroed_box() -> Box { - try_zeroed_box().unwrap() -} - -/// Allocates a `Box<[T]>` with all contents being zeroed out. -/// -/// This uses the global allocator to create a zeroed allocation and _then_ -/// turns it into a Box. In other words, it's 100% assured that the zeroed data -/// won't be put temporarily on the stack. You can make a box of any size -/// without fear of a stack overflow. -/// -/// ## Failure -/// -/// This fails if the allocation fails. -#[inline] -pub fn try_zeroed_slice_box( - length: usize, -) -> Result, ()> { - if size_of::() == 0 { - // This will not allocate but simple create a dangling slice pointer. - let mut vec = Vec::with_capacity(length); - vec.resize_with(length, || T::zeroed()); - return Ok(vec.into_boxed_slice()); - } - if length == 0 { - // This will also not allocate. - return Ok(Vec::new().into_boxed_slice()); - } - // For Pod types, the layout of the array/slice is equivalent to repeating the - // type. - let layout_length = size_of::().checked_mul(length).ok_or(())?; - assert!(layout_length != 0); - let layout = - Layout::from_size_align(layout_length, align_of::()).map_err(|_| ())?; - let ptr = unsafe { alloc_zeroed(layout) }; - if ptr.is_null() { - // we don't know what the error is because `alloc_zeroed` is a dumb API - Err(()) - } else { - let slice = - unsafe { core::slice::from_raw_parts_mut(ptr as *mut T, length) }; - Ok(unsafe { Box::<[T]>::from_raw(slice) }) - } -} - -/// As [`try_zeroed_slice_box`](try_zeroed_slice_box), but unwraps for you. -pub fn zeroed_slice_box(length: usize) -> Box<[T]> { - try_zeroed_slice_box(length).unwrap() -} - -/// As [`try_cast_vec`](try_cast_vec), but unwraps for you. -#[inline] -pub fn cast_vec(input: Vec) -> Vec { - try_cast_vec(input).map_err(|(e, _v)| e).unwrap() -} - -/// Attempts to cast the content type of a [`Vec`](alloc::vec::Vec). -/// -/// On failure you get back an error along with the starting `Vec`. -/// -/// ## Failure -/// -/// * The start and end content type of the `Vec` must have the exact same -/// alignment. -/// * The start and end size of the `Vec` must have the exact same size. -/// * In the future this second restriction might be lessened by having the -/// capacity and length get adjusted during transmutation, but for now it's -/// absolute. -#[inline] -pub fn try_cast_vec( - input: Vec, -) -> Result, (PodCastError, Vec)> { - if align_of::() != align_of::() { - Err((PodCastError::AlignmentMismatch, input)) - } else if size_of::() != size_of::() { - // Note(Lokathor): Under some conditions it would be possible to cast - // between Vec content types of the same alignment but different sizes by - // changing the capacity and len values in the output Vec. However, we will - // not attempt that for now. - Err((PodCastError::SizeMismatch, input)) - } else { - // Note(Lokathor): First we record the length and capacity, which don't have - // any secret provenance metadata. - let length: usize = input.len(); - let capacity: usize = input.capacity(); - // Note(Lokathor): Next we "pre-forget" the old Vec by wrapping with - // ManuallyDrop, because if we used `core::mem::forget` after taking the - // pointer then that would invalidate our pointer. In nightly there's a - // "into raw parts" method, which we can switch this too eventually. - let mut manual_drop_vec = ManuallyDrop::new(input); - let vec_ptr: *mut A = manual_drop_vec.as_mut_ptr(); - let ptr: *mut B = vec_ptr as *mut B; - Ok(unsafe { Vec::from_raw_parts(ptr, length, capacity) }) - } -} - -/// This "collects" a slice of pod data into a vec of a different pod type. -/// -/// Unlike with [`cast_slice`] and [`cast_slice_mut`], this will always work. -/// -/// The output vec will be of a minimal size/capacity to hold the slice given. -/// -/// ```rust -/// # use bytemuck::*; -/// let halfwords: [u16; 4] = [5, 6, 7, 8]; -/// let vec_of_words: Vec = pod_collect_to_vec(&halfwords); -/// if cfg!(target_endian = "little") { -/// assert_eq!(&vec_of_words[..], &[0x0006_0005, 0x0008_0007][..]) -/// } else { -/// assert_eq!(&vec_of_words[..], &[0x0005_0006, 0x0007_0008][..]) -/// } -/// ``` -pub fn pod_collect_to_vec(src: &[A]) -> Vec { - let src_size = size_of_val(src); - // Note(Lokathor): dst_count is rounded up so that the dest will always be at - // least as many bytes as the src. - let dst_count = src_size / size_of::() - + if src_size % size_of::() != 0 { 1 } else { 0 }; - let mut dst = vec![B::zeroed(); dst_count]; - - let src_bytes: &[u8] = cast_slice(src); - let dst_bytes: &mut [u8] = cast_slice_mut(&mut dst[..]); - dst_bytes[..src_size].copy_from_slice(src_bytes); - dst -} diff --git a/vendor/bytemuck/src/contiguous.rs b/vendor/bytemuck/src/contiguous.rs deleted file mode 100644 index f84a6124..00000000 --- a/vendor/bytemuck/src/contiguous.rs +++ /dev/null @@ -1,202 +0,0 @@ -use super::*; - -/// A trait indicating that: -/// -/// 1. A type has an equivalent representation to some known integral type. -/// 2. All instances of this type fall in a fixed range of values. -/// 3. Within that range, there are no gaps. -/// -/// This is generally useful for fieldless enums (aka "c-style" enums), however -/// it's important that it only be used for those with an explicit `#[repr]`, as -/// `#[repr(Rust)]` fieldess enums have an unspecified layout. -/// -/// Additionally, you shouldn't assume that all implementations are enums. Any -/// type which meets the requirements above while following the rules under -/// "Safety" below is valid. -/// -/// # Example -/// -/// ``` -/// # use bytemuck::Contiguous; -/// #[repr(u8)] -/// #[derive(Debug, Copy, Clone, PartialEq)] -/// enum Foo { -/// A = 0, -/// B = 1, -/// C = 2, -/// D = 3, -/// E = 4, -/// } -/// unsafe impl Contiguous for Foo { -/// type Int = u8; -/// const MIN_VALUE: u8 = Foo::A as u8; -/// const MAX_VALUE: u8 = Foo::E as u8; -/// } -/// assert_eq!(Foo::from_integer(3).unwrap(), Foo::D); -/// assert_eq!(Foo::from_integer(8), None); -/// assert_eq!(Foo::C.into_integer(), 2); -/// ``` -/// # Safety -/// -/// This is an unsafe trait, and incorrectly implementing it is undefined -/// behavior. -/// -/// Informally, by implementing it, you're asserting that `C` is identical to -/// the integral type `C::Int`, and that every `C` falls between `C::MIN_VALUE` -/// and `C::MAX_VALUE` exactly once, without any gaps. -/// -/// Precisely, the guarantees you must uphold when implementing `Contiguous` for -/// some type `C` are: -/// -/// 1. The size of `C` and `C::Int` must be the same, and neither may be a ZST. -/// (Note: alignment is explicitly allowed to differ) -/// -/// 2. `C::Int` must be a primitive integer, and not a wrapper type. In the -/// future, this may be lifted to include cases where the behavior is -/// identical for a relevant set of traits (Ord, arithmetic, ...). -/// -/// 3. All `C::Int`s which are in the *inclusive* range between `C::MIN_VALUE` -/// and `C::MAX_VALUE` are bitwise identical to unique valid instances of -/// `C`. -/// -/// 4. There exist no instances of `C` such that their bitpatterns, when -/// interpreted as instances of `C::Int`, fall outside of the `MAX_VALUE` / -/// `MIN_VALUE` range -- It is legal for unsafe code to assume that if it -/// gets a `C` that implements `Contiguous`, it is in the appropriate range. -/// -/// 5. Finally, you promise not to provide overridden implementations of -/// `Contiguous::from_integer` and `Contiguous::into_integer`. -/// -/// For clarity, the following rules could be derived from the above, but are -/// listed explicitly: -/// -/// - `C::MAX_VALUE` must be greater or equal to `C::MIN_VALUE` (therefore, `C` -/// must be an inhabited type). -/// -/// - There exist no two values between `MIN_VALUE` and `MAX_VALUE` such that -/// when interpreted as a `C` they are considered identical (by, say, match). -pub unsafe trait Contiguous: Copy + 'static { - /// The primitive integer type with an identical representation to this - /// type. - /// - /// Contiguous is broadly intended for use with fieldless enums, and for - /// these the correct integer type is easy: The enum should have a - /// `#[repr(Int)]` or `#[repr(C)]` attribute, (if it does not, it is - /// *unsound* to implement `Contiguous`!). - /// - /// - For `#[repr(Int)]`, use the listed `Int`. e.g. `#[repr(u8)]` should use - /// `type Int = u8`. - /// - /// - For `#[repr(C)]`, use whichever type the C compiler will use to - /// represent the given enum. This is usually `c_int` (from `std::os::raw` - /// or `libc`), but it's up to you to make the determination as the - /// implementer of the unsafe trait. - /// - /// For precise rules, see the list under "Safety" above. - type Int: Copy + Ord; - - /// The upper *inclusive* bound for valid instances of this type. - const MAX_VALUE: Self::Int; - - /// The lower *inclusive* bound for valid instances of this type. - const MIN_VALUE: Self::Int; - - /// If `value` is within the range for valid instances of this type, - /// returns `Some(converted_value)`, otherwise, returns `None`. - /// - /// This is a trait method so that you can write `value.into_integer()` in - /// your code. It is a contract of this trait that if you implement - /// `Contiguous` on your type you **must not** override this method. - /// - /// # Panics - /// - /// We will not panic for any correct implementation of `Contiguous`, but - /// *may* panic if we detect an incorrect one. - /// - /// This is undefined behavior regardless, so it could have been the nasal - /// demons at that point anyway ;). - #[inline] - fn from_integer(value: Self::Int) -> Option { - // Guard against an illegal implementation of Contiguous. Annoyingly we - // can't rely on `transmute` to do this for us (see below), but - // whatever, this gets compiled into nothing in release. - assert!(size_of::() == size_of::()); - if Self::MIN_VALUE <= value && value <= Self::MAX_VALUE { - // SAFETY: We've checked their bounds (and their size, even though - // they've sworn under the Oath Of Unsafe Rust that that already - // matched) so this is allowed by `Contiguous`'s unsafe contract. - // - // So, the `transmute!`. ideally we'd use transmute here, which - // is more obviously safe. Sadly, we can't, as these types still - // have unspecified sizes. - Some(unsafe { transmute!(value) }) - } else { - None - } - } - - /// Perform the conversion from `C` into the underlying integral type. This - /// mostly exists otherwise generic code would need unsafe for the `value as - /// integer` - /// - /// This is a trait method so that you can write `value.into_integer()` in - /// your code. It is a contract of this trait that if you implement - /// `Contiguous` on your type you **must not** override this method. - /// - /// # Panics - /// - /// We will not panic for any correct implementation of `Contiguous`, but - /// *may* panic if we detect an incorrect one. - /// - /// This is undefined behavior regardless, so it could have been the nasal - /// demons at that point anyway ;). - #[inline] - fn into_integer(self) -> Self::Int { - // Guard against an illegal implementation of Contiguous. Annoyingly we - // can't rely on `transmute` to do the size check for us (see - // `from_integer's comment`), but whatever, this gets compiled into - // nothing in release. Note that we don't check the result of cast - assert!(size_of::() == size_of::()); - - // SAFETY: The unsafe contract requires that these have identical - // representations, and that the range be entirely valid. Using - // transmute! instead of transmute here is annoying, but is required - // as `Self` and `Self::Int` have unspecified sizes still. - unsafe { transmute!(self) } - } -} - -macro_rules! impl_contiguous { - ($($src:ty as $repr:ident in [$min:expr, $max:expr];)*) => {$( - unsafe impl Contiguous for $src { - type Int = $repr; - const MAX_VALUE: $repr = $max; - const MIN_VALUE: $repr = $min; - } - )*}; -} - -impl_contiguous! { - bool as u8 in [0, 1]; - - u8 as u8 in [0, u8::max_value()]; - u16 as u16 in [0, u16::max_value()]; - u32 as u32 in [0, u32::max_value()]; - u64 as u64 in [0, u64::max_value()]; - u128 as u128 in [0, u128::max_value()]; - usize as usize in [0, usize::max_value()]; - - i8 as i8 in [i8::min_value(), i8::max_value()]; - i16 as i16 in [i16::min_value(), i16::max_value()]; - i32 as i32 in [i32::min_value(), i32::max_value()]; - i64 as i64 in [i64::min_value(), i64::max_value()]; - i128 as i128 in [i128::min_value(), i128::max_value()]; - isize as isize in [isize::min_value(), isize::max_value()]; - - NonZeroU8 as u8 in [1, u8::max_value()]; - NonZeroU16 as u16 in [1, u16::max_value()]; - NonZeroU32 as u32 in [1, u32::max_value()]; - NonZeroU64 as u64 in [1, u64::max_value()]; - NonZeroU128 as u128 in [1, u128::max_value()]; - NonZeroUsize as usize in [1, usize::max_value()]; -} diff --git a/vendor/bytemuck/src/lib.rs b/vendor/bytemuck/src/lib.rs deleted file mode 100644 index 42d901e2..00000000 --- a/vendor/bytemuck/src/lib.rs +++ /dev/null @@ -1,471 +0,0 @@ -#![no_std] -#![warn(missing_docs)] -#![cfg_attr(feature = "nightly_portable_simd", feature(portable_simd))] - -//! This crate gives small utilities for casting between plain data types. -//! -//! ## Basics -//! -//! Data comes in five basic forms in Rust, so we have five basic casting -//! functions: -//! -//! * `T` uses [`cast`] -//! * `&T` uses [`cast_ref`] -//! * `&mut T` uses [`cast_mut`] -//! * `&[T]` uses [`cast_slice`] -//! * `&mut [T]` uses [`cast_slice_mut`] -//! -//! Some casts will never fail (eg: `cast::` always works), other -//! casts might fail (eg: `cast_ref::<[u8; 4], u32>` will fail if the reference -//! isn't already aligned to 4). Each casting function has a "try" version which -//! will return a `Result`, and the "normal" version which will simply panic on -//! invalid input. -//! -//! ## Using Your Own Types -//! -//! All the functions here are guarded by the [`Pod`] trait, which is a -//! sub-trait of the [`Zeroable`] trait. -//! -//! If you're very sure that your type is eligible, you can implement those -//! traits for your type and then they'll have full casting support. However, -//! these traits are `unsafe`, and you should carefully read the requirements -//! before adding the them to your own types. -//! -//! ## Features -//! -//! * This crate is core only by default, but if you're using Rust 1.36 or later -//! you can enable the `extern_crate_alloc` cargo feature for some additional -//! methods related to `Box` and `Vec`. Note that the `docs.rs` documentation -//! is always built with `extern_crate_alloc` cargo feature enabled. - -#[cfg(all(target_arch = "wasm32", feature = "wasm_simd"))] -use core::arch::wasm32; -#[cfg(target_arch = "x86")] -use core::arch::x86; -#[cfg(target_arch = "x86_64")] -use core::arch::x86_64; -// -use core::{marker::*, mem::*, num::*, ptr::*}; - -// Used from macros to ensure we aren't using some locally defined name and -// actually are referencing libcore. This also would allow pre-2018 edition -// crates to use our macros, but I'm not sure how important that is. -#[doc(hidden)] -pub use ::core as __core; - -#[cfg(not(feature = "min_const_generics"))] -macro_rules! impl_unsafe_marker_for_array { - ( $marker:ident , $( $n:expr ),* ) => { - $(unsafe impl $marker for [T; $n] where T: $marker {})* - } -} - -/// A macro to transmute between two types without requiring knowing size -/// statically. -macro_rules! transmute { - ($val:expr) => { - transmute_copy(&ManuallyDrop::new($val)) - }; -} - -#[cfg(feature = "extern_crate_std")] -extern crate std; - -#[cfg(feature = "extern_crate_alloc")] -extern crate alloc; -#[cfg(feature = "extern_crate_alloc")] -pub mod allocation; -#[cfg(feature = "extern_crate_alloc")] -pub use allocation::*; - -mod zeroable; -pub use zeroable::*; - -mod pod; -pub use pod::*; - -mod contiguous; -pub use contiguous::*; - -mod offset_of; -pub use offset_of::*; - -mod transparent; -pub use transparent::*; - -#[cfg(feature = "derive")] -pub use bytemuck_derive::{Contiguous, Pod, TransparentWrapper, Zeroable}; - -/* - -Note(Lokathor): We've switched all of the `unwrap` to `match` because there is -apparently a bug: https://github.com/rust-lang/rust/issues/68667 -and it doesn't seem to show up in simple godbolt examples but has been reported -as having an impact when there's a cast mixed in with other more complicated -code around it. Rustc/LLVM ends up missing that the `Err` can't ever happen for -particular type combinations, and then it doesn't fully eliminated the panic -possibility code branch. - -*/ - -/// Immediately panics. -#[cold] -#[inline(never)] -fn something_went_wrong(_src: &str, _err: PodCastError) -> ! { - // Note(Lokathor): Keeping the panic here makes the panic _formatting_ go - // here too, which helps assembly readability and also helps keep down - // the inline pressure. - #[cfg(not(target_arch = "spirv"))] - panic!("{src}>{err:?}", src = _src, err = _err); - // Note: On the spirv targets from [rust-gpu](https://github.com/EmbarkStudios/rust-gpu) - // panic formatting cannot be used. We we just give a generic error message - // The chance that the panicking version of these functions will ever get - // called on spir-v targets with invalid inputs is small, but giving a - // simple error message is better than no error message at all. - #[cfg(target_arch = "spirv")] - panic!("Called a panicing helper from bytemuck which paniced"); -} - -/// Re-interprets `&T` as `&[u8]`. -/// -/// Any ZST becomes an empty slice, and in that case the pointer value of that -/// empty slice might not match the pointer value of the input reference. -#[inline] -pub fn bytes_of(t: &T) -> &[u8] { - if size_of::() == 0 { - &[] - } else { - match try_cast_slice::(core::slice::from_ref(t)) { - Ok(s) => s, - Err(_) => unreachable!(), - } - } -} - -/// Re-interprets `&mut T` as `&mut [u8]`. -/// -/// Any ZST becomes an empty slice, and in that case the pointer value of that -/// empty slice might not match the pointer value of the input reference. -#[inline] -pub fn bytes_of_mut(t: &mut T) -> &mut [u8] { - if size_of::() == 0 { - &mut [] - } else { - match try_cast_slice_mut::(core::slice::from_mut(t)) { - Ok(s) => s, - Err(_) => unreachable!(), - } - } -} - -/// Re-interprets `&[u8]` as `&T`. -/// -/// ## Panics -/// -/// This is [`try_from_bytes`] but will panic on error. -#[inline] -pub fn from_bytes(s: &[u8]) -> &T { - match try_from_bytes(s) { - Ok(t) => t, - Err(e) => something_went_wrong("from_bytes", e), - } -} - -/// Re-interprets `&mut [u8]` as `&mut T`. -/// -/// ## Panics -/// -/// This is [`try_from_bytes_mut`] but will panic on error. -#[inline] -pub fn from_bytes_mut(s: &mut [u8]) -> &mut T { - match try_from_bytes_mut(s) { - Ok(t) => t, - Err(e) => something_went_wrong("from_bytes_mut", e), - } -} - -/// Re-interprets `&[u8]` as `&T`. -/// -/// ## Failure -/// -/// * If the slice isn't aligned for the new type -/// * If the slice's length isn’t exactly the size of the new type -#[inline] -pub fn try_from_bytes(s: &[u8]) -> Result<&T, PodCastError> { - if s.len() != size_of::() { - Err(PodCastError::SizeMismatch) - } else if (s.as_ptr() as usize) % align_of::() != 0 { - Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) - } else { - Ok(unsafe { &*(s.as_ptr() as *const T) }) - } -} - -/// Re-interprets `&mut [u8]` as `&mut T`. -/// -/// ## Failure -/// -/// * If the slice isn't aligned for the new type -/// * If the slice's length isn’t exactly the size of the new type -#[inline] -pub fn try_from_bytes_mut( - s: &mut [u8], -) -> Result<&mut T, PodCastError> { - if s.len() != size_of::() { - Err(PodCastError::SizeMismatch) - } else if (s.as_ptr() as usize) % align_of::() != 0 { - Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) - } else { - Ok(unsafe { &mut *(s.as_mut_ptr() as *mut T) }) - } -} - -/// The things that can go wrong when casting between [`Pod`] data forms. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum PodCastError { - /// You tried to cast a slice to an element type with a higher alignment - /// requirement but the slice wasn't aligned. - TargetAlignmentGreaterAndInputNotAligned, - /// If the element size changes then the output slice changes length - /// accordingly. If the output slice wouldn't be a whole number of elements - /// then the conversion fails. - OutputSliceWouldHaveSlop, - /// When casting a slice you can't convert between ZST elements and non-ZST - /// elements. When casting an individual `T`, `&T`, or `&mut T` value the - /// source size and destination size must be an exact match. - SizeMismatch, - /// For this type of cast the alignments must be exactly the same and they - /// were not so now you're sad. - /// - /// This error is generated **only** by operations that cast allocated types - /// (such as `Box` and `Vec`), because in that case the alignment must stay - /// exact. - AlignmentMismatch, -} -#[cfg(not(target_arch = "spirv"))] -impl core::fmt::Display for PodCastError { - fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { - write!(f, "{:?}", self) - } -} -#[cfg(feature = "extern_crate_std")] -impl std::error::Error for PodCastError {} - -/// Cast `T` into `U` -/// -/// ## Panics -/// -/// * This is like [`try_cast`](try_cast), but will panic on a size mismatch. -#[inline] -pub fn cast(a: A) -> B { - if size_of::() == size_of::() { - unsafe { transmute!(a) } - } else { - something_went_wrong("cast", PodCastError::SizeMismatch) - } -} - -/// Cast `&mut T` into `&mut U`. -/// -/// ## Panics -/// -/// This is [`try_cast_mut`] but will panic on error. -#[inline] -pub fn cast_mut(a: &mut A) -> &mut B { - if size_of::() == size_of::() && align_of::() >= align_of::() { - // Plz mr compiler, just notice that we can't ever hit Err in this case. - match try_cast_mut(a) { - Ok(b) => b, - Err(_) => unreachable!(), - } - } else { - match try_cast_mut(a) { - Ok(b) => b, - Err(e) => something_went_wrong("cast_mut", e), - } - } -} - -/// Cast `&T` into `&U`. -/// -/// ## Panics -/// -/// This is [`try_cast_ref`] but will panic on error. -#[inline] -pub fn cast_ref(a: &A) -> &B { - if size_of::() == size_of::() && align_of::() >= align_of::() { - // Plz mr compiler, just notice that we can't ever hit Err in this case. - match try_cast_ref(a) { - Ok(b) => b, - Err(_) => unreachable!(), - } - } else { - match try_cast_ref(a) { - Ok(b) => b, - Err(e) => something_went_wrong("cast_ref", e), - } - } -} - -/// Cast `&[A]` into `&[B]`. -/// -/// ## Panics -/// -/// This is [`try_cast_slice`] but will panic on error. -#[inline] -pub fn cast_slice(a: &[A]) -> &[B] { - match try_cast_slice(a) { - Ok(b) => b, - Err(e) => something_went_wrong("cast_slice", e), - } -} - -/// Cast `&mut [T]` into `&mut [U]`. -/// -/// ## Panics -/// -/// This is [`try_cast_slice_mut`] but will panic on error. -#[inline] -pub fn cast_slice_mut(a: &mut [A]) -> &mut [B] { - match try_cast_slice_mut(a) { - Ok(b) => b, - Err(e) => something_went_wrong("cast_slice_mut", e), - } -} - -/// As `align_to`, but safe because of the [`Pod`] bound. -#[inline] -pub fn pod_align_to(vals: &[T]) -> (&[T], &[U], &[T]) { - unsafe { vals.align_to::() } -} - -/// As `align_to_mut`, but safe because of the [`Pod`] bound. -#[inline] -pub fn pod_align_to_mut( - vals: &mut [T], -) -> (&mut [T], &mut [U], &mut [T]) { - unsafe { vals.align_to_mut::() } -} - -/// Try to cast `T` into `U`. -/// -/// Note that for this particular type of cast, alignment isn't a factor. The -/// input value is semantically copied into the function and then returned to a -/// new memory location which will have whatever the required alignment of the -/// output type is. -/// -/// ## Failure -/// -/// * If the types don't have the same size this fails. -#[inline] -pub fn try_cast(a: A) -> Result { - if size_of::() == size_of::() { - Ok(unsafe { transmute!(a) }) - } else { - Err(PodCastError::SizeMismatch) - } -} - -/// Try to convert a `&T` into `&U`. -/// -/// ## Failure -/// -/// * If the reference isn't aligned in the new type -/// * If the source type and target type aren't the same size. -#[inline] -pub fn try_cast_ref(a: &A) -> Result<&B, PodCastError> { - // Note(Lokathor): everything with `align_of` and `size_of` will optimize away - // after monomorphization. - if align_of::() > align_of::() - && (a as *const A as usize) % align_of::() != 0 - { - Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) - } else if size_of::() == size_of::() { - Ok(unsafe { &*(a as *const A as *const B) }) - } else { - Err(PodCastError::SizeMismatch) - } -} - -/// Try to convert a `&mut T` into `&mut U`. -/// -/// As [`try_cast_ref`], but `mut`. -#[inline] -pub fn try_cast_mut(a: &mut A) -> Result<&mut B, PodCastError> { - // Note(Lokathor): everything with `align_of` and `size_of` will optimize away - // after monomorphization. - if align_of::() > align_of::() - && (a as *mut A as usize) % align_of::() != 0 - { - Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) - } else if size_of::() == size_of::() { - Ok(unsafe { &mut *(a as *mut A as *mut B) }) - } else { - Err(PodCastError::SizeMismatch) - } -} - -/// Try to convert `&[A]` into `&[B]` (possibly with a change in length). -/// -/// * `input.as_ptr() as usize == output.as_ptr() as usize` -/// * `input.len() * size_of::() == output.len() * size_of::()` -/// -/// ## Failure -/// -/// * If the target type has a greater alignment requirement and the input slice -/// isn't aligned. -/// * If the target element type is a different size from the current element -/// type, and the output slice wouldn't be a whole number of elements when -/// accounting for the size change (eg: 3 `u16` values is 1.5 `u32` values, so -/// that's a failure). -/// * Similarly, you can't convert between a [ZST](https://doc.rust-lang.org/nomicon/exotic-sizes.html#zero-sized-types-zsts) -/// and a non-ZST. -#[inline] -pub fn try_cast_slice(a: &[A]) -> Result<&[B], PodCastError> { - // Note(Lokathor): everything with `align_of` and `size_of` will optimize away - // after monomorphization. - if align_of::() > align_of::() - && (a.as_ptr() as usize) % align_of::() != 0 - { - Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) - } else if size_of::() == size_of::() { - Ok(unsafe { core::slice::from_raw_parts(a.as_ptr() as *const B, a.len()) }) - } else if size_of::() == 0 || size_of::() == 0 { - Err(PodCastError::SizeMismatch) - } else if core::mem::size_of_val(a) % size_of::() == 0 { - let new_len = core::mem::size_of_val(a) / size_of::(); - Ok(unsafe { core::slice::from_raw_parts(a.as_ptr() as *const B, new_len) }) - } else { - Err(PodCastError::OutputSliceWouldHaveSlop) - } -} - -/// Try to convert `&mut [A]` into `&mut [B]` (possibly with a change in -/// length). -/// -/// As [`try_cast_slice`], but `&mut`. -#[inline] -pub fn try_cast_slice_mut( - a: &mut [A], -) -> Result<&mut [B], PodCastError> { - // Note(Lokathor): everything with `align_of` and `size_of` will optimize away - // after monomorphization. - if align_of::() > align_of::() - && (a.as_mut_ptr() as usize) % align_of::() != 0 - { - Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) - } else if size_of::() == size_of::() { - Ok(unsafe { - core::slice::from_raw_parts_mut(a.as_mut_ptr() as *mut B, a.len()) - }) - } else if size_of::() == 0 || size_of::() == 0 { - Err(PodCastError::SizeMismatch) - } else if core::mem::size_of_val(a) % size_of::() == 0 { - let new_len = core::mem::size_of_val(a) / size_of::(); - Ok(unsafe { - core::slice::from_raw_parts_mut(a.as_mut_ptr() as *mut B, new_len) - }) - } else { - Err(PodCastError::OutputSliceWouldHaveSlop) - } -} diff --git a/vendor/bytemuck/src/offset_of.rs b/vendor/bytemuck/src/offset_of.rs deleted file mode 100644 index 3de23276..00000000 --- a/vendor/bytemuck/src/offset_of.rs +++ /dev/null @@ -1,135 +0,0 @@ -#![forbid(unsafe_code)] - -/// Find the offset in bytes of the given `$field` of `$Type`. Requires an -/// already initialized `$instance` value to work with. -/// -/// This is similar to the macro from [`memoffset`](https://docs.rs/memoffset), -/// however it uses no `unsafe` code. -/// -/// This macro has a 3-argument and 2-argument version. -/// * In the 3-arg version you specify an instance of the type, the type itself, -/// and the field name. -/// * In the 2-arg version the macro will call the [`default`](Default::default) -/// method to make a temporary instance of the type for you. -/// -/// The output of this macro is the byte offset of the field (as a `usize`). The -/// calculations of the macro are fixed across the entire program, but if the -/// type used is `repr(Rust)` then they're *not* fixed across compilations or -/// compilers. -/// -/// ## Examples -/// -/// ### 3-arg Usage -/// -/// ```rust -/// # use bytemuck::offset_of; -/// // enums can't derive default, and for this example we don't pick one -/// enum MyExampleEnum { -/// A, -/// B, -/// C, -/// } -/// -/// // so now our struct here doesn't have Default -/// #[repr(C)] -/// struct MyNotDefaultType { -/// pub counter: i32, -/// pub some_field: MyExampleEnum, -/// } -/// -/// // but we provide an instance of the type and it's all good. -/// let val = MyNotDefaultType { counter: 5, some_field: MyExampleEnum::A }; -/// assert_eq!(offset_of!(val, MyNotDefaultType, some_field), 4); -/// ``` -/// -/// ### 2-arg Usage -/// -/// ```rust -/// # use bytemuck::offset_of; -/// #[derive(Default)] -/// #[repr(C)] -/// struct Vertex { -/// pub loc: [f32; 3], -/// pub color: [f32; 3], -/// } -/// // if the type impls Default the macro can make its own default instance. -/// assert_eq!(offset_of!(Vertex, loc), 0); -/// assert_eq!(offset_of!(Vertex, color), 12); -/// ``` -/// -/// # Usage with `#[repr(packed)]` structs -/// -/// Attempting to compute the offset of a `#[repr(packed)]` struct with -/// `bytemuck::offset_of!` requires an `unsafe` block. We hope to relax this in -/// the future, but currently it is required to work around a soundness hole in -/// Rust (See [rust-lang/rust#27060]). -/// -/// [rust-lang/rust#27060]: https://github.com/rust-lang/rust/issues/27060 -/// -///

-/// Warning: This is only true for versions of bytemuck > -/// 1.4.0. Previous versions of -/// bytemuck::offset_of! -/// will only emit a warning when used on the field of a packed struct in safe -/// code, which can lead to unsoundness. -///

-/// -/// For example, the following will fail to compile: -/// -/// ```compile_fail -/// #[repr(C, packed)] -/// #[derive(Default)] -/// struct Example { -/// field: u32, -/// } -/// // Doesn't compile: -/// let _offset = bytemuck::offset_of!(Example, field); -/// ``` -/// -/// While the error message this generates will mention the -/// `safe_packed_borrows` lint, the macro will still fail to compile even if -/// that lint is `#[allow]`ed: -/// -/// ```compile_fail -/// # #[repr(C, packed)] #[derive(Default)] struct Example { field: u32 } -/// // Still doesn't compile: -/// #[allow(safe_packed_borrows)] -/// { -/// let _offset = bytemuck::offset_of!(Example, field); -/// } -/// ``` -/// -/// This *can* be worked around by using `unsafe`, but it is only sound to do so -/// if you can guarantee that taking a reference to the field is sound. -/// -/// In practice, this means it only works for fields of align(1) types, or if -/// you know the field's offset in advance (defeating the point of `offset_of`) -/// and can prove that the struct's alignment and the field's offset are enough -/// to prove the field's alignment. -/// -/// Once the `raw_ref` macros are available, a future version of this crate will -/// use them to lift the limitations of packed structs. For the duration of the -/// `1.x` version of this crate that will be behind an on-by-default cargo -/// feature (to maintain minimum rust version support). -#[macro_export] -macro_rules! offset_of { - ($instance:expr, $Type:path, $field:tt) => {{ - #[forbid(safe_packed_borrows)] - { - // This helps us guard against field access going through a Deref impl. - #[allow(clippy::unneeded_field_pattern)] - let $Type { $field: _, .. }; - let reference: &$Type = &$instance; - let address = reference as *const _ as usize; - let field_pointer = &reference.$field as *const _ as usize; - // These asserts/unwraps are compiled away at release, and defend against - // the case where somehow a deref impl is still invoked. - let result = field_pointer.checked_sub(address).unwrap(); - assert!(result <= $crate::__core::mem::size_of::<$Type>()); - result - } - }}; - ($Type:path, $field:tt) => {{ - $crate::offset_of!(<$Type as Default>::default(), $Type, $field) - }}; -} diff --git a/vendor/bytemuck/src/pod.rs b/vendor/bytemuck/src/pod.rs deleted file mode 100644 index 8e044366..00000000 --- a/vendor/bytemuck/src/pod.rs +++ /dev/null @@ -1,119 +0,0 @@ -use super::*; - -/// Marker trait for "plain old data". -/// -/// The point of this trait is that once something is marked "plain old data" -/// you can really go to town with the bit fiddling and bit casting. Therefore, -/// it's a relatively strong claim to make about a type. Do not add this to your -/// type casually. -/// -/// **Reminder:** The results of casting around bytes between data types are -/// _endian dependant_. Little-endian machines are the most common, but -/// big-endian machines do exist (and big-endian is also used for "network -/// order" bytes). -/// -/// ## Safety -/// -/// * The type must be inhabited (eg: no -/// [Infallible](core::convert::Infallible)). -/// * The type must allow any bit pattern (eg: no `bool` or `char`, which have -/// illegal bit patterns). -/// * The type must not contain any padding bytes, either in the middle or on -/// the end (eg: no `#[repr(C)] struct Foo(u8, u16)`, which has padding in the -/// middle, and also no `#[repr(C)] struct Foo(u16, u8)`, which has padding on -/// the end). -/// * The type needs to have all fields also be `Pod`. -/// * The type needs to be `repr(C)` or `repr(transparent)`. In the case of -/// `repr(C)`, the `packed` and `align` repr modifiers can be used as long as -/// all other rules end up being followed. -pub unsafe trait Pod: Zeroable + Copy + 'static {} - -unsafe impl Pod for () {} -unsafe impl Pod for u8 {} -unsafe impl Pod for i8 {} -unsafe impl Pod for u16 {} -unsafe impl Pod for i16 {} -unsafe impl Pod for u32 {} -unsafe impl Pod for i32 {} -unsafe impl Pod for u64 {} -unsafe impl Pod for i64 {} -unsafe impl Pod for usize {} -unsafe impl Pod for isize {} -unsafe impl Pod for u128 {} -unsafe impl Pod for i128 {} -unsafe impl Pod for f32 {} -unsafe impl Pod for f64 {} -unsafe impl Pod for Wrapping {} - -unsafe impl Pod for Option {} -unsafe impl Pod for Option {} -unsafe impl Pod for Option {} -unsafe impl Pod for Option {} -unsafe impl Pod for Option {} -unsafe impl Pod for Option {} -unsafe impl Pod for Option {} -unsafe impl Pod for Option {} -unsafe impl Pod for Option {} -unsafe impl Pod for Option {} -unsafe impl Pod for Option {} -unsafe impl Pod for Option {} - -#[cfg(feature = "unsound_ptr_pod_impl")] -unsafe impl Pod for *mut T {} -#[cfg(feature = "unsound_ptr_pod_impl")] -unsafe impl Pod for *const T {} -#[cfg(feature = "unsound_ptr_pod_impl")] -unsafe impl Pod for Option> {} - -unsafe impl Pod for PhantomData {} -unsafe impl Pod for PhantomPinned {} -unsafe impl Pod for ManuallyDrop {} - -// Note(Lokathor): MaybeUninit can NEVER be Pod. - -#[cfg(feature = "min_const_generics")] -unsafe impl Pod for [T; N] where T: Pod {} - -#[cfg(not(feature = "min_const_generics"))] -impl_unsafe_marker_for_array!( - Pod, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, - 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 48, 64, 96, 128, 256, - 512, 1024, 2048, 4096 -); - -#[cfg(all(target_arch = "wasm32", feature = "wasm_simd"))] -unsafe impl Pod for wasm32::v128 {} - -#[cfg(target_arch = "x86")] -unsafe impl Pod for x86::__m128i {} -#[cfg(target_arch = "x86")] -unsafe impl Pod for x86::__m128 {} -#[cfg(target_arch = "x86")] -unsafe impl Pod for x86::__m128d {} -#[cfg(target_arch = "x86")] -unsafe impl Pod for x86::__m256i {} -#[cfg(target_arch = "x86")] -unsafe impl Pod for x86::__m256 {} -#[cfg(target_arch = "x86")] -unsafe impl Pod for x86::__m256d {} - -#[cfg(target_arch = "x86_64")] -unsafe impl Pod for x86_64::__m128i {} -#[cfg(target_arch = "x86_64")] -unsafe impl Pod for x86_64::__m128 {} -#[cfg(target_arch = "x86_64")] -unsafe impl Pod for x86_64::__m128d {} -#[cfg(target_arch = "x86_64")] -unsafe impl Pod for x86_64::__m256i {} -#[cfg(target_arch = "x86_64")] -unsafe impl Pod for x86_64::__m256 {} -#[cfg(target_arch = "x86_64")] -unsafe impl Pod for x86_64::__m256d {} - -#[cfg(feature = "nightly_portable_simd")] -unsafe impl Pod for core::simd::Simd -where - T: core::simd::SimdElement + Pod, - core::simd::LaneCount: core::simd::SupportedLaneCount, -{ -} diff --git a/vendor/bytemuck/src/transparent.rs b/vendor/bytemuck/src/transparent.rs deleted file mode 100644 index 8405ad50..00000000 --- a/vendor/bytemuck/src/transparent.rs +++ /dev/null @@ -1,249 +0,0 @@ -use super::*; - -/// A trait which indicates that a type is a `#[repr(transparent)]` wrapper -/// around the `Inner` value. -/// -/// This allows safely copy transmuting between the `Inner` type and the -/// `TransparentWrapper` type. Functions like `wrap_{}` convert from the inner -/// type to the wrapper type and `peel_{}` functions do the inverse conversion -/// from the wrapper type to the inner type. We deliberately do not call the -/// wrapper-removing methods "unwrap" because at this point that word is too -/// strongly tied to the Option/ Result methods. -/// -/// # Safety -/// -/// The safety contract of `TransparentWrapper` is relatively simple: -/// -/// For a given `Wrapper` which implements `TransparentWrapper`: -/// -/// 1. `Wrapper` must be a wrapper around `Inner` with an identical data -/// representations. This either means that it must be a -/// `#[repr(transparent)]` struct which contains a either a field of type -/// `Inner` (or a field of some other transparent wrapper for `Inner`) as -/// the only non-ZST field. -/// -/// 2. Any fields *other* than the `Inner` field must be trivially constructable -/// ZSTs, for example `PhantomData`, `PhantomPinned`, etc. -/// -/// 3. The `Wrapper` may not impose additional alignment requirements over -/// `Inner`. -/// - Note: this is currently guaranteed by `repr(transparent)`, but there -/// have been discussions of lifting it, so it's stated here explicitly. -/// -/// 4. All functions on `TransparentWrapper` **may not** be overridden. -/// -/// ## Caveats -/// -/// If the wrapper imposes additional constraints upon the inner type which are -/// required for safety, it's responsible for ensuring those still hold -- this -/// generally requires preventing access to instances of the inner type, as -/// implementing `TransparentWrapper for T` means anybody can call -/// `T::cast_ref(any_instance_of_u)`. -/// -/// For example, it would be invalid to implement TransparentWrapper for `str` -/// to implement `TransparentWrapper` around `[u8]` because of this. -/// -/// # Examples -/// -/// ## Basic -/// -/// ``` -/// use bytemuck::TransparentWrapper; -/// # #[derive(Default)] -/// # struct SomeStruct(u32); -/// -/// #[repr(transparent)] -/// struct MyWrapper(SomeStruct); -/// -/// unsafe impl TransparentWrapper for MyWrapper {} -/// -/// // interpret a reference to &SomeStruct as a &MyWrapper -/// let thing = SomeStruct::default(); -/// let inner_ref: &MyWrapper = MyWrapper::wrap_ref(&thing); -/// -/// // Works with &mut too. -/// let mut mut_thing = SomeStruct::default(); -/// let inner_mut: &mut MyWrapper = MyWrapper::wrap_mut(&mut mut_thing); -/// -/// # let _ = (inner_ref, inner_mut); // silence warnings -/// ``` -/// -/// ## Use with dynamically sized types -/// -/// ``` -/// use bytemuck::TransparentWrapper; -/// -/// #[repr(transparent)] -/// struct Slice([T]); -/// -/// unsafe impl TransparentWrapper<[T]> for Slice {} -/// -/// let s = Slice::wrap_ref(&[1u32, 2, 3]); -/// assert_eq!(&s.0, &[1, 2, 3]); -/// -/// let mut buf = [1, 2, 3u8]; -/// let sm = Slice::wrap_mut(&mut buf); -/// ``` -pub unsafe trait TransparentWrapper { - /// Convert the inner type into the wrapper type. - #[inline] - fn wrap(s: Inner) -> Self - where - Self: Sized, - Inner: Sized, - { - // SAFETY: The unsafe contract requires that `Self` and `Inner` have - // identical representations. - unsafe { transmute!(s) } - } - - /// Convert a reference to the inner type into a reference to the wrapper - /// type. - #[inline] - fn wrap_ref(s: &Inner) -> &Self { - unsafe { - assert!(size_of::<*const Inner>() == size_of::<*const Self>()); - // A pointer cast doesn't work here because rustc can't tell that - // the vtables match (because of the `?Sized` restriction relaxation). - // A `transmute` doesn't work because the sizes are unspecified. - // - // SAFETY: The unsafe contract requires that these two have - // identical representations. - let inner_ptr = s as *const Inner; - let wrapper_ptr: *const Self = transmute!(inner_ptr); - &*wrapper_ptr - } - } - - /// Convert a mutable reference to the inner type into a mutable reference to - /// the wrapper type. - #[inline] - fn wrap_mut(s: &mut Inner) -> &mut Self { - unsafe { - assert!(size_of::<*mut Inner>() == size_of::<*mut Self>()); - // A pointer cast doesn't work here because rustc can't tell that - // the vtables match (because of the `?Sized` restriction relaxation). - // A `transmute` doesn't work because the sizes are unspecified. - // - // SAFETY: The unsafe contract requires that these two have - // identical representations. - let inner_ptr = s as *mut Inner; - let wrapper_ptr: *mut Self = transmute!(inner_ptr); - &mut *wrapper_ptr - } - } - - /// Convert a slice to the inner type into a slice to the wrapper type. - #[inline] - fn wrap_slice(s: &[Inner]) -> &[Self] - where - Self: Sized, - Inner: Sized, - { - unsafe { - assert!(size_of::<*const Inner>() == size_of::<*const Self>()); - assert!(align_of::<*const Inner>() == align_of::<*const Self>()); - // SAFETY: The unsafe contract requires that these two have - // identical representations (size and alignment). - core::slice::from_raw_parts(s.as_ptr() as *const Self, s.len()) - } - } - - /// Convert a mutable slice to the inner type into a mutable slice to the - /// wrapper type. - #[inline] - fn wrap_slice_mut(s: &mut [Inner]) -> &mut [Self] - where - Self: Sized, - Inner: Sized, - { - unsafe { - assert!(size_of::<*mut Inner>() == size_of::<*mut Self>()); - assert!(align_of::<*mut Inner>() == align_of::<*mut Self>()); - // SAFETY: The unsafe contract requires that these two have - // identical representations (size and alignment). - core::slice::from_raw_parts_mut(s.as_mut_ptr() as *mut Self, s.len()) - } - } - - /// Convert the wrapper type into the inner type. - #[inline] - fn peel(s: Self) -> Inner - where - Self: Sized, - Inner: Sized, - { - unsafe { transmute!(s) } - } - - /// Convert a reference to the wrapper type into a reference to the inner - /// type. - #[inline] - fn peel_ref(s: &Self) -> &Inner { - unsafe { - assert!(size_of::<*const Inner>() == size_of::<*const Self>()); - // A pointer cast doesn't work here because rustc can't tell that - // the vtables match (because of the `?Sized` restriction relaxation). - // A `transmute` doesn't work because the sizes are unspecified. - // - // SAFETY: The unsafe contract requires that these two have - // identical representations. - let wrapper_ptr = s as *const Self; - let inner_ptr: *const Inner = transmute!(wrapper_ptr); - &*inner_ptr - } - } - - /// Convert a mutable reference to the wrapper type into a mutable reference - /// to the inner type. - #[inline] - fn peel_mut(s: &mut Self) -> &mut Inner { - unsafe { - assert!(size_of::<*mut Inner>() == size_of::<*mut Self>()); - // A pointer cast doesn't work here because rustc can't tell that - // the vtables match (because of the `?Sized` restriction relaxation). - // A `transmute` doesn't work because the sizes are unspecified. - // - // SAFETY: The unsafe contract requires that these two have - // identical representations. - let wrapper_ptr = s as *mut Self; - let inner_ptr: *mut Inner = transmute!(wrapper_ptr); - &mut *inner_ptr - } - } - - /// Convert a slice to the wrapped type into a slice to the inner type. - #[inline] - fn peel_slice(s: &[Self]) -> &[Inner] - where - Self: Sized, - Inner: Sized, - { - unsafe { - assert!(size_of::<*const Inner>() == size_of::<*const Self>()); - assert!(align_of::<*const Inner>() == align_of::<*const Self>()); - // SAFETY: The unsafe contract requires that these two have - // identical representations (size and alignment). - core::slice::from_raw_parts(s.as_ptr() as *const Inner, s.len()) - } - } - - /// Convert a mutable slice to the wrapped type into a mutable slice to the - /// inner type. - #[inline] - fn peel_slice_mut(s: &mut [Self]) -> &mut [Inner] - where - Self: Sized, - Inner: Sized, - { - unsafe { - assert!(size_of::<*mut Inner>() == size_of::<*mut Self>()); - assert!(align_of::<*mut Inner>() == align_of::<*mut Self>()); - // SAFETY: The unsafe contract requires that these two have - // identical representations (size and alignment). - core::slice::from_raw_parts_mut(s.as_mut_ptr() as *mut Inner, s.len()) - } - } -} - -unsafe impl TransparentWrapper for core::num::Wrapping {} diff --git a/vendor/bytemuck/src/zeroable.rs b/vendor/bytemuck/src/zeroable.rs deleted file mode 100644 index aea9028e..00000000 --- a/vendor/bytemuck/src/zeroable.rs +++ /dev/null @@ -1,158 +0,0 @@ -use super::*; - -/// Trait for types that can be safely created with -/// [`zeroed`](core::mem::zeroed). -/// -/// An all-zeroes value may or may not be the same value as the -/// [Default](core::default::Default) value of the type. -/// -/// ## Safety -/// -/// * Your type must be inhabited (eg: no -/// [Infallible](core::convert::Infallible)). -/// * Your type must be allowed to be an "all zeroes" bit pattern (eg: no -/// [`NonNull`](core::ptr::NonNull)). -pub unsafe trait Zeroable: Sized { - /// Calls [`zeroed`](core::mem::zeroed). - /// - /// This is a trait method so that you can write `MyType::zeroed()` in your - /// code. It is a contract of this trait that if you implement it on your type - /// you **must not** override this method. - #[inline] - fn zeroed() -> Self { - unsafe { core::mem::zeroed() } - } -} -unsafe impl Zeroable for () {} -unsafe impl Zeroable for bool {} -unsafe impl Zeroable for char {} -unsafe impl Zeroable for u8 {} -unsafe impl Zeroable for i8 {} -unsafe impl Zeroable for u16 {} -unsafe impl Zeroable for i16 {} -unsafe impl Zeroable for u32 {} -unsafe impl Zeroable for i32 {} -unsafe impl Zeroable for u64 {} -unsafe impl Zeroable for i64 {} -unsafe impl Zeroable for usize {} -unsafe impl Zeroable for isize {} -unsafe impl Zeroable for u128 {} -unsafe impl Zeroable for i128 {} -unsafe impl Zeroable for f32 {} -unsafe impl Zeroable for f64 {} -unsafe impl Zeroable for Wrapping {} - -unsafe impl Zeroable for Option {} -unsafe impl Zeroable for Option {} -unsafe impl Zeroable for Option {} -unsafe impl Zeroable for Option {} -unsafe impl Zeroable for Option {} -unsafe impl Zeroable for Option {} -unsafe impl Zeroable for Option {} -unsafe impl Zeroable for Option {} -unsafe impl Zeroable for Option {} -unsafe impl Zeroable for Option {} -unsafe impl Zeroable for Option {} -unsafe impl Zeroable for Option {} - -unsafe impl Zeroable for *mut T {} -unsafe impl Zeroable for *const T {} -unsafe impl Zeroable for Option> {} -unsafe impl Zeroable for PhantomData {} -unsafe impl Zeroable for PhantomPinned {} -unsafe impl Zeroable for ManuallyDrop {} - -#[cfg(feature = "zeroable_maybe_uninit")] -unsafe impl Zeroable for core::mem::MaybeUninit {} - -unsafe impl Zeroable for (A,) {} -unsafe impl Zeroable for (A, B) {} -unsafe impl Zeroable for (A, B, C) {} -unsafe impl Zeroable - for (A, B, C, D) -{ -} -unsafe impl - Zeroable for (A, B, C, D, E) -{ -} -unsafe impl< - A: Zeroable, - B: Zeroable, - C: Zeroable, - D: Zeroable, - E: Zeroable, - F: Zeroable, - > Zeroable for (A, B, C, D, E, F) -{ -} -unsafe impl< - A: Zeroable, - B: Zeroable, - C: Zeroable, - D: Zeroable, - E: Zeroable, - F: Zeroable, - G: Zeroable, - > Zeroable for (A, B, C, D, E, F, G) -{ -} -unsafe impl< - A: Zeroable, - B: Zeroable, - C: Zeroable, - D: Zeroable, - E: Zeroable, - F: Zeroable, - G: Zeroable, - H: Zeroable, - > Zeroable for (A, B, C, D, E, F, G, H) -{ -} - -#[cfg(feature = "min_const_generics")] -unsafe impl Zeroable for [T; N] where T: Zeroable {} - -#[cfg(not(feature = "min_const_generics"))] -impl_unsafe_marker_for_array!( - Zeroable, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, - 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 48, 64, 96, 128, 256, - 512, 1024, 2048, 4096 -); - -#[cfg(all(target_arch = "wasm32", feature = "wasm_simd"))] -unsafe impl Zeroable for wasm32::v128 {} - -#[cfg(target_arch = "x86")] -unsafe impl Zeroable for x86::__m128i {} -#[cfg(target_arch = "x86")] -unsafe impl Zeroable for x86::__m128 {} -#[cfg(target_arch = "x86")] -unsafe impl Zeroable for x86::__m128d {} -#[cfg(target_arch = "x86")] -unsafe impl Zeroable for x86::__m256i {} -#[cfg(target_arch = "x86")] -unsafe impl Zeroable for x86::__m256 {} -#[cfg(target_arch = "x86")] -unsafe impl Zeroable for x86::__m256d {} - -#[cfg(target_arch = "x86_64")] -unsafe impl Zeroable for x86_64::__m128i {} -#[cfg(target_arch = "x86_64")] -unsafe impl Zeroable for x86_64::__m128 {} -#[cfg(target_arch = "x86_64")] -unsafe impl Zeroable for x86_64::__m128d {} -#[cfg(target_arch = "x86_64")] -unsafe impl Zeroable for x86_64::__m256i {} -#[cfg(target_arch = "x86_64")] -unsafe impl Zeroable for x86_64::__m256 {} -#[cfg(target_arch = "x86_64")] -unsafe impl Zeroable for x86_64::__m256d {} - -#[cfg(feature = "nightly_portable_simd")] -unsafe impl Zeroable for core::simd::Simd -where - T: core::simd::SimdElement + Zeroable, - core::simd::LaneCount: core::simd::SupportedLaneCount, -{ -} diff --git a/vendor/bytemuck/tests/array_tests.rs b/vendor/bytemuck/tests/array_tests.rs deleted file mode 100644 index 552de08f..00000000 --- a/vendor/bytemuck/tests/array_tests.rs +++ /dev/null @@ -1,12 +0,0 @@ -#[test] -pub fn test_cast_array() { - let x = [0u32, 1u32, 2u32]; - let _: [u16; 6] = bytemuck::cast(x); -} - -#[cfg(feature = "min_const_generics")] -#[test] -pub fn test_cast_long_array() { - let x = [0u32; 65]; - let _: [u16; 130] = bytemuck::cast(x); -} diff --git a/vendor/bytemuck/tests/cast_slice_tests.rs b/vendor/bytemuck/tests/cast_slice_tests.rs deleted file mode 100644 index fa3b1446..00000000 --- a/vendor/bytemuck/tests/cast_slice_tests.rs +++ /dev/null @@ -1,194 +0,0 @@ -use core::mem::size_of; - -use bytemuck::*; - -#[test] -fn test_try_cast_slice() { - // some align4 data - let u32_slice: &[u32] = &[4, 5, 6]; - // the same data as align1 - let the_bytes: &[u8] = try_cast_slice(u32_slice).unwrap(); - - assert_eq!( - u32_slice.as_ptr() as *const u32 as usize, - the_bytes.as_ptr() as *const u8 as usize - ); - assert_eq!( - u32_slice.len() * size_of::(), - the_bytes.len() * size_of::() - ); - - // by taking one byte off the front, we're definitely mis-aligned for u32. - let mis_aligned_bytes = &the_bytes[1..]; - assert_eq!( - try_cast_slice::(mis_aligned_bytes), - Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) - ); - - // by taking one byte off the end, we're aligned but would have slop bytes for - // u32 - let the_bytes_len_minus1 = the_bytes.len() - 1; - let slop_bytes = &the_bytes[..the_bytes_len_minus1]; - assert_eq!( - try_cast_slice::(slop_bytes), - Err(PodCastError::OutputSliceWouldHaveSlop) - ); - - // if we don't mess with it we can up-alignment cast - try_cast_slice::(the_bytes).unwrap(); -} - -#[test] -fn test_try_cast_slice_mut() { - // some align4 data - let u32_slice: &mut [u32] = &mut [4, 5, 6]; - let u32_len = u32_slice.len(); - let u32_ptr = u32_slice.as_ptr(); - - // the same data as align1 - let the_bytes: &mut [u8] = try_cast_slice_mut(u32_slice).unwrap(); - let the_bytes_len = the_bytes.len(); - let the_bytes_ptr = the_bytes.as_ptr(); - - assert_eq!( - u32_ptr as *const u32 as usize, - the_bytes_ptr as *const u8 as usize - ); - assert_eq!(u32_len * size_of::(), the_bytes_len * size_of::()); - - // by taking one byte off the front, we're definitely mis-aligned for u32. - let mis_aligned_bytes = &mut the_bytes[1..]; - assert_eq!( - try_cast_slice_mut::(mis_aligned_bytes), - Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) - ); - - // by taking one byte off the end, we're aligned but would have slop bytes for - // u32 - let the_bytes_len_minus1 = the_bytes.len() - 1; - let slop_bytes = &mut the_bytes[..the_bytes_len_minus1]; - assert_eq!( - try_cast_slice_mut::(slop_bytes), - Err(PodCastError::OutputSliceWouldHaveSlop) - ); - - // if we don't mess with it we can up-alignment cast - try_cast_slice_mut::(the_bytes).unwrap(); -} - -#[test] -fn test_types() { - let _: i32 = cast(1.0_f32); - let _: &mut i32 = cast_mut(&mut 1.0_f32); - let _: &i32 = cast_ref(&1.0_f32); - let _: &[i32] = cast_slice(&[1.0_f32]); - let _: &mut [i32] = cast_slice_mut(&mut [1.0_f32]); - // - let _: Result = try_cast(1.0_f32); - let _: Result<&mut i32, PodCastError> = try_cast_mut(&mut 1.0_f32); - let _: Result<&i32, PodCastError> = try_cast_ref(&1.0_f32); - let _: Result<&[i32], PodCastError> = try_cast_slice(&[1.0_f32]); - let _: Result<&mut [i32], PodCastError> = try_cast_slice_mut(&mut [1.0_f32]); -} - -#[test] -fn test_bytes_of() { - assert_eq!(bytes_of(&0xaabbccdd_u32), &0xaabbccdd_u32.to_ne_bytes()); - assert_eq!( - bytes_of_mut(&mut 0xaabbccdd_u32), - &mut 0xaabbccdd_u32.to_ne_bytes() - ); - let mut a = 0xaabbccdd_u32; - let a_addr = &a as *const _ as usize; - // ensure addresses match. - assert_eq!(bytes_of(&a).as_ptr() as usize, a_addr); - assert_eq!(bytes_of_mut(&mut a).as_ptr() as usize, a_addr); -} - -#[test] -fn test_try_from_bytes() { - let u32s = [0xaabbccdd, 0x11223344_u32]; - let bytes = bytemuck::cast_slice::(&u32s); - assert_eq!(try_from_bytes::(&bytes[..4]), Ok(&u32s[0])); - assert_eq!( - try_from_bytes::(&bytes[..5]), - Err(PodCastError::SizeMismatch) - ); - assert_eq!( - try_from_bytes::(&bytes[..3]), - Err(PodCastError::SizeMismatch) - ); - assert_eq!( - try_from_bytes::(&bytes[1..5]), - Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) - ); -} - -#[test] -fn test_try_from_bytes_mut() { - let mut abcd = 0xaabbccdd; - let mut u32s = [abcd, 0x11223344_u32]; - let bytes = bytemuck::cast_slice_mut::(&mut u32s); - assert_eq!(try_from_bytes_mut::(&mut bytes[..4]), Ok(&mut abcd)); - assert_eq!(try_from_bytes_mut::(&mut bytes[..4]), Ok(&mut abcd)); - assert_eq!( - try_from_bytes_mut::(&mut bytes[..5]), - Err(PodCastError::SizeMismatch) - ); - assert_eq!( - try_from_bytes_mut::(&mut bytes[..3]), - Err(PodCastError::SizeMismatch) - ); - assert_eq!( - try_from_bytes::(&mut bytes[1..5]), - Err(PodCastError::TargetAlignmentGreaterAndInputNotAligned) - ); -} - -#[test] -fn test_from_bytes() { - let abcd = 0xaabbccdd_u32; - let aligned_bytes = bytemuck::bytes_of(&abcd); - assert_eq!(from_bytes::(aligned_bytes), &abcd); - assert!(core::ptr::eq(from_bytes(aligned_bytes), &abcd)); -} - -#[test] -fn test_from_bytes_mut() { - let mut a = 0xaabbccdd_u32; - let a_addr = &a as *const _ as usize; - let aligned_bytes = bytemuck::bytes_of_mut(&mut a); - assert_eq!(*from_bytes_mut::(aligned_bytes), 0xaabbccdd_u32); - assert_eq!( - from_bytes_mut::(aligned_bytes) as *const u32 as usize, - a_addr - ); -} - -// like #[should_panic], but can be a part of another test, instead of requiring -// it to be it's own test. -macro_rules! should_panic { - ($ex:expr) => { - assert!( - std::panic::catch_unwind(|| { - let _ = $ex; - }) - .is_err(), - concat!("should have panicked: `", stringify!($ex), "`") - ); - }; -} - -#[test] -fn test_panics() { - should_panic!(cast_slice::(&[1u8, 2u8])); - should_panic!(cast_slice_mut::(&mut [1u8, 2u8])); - should_panic!(from_bytes::(&[1u8, 2])); - should_panic!(from_bytes::(&[1u8, 2, 3, 4, 5])); - should_panic!(from_bytes_mut::(&mut [1u8, 2])); - should_panic!(from_bytes_mut::(&mut [1u8, 2, 3, 4, 5])); - // use cast_slice on some u32s to get some align>=4 bytes, so we can know - // we'll give from_bytes unaligned ones. - let aligned_bytes = bytemuck::cast_slice::(&[0, 0]); - should_panic!(from_bytes::(&aligned_bytes[1..5])); -} diff --git a/vendor/bytemuck/tests/derive.rs b/vendor/bytemuck/tests/derive.rs deleted file mode 100644 index ac30216b..00000000 --- a/vendor/bytemuck/tests/derive.rs +++ /dev/null @@ -1,25 +0,0 @@ -#![cfg(feature = "derive")] -#![allow(dead_code)] - -use bytemuck::{Pod, TransparentWrapper, Zeroable}; - -#[derive(Copy, Clone, Pod, Zeroable)] -#[repr(C)] -struct Test { - a: u16, - b: u16, -} - -#[derive(TransparentWrapper)] -#[repr(transparent)] -struct TransparentSingle { - a: u16, -} - -#[derive(TransparentWrapper)] -#[repr(transparent)] -#[transparent(u16)] -struct TransparentWithZeroSized { - a: u16, - b: (), -} diff --git a/vendor/bytemuck/tests/doc_tests.rs b/vendor/bytemuck/tests/doc_tests.rs deleted file mode 100644 index 091226cf..00000000 --- a/vendor/bytemuck/tests/doc_tests.rs +++ /dev/null @@ -1,121 +0,0 @@ -//! Cargo miri doesn't run doctests yet, so we duplicate these here. It's -//! probably not that important to sweat keeping these perfectly up to date, but -//! we should try to catch the cases where the primary tests are doctests. -use bytemuck::*; - -// Miri doesn't run on doctests, so... copypaste to the rescue. -#[test] -fn test_transparent_slice() { - #[repr(transparent)] - struct Slice([T]); - - unsafe impl TransparentWrapper<[T]> for Slice {} - - let s = Slice::wrap_ref(&[1u32, 2, 3]); - assert_eq!(&s.0, &[1, 2, 3]); - - let mut buf = [1, 2, 3u8]; - let _sm = Slice::wrap_mut(&mut buf); -} - -#[test] -fn test_transparent_basic() { - #[derive(Default)] - struct SomeStruct(u32); - - #[repr(transparent)] - struct MyWrapper(SomeStruct); - - unsafe impl TransparentWrapper for MyWrapper {} - - // interpret a reference to &SomeStruct as a &MyWrapper - let thing = SomeStruct::default(); - let wrapped_ref: &MyWrapper = MyWrapper::wrap_ref(&thing); - - // Works with &mut too. - let mut mut_thing = SomeStruct::default(); - let wrapped_mut: &mut MyWrapper = MyWrapper::wrap_mut(&mut mut_thing); - let _ = (wrapped_ref, wrapped_mut); -} - -// Work around miri not running doctests -#[test] -fn test_contiguous_doc() { - #[repr(u8)] - #[derive(Debug, Copy, Clone, PartialEq)] - enum Foo { - A = 0, - B = 1, - C = 2, - D = 3, - E = 4, - } - unsafe impl Contiguous for Foo { - type Int = u8; - const MIN_VALUE: u8 = Foo::A as u8; - const MAX_VALUE: u8 = Foo::E as u8; - } - - assert_eq!(Foo::from_integer(3).unwrap(), Foo::D); - assert_eq!(Foo::from_integer(8), None); - assert_eq!(Foo::C.into_integer(), 2); - assert_eq!(Foo::B.into_integer(), Foo::B as u8); -} - -#[test] -fn test_offsetof_vertex() { - #[repr(C)] - struct Vertex { - pos: [f32; 2], - uv: [u16; 2], - color: [u8; 4], - } - unsafe impl Zeroable for Vertex {} - - let pos = offset_of!(Zeroable::zeroed(), Vertex, pos); - let uv = offset_of!(Zeroable::zeroed(), Vertex, uv); - let color = offset_of!(Zeroable::zeroed(), Vertex, color); - - assert_eq!(pos, 0); - assert_eq!(uv, 8); - assert_eq!(color, 12); -} - -#[test] -fn test_offsetof_nonpod() { - #[derive(Default)] - struct Foo { - a: u8, - b: &'static str, - c: i32, - } - - let a_offset = offset_of!(Default::default(), Foo, a); - let b_offset = offset_of!(Default::default(), Foo, b); - let c_offset = offset_of!(Default::default(), Foo, c); - - assert_ne!(a_offset, b_offset); - assert_ne!(b_offset, c_offset); - // We can't check against hardcoded values for a repr(Rust) type, - // but prove to ourself this way. - - let foo = Foo::default(); - // Note: offsets are in bytes. - let as_bytes = &foo as *const _ as *const u8; - - // we're using wrapping_offset here becasue it's not worth - // the unsafe block, but it would be valid to use `add` instead, - // as it cannot overflow. - assert_eq!( - &foo.a as *const _ as usize, - as_bytes.wrapping_add(a_offset) as usize - ); - assert_eq!( - &foo.b as *const _ as usize, - as_bytes.wrapping_add(b_offset) as usize - ); - assert_eq!( - &foo.c as *const _ as usize, - as_bytes.wrapping_add(c_offset) as usize - ); -} diff --git a/vendor/bytemuck/tests/offset_of_tests.rs b/vendor/bytemuck/tests/offset_of_tests.rs deleted file mode 100644 index d4848457..00000000 --- a/vendor/bytemuck/tests/offset_of_tests.rs +++ /dev/null @@ -1,59 +0,0 @@ -use bytemuck::{offset_of, Zeroable}; - -#[test] -fn test_offset_of_vertex() { - #[repr(C)] - struct Vertex { - pos: [f32; 2], - uv: [u16; 2], - color: [u8; 4], - } - unsafe impl Zeroable for Vertex {} - - let pos = offset_of!(Zeroable::zeroed(), Vertex, pos); - let uv = offset_of!(Zeroable::zeroed(), Vertex, uv); - let color = offset_of!(Zeroable::zeroed(), Vertex, color); - - assert_eq!(pos, 0); - assert_eq!(uv, 8); - assert_eq!(color, 12); -} - -#[test] -fn test_offset_of_foo() { - #[derive(Default)] - struct Foo { - a: u8, - b: &'static str, - c: i32, - } - - let a_offset = offset_of!(Default::default(), Foo, a); - let b_offset = offset_of!(Default::default(), Foo, b); - let c_offset = offset_of!(Default::default(), Foo, c); - - assert_ne!(a_offset, b_offset); - assert_ne!(b_offset, c_offset); - // We can't check against hardcoded values for a repr(Rust) type, - // but prove to ourself this way. - - let foo = Foo::default(); - // Note: offsets are in bytes. - let as_bytes = &foo as *const _ as *const u8; - - // we're using wrapping_offset here because it's not worth - // the unsafe block, but it would be valid to use `add` instead, - // as it cannot overflow. - assert_eq!( - &foo.a as *const _ as usize, - as_bytes.wrapping_add(a_offset) as usize - ); - assert_eq!( - &foo.b as *const _ as usize, - as_bytes.wrapping_add(b_offset) as usize - ); - assert_eq!( - &foo.c as *const _ as usize, - as_bytes.wrapping_add(c_offset) as usize - ); -} diff --git a/vendor/bytemuck/tests/std_tests.rs b/vendor/bytemuck/tests/std_tests.rs deleted file mode 100644 index a91ee361..00000000 --- a/vendor/bytemuck/tests/std_tests.rs +++ /dev/null @@ -1,45 +0,0 @@ -//! The integration tests seem to always have `std` linked, so things that would -//! depend on that can go here. - -use bytemuck::*; - -#[test] -fn test_transparent_vtabled() { - use core::fmt::Display; - - #[repr(transparent)] - struct DisplayTraitObj(dyn Display); - - unsafe impl TransparentWrapper for DisplayTraitObj {} - - impl Display for DisplayTraitObj { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - self.0.fmt(f) - } - } - - let v = DisplayTraitObj::wrap_ref(&5i32); - let s = format!("{}", v); - assert_eq!(s, "5"); - - let mut x = 100i32; - let v_mut = DisplayTraitObj::wrap_mut(&mut x); - let s = format!("{}", v_mut); - assert_eq!(s, "100"); -} - -#[test] -#[cfg(feature = "extern_crate_alloc")] -fn test_large_box_alloc() { - type SuperPage = [[u8; 4096]; 4096]; - let _: Box = try_zeroed_box().unwrap(); -} - -#[test] -#[cfg(feature = "extern_crate_alloc")] -fn test_zero_sized_box_alloc() { - #[repr(align(4096))] - struct Empty; - unsafe impl Zeroable for Empty {} - let _: Box = try_zeroed_box().unwrap(); -} diff --git a/vendor/bytemuck/tests/transparent.rs b/vendor/bytemuck/tests/transparent.rs deleted file mode 100644 index 8381ad4f..00000000 --- a/vendor/bytemuck/tests/transparent.rs +++ /dev/null @@ -1,64 +0,0 @@ -// Currently this test doesn't actually check the output of the functions. -// It's only here for miri to check for any potential undefined behaviour. -// TODO: check function results - -#[test] -fn test_transparent_wrapper() { - // An external type defined in a different crate. - #[derive(Copy, Clone, Default)] - struct Foreign(u8); - - use bytemuck::TransparentWrapper; - - #[derive(Copy, Clone)] - #[repr(transparent)] - struct Wrapper(Foreign); - - unsafe impl TransparentWrapper for Wrapper {} - - // Traits can be implemented on crate-local wrapper. - unsafe impl bytemuck::Zeroable for Wrapper {} - unsafe impl bytemuck::Pod for Wrapper {} - - let _: u8 = bytemuck::cast(Wrapper::wrap(Foreign::default())); - let _: Foreign = Wrapper::peel(bytemuck::cast(u8::default())); - - let _: &u8 = bytemuck::cast_ref(Wrapper::wrap_ref(&Foreign::default())); - let _: &Foreign = Wrapper::peel_ref(bytemuck::cast_ref(&u8::default())); - - let _: &mut u8 = - bytemuck::cast_mut(Wrapper::wrap_mut(&mut Foreign::default())); - let _: &mut Foreign = - Wrapper::peel_mut(bytemuck::cast_mut(&mut u8::default())); - - let _: &[u8] = - bytemuck::cast_slice(Wrapper::wrap_slice(&[Foreign::default()])); - let _: &[Foreign] = - Wrapper::peel_slice(bytemuck::cast_slice(&[u8::default()])); - - let _: &mut [u8] = - bytemuck::cast_slice_mut(Wrapper::wrap_slice_mut( - &mut [Foreign::default()], - )); - let _: &mut [Foreign] = - Wrapper::peel_slice_mut(bytemuck::cast_slice_mut(&mut [u8::default()])); - - let _: &[u8] = bytemuck::bytes_of(Wrapper::wrap_ref(&Foreign::default())); - let _: &Foreign = Wrapper::peel_ref(bytemuck::from_bytes(&[u8::default()])); - - let _: &mut [u8] = - bytemuck::bytes_of_mut(Wrapper::wrap_mut(&mut Foreign::default())); - let _: &mut Foreign = - Wrapper::peel_mut(bytemuck::from_bytes_mut(&mut [u8::default()])); - - // not sure if this is the right usage - let _ = - bytemuck::pod_align_to::<_, u8>(Wrapper::wrap_slice(&[Foreign::default()])); - // counterpart? - - // not sure if this is the right usage - let _ = bytemuck::pod_align_to_mut::<_, u8>(Wrapper::wrap_slice_mut(&mut [ - Foreign::default(), - ])); - // counterpart? -} diff --git a/vendor/bytemuck/tests/wrapper_forgets.rs b/vendor/bytemuck/tests/wrapper_forgets.rs deleted file mode 100644 index da3404f3..00000000 --- a/vendor/bytemuck/tests/wrapper_forgets.rs +++ /dev/null @@ -1,13 +0,0 @@ -use bytemuck::TransparentWrapper; - -#[repr(transparent)] -struct Wrap(Box); - -// SAFETY: it's #[repr(transparent)] -unsafe impl TransparentWrapper> for Wrap {} - -fn main() { - let value = Box::new(5); - // This used to duplicate the wrapped value, creating a double free :( - Wrap::wrap(value); -} diff --git a/vendor/bytemuck_derive/.cargo-checksum.json b/vendor/bytemuck_derive/.cargo-checksum.json deleted file mode 100644 index ef4dfff2..00000000 --- a/vendor/bytemuck_derive/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{"Cargo.toml":"e3bc406eee45f74e5abc5e0935867cf7cbda2a21c9aae5b9ef5e223827f07445","LICENSE-APACHE":"e3ba223bb1423f0aad8c3dfce0fe3148db48926d41e6fbc3afbbf5ff9e1c89cb","LICENSE-MIT":"9df9ba60a11af705f2e451b53762686e615d86f76b169cf075c3237730dbd7e2","LICENSE-ZLIB":"84b34dd7608f7fb9b17bd588a6bf392bf7de504e2716f024a77d89f1b145a151","README.md":"09d8238fd7fdac39857da88e090667d8327ca9ac240768e216ef2079c2f06846","changelog.md":"5314b666a08dbabbb4a09d0cebfaba9b742e4c39fa45b96cf65638d615ca7272","src/lib.rs":"ba381a12be6aca1a9e8b255e7948d87846f382629316473f6b7cad7640d9e47a","src/traits.rs":"163308df190e889b1b68ca5dab530fbf283998e9d0e47cd9ec0e357a1addfa5a","tests/basic.rs":"dd02611e39aa53c941f39be0d10c1eba319650a31fd8222e459cbcc9f9346219"},"package":"8e215f8c2f9f79cb53c8335e687ffd07d5bfcb6fe5fc80723762d0be46e7cc54"} \ No newline at end of file diff --git a/vendor/bytemuck_derive/Cargo.toml b/vendor/bytemuck_derive/Cargo.toml deleted file mode 100644 index 4365f061..00000000 --- a/vendor/bytemuck_derive/Cargo.toml +++ /dev/null @@ -1,37 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies -# -# If you believe there's an error in this file please file an -# issue against the rust-lang/cargo repository. If you're -# editing this file be aware that the upstream Cargo.toml -# will likely look very different (and much more reasonable) - -[package] -edition = "2018" -name = "bytemuck_derive" -version = "1.0.1" -authors = ["Lokathor "] -description = "derive proc-macros for `bytemuck`" -readme = "README.md" -keywords = ["transmute", "bytes", "casting"] -categories = ["encoding", "no-std"] -license = "Zlib OR Apache-2.0 OR MIT" -repository = "https://github.com/Lokathor/bytemuck" - -[lib] -name = "bytemuck_derive" -proc-macro = true -[dependencies.proc-macro2] -version = "1" - -[dependencies.quote] -version = "1" - -[dependencies.syn] -version = "1" -[dev-dependencies.bytemuck] -version = "1.2" diff --git a/vendor/bytemuck_derive/LICENSE-APACHE b/vendor/bytemuck_derive/LICENSE-APACHE deleted file mode 100644 index 136d9004..00000000 --- a/vendor/bytemuck_derive/LICENSE-APACHE +++ /dev/null @@ -1,61 +0,0 @@ -Apache License -Version 2.0, January 2004 -http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. - - "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. - 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. - 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. - 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: - (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and - (b) You must cause any modified files to carry prominent notices stating that You changed the files; and - (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and - (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. - - You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. - 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. - 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. - 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. - 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. - 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - -To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/vendor/bytemuck_derive/LICENSE-MIT b/vendor/bytemuck_derive/LICENSE-MIT deleted file mode 100644 index 164045fa..00000000 --- a/vendor/bytemuck_derive/LICENSE-MIT +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) 2019 Daniel "Lokathor" Gee. - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/bytemuck_derive/LICENSE-ZLIB b/vendor/bytemuck_derive/LICENSE-ZLIB deleted file mode 100644 index d70707c7..00000000 --- a/vendor/bytemuck_derive/LICENSE-ZLIB +++ /dev/null @@ -1,11 +0,0 @@ -Copyright (c) 2019 Daniel "Lokathor" Gee. - -This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software. - -Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: - -1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. - -2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. - -3. This notice may not be removed or altered from any source distribution. diff --git a/vendor/bytemuck_derive/README.md b/vendor/bytemuck_derive/README.md deleted file mode 100644 index 553a40ef..00000000 --- a/vendor/bytemuck_derive/README.md +++ /dev/null @@ -1,4 +0,0 @@ - -# bytemuck_derive - -Derive macros for [bytemuck](https://docs.rs/bytemuck) traits. diff --git a/vendor/bytemuck_derive/changelog.md b/vendor/bytemuck_derive/changelog.md deleted file mode 100644 index eccfaf03..00000000 --- a/vendor/bytemuck_derive/changelog.md +++ /dev/null @@ -1,11 +0,0 @@ - -## `bytemuck_derive` changelog - -## 1.0.1 - -* [yanchith](https://github.com/yanchith) fixed the derive checks code to make clippy more happy. -[PR 45](https://github.com/Lokathor/bytemuck/pull/45) - -## 1.0.0 - -* Initial stable release. diff --git a/vendor/bytemuck_derive/src/lib.rs b/vendor/bytemuck_derive/src/lib.rs deleted file mode 100644 index 924270a0..00000000 --- a/vendor/bytemuck_derive/src/lib.rs +++ /dev/null @@ -1,179 +0,0 @@ -//! Derive macros for [bytemuck](https://docs.rs/bytemuck) traits. - -extern crate proc_macro; - -mod traits; - -use proc_macro2::TokenStream; -use quote::quote; -use syn::{parse_macro_input, DeriveInput}; - -use crate::traits::{Contiguous, Derivable, Pod, TransparentWrapper, Zeroable}; - -/// Derive the `Pod` trait for a struct -/// -/// The macro ensures that the struct follows all the the safety requirements -/// for the `Pod` trait. -/// -/// The following constraints need to be satisfied for the macro to succeed -/// -/// - All fields in the struct must implement `Pod` -/// - The struct must be `#[repr(C)]` or `#[repr(transparent)]` -/// - The struct must not contain any padding bytes -/// - The struct contains no generic parameters -/// -/// ## Example -/// -/// ```rust -/// # use bytemuck_derive::{Pod, Zeroable}; -/// -/// #[derive(Copy, Clone, Pod, Zeroable)] -/// #[repr(C)] -/// struct Test { -/// a: u16, -/// b: u16, -/// } -/// ``` -#[proc_macro_derive(Pod)] -pub fn derive_pod(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let expanded = - derive_marker_trait::(parse_macro_input!(input as DeriveInput)); - - proc_macro::TokenStream::from(expanded) -} - -/// Derive the `Zeroable` trait for a struct -/// -/// The macro ensures that the struct follows all the the safety requirements -/// for the `Zeroable` trait. -/// -/// The following constraints need to be satisfied for the macro to succeed -/// -/// - All fields ind the struct must to implement `Zeroable` -/// -/// ## Example -/// -/// ```rust -/// # use bytemuck_derive::{Zeroable}; -/// -/// #[derive(Copy, Clone, Zeroable)] -/// #[repr(C)] -/// struct Test { -/// a: u16, -/// b: u16, -/// } -/// ``` -#[proc_macro_derive(Zeroable)] -pub fn derive_zeroable( - input: proc_macro::TokenStream, -) -> proc_macro::TokenStream { - let expanded = - derive_marker_trait::(parse_macro_input!(input as DeriveInput)); - - proc_macro::TokenStream::from(expanded) -} - -/// Derive the `TransparentWrapper` trait for a struct -/// -/// The macro ensures that the struct follows all the the safety requirements -/// for the `TransparentWrapper` trait. -/// -/// The following constraints need to be satisfied for the macro to succeed -/// -/// - The struct must be `#[repr(transparent)]` -/// - The struct must contain the `Wrapped` type -/// -/// If the struct only contains a single field, the `Wrapped` type will -/// automatically be determined if there is more then one field in the struct, -/// you need to specify the `Wrapped` type using `#[transparent(T)]` -/// -/// ## Example -/// -/// ```rust -/// # use bytemuck_derive::TransparentWrapper; -/// # use std::marker::PhantomData; -/// -/// #[derive(Copy, Clone, TransparentWrapper)] -/// #[repr(transparent)] -/// #[transparent(u16)] -/// struct Test { -/// inner: u16, -/// extra: PhantomData, -/// } -/// ``` -#[proc_macro_derive(TransparentWrapper, attributes(transparent))] -pub fn derive_transparent( - input: proc_macro::TokenStream, -) -> proc_macro::TokenStream { - let expanded = derive_marker_trait::(parse_macro_input!( - input as DeriveInput - )); - - proc_macro::TokenStream::from(expanded) -} - -/// Derive the `Contiguous` trait for an enum -/// -/// The macro ensures that the enum follows all the the safety requirements -/// for the `Contiguous` trait. -/// -/// The following constraints need to be satisfied for the macro to succeed -/// -/// - The enum must be `#[repr(Int)]` -/// - The enum must be fieldless -/// - The enum discriminants must form a contiguous range -/// -/// ## Example -/// -/// ```rust -/// # use bytemuck_derive::{Contiguous}; -/// -/// #[derive(Copy, Clone, Contiguous)] -/// #[repr(u8)] -/// enum Test { -/// A = 0, -/// B = 1, -/// C = 2, -/// } -/// ``` -#[proc_macro_derive(Contiguous)] -pub fn derive_contiguous( - input: proc_macro::TokenStream, -) -> proc_macro::TokenStream { - let expanded = - derive_marker_trait::(parse_macro_input!(input as DeriveInput)); - - proc_macro::TokenStream::from(expanded) -} - -/// Basic wrapper for error handling -fn derive_marker_trait(input: DeriveInput) -> TokenStream { - derive_marker_trait_inner::(input).unwrap_or_else(|err| { - quote! { - compile_error!(#err); - } - }) -} - -fn derive_marker_trait_inner( - input: DeriveInput, -) -> Result { - let name = &input.ident; - - let (impl_generics, ty_generics, where_clause) = - input.generics.split_for_impl(); - - let trait_ = Trait::ident(); - Trait::check_attributes(&input.attrs)?; - let asserts = Trait::struct_asserts(&input)?; - let trait_params = Trait::generic_params(&input)?; - let trait_impl = Trait::trait_impl(&input)?; - - Ok(quote! { - #asserts - - unsafe impl #impl_generics #trait_ #trait_params for #name #ty_generics #where_clause { - #trait_impl - } - }) -} diff --git a/vendor/bytemuck_derive/src/traits.rs b/vendor/bytemuck_derive/src/traits.rs deleted file mode 100644 index 03147916..00000000 --- a/vendor/bytemuck_derive/src/traits.rs +++ /dev/null @@ -1,328 +0,0 @@ -use proc_macro2::{Ident, TokenStream, TokenTree}; -use quote::{quote, quote_spanned, ToTokens}; -use syn::{ - spanned::Spanned, AttrStyle, Attribute, Data, DataEnum, DataStruct, - DeriveInput, Expr, ExprLit, ExprUnary, Fields, Lit, LitInt, Type, UnOp, - Variant, -}; - -pub trait Derivable { - fn ident() -> TokenStream; - fn generic_params(_input: &DeriveInput) -> Result { - Ok(quote!()) - } - fn struct_asserts(_input: &DeriveInput) -> Result { - Ok(quote!()) - } - fn check_attributes(_attributes: &[Attribute]) -> Result<(), &'static str> { - Ok(()) - } - fn trait_impl(_input: &DeriveInput) -> Result { - Ok(quote!()) - } -} - -pub struct Pod; - -impl Derivable for Pod { - fn ident() -> TokenStream { - quote!(::bytemuck::Pod) - } - - fn struct_asserts(input: &DeriveInput) -> Result { - if !input.generics.params.is_empty() { - return Err("Pod requires cannot be derived for structs containing generic parameters because the padding requirements can't be verified for generic structs"); - } - - let assert_no_padding = generate_assert_no_padding(input)?; - let assert_fields_are_pod = - generate_fields_are_trait(input, Self::ident())?; - - Ok(quote!( - #assert_no_padding - #assert_fields_are_pod - )) - } - - fn check_attributes(attributes: &[Attribute]) -> Result<(), &'static str> { - let repr = get_repr(attributes); - match repr.as_ref().map(|repr| repr.as_str()) { - Some("C") => Ok(()), - Some("transparent") => Ok(()), - _ => { - Err("Pod requires the struct to be #[repr(C)] or #[repr(transparent)]") - } - } - } -} - -pub struct Zeroable; - -impl Derivable for Zeroable { - fn ident() -> TokenStream { - quote!(::bytemuck::Zeroable) - } - - fn struct_asserts(input: &DeriveInput) -> Result { - generate_fields_are_trait(input, Self::ident()) - } -} - -pub struct TransparentWrapper; - -impl TransparentWrapper { - fn get_wrapper_type( - attributes: &[Attribute], fields: &Fields, - ) -> Option { - let transparent_param = get_simple_attr(attributes, "transparent"); - transparent_param.map(|ident| ident.to_token_stream()).or_else(|| { - let mut types = get_field_types(fields); - let first_type = types.next(); - if let Some(_) = types.next() { - // can't guess param type if there is more than one field - return None; - } else { - first_type.map(|ty| ty.to_token_stream()) - } - }) - } -} - -impl Derivable for TransparentWrapper { - fn ident() -> TokenStream { - quote!(::bytemuck::TransparentWrapper) - } - - fn generic_params(input: &DeriveInput) -> Result { - let fields = get_struct_fields(input)?; - - Self::get_wrapper_type(&input.attrs, fields).map(|ty| quote!(<#ty>)) - .ok_or("when deriving TransparentWrapper for a struct with more than one field you need to specify the transparent field using #[transparent(T)]") - } - - fn struct_asserts(input: &DeriveInput) -> Result { - let fields = get_struct_fields(input)?; - let wrapped_type = match Self::get_wrapper_type(&input.attrs, fields) { - Some(wrapped_type) => wrapped_type.to_string(), - None => unreachable!(), /* other code will already reject this derive */ - }; - let mut wrapped_fields = fields - .iter() - .filter(|field| field.ty.to_token_stream().to_string() == wrapped_type); - if let None = wrapped_fields.next() { - return Err("TransparentWrapper must have one field of the wrapped type"); - }; - if let Some(_) = wrapped_fields.next() { - Err("TransparentWrapper can only have one field of the wrapped type") - } else { - Ok(quote!()) - } - } - - fn check_attributes(attributes: &[Attribute]) -> Result<(), &'static str> { - let repr = get_repr(attributes); - - match repr.as_ref().map(|repr| repr.as_str()) { - Some("transparent") => Ok(()), - _ => { - Err("TransparentWrapper requires the struct to be #[repr(transparent)]") - } - } - } -} - -pub struct Contiguous; - -impl Derivable for Contiguous { - fn ident() -> TokenStream { - quote!(::bytemuck::Contiguous) - } - - fn trait_impl(input: &DeriveInput) -> Result { - let repr = get_repr(&input.attrs) - .ok_or("Contiguous requires the enum to be #[repr(Int)]")?; - - if !repr.starts_with('u') && !repr.starts_with('i') { - return Err("Contiguous requires the enum to be #[repr(Int)]"); - } - - let variants = get_enum_variants(input)?; - let mut variants_with_discriminator = - VariantDiscriminantIterator::new(variants); - - let (min, max, count) = variants_with_discriminator.try_fold( - (i64::max_value(), i64::min_value(), 0), - |(min, max, count), res| { - let discriminator = res?; - Ok(( - i64::min(min, discriminator), - i64::max(max, discriminator), - count + 1, - )) - }, - )?; - - if max - min != count - 1 { - return Err( - "Contiguous requires the enum discriminants to be contiguous", - ); - } - - let repr_ident = Ident::new(&repr, input.span()); - let min_lit = LitInt::new(&format!("{}", min), input.span()); - let max_lit = LitInt::new(&format!("{}", max), input.span()); - - Ok(quote! { - type Int = #repr_ident; - const MIN_VALUE: #repr_ident = #min_lit; - const MAX_VALUE: #repr_ident = #max_lit; - }) - } -} - -fn get_struct_fields(input: &DeriveInput) -> Result<&Fields, &'static str> { - if let Data::Struct(DataStruct { fields, .. }) = &input.data { - Ok(fields) - } else { - Err("deriving this trait is only supported for structs") - } -} - -fn get_enum_variants<'a>( - input: &'a DeriveInput, -) -> Result + 'a, &'static str> { - if let Data::Enum(DataEnum { variants, .. }) = &input.data { - Ok(variants.iter()) - } else { - Err("deriving this trait is only supported for enums") - } -} - -fn get_field_types<'a>( - fields: &'a Fields, -) -> impl Iterator + 'a { - fields.iter().map(|field| &field.ty) -} - -/// Check that a struct has no padding by asserting that the size of the struct -/// is equal to the sum of the size of it's fields -fn generate_assert_no_padding( - input: &DeriveInput, -) -> Result { - let struct_type = &input.ident; - let span = input.ident.span(); - let fields = get_struct_fields(input)?; - - let mut field_types = get_field_types(&fields); - let size_sum = if let Some(first) = field_types.next() { - let size_first = quote_spanned!(span => ::core::mem::size_of::<#first>()); - let size_rest = - quote_spanned!(span => #( + ::core::mem::size_of::<#field_types>() )*); - - quote_spanned!(span => #size_first#size_rest) - } else { - quote_spanned!(span => 0) - }; - - Ok(quote_spanned! {span => const _: fn() = || { - struct TypeWithoutPadding([u8; #size_sum]); - let _ = ::core::mem::transmute::<#struct_type, TypeWithoutPadding>; - };}) -} - -/// Check that all fields implement a given trait -fn generate_fields_are_trait( - input: &DeriveInput, trait_: TokenStream, -) -> Result { - let (impl_generics, _ty_generics, where_clause) = - input.generics.split_for_impl(); - let fields = get_struct_fields(input)?; - let span = input.span(); - let field_types = get_field_types(&fields); - Ok(quote_spanned! {span => #(const _: fn() = || { - fn check #impl_generics () #where_clause { - fn assert_impl() {} - assert_impl::<#field_types>(); - } - };)* - }) -} - -fn get_ident_from_stream(tokens: TokenStream) -> Option { - match tokens.into_iter().next() { - Some(TokenTree::Group(group)) => get_ident_from_stream(group.stream()), - Some(TokenTree::Ident(ident)) => Some(ident), - _ => None, - } -} - -/// get a simple #[foo(bar)] attribute, returning "bar" -fn get_simple_attr(attributes: &[Attribute], attr_name: &str) -> Option { - for attr in attributes { - if let (AttrStyle::Outer, Some(outer_ident), Some(inner_ident)) = ( - &attr.style, - attr.path.get_ident(), - get_ident_from_stream(attr.tokens.clone()), - ) { - if outer_ident.to_string() == attr_name { - return Some(inner_ident); - } - } - } - - None -} - -fn get_repr(attributes: &[Attribute]) -> Option { - get_simple_attr(attributes, "repr").map(|ident| ident.to_string()) -} - -struct VariantDiscriminantIterator<'a, I: Iterator + 'a> { - inner: I, - last_value: i64, -} - -impl<'a, I: Iterator + 'a> - VariantDiscriminantIterator<'a, I> -{ - fn new(inner: I) -> Self { - VariantDiscriminantIterator { inner, last_value: -1 } - } -} - -impl<'a, I: Iterator + 'a> Iterator - for VariantDiscriminantIterator<'a, I> -{ - type Item = Result; - - fn next(&mut self) -> Option { - let variant = self.inner.next()?; - if !variant.fields.is_empty() { - return Some(Err("Only fieldless enums are supported")); - } - - if let Some((_, discriminant)) = &variant.discriminant { - let discriminant_value = match parse_int_expr(discriminant) { - Ok(value) => value, - Err(e) => return Some(Err(e)), - }; - self.last_value = discriminant_value; - } else { - self.last_value += 1; - } - - Some(Ok(self.last_value)) - } -} - -fn parse_int_expr(expr: &Expr) -> Result { - match expr { - Expr::Unary(ExprUnary { op: UnOp::Neg(_), expr, .. }) => { - parse_int_expr(expr).map(|int| -int) - } - Expr::Lit(ExprLit { lit: Lit::Int(int), .. }) => { - int.base10_parse().map_err(|_| "Invalid integer expression") - } - _ => Err("Not an integer expression"), - } -} diff --git a/vendor/bytemuck_derive/tests/basic.rs b/vendor/bytemuck_derive/tests/basic.rs deleted file mode 100644 index 867399a0..00000000 --- a/vendor/bytemuck_derive/tests/basic.rs +++ /dev/null @@ -1,50 +0,0 @@ -#![allow(dead_code)] - -use bytemuck_derive::{Contiguous, Pod, TransparentWrapper, Zeroable}; -use std::marker::PhantomData; - -#[derive(Copy, Clone, Pod, Zeroable)] -#[repr(C)] -struct Test { - a: u16, - b: u16, -} - -#[derive(Zeroable)] -struct ZeroGeneric { - a: T, -} - -#[derive(TransparentWrapper)] -#[repr(transparent)] -struct TransparentSingle { - a: u16, -} - -#[derive(TransparentWrapper)] -#[repr(transparent)] -#[transparent(u16)] -struct TransparentWithZeroSized { - a: u16, - b: PhantomData, -} - -#[repr(u8)] -#[derive(Clone, Copy, Contiguous)] -enum ContiguousWithValues { - A = 0, - B = 1, - C = 2, - D = 3, - E = 4, -} - -#[repr(i8)] -#[derive(Clone, Copy, Contiguous)] -enum ContiguousWithImplicitValues { - A = -10, - B, - C, - D, - E, -} diff --git a/vendor/fake-enum/.cargo-checksum.json b/vendor/fake-enum/.cargo-checksum.json deleted file mode 100644 index 00f45400..00000000 --- a/vendor/fake-enum/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{"Cargo.toml":"d5007319bb050aafdb2d9928048ab8c70c7deae5cab4fb135c6183231a3a8bf7","LICENSE-APACHE":"c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4","LICENSE-MIT":"6294b1a107fec2877a4869ae2fb31088258700fa763bae44be802458adbe126f","Makefile.in":"20c33db75369d0a839d5e96aadf870ab0be9850585a165d421150e1641c5f8a9","README.md":"661076a770da8e07d214958efe9b02b50229b1d4b9b6fb0473eb16eefd70f319","aclocal.m4":"3e49d379f015e7aa565af29b60183aaf7835ef22b56d164a3437da1a059b3966","build-dirs.sh":"81b97d919f0b95f63cd12d1a4876a10ae10640f418b6db080e46259d36190bd6","config.guess":"7d1e3c79b86de601c3a0457855ab854dffd15163f53c91edac54a7be2e9c931b","config.sub":"a55026ac66184483db385cc065dd6cdc1a6788ffc1c3f82eddb5fceee2231a6d","configure":"2ddeb47f60f559829f869aa6f8b0f895bc205cecafeca40657ce1540bd03b5fd","configure.ac":"26595d8dc2caa3f6b962cb20a33dfdf7b8d220932190f6a8a155bd7a6d7d76a6","install-sh":"3d7488bebd0cfc9b5c440c55d5b44f1c6e2e3d3e19894821bae4a27f9307f1d2","m4/ax_prog_cc_for_build.m4":"3480058391c4511a3c2c73b4ddd018bc36de6dabbc58d876c0ce4ffe239c36fc","m4/bindgen.m4":"15de8141b578a8ca2657f61f65a28eacb1ba7cb811bfe159e74aac3c4e833bfc","m4/build_std.m4":"4b2da2c135c431c7ca846326e60cd44fde8747ffe91e45562e252bcb1fa465cd","m4/lcrust_checks.m4":"8bd58fa9ce76f1cb1369e15ce244be2fdd5f421f895f5415e4a9d780791f487f","m4/lcrust_prog_rustc.m4":"cf1e0d65c0758bd9028f40d8b0e3880f181aa8849cdc7ff3e052d6339fd3ddb9","run-tests.sh":"6301cdd717d770d946503076778ccd7fecb712acaadc6379372cf35740a5b5a9","src/lib.rs":"9832edec5ebd97c1ac62d3bb6a3799b10013410db26a3ba0c0204e4563614fda"},"package":"185b4cd26aff3789b8662aa73732b585d46b9756cd3c6b302be72f68302c5eba"} \ No newline at end of file diff --git a/vendor/fake-enum/Cargo.toml b/vendor/fake-enum/Cargo.toml deleted file mode 100644 index e0758aac..00000000 --- a/vendor/fake-enum/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies. -# -# If you are reading this file be aware that the original Cargo.toml -# will likely look very different (and much more reasonable). -# See Cargo.toml.orig for the original contents. - -[package] -edition = "2018" -name = "fake-enum" -version = "0.1.4" -authors = ["Connor "] -description = " A rust crate for creating FFI safe \"fake\" enum types\n" -readme = "README.md" -license = "MIT AND Apache-2.0" -repository = "https://github.com/chorman0773/rust-fake-enum" - -[dependencies] diff --git a/vendor/fake-enum/LICENSE-APACHE b/vendor/fake-enum/LICENSE-APACHE deleted file mode 100644 index 261eeb9e..00000000 --- a/vendor/fake-enum/LICENSE-APACHE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/fake-enum/LICENSE-MIT b/vendor/fake-enum/LICENSE-MIT deleted file mode 100644 index a13ecd59..00000000 --- a/vendor/fake-enum/LICENSE-MIT +++ /dev/null @@ -1,8 +0,0 @@ -Copyright 2021 Connor Horman - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/vendor/fake-enum/Makefile.in b/vendor/fake-enum/Makefile.in deleted file mode 100644 index 3ba6ed36..00000000 --- a/vendor/fake-enum/Makefile.in +++ /dev/null @@ -1,179 +0,0 @@ -# Copyright 2021 Connor Horman - -# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -# Makefile.in template for library rust projects - - -# Root Options - -ifeq ($(OS),Windows_NT) -DLLEXT = .dll -else -DLLEXT = .so -endif - -srcdir = @srcdir@ -builddir = @builddir@ -abs_builddir = @abs_builddir@ - -RUSTC = @RUSTC@ -RUSTFLAGS := @RUSTFLAGS@ - -INSTALL := @INSTALL@ - -RUSTC_IS_LCCC = @rustc_is_lccc@ - -## Install directories - -prefix = @prefix@ -exec_prefix = @exec_prefix@ -bindir = @bindir@ -libdir = @libdir@ -includedir = @includedir@ -sbindir = @sbindir@ -libexecdir = @libexecdir@ -datarootdir = @datarootdir@ -datadir = @datadir@ -mandir = @mandir@ -docdir = @docdir@ -infodir = @infodir@ -localedir = @localedir@ -sysconfdir = @sysconfdir@ -localstatedir = @localstatedir@ -runstatedir = @runstatedir@ -sharedstatedir = @sharedstatedir@ - - -# Other Configuration - -features = -binaries = -tests = -# Package specific Configuration - -EXTRA_RUSTFLAGS = - -# The package version -version = 0.1.4 - -# The output file of the library -OUTPUT = fake-enum - -# The crate name of the library -CRATE_NAME = fake_enum - -# The path to find dependencies located in - -DEPENDENCY_DIR = @abs_builddir@/vendor - -# Any dependencies of the library -DEPENDENCIES = - -PROC_MACROS = - -SUBDIRS = - -# Autogenerated variables - -file_to_crate = $(subst -,_,$(1)) - -binary_outputs = $(foreach bin,$(binaries),$(bin)@EXEEXT@) - -CFG = $(foreach feature,$(features),feature="$feature") - -EXTERN_CRATES = $(foreach procmacro,$(PROC_MACROS),$(call file_to_crate,$(notdir $(procmacro)))=$(DEPENDENCY_DIR)/$(build)/$(procmacro)/lib$(procmacro).$(DLLEXT)) $(foreach dep,$(DEPENDENCIES),--extern $(call file_to_crate,$(notdir $(dep)))=$(DEPENDENCY_DIR)/$(dep)/lib$(dep).rlib) - -export CARGO_PKG_VERSION=$(version) -export CARGO_MANIFEST_DIR=@abs_srcdir@ -export OUT_DIR=@abs_builddir@ - -# Targets - -all: - +@srcdir@/build-dirs.sh $(SUBDIRS) - +$(MAKE) stamp - -.PHONY: all clean distclean install install-strip $(binary_outputs:%=install-%) $(binary_outputs:%=install-strip-%) $(foreach subdir,$(SUBDIRS),$(subdir)/install) $(foreach subdir,$(SUBDIRS),$(subdir)/install-strip) $(foreach subdir,$(SUBDIRS),$(subdir)/clean) $(foreach subdir,$(SUBDIRS),$(subdir)/distclean) $(foreach subdir,$(SUBDIRS),$(subdir)/check) - - -Makefile: config.status @srcdir@/Makefile.in - @builddir@/config.status Makefile - - -config.status: @srcdir@/configure - @builddir@/config.status --recheck - - -stamp: $(binaries) $(OUTPUT) - touch stamp - -$(OUTPUT): Makefile - - -include $(OUTPUT).d - -$(OUTPUT).d: $(srcdir)/src/lib.rs $(foreach subdir,$(SUBDIRS),$(subdir)/stamp) - $(RUSTC) $(RUSTFLAGS) $(EXTRA_RUSTFLAGS) $(foreach cfg,$(CFG),--cfg $(cfg)) --crate-name $(CRATE_NAME) --crate-type rlib --emit dep-info=@builddir@/$(OUTPUT).d -o$(OUTPUT) $< $(EXTERN_CRATES) - -$(OUTPUT): $(srcdir)/src/lib.rs $(foreach subdir,$(SUBDIRS),$(subdir)/stamp) - +$(RUSTC) $(RUSTFLAGS) $(EXTRA_RUSTFLAGS) $(foreach cfg,$(CFG),--cfg $(cfg)) --crate-name $(CRATE_NAME) --crate-type rlib --emit dep-info=@builddir@/$(OUTPUT).d --emit link=@abs_builddir@/$(OUTPUT) -o$(OUTPUT) $< $(EXTERN_CRATES) - -include $(foreach bin,$(binary_outputs),$(bin).d) - -$(foreach out,$(binary_outputs),$(out).d): %@EXEEXT@.d: $(srcdir)/src/bin/%.rs $(OUTPUT) - $(RUSTC) $(RUSTFLAGS) $(EXTRA_RUSTFLAGS)$(foreach cfg,$(CFG),--cfg $(cfg)) --crate-name $(call file_to_crate,$*) --crate-type bin --emit dep-info=@builddir@/$@ -o$*@EXEEXT@ $< $(EXTERN_CRATES) --extern $(CRATE_NAME)=@abs_builddir@/$(OUTPUT) - -$(binary_outputs): %@EXEEXT@: $(srcdir)/src/bin/%.rs $(OUTPUT) - +$(RUSTC) $(RUSTFLAGS) $(EXTRA_RUSTFLAGS) $(foreach cfg,$(CFG),--cfg $(cfg)) --crate-name $(call file_to_crate,$*) --crate-type bin --emit dep-info=@builddir@/$@.d --emit link=@abs_builddir@/$@ -o$@ $< $(EXTERN_CRATES) --extern $(CRATE_NAME)=@abs_builddir@/$(OUTPUT) - -clean: $(foreach subdir,$(SUBDIRS),$(subdir)/clean) - rm -f $(OUTPUT) $(binary_outputs) test-$(OUTPUT:%.rlib=%@EXEEXT@) $(tests:%.rs=%@EXEEXT@) stamp - -distclean: clean $(foreach subdir,$(SUBDIRS),$(subdir)/distclean) - rm -f config.status config.log $(OUTPUT).d test-$(OUTPUT:%.rlib=%@EXEEXT@).d $(foreach out,$(binary_outputs),$(out).d) Makefile - -$(binary_outputs:%=install-%): install-%: % - $(INSTALL) -m755 $< ${bindir}/ - -$(binary_outputs:%=install-strip-%): install-strip-%: % - $(INSTALL) -s -m755 $< ${bindir}/ - -install: $(binary_outputs:%=install-%) $(foreach subdir,$(SUBDIRS),$(subdir)/install) - -install-strip: $(binary_outputs:%=install-strip-%) $(foreach subdir,$(SUBDIRS),$(subdir)/install-strip) - -include test-$(OUTPUT:%.rlib=%@EXEEXT@).d - -tests/: - mkdir tests/ - -test-$(OUTPUT:%.rlib=%@EXEEXT@).d: $(srcdir)/src/lib.rs - $(RUSTC) $(RUSTFLAGS) $(EXTRA_RUSTFLAGS) $(foreach cfg,$(CFG),--cfg $(cfg)) --cfg test --crate-name $(CRATE_NAME) --test --emit dep-info=@builddir@/test-$(OUTPUT:%.rlib=%@EXEEXT@).d -otest-$(OUTPUT:%.rlib=%@EXEEXT@) $< $(EXTERN_CRATES) - -test-$(OUTPUT:%.rlib=%@EXEEXT@): $(srcdir)/src/lib.rs - +$(RUSTC) $(RUSTFLAGS) $(EXTRA_RUSTFLAGS) $(foreach cfg,$(CFG),--cfg $(cfg)) --cfg test --crate-name $(CRATE_NAME) --test --emit dep-info=@builddir@/test-$(OUTPUT:%.rlib=%@EXEEXT@).d --emit link=@abs_builddir@/test-$(OUTPUT:%.rlib=%@EXEEXT@) -o$(OUTPUT:%.rlib=%@EXEEXT@) $< $(EXTERN_CRATES) - -$(tests:%.rs=%@EXEEXT@): %@EXEEXT@: $(srcdir)/%.rs $(OUTPUT) tests/ - +$(RUSTC) $(RUSTFLAGS) $(EXTRA_RUSTFLAGS) $(foreach cfg,$(CFG),--cfg $(cfg)) --crate-name $(call file_to_crate,$(*F)) --test --emit dep-info=@builddir@/$@.d --emit link=@abs_builddir@/$@ -o$@ $< $(EXTERN_CRATES) --extern $(CRATE_NAME)=@abs_builddir@/$(OUTPUT) - -check: @builddir@/test-$(OUTPUT:%.rlib=%@EXEEXT@) $(tests:%.rs=@builddir@/%@EXEEXT@) $(foreach subdir,$(SUBDIRS),$(subdir)/check) - $(srcdir)/run-tests.sh $(foreach file,$^,@builddir@/$(file)) - -%/stamp: %/Makefile - +$(MAKE) -C $* stamp - -%/clean: - +$(MAKE) -C $* clean - -%/install: - +$(MAKE) -C $* install -%/install: - +$(MAKE) -C $* install-strip - -%/check: - +$(MAKE) -C $* check diff --git a/vendor/fake-enum/README.md b/vendor/fake-enum/README.md deleted file mode 100644 index e2cf23f9..00000000 --- a/vendor/fake-enum/README.md +++ /dev/null @@ -1,13 +0,0 @@ -# Rust Fake Enums - -A small library that allows you to declare "fake" enums, which can be safely used with FFI, - or reinterpreted from bytes sourced from a user-controlled input. - -## License - -Copyright (C) 2021 Connor Horman - -This repository is dual licensed under the terms of the MIT and Apachev2 license. If you deal in the software, you must comply with the terms of both licenses. - -Any contributions intentionally submitted by you for inclusion in this repository must be dual licensed as above. - diff --git a/vendor/fake-enum/aclocal.m4 b/vendor/fake-enum/aclocal.m4 deleted file mode 100644 index 2823c249..00000000 --- a/vendor/fake-enum/aclocal.m4 +++ /dev/null @@ -1,16 +0,0 @@ -# generated automatically by aclocal 1.16.3 -*- Autoconf -*- - -# Copyright (C) 1996-2020 Free Software Foundation, Inc. - -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY, to the extent permitted by law; without -# even the implied warranty of MERCHANTABILITY or FITNESS FOR A -# PARTICULAR PURPOSE. - -m4_ifndef([AC_CONFIG_MACRO_DIRS], [m4_defun([_AM_CONFIG_MACRO_DIRS], [])m4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])]) -m4_include([m4/lcrust_checks.m4]) -m4_include([m4/lcrust_prog_rustc.m4]) diff --git a/vendor/fake-enum/build-dirs.sh b/vendor/fake-enum/build-dirs.sh deleted file mode 100755 index e5079d95..00000000 --- a/vendor/fake-enum/build-dirs.sh +++ /dev/null @@ -1,4 +0,0 @@ -for s in $* -do - ${MAKE} -C $s -done \ No newline at end of file diff --git a/vendor/fake-enum/config.guess b/vendor/fake-enum/config.guess deleted file mode 100755 index 1972fda8..00000000 --- a/vendor/fake-enum/config.guess +++ /dev/null @@ -1,1700 +0,0 @@ -#! /bin/sh -# Attempt to guess a canonical system name. -# Copyright 1992-2021 Free Software Foundation, Inc. - -timestamp='2021-01-25' - -# This file is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, see . -# -# As a special exception to the GNU General Public License, if you -# distribute this file as part of a program that contains a -# configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that -# program. This Exception is an additional permission under section 7 -# of the GNU General Public License, version 3 ("GPLv3"). -# -# Originally written by Per Bothner; maintained since 2000 by Ben Elliston. -# -# You can get the latest version of this script from: -# https://git.savannah.gnu.org/cgit/config.git/plain/config.guess -# -# Please send patches to . - - -me=$(echo "$0" | sed -e 's,.*/,,') - -usage="\ -Usage: $0 [OPTION] - -Output the configuration name of the system \`$me' is run on. - -Options: - -h, --help print this help, then exit - -t, --time-stamp print date of last modification, then exit - -v, --version print version number, then exit - -Report bugs and patches to ." - -version="\ -GNU config.guess ($timestamp) - -Originally written by Per Bothner. -Copyright 1992-2021 Free Software Foundation, Inc. - -This is free software; see the source for copying conditions. There is NO -warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." - -help=" -Try \`$me --help' for more information." - -# Parse command line -while test $# -gt 0 ; do - case $1 in - --time-stamp | --time* | -t ) - echo "$timestamp" ; exit ;; - --version | -v ) - echo "$version" ; exit ;; - --help | --h* | -h ) - echo "$usage"; exit ;; - -- ) # Stop option processing - shift; break ;; - - ) # Use stdin as input. - break ;; - -* ) - echo "$me: invalid option $1$help" >&2 - exit 1 ;; - * ) - break ;; - esac -done - -if test $# != 0; then - echo "$me: too many arguments$help" >&2 - exit 1 -fi - -# CC_FOR_BUILD -- compiler used by this script. Note that the use of a -# compiler to aid in system detection is discouraged as it requires -# temporary files to be created and, as you can see below, it is a -# headache to deal with in a portable fashion. - -# Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still -# use `HOST_CC' if defined, but it is deprecated. - -# Portable tmp directory creation inspired by the Autoconf team. - -tmp= -# shellcheck disable=SC2172 -trap 'test -z "$tmp" || rm -fr "$tmp"' 0 1 2 13 15 - -set_cc_for_build() { - # prevent multiple calls if $tmp is already set - test "$tmp" && return 0 - : "${TMPDIR=/tmp}" - # shellcheck disable=SC2039 - { tmp=$( (umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null) && test -n "$tmp" && test -d "$tmp" ; } || - { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir "$tmp" 2>/dev/null) ; } || - { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir "$tmp" 2>/dev/null) && echo "Warning: creating insecure temp directory" >&2 ; } || - { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } - dummy=$tmp/dummy - case ${CC_FOR_BUILD-},${HOST_CC-},${CC-} in - ,,) echo "int x;" > "$dummy.c" - for driver in cc gcc c89 c99 ; do - if ($driver -c -o "$dummy.o" "$dummy.c") >/dev/null 2>&1 ; then - CC_FOR_BUILD="$driver" - break - fi - done - if test x"$CC_FOR_BUILD" = x ; then - CC_FOR_BUILD=no_compiler_found - fi - ;; - ,,*) CC_FOR_BUILD=$CC ;; - ,*,*) CC_FOR_BUILD=$HOST_CC ;; - esac -} - -# This is needed to find uname on a Pyramid OSx when run in the BSD universe. -# (ghazi@noc.rutgers.edu 1994-08-24) -if test -f /.attbin/uname ; then - PATH=$PATH:/.attbin ; export PATH -fi - -UNAME_MACHINE=$( (uname -m) 2>/dev/null) || UNAME_MACHINE=unknown -UNAME_RELEASE=$( (uname -r) 2>/dev/null) || UNAME_RELEASE=unknown -UNAME_SYSTEM=$( (uname -s) 2>/dev/null) || UNAME_SYSTEM=unknown -UNAME_VERSION=$( (uname -v) 2>/dev/null) || UNAME_VERSION=unknown - -case "$UNAME_SYSTEM" in -Linux|GNU|GNU/*) - LIBC=unknown - - set_cc_for_build - cat <<-EOF > "$dummy.c" - #include - #if defined(__UCLIBC__) - LIBC=uclibc - #elif defined(__dietlibc__) - LIBC=dietlibc - #elif defined(__GLIBC__) - LIBC=gnu - #else - #include - /* First heuristic to detect musl libc. */ - #ifdef __DEFINED_va_list - LIBC=musl - #endif - #endif - EOF - eval "$($CC_FOR_BUILD -E "$dummy.c" 2>/dev/null | grep '^LIBC' | sed 's, ,,g')" - - # Second heuristic to detect musl libc. - if [ "$LIBC" = unknown ] && - command -v ldd >/dev/null && - ldd --version 2>&1 | grep -q ^musl; then - LIBC=musl - fi - - # If the system lacks a compiler, then just pick glibc. - # We could probably try harder. - if [ "$LIBC" = unknown ]; then - LIBC=gnu - fi - ;; -esac - -# Note: order is significant - the case branches are not exclusive. - -case "$UNAME_MACHINE:$UNAME_SYSTEM:$UNAME_RELEASE:$UNAME_VERSION" in - *:NetBSD:*:*) - # NetBSD (nbsd) targets should (where applicable) match one or - # more of the tuples: *-*-netbsdelf*, *-*-netbsdaout*, - # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently - # switched to ELF, *-*-netbsd* would select the old - # object file format. This provides both forward - # compatibility and a consistent mechanism for selecting the - # object file format. - # - # Note: NetBSD doesn't particularly care about the vendor - # portion of the name. We always set it to "unknown". - UNAME_MACHINE_ARCH=$( (uname -p 2>/dev/null || \ - /sbin/sysctl -n hw.machine_arch 2>/dev/null || \ - /usr/sbin/sysctl -n hw.machine_arch 2>/dev/null || \ - echo unknown)) - case "$UNAME_MACHINE_ARCH" in - aarch64eb) machine=aarch64_be-unknown ;; - armeb) machine=armeb-unknown ;; - arm*) machine=arm-unknown ;; - sh3el) machine=shl-unknown ;; - sh3eb) machine=sh-unknown ;; - sh5el) machine=sh5le-unknown ;; - earmv*) - arch=$(echo "$UNAME_MACHINE_ARCH" | sed -e 's,^e\(armv[0-9]\).*$,\1,') - endian=$(echo "$UNAME_MACHINE_ARCH" | sed -ne 's,^.*\(eb\)$,\1,p') - machine="${arch}${endian}"-unknown - ;; - *) machine="$UNAME_MACHINE_ARCH"-unknown ;; - esac - # The Operating System including object format, if it has switched - # to ELF recently (or will in the future) and ABI. - case "$UNAME_MACHINE_ARCH" in - earm*) - os=netbsdelf - ;; - arm*|i386|m68k|ns32k|sh3*|sparc|vax) - set_cc_for_build - if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \ - | grep -q __ELF__ - then - # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout). - # Return netbsd for either. FIX? - os=netbsd - else - os=netbsdelf - fi - ;; - *) - os=netbsd - ;; - esac - # Determine ABI tags. - case "$UNAME_MACHINE_ARCH" in - earm*) - expr='s/^earmv[0-9]/-eabi/;s/eb$//' - abi=$(echo "$UNAME_MACHINE_ARCH" | sed -e "$expr") - ;; - esac - # The OS release - # Debian GNU/NetBSD machines have a different userland, and - # thus, need a distinct triplet. However, they do not need - # kernel version information, so it can be replaced with a - # suitable tag, in the style of linux-gnu. - case "$UNAME_VERSION" in - Debian*) - release='-gnu' - ;; - *) - release=$(echo "$UNAME_RELEASE" | sed -e 's/[-_].*//' | cut -d. -f1,2) - ;; - esac - # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM: - # contains redundant information, the shorter form: - # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used. - echo "$machine-${os}${release}${abi-}" - exit ;; - *:Bitrig:*:*) - UNAME_MACHINE_ARCH=$(arch | sed 's/Bitrig.//') - echo "$UNAME_MACHINE_ARCH"-unknown-bitrig"$UNAME_RELEASE" - exit ;; - *:OpenBSD:*:*) - UNAME_MACHINE_ARCH=$(arch | sed 's/OpenBSD.//') - echo "$UNAME_MACHINE_ARCH"-unknown-openbsd"$UNAME_RELEASE" - exit ;; - *:LibertyBSD:*:*) - UNAME_MACHINE_ARCH=$(arch | sed 's/^.*BSD\.//') - echo "$UNAME_MACHINE_ARCH"-unknown-libertybsd"$UNAME_RELEASE" - exit ;; - *:MidnightBSD:*:*) - echo "$UNAME_MACHINE"-unknown-midnightbsd"$UNAME_RELEASE" - exit ;; - *:ekkoBSD:*:*) - echo "$UNAME_MACHINE"-unknown-ekkobsd"$UNAME_RELEASE" - exit ;; - *:SolidBSD:*:*) - echo "$UNAME_MACHINE"-unknown-solidbsd"$UNAME_RELEASE" - exit ;; - *:OS108:*:*) - echo "$UNAME_MACHINE"-unknown-os108_"$UNAME_RELEASE" - exit ;; - macppc:MirBSD:*:*) - echo powerpc-unknown-mirbsd"$UNAME_RELEASE" - exit ;; - *:MirBSD:*:*) - echo "$UNAME_MACHINE"-unknown-mirbsd"$UNAME_RELEASE" - exit ;; - *:Sortix:*:*) - echo "$UNAME_MACHINE"-unknown-sortix - exit ;; - *:Twizzler:*:*) - echo "$UNAME_MACHINE"-unknown-twizzler - exit ;; - *:Redox:*:*) - echo "$UNAME_MACHINE"-unknown-redox - exit ;; - mips:OSF1:*.*) - echo mips-dec-osf1 - exit ;; - alpha:OSF1:*:*) - case $UNAME_RELEASE in - *4.0) - UNAME_RELEASE=$(/usr/sbin/sizer -v | awk '{print $3}') - ;; - *5.*) - UNAME_RELEASE=$(/usr/sbin/sizer -v | awk '{print $4}') - ;; - esac - # According to Compaq, /usr/sbin/psrinfo has been available on - # OSF/1 and Tru64 systems produced since 1995. I hope that - # covers most systems running today. This code pipes the CPU - # types through head -n 1, so we only detect the type of CPU 0. - ALPHA_CPU_TYPE=$(/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1) - case "$ALPHA_CPU_TYPE" in - "EV4 (21064)") - UNAME_MACHINE=alpha ;; - "EV4.5 (21064)") - UNAME_MACHINE=alpha ;; - "LCA4 (21066/21068)") - UNAME_MACHINE=alpha ;; - "EV5 (21164)") - UNAME_MACHINE=alphaev5 ;; - "EV5.6 (21164A)") - UNAME_MACHINE=alphaev56 ;; - "EV5.6 (21164PC)") - UNAME_MACHINE=alphapca56 ;; - "EV5.7 (21164PC)") - UNAME_MACHINE=alphapca57 ;; - "EV6 (21264)") - UNAME_MACHINE=alphaev6 ;; - "EV6.7 (21264A)") - UNAME_MACHINE=alphaev67 ;; - "EV6.8CB (21264C)") - UNAME_MACHINE=alphaev68 ;; - "EV6.8AL (21264B)") - UNAME_MACHINE=alphaev68 ;; - "EV6.8CX (21264D)") - UNAME_MACHINE=alphaev68 ;; - "EV6.9A (21264/EV69A)") - UNAME_MACHINE=alphaev69 ;; - "EV7 (21364)") - UNAME_MACHINE=alphaev7 ;; - "EV7.9 (21364A)") - UNAME_MACHINE=alphaev79 ;; - esac - # A Pn.n version is a patched version. - # A Vn.n version is a released version. - # A Tn.n version is a released field test version. - # A Xn.n version is an unreleased experimental baselevel. - # 1.2 uses "1.2" for uname -r. - echo "$UNAME_MACHINE"-dec-osf"$(echo "$UNAME_RELEASE" | sed -e 's/^[PVTX]//' | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz)" - # Reset EXIT trap before exiting to avoid spurious non-zero exit code. - exitcode=$? - trap '' 0 - exit $exitcode ;; - Amiga*:UNIX_System_V:4.0:*) - echo m68k-unknown-sysv4 - exit ;; - *:[Aa]miga[Oo][Ss]:*:*) - echo "$UNAME_MACHINE"-unknown-amigaos - exit ;; - *:[Mm]orph[Oo][Ss]:*:*) - echo "$UNAME_MACHINE"-unknown-morphos - exit ;; - *:OS/390:*:*) - echo i370-ibm-openedition - exit ;; - *:z/VM:*:*) - echo s390-ibm-zvmoe - exit ;; - *:OS400:*:*) - echo powerpc-ibm-os400 - exit ;; - arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*) - echo arm-acorn-riscix"$UNAME_RELEASE" - exit ;; - arm*:riscos:*:*|arm*:RISCOS:*:*) - echo arm-unknown-riscos - exit ;; - SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*) - echo hppa1.1-hitachi-hiuxmpp - exit ;; - Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*) - # akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE. - if test "$( (/bin/universe) 2>/dev/null)" = att ; then - echo pyramid-pyramid-sysv3 - else - echo pyramid-pyramid-bsd - fi - exit ;; - NILE*:*:*:dcosx) - echo pyramid-pyramid-svr4 - exit ;; - DRS?6000:unix:4.0:6*) - echo sparc-icl-nx6 - exit ;; - DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*) - case $(/usr/bin/uname -p) in - sparc) echo sparc-icl-nx7; exit ;; - esac ;; - s390x:SunOS:*:*) - echo "$UNAME_MACHINE"-ibm-solaris2"$(echo "$UNAME_RELEASE" | sed -e 's/[^.]*//')" - exit ;; - sun4H:SunOS:5.*:*) - echo sparc-hal-solaris2"$(echo "$UNAME_RELEASE"|sed -e 's/[^.]*//')" - exit ;; - sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*) - echo sparc-sun-solaris2"$(echo "$UNAME_RELEASE" | sed -e 's/[^.]*//')" - exit ;; - i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*) - echo i386-pc-auroraux"$UNAME_RELEASE" - exit ;; - i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*) - set_cc_for_build - SUN_ARCH=i386 - # If there is a compiler, see if it is configured for 64-bit objects. - # Note that the Sun cc does not turn __LP64__ into 1 like gcc does. - # This test works for both compilers. - if test "$CC_FOR_BUILD" != no_compiler_found; then - if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \ - (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \ - grep IS_64BIT_ARCH >/dev/null - then - SUN_ARCH=x86_64 - fi - fi - echo "$SUN_ARCH"-pc-solaris2"$(echo "$UNAME_RELEASE"|sed -e 's/[^.]*//')" - exit ;; - sun4*:SunOS:6*:*) - # According to config.sub, this is the proper way to canonicalize - # SunOS6. Hard to guess exactly what SunOS6 will be like, but - # it's likely to be more like Solaris than SunOS4. - echo sparc-sun-solaris3"$(echo "$UNAME_RELEASE"|sed -e 's/[^.]*//')" - exit ;; - sun4*:SunOS:*:*) - case "$(/usr/bin/arch -k)" in - Series*|S4*) - UNAME_RELEASE=$(uname -v) - ;; - esac - # Japanese Language versions have a version number like `4.1.3-JL'. - echo sparc-sun-sunos"$(echo "$UNAME_RELEASE"|sed -e 's/-/_/')" - exit ;; - sun3*:SunOS:*:*) - echo m68k-sun-sunos"$UNAME_RELEASE" - exit ;; - sun*:*:4.2BSD:*) - UNAME_RELEASE=$( (sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null) - test "x$UNAME_RELEASE" = x && UNAME_RELEASE=3 - case "$(/bin/arch)" in - sun3) - echo m68k-sun-sunos"$UNAME_RELEASE" - ;; - sun4) - echo sparc-sun-sunos"$UNAME_RELEASE" - ;; - esac - exit ;; - aushp:SunOS:*:*) - echo sparc-auspex-sunos"$UNAME_RELEASE" - exit ;; - # The situation for MiNT is a little confusing. The machine name - # can be virtually everything (everything which is not - # "atarist" or "atariste" at least should have a processor - # > m68000). The system name ranges from "MiNT" over "FreeMiNT" - # to the lowercase version "mint" (or "freemint"). Finally - # the system name "TOS" denotes a system which is actually not - # MiNT. But MiNT is downward compatible to TOS, so this should - # be no problem. - atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*) - echo m68k-atari-mint"$UNAME_RELEASE" - exit ;; - atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*) - echo m68k-atari-mint"$UNAME_RELEASE" - exit ;; - *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*) - echo m68k-atari-mint"$UNAME_RELEASE" - exit ;; - milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*) - echo m68k-milan-mint"$UNAME_RELEASE" - exit ;; - hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*) - echo m68k-hades-mint"$UNAME_RELEASE" - exit ;; - *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*) - echo m68k-unknown-mint"$UNAME_RELEASE" - exit ;; - m68k:machten:*:*) - echo m68k-apple-machten"$UNAME_RELEASE" - exit ;; - powerpc:machten:*:*) - echo powerpc-apple-machten"$UNAME_RELEASE" - exit ;; - RISC*:Mach:*:*) - echo mips-dec-mach_bsd4.3 - exit ;; - RISC*:ULTRIX:*:*) - echo mips-dec-ultrix"$UNAME_RELEASE" - exit ;; - VAX*:ULTRIX*:*:*) - echo vax-dec-ultrix"$UNAME_RELEASE" - exit ;; - 2020:CLIX:*:* | 2430:CLIX:*:*) - echo clipper-intergraph-clix"$UNAME_RELEASE" - exit ;; - mips:*:*:UMIPS | mips:*:*:RISCos) - set_cc_for_build - sed 's/^ //' << EOF > "$dummy.c" -#ifdef __cplusplus -#include /* for printf() prototype */ - int main (int argc, char *argv[]) { -#else - int main (argc, argv) int argc; char *argv[]; { -#endif - #if defined (host_mips) && defined (MIPSEB) - #if defined (SYSTYPE_SYSV) - printf ("mips-mips-riscos%ssysv\\n", argv[1]); exit (0); - #endif - #if defined (SYSTYPE_SVR4) - printf ("mips-mips-riscos%ssvr4\\n", argv[1]); exit (0); - #endif - #if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD) - printf ("mips-mips-riscos%sbsd\\n", argv[1]); exit (0); - #endif - #endif - exit (-1); - } -EOF - $CC_FOR_BUILD -o "$dummy" "$dummy.c" && - dummyarg=$(echo "$UNAME_RELEASE" | sed -n 's/\([0-9]*\).*/\1/p') && - SYSTEM_NAME=$("$dummy" "$dummyarg") && - { echo "$SYSTEM_NAME"; exit; } - echo mips-mips-riscos"$UNAME_RELEASE" - exit ;; - Motorola:PowerMAX_OS:*:*) - echo powerpc-motorola-powermax - exit ;; - Motorola:*:4.3:PL8-*) - echo powerpc-harris-powermax - exit ;; - Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*) - echo powerpc-harris-powermax - exit ;; - Night_Hawk:Power_UNIX:*:*) - echo powerpc-harris-powerunix - exit ;; - m88k:CX/UX:7*:*) - echo m88k-harris-cxux7 - exit ;; - m88k:*:4*:R4*) - echo m88k-motorola-sysv4 - exit ;; - m88k:*:3*:R3*) - echo m88k-motorola-sysv3 - exit ;; - AViiON:dgux:*:*) - # DG/UX returns AViiON for all architectures - UNAME_PROCESSOR=$(/usr/bin/uname -p) - if test "$UNAME_PROCESSOR" = mc88100 || test "$UNAME_PROCESSOR" = mc88110 - then - if test "$TARGET_BINARY_INTERFACE"x = m88kdguxelfx || \ - test "$TARGET_BINARY_INTERFACE"x = x - then - echo m88k-dg-dgux"$UNAME_RELEASE" - else - echo m88k-dg-dguxbcs"$UNAME_RELEASE" - fi - else - echo i586-dg-dgux"$UNAME_RELEASE" - fi - exit ;; - M88*:DolphinOS:*:*) # DolphinOS (SVR3) - echo m88k-dolphin-sysv3 - exit ;; - M88*:*:R3*:*) - # Delta 88k system running SVR3 - echo m88k-motorola-sysv3 - exit ;; - XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3) - echo m88k-tektronix-sysv3 - exit ;; - Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD) - echo m68k-tektronix-bsd - exit ;; - *:IRIX*:*:*) - echo mips-sgi-irix"$(echo "$UNAME_RELEASE"|sed -e 's/-/_/g')" - exit ;; - ????????:AIX?:[12].1:2) # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX. - echo romp-ibm-aix # uname -m gives an 8 hex-code CPU id - exit ;; # Note that: echo "'$(uname -s)'" gives 'AIX ' - i*86:AIX:*:*) - echo i386-ibm-aix - exit ;; - ia64:AIX:*:*) - if test -x /usr/bin/oslevel ; then - IBM_REV=$(/usr/bin/oslevel) - else - IBM_REV="$UNAME_VERSION.$UNAME_RELEASE" - fi - echo "$UNAME_MACHINE"-ibm-aix"$IBM_REV" - exit ;; - *:AIX:2:3) - if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then - set_cc_for_build - sed 's/^ //' << EOF > "$dummy.c" - #include - - main() - { - if (!__power_pc()) - exit(1); - puts("powerpc-ibm-aix3.2.5"); - exit(0); - } -EOF - if $CC_FOR_BUILD -o "$dummy" "$dummy.c" && SYSTEM_NAME=$("$dummy") - then - echo "$SYSTEM_NAME" - else - echo rs6000-ibm-aix3.2.5 - fi - elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then - echo rs6000-ibm-aix3.2.4 - else - echo rs6000-ibm-aix3.2 - fi - exit ;; - *:AIX:*:[4567]) - IBM_CPU_ID=$(/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }') - if /usr/sbin/lsattr -El "$IBM_CPU_ID" | grep ' POWER' >/dev/null 2>&1; then - IBM_ARCH=rs6000 - else - IBM_ARCH=powerpc - fi - if test -x /usr/bin/lslpp ; then - IBM_REV=$(/usr/bin/lslpp -Lqc bos.rte.libc | - awk -F: '{ print $3 }' | sed s/[0-9]*$/0/) - else - IBM_REV="$UNAME_VERSION.$UNAME_RELEASE" - fi - echo "$IBM_ARCH"-ibm-aix"$IBM_REV" - exit ;; - *:AIX:*:*) - echo rs6000-ibm-aix - exit ;; - ibmrt:4.4BSD:*|romp-ibm:4.4BSD:*) - echo romp-ibm-bsd4.4 - exit ;; - ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and - echo romp-ibm-bsd"$UNAME_RELEASE" # 4.3 with uname added to - exit ;; # report: romp-ibm BSD 4.3 - *:BOSX:*:*) - echo rs6000-bull-bosx - exit ;; - DPX/2?00:B.O.S.:*:*) - echo m68k-bull-sysv3 - exit ;; - 9000/[34]??:4.3bsd:1.*:*) - echo m68k-hp-bsd - exit ;; - hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*) - echo m68k-hp-bsd4.4 - exit ;; - 9000/[34678]??:HP-UX:*:*) - HPUX_REV=$(echo "$UNAME_RELEASE"|sed -e 's/[^.]*.[0B]*//') - case "$UNAME_MACHINE" in - 9000/31?) HP_ARCH=m68000 ;; - 9000/[34]??) HP_ARCH=m68k ;; - 9000/[678][0-9][0-9]) - if test -x /usr/bin/getconf; then - sc_cpu_version=$(/usr/bin/getconf SC_CPU_VERSION 2>/dev/null) - sc_kernel_bits=$(/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null) - case "$sc_cpu_version" in - 523) HP_ARCH=hppa1.0 ;; # CPU_PA_RISC1_0 - 528) HP_ARCH=hppa1.1 ;; # CPU_PA_RISC1_1 - 532) # CPU_PA_RISC2_0 - case "$sc_kernel_bits" in - 32) HP_ARCH=hppa2.0n ;; - 64) HP_ARCH=hppa2.0w ;; - '') HP_ARCH=hppa2.0 ;; # HP-UX 10.20 - esac ;; - esac - fi - if test "$HP_ARCH" = ""; then - set_cc_for_build - sed 's/^ //' << EOF > "$dummy.c" - - #define _HPUX_SOURCE - #include - #include - - int main () - { - #if defined(_SC_KERNEL_BITS) - long bits = sysconf(_SC_KERNEL_BITS); - #endif - long cpu = sysconf (_SC_CPU_VERSION); - - switch (cpu) - { - case CPU_PA_RISC1_0: puts ("hppa1.0"); break; - case CPU_PA_RISC1_1: puts ("hppa1.1"); break; - case CPU_PA_RISC2_0: - #if defined(_SC_KERNEL_BITS) - switch (bits) - { - case 64: puts ("hppa2.0w"); break; - case 32: puts ("hppa2.0n"); break; - default: puts ("hppa2.0"); break; - } break; - #else /* !defined(_SC_KERNEL_BITS) */ - puts ("hppa2.0"); break; - #endif - default: puts ("hppa1.0"); break; - } - exit (0); - } -EOF - (CCOPTS="" $CC_FOR_BUILD -o "$dummy" "$dummy.c" 2>/dev/null) && HP_ARCH=$("$dummy") - test -z "$HP_ARCH" && HP_ARCH=hppa - fi ;; - esac - if test "$HP_ARCH" = hppa2.0w - then - set_cc_for_build - - # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating - # 32-bit code. hppa64-hp-hpux* has the same kernel and a compiler - # generating 64-bit code. GNU and HP use different nomenclature: - # - # $ CC_FOR_BUILD=cc ./config.guess - # => hppa2.0w-hp-hpux11.23 - # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess - # => hppa64-hp-hpux11.23 - - if echo __LP64__ | (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | - grep -q __LP64__ - then - HP_ARCH=hppa2.0w - else - HP_ARCH=hppa64 - fi - fi - echo "$HP_ARCH"-hp-hpux"$HPUX_REV" - exit ;; - ia64:HP-UX:*:*) - HPUX_REV=$(echo "$UNAME_RELEASE"|sed -e 's/[^.]*.[0B]*//') - echo ia64-hp-hpux"$HPUX_REV" - exit ;; - 3050*:HI-UX:*:*) - set_cc_for_build - sed 's/^ //' << EOF > "$dummy.c" - #include - int - main () - { - long cpu = sysconf (_SC_CPU_VERSION); - /* The order matters, because CPU_IS_HP_MC68K erroneously returns - true for CPU_PA_RISC1_0. CPU_IS_PA_RISC returns correct - results, however. */ - if (CPU_IS_PA_RISC (cpu)) - { - switch (cpu) - { - case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break; - case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break; - case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break; - default: puts ("hppa-hitachi-hiuxwe2"); break; - } - } - else if (CPU_IS_HP_MC68K (cpu)) - puts ("m68k-hitachi-hiuxwe2"); - else puts ("unknown-hitachi-hiuxwe2"); - exit (0); - } -EOF - $CC_FOR_BUILD -o "$dummy" "$dummy.c" && SYSTEM_NAME=$("$dummy") && - { echo "$SYSTEM_NAME"; exit; } - echo unknown-hitachi-hiuxwe2 - exit ;; - 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:*) - echo hppa1.1-hp-bsd - exit ;; - 9000/8??:4.3bsd:*:*) - echo hppa1.0-hp-bsd - exit ;; - *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*) - echo hppa1.0-hp-mpeix - exit ;; - hp7??:OSF1:*:* | hp8?[79]:OSF1:*:*) - echo hppa1.1-hp-osf - exit ;; - hp8??:OSF1:*:*) - echo hppa1.0-hp-osf - exit ;; - i*86:OSF1:*:*) - if test -x /usr/sbin/sysversion ; then - echo "$UNAME_MACHINE"-unknown-osf1mk - else - echo "$UNAME_MACHINE"-unknown-osf1 - fi - exit ;; - parisc*:Lites*:*:*) - echo hppa1.1-hp-lites - exit ;; - C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*) - echo c1-convex-bsd - exit ;; - C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*) - if getsysinfo -f scalar_acc - then echo c32-convex-bsd - else echo c2-convex-bsd - fi - exit ;; - C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*) - echo c34-convex-bsd - exit ;; - C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*) - echo c38-convex-bsd - exit ;; - C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*) - echo c4-convex-bsd - exit ;; - CRAY*Y-MP:*:*:*) - echo ymp-cray-unicos"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/' - exit ;; - CRAY*[A-Z]90:*:*:*) - echo "$UNAME_MACHINE"-cray-unicos"$UNAME_RELEASE" \ - | sed -e 's/CRAY.*\([A-Z]90\)/\1/' \ - -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \ - -e 's/\.[^.]*$/.X/' - exit ;; - CRAY*TS:*:*:*) - echo t90-cray-unicos"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/' - exit ;; - CRAY*T3E:*:*:*) - echo alphaev5-cray-unicosmk"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/' - exit ;; - CRAY*SV1:*:*:*) - echo sv1-cray-unicos"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/' - exit ;; - *:UNICOS/mp:*:*) - echo craynv-cray-unicosmp"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/' - exit ;; - F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*) - FUJITSU_PROC=$(uname -m | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz) - FUJITSU_SYS=$(uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///') - FUJITSU_REL=$(echo "$UNAME_RELEASE" | sed -e 's/ /_/') - echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" - exit ;; - 5000:UNIX_System_V:4.*:*) - FUJITSU_SYS=$(uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///') - FUJITSU_REL=$(echo "$UNAME_RELEASE" | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/ /_/') - echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" - exit ;; - i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*) - echo "$UNAME_MACHINE"-pc-bsdi"$UNAME_RELEASE" - exit ;; - sparc*:BSD/OS:*:*) - echo sparc-unknown-bsdi"$UNAME_RELEASE" - exit ;; - *:BSD/OS:*:*) - echo "$UNAME_MACHINE"-unknown-bsdi"$UNAME_RELEASE" - exit ;; - arm:FreeBSD:*:*) - UNAME_PROCESSOR=$(uname -p) - set_cc_for_build - if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \ - | grep -q __ARM_PCS_VFP - then - echo "${UNAME_PROCESSOR}"-unknown-freebsd"$(echo ${UNAME_RELEASE}|sed -e 's/[-(].*//')"-gnueabi - else - echo "${UNAME_PROCESSOR}"-unknown-freebsd"$(echo ${UNAME_RELEASE}|sed -e 's/[-(].*//')"-gnueabihf - fi - exit ;; - *:FreeBSD:*:*) - UNAME_PROCESSOR=$(/usr/bin/uname -p) - case "$UNAME_PROCESSOR" in - amd64) - UNAME_PROCESSOR=x86_64 ;; - i386) - UNAME_PROCESSOR=i586 ;; - esac - echo "$UNAME_PROCESSOR"-unknown-freebsd"$(echo "$UNAME_RELEASE"|sed -e 's/[-(].*//')" - exit ;; - i*:CYGWIN*:*) - echo "$UNAME_MACHINE"-pc-cygwin - exit ;; - *:MINGW64*:*) - echo "$UNAME_MACHINE"-pc-mingw64 - exit ;; - *:MINGW*:*) - echo "$UNAME_MACHINE"-pc-mingw32 - exit ;; - *:MSYS*:*) - echo "$UNAME_MACHINE"-pc-msys - exit ;; - i*:PW*:*) - echo "$UNAME_MACHINE"-pc-pw32 - exit ;; - *:Interix*:*) - case "$UNAME_MACHINE" in - x86) - echo i586-pc-interix"$UNAME_RELEASE" - exit ;; - authenticamd | genuineintel | EM64T) - echo x86_64-unknown-interix"$UNAME_RELEASE" - exit ;; - IA64) - echo ia64-unknown-interix"$UNAME_RELEASE" - exit ;; - esac ;; - i*:UWIN*:*) - echo "$UNAME_MACHINE"-pc-uwin - exit ;; - amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*) - echo x86_64-pc-cygwin - exit ;; - prep*:SunOS:5.*:*) - echo powerpcle-unknown-solaris2"$(echo "$UNAME_RELEASE"|sed -e 's/[^.]*//')" - exit ;; - *:GNU:*:*) - # the GNU system - echo "$(echo "$UNAME_MACHINE"|sed -e 's,[-/].*$,,')-unknown-$LIBC$(echo "$UNAME_RELEASE"|sed -e 's,/.*$,,')" - exit ;; - *:GNU/*:*:*) - # other systems with GNU libc and userland - echo "$UNAME_MACHINE-unknown-$(echo "$UNAME_SYSTEM" | sed 's,^[^/]*/,,' | tr "[:upper:]" "[:lower:]")$(echo "$UNAME_RELEASE"|sed -e 's/[-(].*//')-$LIBC" - exit ;; - *:Minix:*:*) - echo "$UNAME_MACHINE"-unknown-minix - exit ;; - aarch64:Linux:*:*) - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - aarch64_be:Linux:*:*) - UNAME_MACHINE=aarch64_be - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - alpha:Linux:*:*) - case $(sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' /proc/cpuinfo 2>/dev/null) in - EV5) UNAME_MACHINE=alphaev5 ;; - EV56) UNAME_MACHINE=alphaev56 ;; - PCA56) UNAME_MACHINE=alphapca56 ;; - PCA57) UNAME_MACHINE=alphapca56 ;; - EV6) UNAME_MACHINE=alphaev6 ;; - EV67) UNAME_MACHINE=alphaev67 ;; - EV68*) UNAME_MACHINE=alphaev68 ;; - esac - objdump --private-headers /bin/sh | grep -q ld.so.1 - if test "$?" = 0 ; then LIBC=gnulibc1 ; fi - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - arc:Linux:*:* | arceb:Linux:*:*) - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - arm*:Linux:*:*) - set_cc_for_build - if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \ - | grep -q __ARM_EABI__ - then - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - else - if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \ - | grep -q __ARM_PCS_VFP - then - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"eabi - else - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"eabihf - fi - fi - exit ;; - avr32*:Linux:*:*) - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - cris:Linux:*:*) - echo "$UNAME_MACHINE"-axis-linux-"$LIBC" - exit ;; - crisv32:Linux:*:*) - echo "$UNAME_MACHINE"-axis-linux-"$LIBC" - exit ;; - e2k:Linux:*:*) - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - frv:Linux:*:*) - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - hexagon:Linux:*:*) - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - i*86:Linux:*:*) - echo "$UNAME_MACHINE"-pc-linux-"$LIBC" - exit ;; - ia64:Linux:*:*) - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - k1om:Linux:*:*) - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - loongarch32:Linux:*:* | loongarch64:Linux:*:* | loongarchx32:Linux:*:*) - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - m32r*:Linux:*:*) - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - m68*:Linux:*:*) - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - mips:Linux:*:* | mips64:Linux:*:*) - set_cc_for_build - IS_GLIBC=0 - test x"${LIBC}" = xgnu && IS_GLIBC=1 - sed 's/^ //' << EOF > "$dummy.c" - #undef CPU - #undef mips - #undef mipsel - #undef mips64 - #undef mips64el - #if ${IS_GLIBC} && defined(_ABI64) - LIBCABI=gnuabi64 - #else - #if ${IS_GLIBC} && defined(_ABIN32) - LIBCABI=gnuabin32 - #else - LIBCABI=${LIBC} - #endif - #endif - - #if ${IS_GLIBC} && defined(__mips64) && defined(__mips_isa_rev) && __mips_isa_rev>=6 - CPU=mipsisa64r6 - #else - #if ${IS_GLIBC} && !defined(__mips64) && defined(__mips_isa_rev) && __mips_isa_rev>=6 - CPU=mipsisa32r6 - #else - #if defined(__mips64) - CPU=mips64 - #else - CPU=mips - #endif - #endif - #endif - - #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL) - MIPS_ENDIAN=el - #else - #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB) - MIPS_ENDIAN= - #else - MIPS_ENDIAN= - #endif - #endif -EOF - eval "$($CC_FOR_BUILD -E "$dummy.c" 2>/dev/null | grep '^CPU\|^MIPS_ENDIAN\|^LIBCABI')" - test "x$CPU" != x && { echo "$CPU${MIPS_ENDIAN}-unknown-linux-$LIBCABI"; exit; } - ;; - mips64el:Linux:*:*) - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - openrisc*:Linux:*:*) - echo or1k-unknown-linux-"$LIBC" - exit ;; - or32:Linux:*:* | or1k*:Linux:*:*) - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - padre:Linux:*:*) - echo sparc-unknown-linux-"$LIBC" - exit ;; - parisc64:Linux:*:* | hppa64:Linux:*:*) - echo hppa64-unknown-linux-"$LIBC" - exit ;; - parisc:Linux:*:* | hppa:Linux:*:*) - # Look for CPU level - case $(grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2) in - PA7*) echo hppa1.1-unknown-linux-"$LIBC" ;; - PA8*) echo hppa2.0-unknown-linux-"$LIBC" ;; - *) echo hppa-unknown-linux-"$LIBC" ;; - esac - exit ;; - ppc64:Linux:*:*) - echo powerpc64-unknown-linux-"$LIBC" - exit ;; - ppc:Linux:*:*) - echo powerpc-unknown-linux-"$LIBC" - exit ;; - ppc64le:Linux:*:*) - echo powerpc64le-unknown-linux-"$LIBC" - exit ;; - ppcle:Linux:*:*) - echo powerpcle-unknown-linux-"$LIBC" - exit ;; - riscv32:Linux:*:* | riscv32be:Linux:*:* | riscv64:Linux:*:* | riscv64be:Linux:*:*) - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - s390:Linux:*:* | s390x:Linux:*:*) - echo "$UNAME_MACHINE"-ibm-linux-"$LIBC" - exit ;; - sh64*:Linux:*:*) - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - sh*:Linux:*:*) - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - sparc:Linux:*:* | sparc64:Linux:*:*) - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - tile*:Linux:*:*) - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - vax:Linux:*:*) - echo "$UNAME_MACHINE"-dec-linux-"$LIBC" - exit ;; - x86_64:Linux:*:*) - set_cc_for_build - LIBCABI=$LIBC - if test "$CC_FOR_BUILD" != no_compiler_found; then - if (echo '#ifdef __ILP32__'; echo IS_X32; echo '#endif') | \ - (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \ - grep IS_X32 >/dev/null - then - LIBCABI="$LIBC"x32 - fi - fi - echo "$UNAME_MACHINE"-pc-linux-"$LIBCABI" - exit ;; - xtensa*:Linux:*:*) - echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" - exit ;; - i*86:DYNIX/ptx:4*:*) - # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there. - # earlier versions are messed up and put the nodename in both - # sysname and nodename. - echo i386-sequent-sysv4 - exit ;; - i*86:UNIX_SV:4.2MP:2.*) - # Unixware is an offshoot of SVR4, but it has its own version - # number series starting with 2... - # I am not positive that other SVR4 systems won't match this, - # I just have to hope. -- rms. - # Use sysv4.2uw... so that sysv4* matches it. - echo "$UNAME_MACHINE"-pc-sysv4.2uw"$UNAME_VERSION" - exit ;; - i*86:OS/2:*:*) - # If we were able to find `uname', then EMX Unix compatibility - # is probably installed. - echo "$UNAME_MACHINE"-pc-os2-emx - exit ;; - i*86:XTS-300:*:STOP) - echo "$UNAME_MACHINE"-unknown-stop - exit ;; - i*86:atheos:*:*) - echo "$UNAME_MACHINE"-unknown-atheos - exit ;; - i*86:syllable:*:*) - echo "$UNAME_MACHINE"-pc-syllable - exit ;; - i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*) - echo i386-unknown-lynxos"$UNAME_RELEASE" - exit ;; - i*86:*DOS:*:*) - echo "$UNAME_MACHINE"-pc-msdosdjgpp - exit ;; - i*86:*:4.*:*) - UNAME_REL=$(echo "$UNAME_RELEASE" | sed 's/\/MP$//') - if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then - echo "$UNAME_MACHINE"-univel-sysv"$UNAME_REL" - else - echo "$UNAME_MACHINE"-pc-sysv"$UNAME_REL" - fi - exit ;; - i*86:*:5:[678]*) - # UnixWare 7.x, OpenUNIX and OpenServer 6. - case $(/bin/uname -X | grep "^Machine") in - *486*) UNAME_MACHINE=i486 ;; - *Pentium) UNAME_MACHINE=i586 ;; - *Pent*|*Celeron) UNAME_MACHINE=i686 ;; - esac - echo "$UNAME_MACHINE-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION}" - exit ;; - i*86:*:3.2:*) - if test -f /usr/options/cb.name; then - UNAME_REL=$(sed -n 's/.*Version //p' /dev/null >/dev/null ; then - UNAME_REL=$( (/bin/uname -X|grep Release|sed -e 's/.*= //')) - (/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486 - (/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \ - && UNAME_MACHINE=i586 - (/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \ - && UNAME_MACHINE=i686 - (/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \ - && UNAME_MACHINE=i686 - echo "$UNAME_MACHINE"-pc-sco"$UNAME_REL" - else - echo "$UNAME_MACHINE"-pc-sysv32 - fi - exit ;; - pc:*:*:*) - # Left here for compatibility: - # uname -m prints for DJGPP always 'pc', but it prints nothing about - # the processor, so we play safe by assuming i586. - # Note: whatever this is, it MUST be the same as what config.sub - # prints for the "djgpp" host, or else GDB configure will decide that - # this is a cross-build. - echo i586-pc-msdosdjgpp - exit ;; - Intel:Mach:3*:*) - echo i386-pc-mach3 - exit ;; - paragon:*:*:*) - echo i860-intel-osf1 - exit ;; - i860:*:4.*:*) # i860-SVR4 - if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then - echo i860-stardent-sysv"$UNAME_RELEASE" # Stardent Vistra i860-SVR4 - else # Add other i860-SVR4 vendors below as they are discovered. - echo i860-unknown-sysv"$UNAME_RELEASE" # Unknown i860-SVR4 - fi - exit ;; - mini*:CTIX:SYS*5:*) - # "miniframe" - echo m68010-convergent-sysv - exit ;; - mc68k:UNIX:SYSTEM5:3.51m) - echo m68k-convergent-sysv - exit ;; - M680?0:D-NIX:5.3:*) - echo m68k-diab-dnix - exit ;; - M68*:*:R3V[5678]*:*) - test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;; - 3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0) - OS_REL='' - test -r /etc/.relid \ - && OS_REL=.$(sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid) - /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ - && { echo i486-ncr-sysv4.3"$OS_REL"; exit; } - /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ - && { echo i586-ncr-sysv4.3"$OS_REL"; exit; } ;; - 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*) - /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ - && { echo i486-ncr-sysv4; exit; } ;; - NCR*:*:4.2:* | MPRAS*:*:4.2:*) - OS_REL='.3' - test -r /etc/.relid \ - && OS_REL=.$(sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid) - /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ - && { echo i486-ncr-sysv4.3"$OS_REL"; exit; } - /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ - && { echo i586-ncr-sysv4.3"$OS_REL"; exit; } - /bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \ - && { echo i586-ncr-sysv4.3"$OS_REL"; exit; } ;; - m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*) - echo m68k-unknown-lynxos"$UNAME_RELEASE" - exit ;; - mc68030:UNIX_System_V:4.*:*) - echo m68k-atari-sysv4 - exit ;; - TSUNAMI:LynxOS:2.*:*) - echo sparc-unknown-lynxos"$UNAME_RELEASE" - exit ;; - rs6000:LynxOS:2.*:*) - echo rs6000-unknown-lynxos"$UNAME_RELEASE" - exit ;; - PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*) - echo powerpc-unknown-lynxos"$UNAME_RELEASE" - exit ;; - SM[BE]S:UNIX_SV:*:*) - echo mips-dde-sysv"$UNAME_RELEASE" - exit ;; - RM*:ReliantUNIX-*:*:*) - echo mips-sni-sysv4 - exit ;; - RM*:SINIX-*:*:*) - echo mips-sni-sysv4 - exit ;; - *:SINIX-*:*:*) - if uname -p 2>/dev/null >/dev/null ; then - UNAME_MACHINE=$( (uname -p) 2>/dev/null) - echo "$UNAME_MACHINE"-sni-sysv4 - else - echo ns32k-sni-sysv - fi - exit ;; - PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort - # says - echo i586-unisys-sysv4 - exit ;; - *:UNIX_System_V:4*:FTX*) - # From Gerald Hewes . - # How about differentiating between stratus architectures? -djm - echo hppa1.1-stratus-sysv4 - exit ;; - *:*:*:FTX*) - # From seanf@swdc.stratus.com. - echo i860-stratus-sysv4 - exit ;; - i*86:VOS:*:*) - # From Paul.Green@stratus.com. - echo "$UNAME_MACHINE"-stratus-vos - exit ;; - *:VOS:*:*) - # From Paul.Green@stratus.com. - echo hppa1.1-stratus-vos - exit ;; - mc68*:A/UX:*:*) - echo m68k-apple-aux"$UNAME_RELEASE" - exit ;; - news*:NEWS-OS:6*:*) - echo mips-sony-newsos6 - exit ;; - R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*) - if test -d /usr/nec; then - echo mips-nec-sysv"$UNAME_RELEASE" - else - echo mips-unknown-sysv"$UNAME_RELEASE" - fi - exit ;; - BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only. - echo powerpc-be-beos - exit ;; - BeMac:BeOS:*:*) # BeOS running on Mac or Mac clone, PPC only. - echo powerpc-apple-beos - exit ;; - BePC:BeOS:*:*) # BeOS running on Intel PC compatible. - echo i586-pc-beos - exit ;; - BePC:Haiku:*:*) # Haiku running on Intel PC compatible. - echo i586-pc-haiku - exit ;; - x86_64:Haiku:*:*) - echo x86_64-unknown-haiku - exit ;; - SX-4:SUPER-UX:*:*) - echo sx4-nec-superux"$UNAME_RELEASE" - exit ;; - SX-5:SUPER-UX:*:*) - echo sx5-nec-superux"$UNAME_RELEASE" - exit ;; - SX-6:SUPER-UX:*:*) - echo sx6-nec-superux"$UNAME_RELEASE" - exit ;; - SX-7:SUPER-UX:*:*) - echo sx7-nec-superux"$UNAME_RELEASE" - exit ;; - SX-8:SUPER-UX:*:*) - echo sx8-nec-superux"$UNAME_RELEASE" - exit ;; - SX-8R:SUPER-UX:*:*) - echo sx8r-nec-superux"$UNAME_RELEASE" - exit ;; - SX-ACE:SUPER-UX:*:*) - echo sxace-nec-superux"$UNAME_RELEASE" - exit ;; - Power*:Rhapsody:*:*) - echo powerpc-apple-rhapsody"$UNAME_RELEASE" - exit ;; - *:Rhapsody:*:*) - echo "$UNAME_MACHINE"-apple-rhapsody"$UNAME_RELEASE" - exit ;; - arm64:Darwin:*:*) - echo aarch64-apple-darwin"$UNAME_RELEASE" - exit ;; - *:Darwin:*:*) - UNAME_PROCESSOR=$(uname -p) - case $UNAME_PROCESSOR in - unknown) UNAME_PROCESSOR=powerpc ;; - esac - if command -v xcode-select > /dev/null 2> /dev/null && \ - ! xcode-select --print-path > /dev/null 2> /dev/null ; then - # Avoid executing cc if there is no toolchain installed as - # cc will be a stub that puts up a graphical alert - # prompting the user to install developer tools. - CC_FOR_BUILD=no_compiler_found - else - set_cc_for_build - fi - if test "$CC_FOR_BUILD" != no_compiler_found; then - if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \ - (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \ - grep IS_64BIT_ARCH >/dev/null - then - case $UNAME_PROCESSOR in - i386) UNAME_PROCESSOR=x86_64 ;; - powerpc) UNAME_PROCESSOR=powerpc64 ;; - esac - fi - # On 10.4-10.6 one might compile for PowerPC via gcc -arch ppc - if (echo '#ifdef __POWERPC__'; echo IS_PPC; echo '#endif') | \ - (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \ - grep IS_PPC >/dev/null - then - UNAME_PROCESSOR=powerpc - fi - elif test "$UNAME_PROCESSOR" = i386 ; then - # uname -m returns i386 or x86_64 - UNAME_PROCESSOR=$UNAME_MACHINE - fi - echo "$UNAME_PROCESSOR"-apple-darwin"$UNAME_RELEASE" - exit ;; - *:procnto*:*:* | *:QNX:[0123456789]*:*) - UNAME_PROCESSOR=$(uname -p) - if test "$UNAME_PROCESSOR" = x86; then - UNAME_PROCESSOR=i386 - UNAME_MACHINE=pc - fi - echo "$UNAME_PROCESSOR"-"$UNAME_MACHINE"-nto-qnx"$UNAME_RELEASE" - exit ;; - *:QNX:*:4*) - echo i386-pc-qnx - exit ;; - NEO-*:NONSTOP_KERNEL:*:*) - echo neo-tandem-nsk"$UNAME_RELEASE" - exit ;; - NSE-*:NONSTOP_KERNEL:*:*) - echo nse-tandem-nsk"$UNAME_RELEASE" - exit ;; - NSR-*:NONSTOP_KERNEL:*:*) - echo nsr-tandem-nsk"$UNAME_RELEASE" - exit ;; - NSV-*:NONSTOP_KERNEL:*:*) - echo nsv-tandem-nsk"$UNAME_RELEASE" - exit ;; - NSX-*:NONSTOP_KERNEL:*:*) - echo nsx-tandem-nsk"$UNAME_RELEASE" - exit ;; - *:NonStop-UX:*:*) - echo mips-compaq-nonstopux - exit ;; - BS2000:POSIX*:*:*) - echo bs2000-siemens-sysv - exit ;; - DS/*:UNIX_System_V:*:*) - echo "$UNAME_MACHINE"-"$UNAME_SYSTEM"-"$UNAME_RELEASE" - exit ;; - *:Plan9:*:*) - # "uname -m" is not consistent, so use $cputype instead. 386 - # is converted to i386 for consistency with other x86 - # operating systems. - # shellcheck disable=SC2154 - if test "$cputype" = 386; then - UNAME_MACHINE=i386 - else - UNAME_MACHINE="$cputype" - fi - echo "$UNAME_MACHINE"-unknown-plan9 - exit ;; - *:TOPS-10:*:*) - echo pdp10-unknown-tops10 - exit ;; - *:TENEX:*:*) - echo pdp10-unknown-tenex - exit ;; - KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*) - echo pdp10-dec-tops20 - exit ;; - XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*) - echo pdp10-xkl-tops20 - exit ;; - *:TOPS-20:*:*) - echo pdp10-unknown-tops20 - exit ;; - *:ITS:*:*) - echo pdp10-unknown-its - exit ;; - SEI:*:*:SEIUX) - echo mips-sei-seiux"$UNAME_RELEASE" - exit ;; - *:DragonFly:*:*) - echo "$UNAME_MACHINE"-unknown-dragonfly"$(echo "$UNAME_RELEASE"|sed -e 's/[-(].*//')" - exit ;; - *:*VMS:*:*) - UNAME_MACHINE=$( (uname -p) 2>/dev/null) - case "$UNAME_MACHINE" in - A*) echo alpha-dec-vms ; exit ;; - I*) echo ia64-dec-vms ; exit ;; - V*) echo vax-dec-vms ; exit ;; - esac ;; - *:XENIX:*:SysV) - echo i386-pc-xenix - exit ;; - i*86:skyos:*:*) - echo "$UNAME_MACHINE"-pc-skyos"$(echo "$UNAME_RELEASE" | sed -e 's/ .*$//')" - exit ;; - i*86:rdos:*:*) - echo "$UNAME_MACHINE"-pc-rdos - exit ;; - *:AROS:*:*) - echo "$UNAME_MACHINE"-unknown-aros - exit ;; - x86_64:VMkernel:*:*) - echo "$UNAME_MACHINE"-unknown-esx - exit ;; - amd64:Isilon\ OneFS:*:*) - echo x86_64-unknown-onefs - exit ;; - *:Unleashed:*:*) - echo "$UNAME_MACHINE"-unknown-unleashed"$UNAME_RELEASE" - exit ;; -esac - -# No uname command or uname output not recognized. -set_cc_for_build -cat > "$dummy.c" < -#include -#endif -#if defined(ultrix) || defined(_ultrix) || defined(__ultrix) || defined(__ultrix__) -#if defined (vax) || defined (__vax) || defined (__vax__) || defined(mips) || defined(__mips) || defined(__mips__) || defined(MIPS) || defined(__MIPS__) -#include -#if defined(_SIZE_T_) || defined(SIGLOST) -#include -#endif -#endif -#endif -main () -{ -#if defined (sony) -#if defined (MIPSEB) - /* BFD wants "bsd" instead of "newsos". Perhaps BFD should be changed, - I don't know.... */ - printf ("mips-sony-bsd\n"); exit (0); -#else -#include - printf ("m68k-sony-newsos%s\n", -#ifdef NEWSOS4 - "4" -#else - "" -#endif - ); exit (0); -#endif -#endif - -#if defined (NeXT) -#if !defined (__ARCHITECTURE__) -#define __ARCHITECTURE__ "m68k" -#endif - int version; - version=$( (hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null); - if (version < 4) - printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version); - else - printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version); - exit (0); -#endif - -#if defined (MULTIMAX) || defined (n16) -#if defined (UMAXV) - printf ("ns32k-encore-sysv\n"); exit (0); -#else -#if defined (CMU) - printf ("ns32k-encore-mach\n"); exit (0); -#else - printf ("ns32k-encore-bsd\n"); exit (0); -#endif -#endif -#endif - -#if defined (__386BSD__) - printf ("i386-pc-bsd\n"); exit (0); -#endif - -#if defined (sequent) -#if defined (i386) - printf ("i386-sequent-dynix\n"); exit (0); -#endif -#if defined (ns32000) - printf ("ns32k-sequent-dynix\n"); exit (0); -#endif -#endif - -#if defined (_SEQUENT_) - struct utsname un; - - uname(&un); - if (strncmp(un.version, "V2", 2) == 0) { - printf ("i386-sequent-ptx2\n"); exit (0); - } - if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */ - printf ("i386-sequent-ptx1\n"); exit (0); - } - printf ("i386-sequent-ptx\n"); exit (0); -#endif - -#if defined (vax) -#if !defined (ultrix) -#include -#if defined (BSD) -#if BSD == 43 - printf ("vax-dec-bsd4.3\n"); exit (0); -#else -#if BSD == 199006 - printf ("vax-dec-bsd4.3reno\n"); exit (0); -#else - printf ("vax-dec-bsd\n"); exit (0); -#endif -#endif -#else - printf ("vax-dec-bsd\n"); exit (0); -#endif -#else -#if defined(_SIZE_T_) || defined(SIGLOST) - struct utsname un; - uname (&un); - printf ("vax-dec-ultrix%s\n", un.release); exit (0); -#else - printf ("vax-dec-ultrix\n"); exit (0); -#endif -#endif -#endif -#if defined(ultrix) || defined(_ultrix) || defined(__ultrix) || defined(__ultrix__) -#if defined(mips) || defined(__mips) || defined(__mips__) || defined(MIPS) || defined(__MIPS__) -#if defined(_SIZE_T_) || defined(SIGLOST) - struct utsname *un; - uname (&un); - printf ("mips-dec-ultrix%s\n", un.release); exit (0); -#else - printf ("mips-dec-ultrix\n"); exit (0); -#endif -#endif -#endif - -#if defined (alliant) && defined (i860) - printf ("i860-alliant-bsd\n"); exit (0); -#endif - - exit (1); -} -EOF - -$CC_FOR_BUILD -o "$dummy" "$dummy.c" 2>/dev/null && SYSTEM_NAME=$($dummy) && - { echo "$SYSTEM_NAME"; exit; } - -# Apollos put the system type in the environment. -test -d /usr/apollo && { echo "$ISP-apollo-$SYSTYPE"; exit; } - -echo "$0: unable to guess system type" >&2 - -case "$UNAME_MACHINE:$UNAME_SYSTEM" in - mips:Linux | mips64:Linux) - # If we got here on MIPS GNU/Linux, output extra information. - cat >&2 <&2 <&2 </dev/null || echo unknown) -uname -r = $( (uname -r) 2>/dev/null || echo unknown) -uname -s = $( (uname -s) 2>/dev/null || echo unknown) -uname -v = $( (uname -v) 2>/dev/null || echo unknown) - -/usr/bin/uname -p = $( (/usr/bin/uname -p) 2>/dev/null) -/bin/uname -X = $( (/bin/uname -X) 2>/dev/null) - -hostinfo = $( (hostinfo) 2>/dev/null) -/bin/universe = $( (/bin/universe) 2>/dev/null) -/usr/bin/arch -k = $( (/usr/bin/arch -k) 2>/dev/null) -/bin/arch = $( (/bin/arch) 2>/dev/null) -/usr/bin/oslevel = $( (/usr/bin/oslevel) 2>/dev/null) -/usr/convex/getsysinfo = $( (/usr/convex/getsysinfo) 2>/dev/null) - -UNAME_MACHINE = "$UNAME_MACHINE" -UNAME_RELEASE = "$UNAME_RELEASE" -UNAME_SYSTEM = "$UNAME_SYSTEM" -UNAME_VERSION = "$UNAME_VERSION" -EOF -fi - -exit 1 - -# Local variables: -# eval: (add-hook 'before-save-hook 'time-stamp) -# time-stamp-start: "timestamp='" -# time-stamp-format: "%:y-%02m-%02d" -# time-stamp-end: "'" -# End: diff --git a/vendor/fake-enum/config.sub b/vendor/fake-enum/config.sub deleted file mode 100755 index d52017ea..00000000 --- a/vendor/fake-enum/config.sub +++ /dev/null @@ -1,1879 +0,0 @@ -#! /bin/sh -# Configuration validation subroutine script. -# Copyright 1992-2021 Free Software Foundation, Inc. - -timestamp='2021-05-03' - -# This file is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, see . -# -# As a special exception to the GNU General Public License, if you -# distribute this file as part of a program that contains a -# configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that -# program. This Exception is an additional permission under section 7 -# of the GNU General Public License, version 3 ("GPLv3"). - - -# Please send patches to . -# -# Configuration subroutine to validate and canonicalize a configuration type. -# Supply the specified configuration type as an argument. -# If it is invalid, we print an error message on stderr and exit with code 1. -# Otherwise, we print the canonical config type on stdout and succeed. - -# You can get the latest version of this script from: -# https://git.savannah.gnu.org/cgit/config.git/plain/config.sub - -# This file is supposed to be the same for all GNU packages -# and recognize all the CPU types, system types and aliases -# that are meaningful with *any* GNU software. -# Each package is responsible for reporting which valid configurations -# it does not support. The user should be able to distinguish -# a failure to support a valid configuration from a meaningless -# configuration. - -# The goal of this file is to map all the various variations of a given -# machine specification into a single specification in the form: -# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM -# or in some cases, the newer four-part form: -# CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM -# It is wrong to echo any other type of specification. - -me=$(echo "$0" | sed -e 's,.*/,,') - -usage="\ -Usage: $0 [OPTION] CPU-MFR-OPSYS or ALIAS - -Canonicalize a configuration name. - -Options: - -h, --help print this help, then exit - -t, --time-stamp print date of last modification, then exit - -v, --version print version number, then exit - -Report bugs and patches to ." - -version="\ -GNU config.sub ($timestamp) - -Copyright 1992-2021 Free Software Foundation, Inc. - -This is free software; see the source for copying conditions. There is NO -warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." - -help=" -Try \`$me --help' for more information." - -# Parse command line -while test $# -gt 0 ; do - case $1 in - --time-stamp | --time* | -t ) - echo "$timestamp" ; exit ;; - --version | -v ) - echo "$version" ; exit ;; - --help | --h* | -h ) - echo "$usage"; exit ;; - -- ) # Stop option processing - shift; break ;; - - ) # Use stdin as input. - break ;; - -* ) - echo "$me: invalid option $1$help" >&2 - exit 1 ;; - - *local*) - # First pass through any local machine types. - echo "$1" - exit ;; - - * ) - break ;; - esac -done - -case $# in - 0) echo "$me: missing argument$help" >&2 - exit 1;; - 1) ;; - *) echo "$me: too many arguments$help" >&2 - exit 1;; -esac - -# Split fields of configuration type -# shellcheck disable=SC2162 -IFS="-" read field1 field2 field3 field4 <&2 - exit 1 - ;; - *-*-*-*) - basic_machine=$field1-$field2 - basic_os=$field3-$field4 - ;; - *-*-*) - # Ambiguous whether COMPANY is present, or skipped and KERNEL-OS is two - # parts - maybe_os=$field2-$field3 - case $maybe_os in - nto-qnx* | linux-* | uclinux-uclibc* \ - | uclinux-gnu* | kfreebsd*-gnu* | knetbsd*-gnu* | netbsd*-gnu* \ - | netbsd*-eabi* | kopensolaris*-gnu* | cloudabi*-eabi* \ - | storm-chaos* | os2-emx* | rtmk-nova* \ - | phantom-kernel* | phantom-user* \ - | snes*-elf* | snes*-xo65* | nes*-xo65* | snes*-coff*) - basic_machine=$field1 - basic_os=$maybe_os - ;; - android-linux) - basic_machine=$field1-unknown - basic_os=linux-android - ;; - *) - basic_machine=$field1-$field2 - basic_os=$field3 - ;; - esac - ;; - *-*) - # A lone config we happen to match not fitting any pattern - case $field1-$field2 in - decstation-3100) - basic_machine=mips-dec - basic_os= - ;; - *-*) - # Second component is usually, but not always the OS - case $field2 in - # Prevent following clause from handling this valid os - sun*os*) - basic_machine=$field1 - basic_os=$field2 - ;; - # Manufacturers - dec* | mips* | sequent* | encore* | pc533* | sgi* | sony* \ - | att* | 7300* | 3300* | delta* | motorola* | sun[234]* \ - | unicom* | ibm* | next | hp | isi* | apollo | altos* \ - | convergent* | ncr* | news | 32* | 3600* | 3100* \ - | hitachi* | c[123]* | convex* | sun | crds | omron* | dg \ - | ultra | tti* | harris | dolphin | highlevel | gould \ - | cbm | ns | masscomp | apple | axis | knuth | cray \ - | microblaze* | sim | cisco \ - | oki | wec | wrs | winbond ) - basic_machine=$field1-$field2 - basic_os= - ;; - *) - basic_machine=$field1 - basic_os=$field2 - ;; - esac - ;; - esac - ;; - *) - # Convert single-component short-hands not valid as part of - # multi-component configurations. - case $field1 in - 386bsd) - basic_machine=i386-pc - basic_os=bsd - ;; - a29khif) - basic_machine=a29k-amd - basic_os=udi - ;; - adobe68k) - basic_machine=m68010-adobe - basic_os=scout - ;; - alliant) - basic_machine=fx80-alliant - basic_os= - ;; - altos | altos3068) - basic_machine=m68k-altos - basic_os= - ;; - am29k) - basic_machine=a29k-none - basic_os=bsd - ;; - amdahl) - basic_machine=580-amdahl - basic_os=sysv - ;; - amiga) - basic_machine=m68k-unknown - basic_os= - ;; - amigaos | amigados) - basic_machine=m68k-unknown - basic_os=amigaos - ;; - amigaunix | amix) - basic_machine=m68k-unknown - basic_os=sysv4 - ;; - apollo68) - basic_machine=m68k-apollo - basic_os=sysv - ;; - apollo68bsd) - basic_machine=m68k-apollo - basic_os=bsd - ;; - aros) - basic_machine=i386-pc - basic_os=aros - ;; - aux) - basic_machine=m68k-apple - basic_os=aux - ;; - balance) - basic_machine=ns32k-sequent - basic_os=dynix - ;; - blackfin) - basic_machine=bfin-unknown - basic_os=linux - ;; - cegcc) - basic_machine=arm-unknown - basic_os=cegcc - ;; - convex-c1) - basic_machine=c1-convex - basic_os=bsd - ;; - convex-c2) - basic_machine=c2-convex - basic_os=bsd - ;; - convex-c32) - basic_machine=c32-convex - basic_os=bsd - ;; - convex-c34) - basic_machine=c34-convex - basic_os=bsd - ;; - convex-c38) - basic_machine=c38-convex - basic_os=bsd - ;; - cray) - basic_machine=j90-cray - basic_os=unicos - ;; - crds | unos) - basic_machine=m68k-crds - basic_os= - ;; - da30) - basic_machine=m68k-da30 - basic_os= - ;; - decstation | pmax | pmin | dec3100 | decstatn) - basic_machine=mips-dec - basic_os= - ;; - delta88) - basic_machine=m88k-motorola - basic_os=sysv3 - ;; - dicos) - basic_machine=i686-pc - basic_os=dicos - ;; - djgpp) - basic_machine=i586-pc - basic_os=msdosdjgpp - ;; - ebmon29k) - basic_machine=a29k-amd - basic_os=ebmon - ;; - es1800 | OSE68k | ose68k | ose | OSE) - basic_machine=m68k-ericsson - basic_os=ose - ;; - gmicro) - basic_machine=tron-gmicro - basic_os=sysv - ;; - go32) - basic_machine=i386-pc - basic_os=go32 - ;; - h8300hms) - basic_machine=h8300-hitachi - basic_os=hms - ;; - h8300xray) - basic_machine=h8300-hitachi - basic_os=xray - ;; - h8500hms) - basic_machine=h8500-hitachi - basic_os=hms - ;; - harris) - basic_machine=m88k-harris - basic_os=sysv3 - ;; - hp300 | hp300hpux) - basic_machine=m68k-hp - basic_os=hpux - ;; - hp300bsd) - basic_machine=m68k-hp - basic_os=bsd - ;; - hppaosf) - basic_machine=hppa1.1-hp - basic_os=osf - ;; - hppro) - basic_machine=hppa1.1-hp - basic_os=proelf - ;; - i386mach) - basic_machine=i386-mach - basic_os=mach - ;; - isi68 | isi) - basic_machine=m68k-isi - basic_os=sysv - ;; - m68knommu) - basic_machine=m68k-unknown - basic_os=linux - ;; - magnum | m3230) - basic_machine=mips-mips - basic_os=sysv - ;; - merlin) - basic_machine=ns32k-utek - basic_os=sysv - ;; - mingw64) - basic_machine=x86_64-pc - basic_os=mingw64 - ;; - mingw32) - basic_machine=i686-pc - basic_os=mingw32 - ;; - mingw32ce) - basic_machine=arm-unknown - basic_os=mingw32ce - ;; - monitor) - basic_machine=m68k-rom68k - basic_os=coff - ;; - morphos) - basic_machine=powerpc-unknown - basic_os=morphos - ;; - moxiebox) - basic_machine=moxie-unknown - basic_os=moxiebox - ;; - msdos) - basic_machine=i386-pc - basic_os=msdos - ;; - msys) - basic_machine=i686-pc - basic_os=msys - ;; - mvs) - basic_machine=i370-ibm - basic_os=mvs - ;; - nacl) - basic_machine=le32-unknown - basic_os=nacl - ;; - ncr3000) - basic_machine=i486-ncr - basic_os=sysv4 - ;; - netbsd386) - basic_machine=i386-pc - basic_os=netbsd - ;; - netwinder) - basic_machine=armv4l-rebel - basic_os=linux - ;; - news | news700 | news800 | news900) - basic_machine=m68k-sony - basic_os=newsos - ;; - news1000) - basic_machine=m68030-sony - basic_os=newsos - ;; - necv70) - basic_machine=v70-nec - basic_os=sysv - ;; - nh3000) - basic_machine=m68k-harris - basic_os=cxux - ;; - nh[45]000) - basic_machine=m88k-harris - basic_os=cxux - ;; - nindy960) - basic_machine=i960-intel - basic_os=nindy - ;; - mon960) - basic_machine=i960-intel - basic_os=mon960 - ;; - nonstopux) - basic_machine=mips-compaq - basic_os=nonstopux - ;; - os400) - basic_machine=powerpc-ibm - basic_os=os400 - ;; - OSE68000 | ose68000) - basic_machine=m68000-ericsson - basic_os=ose - ;; - os68k) - basic_machine=m68k-none - basic_os=os68k - ;; - paragon) - basic_machine=i860-intel - basic_os=osf - ;; - parisc) - basic_machine=hppa-unknown - basic_os=linux - ;; - psp) - basic_machine=mipsallegrexel-sony - basic_os=psp - ;; - pw32) - basic_machine=i586-unknown - basic_os=pw32 - ;; - rdos | rdos64) - basic_machine=x86_64-pc - basic_os=rdos - ;; - rdos32) - basic_machine=i386-pc - basic_os=rdos - ;; - rom68k) - basic_machine=m68k-rom68k - basic_os=coff - ;; - sa29200) - basic_machine=a29k-amd - basic_os=udi - ;; - sei) - basic_machine=mips-sei - basic_os=seiux - ;; - sequent) - basic_machine=i386-sequent - basic_os= - ;; - snesdev) - basic_machine=wc65c816-snes - basic_os=elf - ;; - sps7) - basic_machine=m68k-bull - basic_os=sysv2 - ;; - st2000) - basic_machine=m68k-tandem - basic_os= - ;; - stratus) - basic_machine=i860-stratus - basic_os=sysv4 - ;; - sun2) - basic_machine=m68000-sun - basic_os= - ;; - sun2os3) - basic_machine=m68000-sun - basic_os=sunos3 - ;; - sun2os4) - basic_machine=m68000-sun - basic_os=sunos4 - ;; - sun3) - basic_machine=m68k-sun - basic_os= - ;; - sun3os3) - basic_machine=m68k-sun - basic_os=sunos3 - ;; - sun3os4) - basic_machine=m68k-sun - basic_os=sunos4 - ;; - sun4) - basic_machine=sparc-sun - basic_os= - ;; - sun4os3) - basic_machine=sparc-sun - basic_os=sunos3 - ;; - sun4os4) - basic_machine=sparc-sun - basic_os=sunos4 - ;; - sun4sol2) - basic_machine=sparc-sun - basic_os=solaris2 - ;; - sun386 | sun386i | roadrunner) - basic_machine=i386-sun - basic_os= - ;; - sv1) - basic_machine=sv1-cray - basic_os=unicos - ;; - symmetry) - basic_machine=i386-sequent - basic_os=dynix - ;; - t3e) - basic_machine=alphaev5-cray - basic_os=unicos - ;; - t90) - basic_machine=t90-cray - basic_os=unicos - ;; - toad1) - basic_machine=pdp10-xkl - basic_os=tops20 - ;; - tpf) - basic_machine=s390x-ibm - basic_os=tpf - ;; - udi29k) - basic_machine=a29k-amd - basic_os=udi - ;; - ultra3) - basic_machine=a29k-nyu - basic_os=sym1 - ;; - v810 | necv810) - basic_machine=v810-nec - basic_os=none - ;; - vaxv) - basic_machine=vax-dec - basic_os=sysv - ;; - vms) - basic_machine=vax-dec - basic_os=vms - ;; - vsta) - basic_machine=i386-pc - basic_os=vsta - ;; - vxworks960) - basic_machine=i960-wrs - basic_os=vxworks - ;; - vxworks68) - basic_machine=m68k-wrs - basic_os=vxworks - ;; - vxworks29k) - basic_machine=a29k-wrs - basic_os=vxworks - ;; - xbox) - basic_machine=i686-pc - basic_os=mingw32 - ;; - ymp) - basic_machine=ymp-cray - basic_os=unicos - ;; - *) - basic_machine=$1 - basic_os= - ;; - esac - ;; -esac - -# Decode 1-component or ad-hoc basic machines -case $basic_machine in - # Here we handle the default manufacturer of certain CPU types. It is in - # some cases the only manufacturer, in others, it is the most popular. - w89k) - cpu=hppa1.1 - vendor=winbond - ;; - op50n) - cpu=hppa1.1 - vendor=oki - ;; - op60c) - cpu=hppa1.1 - vendor=oki - ;; - ibm*) - cpu=i370 - vendor=ibm - ;; - orion105) - cpu=clipper - vendor=highlevel - ;; - mac | mpw | mac-mpw) - cpu=m68k - vendor=apple - ;; - pmac | pmac-mpw) - cpu=powerpc - vendor=apple - ;; - - # Recognize the various machine names and aliases which stand - # for a CPU type and a company and sometimes even an OS. - 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc) - cpu=m68000 - vendor=att - ;; - 3b*) - cpu=we32k - vendor=att - ;; - bluegene*) - cpu=powerpc - vendor=ibm - basic_os=cnk - ;; - decsystem10* | dec10*) - cpu=pdp10 - vendor=dec - basic_os=tops10 - ;; - decsystem20* | dec20*) - cpu=pdp10 - vendor=dec - basic_os=tops20 - ;; - delta | 3300 | motorola-3300 | motorola-delta \ - | 3300-motorola | delta-motorola) - cpu=m68k - vendor=motorola - ;; - dpx2*) - cpu=m68k - vendor=bull - basic_os=sysv3 - ;; - encore | umax | mmax) - cpu=ns32k - vendor=encore - ;; - elxsi) - cpu=elxsi - vendor=elxsi - basic_os=${basic_os:-bsd} - ;; - fx2800) - cpu=i860 - vendor=alliant - ;; - genix) - cpu=ns32k - vendor=ns - ;; - h3050r* | hiux*) - cpu=hppa1.1 - vendor=hitachi - basic_os=hiuxwe2 - ;; - hp3k9[0-9][0-9] | hp9[0-9][0-9]) - cpu=hppa1.0 - vendor=hp - ;; - hp9k2[0-9][0-9] | hp9k31[0-9]) - cpu=m68000 - vendor=hp - ;; - hp9k3[2-9][0-9]) - cpu=m68k - vendor=hp - ;; - hp9k6[0-9][0-9] | hp6[0-9][0-9]) - cpu=hppa1.0 - vendor=hp - ;; - hp9k7[0-79][0-9] | hp7[0-79][0-9]) - cpu=hppa1.1 - vendor=hp - ;; - hp9k78[0-9] | hp78[0-9]) - # FIXME: really hppa2.0-hp - cpu=hppa1.1 - vendor=hp - ;; - hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893) - # FIXME: really hppa2.0-hp - cpu=hppa1.1 - vendor=hp - ;; - hp9k8[0-9][13679] | hp8[0-9][13679]) - cpu=hppa1.1 - vendor=hp - ;; - hp9k8[0-9][0-9] | hp8[0-9][0-9]) - cpu=hppa1.0 - vendor=hp - ;; - i*86v32) - cpu=$(echo "$1" | sed -e 's/86.*/86/') - vendor=pc - basic_os=sysv32 - ;; - i*86v4*) - cpu=$(echo "$1" | sed -e 's/86.*/86/') - vendor=pc - basic_os=sysv4 - ;; - i*86v) - cpu=$(echo "$1" | sed -e 's/86.*/86/') - vendor=pc - basic_os=sysv - ;; - i*86sol2) - cpu=$(echo "$1" | sed -e 's/86.*/86/') - vendor=pc - basic_os=solaris2 - ;; - j90 | j90-cray) - cpu=j90 - vendor=cray - basic_os=${basic_os:-unicos} - ;; - iris | iris4d) - cpu=mips - vendor=sgi - case $basic_os in - irix*) - ;; - *) - basic_os=irix4 - ;; - esac - ;; - miniframe) - cpu=m68000 - vendor=convergent - ;; - *mint | mint[0-9]* | *MiNT | *MiNT[0-9]*) - cpu=m68k - vendor=atari - basic_os=mint - ;; - news-3600 | risc-news) - cpu=mips - vendor=sony - basic_os=newsos - ;; - next | m*-next) - cpu=m68k - vendor=next - case $basic_os in - openstep*) - ;; - nextstep*) - ;; - ns2*) - basic_os=nextstep2 - ;; - *) - basic_os=nextstep3 - ;; - esac - ;; - np1) - cpu=np1 - vendor=gould - ;; - op50n-* | op60c-*) - cpu=hppa1.1 - vendor=oki - basic_os=proelf - ;; - pa-hitachi) - cpu=hppa1.1 - vendor=hitachi - basic_os=hiuxwe2 - ;; - pbd) - cpu=sparc - vendor=tti - ;; - pbb) - cpu=m68k - vendor=tti - ;; - pc532) - cpu=ns32k - vendor=pc532 - ;; - pn) - cpu=pn - vendor=gould - ;; - power) - cpu=power - vendor=ibm - ;; - ps2) - cpu=i386 - vendor=ibm - ;; - rm[46]00) - cpu=mips - vendor=siemens - ;; - rtpc | rtpc-*) - cpu=romp - vendor=ibm - ;; - sde) - cpu=mipsisa32 - vendor=sde - basic_os=${basic_os:-elf} - ;; - simso-wrs) - cpu=sparclite - vendor=wrs - basic_os=vxworks - ;; - tower | tower-32) - cpu=m68k - vendor=ncr - ;; - vpp*|vx|vx-*) - cpu=f301 - vendor=fujitsu - ;; - w65 | 65816 | 65c816 | wdc65c816 | wc65c816 | wdc65816 | wc65816) - cpu=w65 - vendor=wdc - ;; - w89k-*) - cpu=hppa1.1 - vendor=winbond - basic_os=proelf - ;; - none) - cpu=none - vendor=none - ;; - leon|leon[3-9]) - cpu=sparc - vendor=$basic_machine - ;; - leon-*|leon[3-9]-*) - cpu=sparc - vendor=$(echo "$basic_machine" | sed 's/-.*//') - ;; - *-*) - # shellcheck disable=SC2162 - IFS="-" read cpu vendor <&2 - exit 1 - ;; - esac - ;; -esac - -# Here we canonicalize certain aliases for manufacturers. -case $vendor in - digital*) - vendor=dec - ;; - commodore*) - vendor=cbm - ;; - *) - ;; -esac - -# Decode manufacturer-specific aliases for certain operating systems. - -if test x$basic_os != x -then - -# First recognize some ad-hoc caes, or perhaps split kernel-os, or else just -# set os. -case $basic_os in - gnu/linux*) - kernel=linux - os=$(echo $basic_os | sed -e 's|gnu/linux|gnu|') - ;; - os2-emx) - kernel=os2 - os=$(echo $basic_os | sed -e 's|os2-emx|emx|') - ;; - nto-qnx*) - kernel=nto - os=$(echo $basic_os | sed -e 's|nto-qnx|qnx|') - ;; - *-*) - # shellcheck disable=SC2162 - IFS="-" read kernel os <&2 - exit 1 - ;; -esac - -# As a final step for OS-related things, validate the OS-kernel combination -# (given a valid OS), if there is a kernel. -case $kernel-$os in - linux-gnu* | linux-dietlibc* | linux-android* | linux-newlib* | linux-musl* | linux-uclibc* ) - ;; - uclinux-uclibc* ) - ;; - -dietlibc* | -newlib* | -musl* | -uclibc* ) - # These are just libc implementations, not actual OSes, and thus - # require a kernel. - echo "Invalid configuration \`$1': libc \`$os' needs explicit kernel." 1>&2 - exit 1 - ;; - kfreebsd*-gnu* | kopensolaris*-gnu*) - ;; - vxworks-simlinux | vxworks-simwindows | vxworks-spe) - ;; - nto-qnx*) - ;; - os2-emx) - ;; - phantom-kernel* | phantom-user*) - ;; - snes*-elf* | snes*-xo65* | nes*-xo65* | snes*-coff* ) - ;; - *-eabi* | *-gnueabi*) - ;; - -*) - # Blank kernel with real OS is always fine. - ;; - *-*) - echo "Invalid configuration \`$1': Kernel \`$kernel' not known to work with OS \`$os'." 1>&2 - exit 1 - ;; -esac - -# Here we handle the case where we know the os, and the CPU type, but not the -# manufacturer. We pick the logical manufacturer. -case $vendor in - unknown) - case $cpu-$os in - *-riscix*) - vendor=acorn - ;; - *-sunos*) - vendor=sun - ;; - *-cnk* | *-aix*) - vendor=ibm - ;; - *-beos*) - vendor=be - ;; - *-hpux*) - vendor=hp - ;; - *-mpeix*) - vendor=hp - ;; - *-hiux*) - vendor=hitachi - ;; - *-unos*) - vendor=crds - ;; - *-dgux*) - vendor=dg - ;; - *-luna*) - vendor=omron - ;; - *-genix*) - vendor=ns - ;; - *-clix*) - vendor=intergraph - ;; - *-mvs* | *-opened*) - vendor=ibm - ;; - *-os400*) - vendor=ibm - ;; - s390-* | s390x-*) - vendor=ibm - ;; - *-ptx*) - vendor=sequent - ;; - *-tpf*) - vendor=ibm - ;; - *-vxsim* | *-vxworks* | *-windiss*) - vendor=wrs - ;; - *-aux*) - vendor=apple - ;; - *-hms*) - vendor=hitachi - ;; - *-mpw* | *-macos*) - vendor=apple - ;; - *-*mint | *-mint[0-9]* | *-*MiNT | *-MiNT[0-9]*) - vendor=atari - ;; - *-vos*) - vendor=stratus - ;; - esac - ;; -esac - -echo "$cpu-$vendor-${kernel:+$kernel-}$os" -exit - -# Local variables: -# eval: (add-hook 'before-save-hook 'time-stamp) -# time-stamp-start: "timestamp='" -# time-stamp-format: "%:y-%02m-%02d" -# time-stamp-end: "'" -# End: diff --git a/vendor/fake-enum/configure b/vendor/fake-enum/configure deleted file mode 100755 index 9cd7e198..00000000 --- a/vendor/fake-enum/configure +++ /dev/null @@ -1,4207 +0,0 @@ -#! /bin/sh -# Guess values for system-dependent variables and create Makefiles. -# Generated by GNU Autoconf 2.69. -# -# -# Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc. -# -# -# This configure script is free software; the Free Software Foundation -# gives unlimited permission to copy, distribute and modify it. -## -------------------- ## -## M4sh Initialization. ## -## -------------------- ## - -# Be more Bourne compatible -DUALCASE=1; export DUALCASE # for MKS sh -if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST -else - case `(set -o) 2>/dev/null` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi - - -as_nl=' -' -export as_nl -# Printing a long string crashes Solaris 7 /usr/bin/printf. -as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo -# Prefer a ksh shell builtin over an external printf program on Solaris, -# but without wasting forks for bash or zsh. -if test -z "$BASH_VERSION$ZSH_VERSION" \ - && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='print -r --' - as_echo_n='print -rn --' -elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='printf %s\n' - as_echo_n='printf %s' -else - if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then - as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' - as_echo_n='/usr/ucb/echo -n' - else - as_echo_body='eval expr "X$1" : "X\\(.*\\)"' - as_echo_n_body='eval - arg=$1; - case $arg in #( - *"$as_nl"*) - expr "X$arg" : "X\\(.*\\)$as_nl"; - arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; - esac; - expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" - ' - export as_echo_n_body - as_echo_n='sh -c $as_echo_n_body as_echo' - fi - export as_echo_body - as_echo='sh -c $as_echo_body as_echo' -fi - -# The user is always right. -if test "${PATH_SEPARATOR+set}" != set; then - PATH_SEPARATOR=: - (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { - (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || - PATH_SEPARATOR=';' - } -fi - - -# IFS -# We need space, tab and new line, in precisely that order. Quoting is -# there to prevent editors from complaining about space-tab. -# (If _AS_PATH_WALK were called with IFS unset, it would disable word -# splitting by setting IFS to empty value.) -IFS=" "" $as_nl" - -# Find who we are. Look in the path if we contain no directory separator. -as_myself= -case $0 in #(( - *[\\/]* ) as_myself=$0 ;; - *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break - done -IFS=$as_save_IFS - - ;; -esac -# We did not find ourselves, most probably we were run as `sh COMMAND' -# in which case we are not to be found in the path. -if test "x$as_myself" = x; then - as_myself=$0 -fi -if test ! -f "$as_myself"; then - $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 - exit 1 -fi - -# Unset variables that we do not need and which cause bugs (e.g. in -# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" -# suppresses any "Segmentation fault" message there. '((' could -# trigger a bug in pdksh 5.2.14. -for as_var in BASH_ENV ENV MAIL MAILPATH -do eval test x\${$as_var+set} = xset \ - && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : -done -PS1='$ ' -PS2='> ' -PS4='+ ' - -# NLS nuisances. -LC_ALL=C -export LC_ALL -LANGUAGE=C -export LANGUAGE - -# CDPATH. -(unset CDPATH) >/dev/null 2>&1 && unset CDPATH - -# Use a proper internal environment variable to ensure we don't fall - # into an infinite loop, continuously re-executing ourselves. - if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then - _as_can_reexec=no; export _as_can_reexec; - # We cannot yet assume a decent shell, so we have to provide a -# neutralization value for shells without unset; and this also -# works around shells that cannot unset nonexistent variables. -# Preserve -v and -x to the replacement shell. -BASH_ENV=/dev/null -ENV=/dev/null -(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV -case $- in # (((( - *v*x* | *x*v* ) as_opts=-vx ;; - *v* ) as_opts=-v ;; - *x* ) as_opts=-x ;; - * ) as_opts= ;; -esac -exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} -# Admittedly, this is quite paranoid, since all the known shells bail -# out after a failed `exec'. -$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 -as_fn_exit 255 - fi - # We don't want this to propagate to other subprocesses. - { _as_can_reexec=; unset _as_can_reexec;} -if test "x$CONFIG_SHELL" = x; then - as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which - # is contrary to our usage. Disable this feature. - alias -g '\${1+\"\$@\"}'='\"\$@\"' - setopt NO_GLOB_SUBST -else - case \`(set -o) 2>/dev/null\` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi -" - as_required="as_fn_return () { (exit \$1); } -as_fn_success () { as_fn_return 0; } -as_fn_failure () { as_fn_return 1; } -as_fn_ret_success () { return 0; } -as_fn_ret_failure () { return 1; } - -exitcode=0 -as_fn_success || { exitcode=1; echo as_fn_success failed.; } -as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } -as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } -as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } -if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : - -else - exitcode=1; echo positional parameters were not saved. -fi -test x\$exitcode = x0 || exit 1 -test -x / || exit 1" - as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO - as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO - eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && - test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1" - if (eval "$as_required") 2>/dev/null; then : - as_have_required=yes -else - as_have_required=no -fi - if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : - -else - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -as_found=false -for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - as_found=: - case $as_dir in #( - /*) - for as_base in sh bash ksh sh5; do - # Try only shells that exist, to save several forks. - as_shell=$as_dir/$as_base - if { test -f "$as_shell" || test -f "$as_shell.exe"; } && - { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : - CONFIG_SHELL=$as_shell as_have_required=yes - if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then : - break 2 -fi -fi - done;; - esac - as_found=false -done -$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && - { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : - CONFIG_SHELL=$SHELL as_have_required=yes -fi; } -IFS=$as_save_IFS - - - if test "x$CONFIG_SHELL" != x; then : - export CONFIG_SHELL - # We cannot yet assume a decent shell, so we have to provide a -# neutralization value for shells without unset; and this also -# works around shells that cannot unset nonexistent variables. -# Preserve -v and -x to the replacement shell. -BASH_ENV=/dev/null -ENV=/dev/null -(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV -case $- in # (((( - *v*x* | *x*v* ) as_opts=-vx ;; - *v* ) as_opts=-v ;; - *x* ) as_opts=-x ;; - * ) as_opts= ;; -esac -exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} -# Admittedly, this is quite paranoid, since all the known shells bail -# out after a failed `exec'. -$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 -exit 255 -fi - - if test x$as_have_required = xno; then : - $as_echo "$0: This script requires a shell more modern than all" - $as_echo "$0: the shells that I found on your system." - if test x${ZSH_VERSION+set} = xset ; then - $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" - $as_echo "$0: be upgraded to zsh 4.3.4 or later." - else - $as_echo "$0: Please tell bug-autoconf@gnu.org about your system, -$0: including any error possibly output before this -$0: message. Then install a modern shell, or manually run -$0: the script under such a shell if you do have one." - fi - exit 1 -fi -fi -fi -SHELL=${CONFIG_SHELL-/bin/sh} -export SHELL -# Unset more variables known to interfere with behavior of common tools. -CLICOLOR_FORCE= GREP_OPTIONS= -unset CLICOLOR_FORCE GREP_OPTIONS - -## --------------------- ## -## M4sh Shell Functions. ## -## --------------------- ## -# as_fn_unset VAR -# --------------- -# Portably unset VAR. -as_fn_unset () -{ - { eval $1=; unset $1;} -} -as_unset=as_fn_unset - -# as_fn_set_status STATUS -# ----------------------- -# Set $? to STATUS, without forking. -as_fn_set_status () -{ - return $1 -} # as_fn_set_status - -# as_fn_exit STATUS -# ----------------- -# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. -as_fn_exit () -{ - set +e - as_fn_set_status $1 - exit $1 -} # as_fn_exit - -# as_fn_mkdir_p -# ------------- -# Create "$as_dir" as a directory, including parents if necessary. -as_fn_mkdir_p () -{ - - case $as_dir in #( - -*) as_dir=./$as_dir;; - esac - test -d "$as_dir" || eval $as_mkdir_p || { - as_dirs= - while :; do - case $as_dir in #( - *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( - *) as_qdir=$as_dir;; - esac - as_dirs="'$as_qdir' $as_dirs" - as_dir=`$as_dirname -- "$as_dir" || -$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_dir" : 'X\(//\)[^/]' \| \ - X"$as_dir" : 'X\(//\)$' \| \ - X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$as_dir" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - test -d "$as_dir" && break - done - test -z "$as_dirs" || eval "mkdir $as_dirs" - } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" - - -} # as_fn_mkdir_p - -# as_fn_executable_p FILE -# ----------------------- -# Test if FILE is an executable regular file. -as_fn_executable_p () -{ - test -f "$1" && test -x "$1" -} # as_fn_executable_p -# as_fn_append VAR VALUE -# ---------------------- -# Append the text in VALUE to the end of the definition contained in VAR. Take -# advantage of any shell optimizations that allow amortized linear growth over -# repeated appends, instead of the typical quadratic growth present in naive -# implementations. -if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : - eval 'as_fn_append () - { - eval $1+=\$2 - }' -else - as_fn_append () - { - eval $1=\$$1\$2 - } -fi # as_fn_append - -# as_fn_arith ARG... -# ------------------ -# Perform arithmetic evaluation on the ARGs, and store the result in the -# global $as_val. Take advantage of shells that can avoid forks. The arguments -# must be portable across $(()) and expr. -if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : - eval 'as_fn_arith () - { - as_val=$(( $* )) - }' -else - as_fn_arith () - { - as_val=`expr "$@" || test $? -eq 1` - } -fi # as_fn_arith - - -# as_fn_error STATUS ERROR [LINENO LOG_FD] -# ---------------------------------------- -# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are -# provided, also output the error to LOG_FD, referencing LINENO. Then exit the -# script with STATUS, using 1 if that was 0. -as_fn_error () -{ - as_status=$1; test $as_status -eq 0 && as_status=1 - if test "$4"; then - as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 - fi - $as_echo "$as_me: error: $2" >&2 - as_fn_exit $as_status -} # as_fn_error - -if expr a : '\(a\)' >/dev/null 2>&1 && - test "X`expr 00001 : '.*\(...\)'`" = X001; then - as_expr=expr -else - as_expr=false -fi - -if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then - as_basename=basename -else - as_basename=false -fi - -if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then - as_dirname=dirname -else - as_dirname=false -fi - -as_me=`$as_basename -- "$0" || -$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ - X"$0" : 'X\(//\)$' \| \ - X"$0" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X/"$0" | - sed '/^.*\/\([^/][^/]*\)\/*$/{ - s//\1/ - q - } - /^X\/\(\/\/\)$/{ - s//\1/ - q - } - /^X\/\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - -# Avoid depending upon Character Ranges. -as_cr_letters='abcdefghijklmnopqrstuvwxyz' -as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' -as_cr_Letters=$as_cr_letters$as_cr_LETTERS -as_cr_digits='0123456789' -as_cr_alnum=$as_cr_Letters$as_cr_digits - - - as_lineno_1=$LINENO as_lineno_1a=$LINENO - as_lineno_2=$LINENO as_lineno_2a=$LINENO - eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && - test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { - # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) - sed -n ' - p - /[$]LINENO/= - ' <$as_myself | - sed ' - s/[$]LINENO.*/&-/ - t lineno - b - :lineno - N - :loop - s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ - t loop - s/-\n.*// - ' >$as_me.lineno && - chmod +x "$as_me.lineno" || - { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } - - # If we had to re-execute with $CONFIG_SHELL, we're ensured to have - # already done that, so ensure we don't try to do so again and fall - # in an infinite loop. This has already happened in practice. - _as_can_reexec=no; export _as_can_reexec - # Don't try to exec as it changes $[0], causing all sort of problems - # (the dirname of $[0] is not the place where we might find the - # original and so on. Autoconf is especially sensitive to this). - . "./$as_me.lineno" - # Exit status is that of the last command. - exit -} - -ECHO_C= ECHO_N= ECHO_T= -case `echo -n x` in #((((( --n*) - case `echo 'xy\c'` in - *c*) ECHO_T=' ';; # ECHO_T is single tab character. - xy) ECHO_C='\c';; - *) echo `echo ksh88 bug on AIX 6.1` > /dev/null - ECHO_T=' ';; - esac;; -*) - ECHO_N='-n';; -esac - -rm -f conf$$ conf$$.exe conf$$.file -if test -d conf$$.dir; then - rm -f conf$$.dir/conf$$.file -else - rm -f conf$$.dir - mkdir conf$$.dir 2>/dev/null -fi -if (echo >conf$$.file) 2>/dev/null; then - if ln -s conf$$.file conf$$ 2>/dev/null; then - as_ln_s='ln -s' - # ... but there are two gotchas: - # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. - # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -pR'. - ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -pR' - elif ln conf$$.file conf$$ 2>/dev/null; then - as_ln_s=ln - else - as_ln_s='cp -pR' - fi -else - as_ln_s='cp -pR' -fi -rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file -rmdir conf$$.dir 2>/dev/null - -if mkdir -p . 2>/dev/null; then - as_mkdir_p='mkdir -p "$as_dir"' -else - test -d ./-p && rmdir ./-p - as_mkdir_p=false -fi - -as_test_x='test -x' -as_executable_p=as_fn_executable_p - -# Sed expression to map a string onto a valid CPP name. -as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" - -# Sed expression to map a string onto a valid variable name. -as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" - - -test -n "$DJDIR" || exec 7<&0 &1 - -# Name of the host. -# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, -# so uname gets run too. -ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` - -# -# Initializations. -# -ac_default_prefix=/usr/local -ac_clean_files= -ac_config_libobj_dir=. -LIBOBJS= -cross_compiling=no -subdirs= -MFLAGS= -MAKEFLAGS= - -# Identity of this package. -PACKAGE_NAME= -PACKAGE_TARNAME= -PACKAGE_VERSION= -PACKAGE_STRING= -PACKAGE_BUGREPORT= -PACKAGE_URL= - -ac_subst_vars='LTLIBOBJS -LIBOBJS -rustc_has_std -RUSTFLAGS -RUSTC -INSTALL_DATA -INSTALL_SCRIPT -INSTALL_PROGRAM -OBJEXT -EXEEXT -ac_ct_CC -CPPFLAGS -LDFLAGS -CFLAGS -CC -host_os -host_vendor -host_cpu -host -build_os -build_vendor -build_cpu -build -target_alias -host_alias -build_alias -LIBS -ECHO_T -ECHO_N -ECHO_C -DEFS -mandir -localedir -libdir -psdir -pdfdir -dvidir -htmldir -infodir -docdir -oldincludedir -includedir -runstatedir -localstatedir -sharedstatedir -sysconfdir -datadir -datarootdir -libexecdir -sbindir -bindir -program_transform_name -prefix -exec_prefix -PACKAGE_URL -PACKAGE_BUGREPORT -PACKAGE_STRING -PACKAGE_VERSION -PACKAGE_TARNAME -PACKAGE_NAME -PATH_SEPARATOR -SHELL' -ac_subst_files='' -ac_user_opts=' -enable_option_checking -' - ac_precious_vars='build_alias -host_alias -target_alias -CC -CFLAGS -LDFLAGS -LIBS -CPPFLAGS -RUSTC -RUSTFLAGS' - - -# Initialize some variables set by options. -ac_init_help= -ac_init_version=false -ac_unrecognized_opts= -ac_unrecognized_sep= -# The variables have the same names as the options, with -# dashes changed to underlines. -cache_file=/dev/null -exec_prefix=NONE -no_create= -no_recursion= -prefix=NONE -program_prefix=NONE -program_suffix=NONE -program_transform_name=s,x,x, -silent= -site= -srcdir= -verbose= -x_includes=NONE -x_libraries=NONE - -# Installation directory options. -# These are left unexpanded so users can "make install exec_prefix=/foo" -# and all the variables that are supposed to be based on exec_prefix -# by default will actually change. -# Use braces instead of parens because sh, perl, etc. also accept them. -# (The list follows the same order as the GNU Coding Standards.) -bindir='${exec_prefix}/bin' -sbindir='${exec_prefix}/sbin' -libexecdir='${exec_prefix}/libexec' -datarootdir='${prefix}/share' -datadir='${datarootdir}' -sysconfdir='${prefix}/etc' -sharedstatedir='${prefix}/com' -localstatedir='${prefix}/var' -runstatedir='${localstatedir}/run' -includedir='${prefix}/include' -oldincludedir='/usr/include' -docdir='${datarootdir}/doc/${PACKAGE}' -infodir='${datarootdir}/info' -htmldir='${docdir}' -dvidir='${docdir}' -pdfdir='${docdir}' -psdir='${docdir}' -libdir='${exec_prefix}/lib' -localedir='${datarootdir}/locale' -mandir='${datarootdir}/man' - -ac_prev= -ac_dashdash= -for ac_option -do - # If the previous option needs an argument, assign it. - if test -n "$ac_prev"; then - eval $ac_prev=\$ac_option - ac_prev= - continue - fi - - case $ac_option in - *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; - *=) ac_optarg= ;; - *) ac_optarg=yes ;; - esac - - # Accept the important Cygnus configure options, so we can diagnose typos. - - case $ac_dashdash$ac_option in - --) - ac_dashdash=yes ;; - - -bindir | --bindir | --bindi | --bind | --bin | --bi) - ac_prev=bindir ;; - -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) - bindir=$ac_optarg ;; - - -build | --build | --buil | --bui | --bu) - ac_prev=build_alias ;; - -build=* | --build=* | --buil=* | --bui=* | --bu=*) - build_alias=$ac_optarg ;; - - -cache-file | --cache-file | --cache-fil | --cache-fi \ - | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) - ac_prev=cache_file ;; - -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ - | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) - cache_file=$ac_optarg ;; - - --config-cache | -C) - cache_file=config.cache ;; - - -datadir | --datadir | --datadi | --datad) - ac_prev=datadir ;; - -datadir=* | --datadir=* | --datadi=* | --datad=*) - datadir=$ac_optarg ;; - - -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ - | --dataroo | --dataro | --datar) - ac_prev=datarootdir ;; - -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ - | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) - datarootdir=$ac_optarg ;; - - -disable-* | --disable-*) - ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid feature name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"enable_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval enable_$ac_useropt=no ;; - - -docdir | --docdir | --docdi | --doc | --do) - ac_prev=docdir ;; - -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) - docdir=$ac_optarg ;; - - -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) - ac_prev=dvidir ;; - -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) - dvidir=$ac_optarg ;; - - -enable-* | --enable-*) - ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid feature name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"enable_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval enable_$ac_useropt=\$ac_optarg ;; - - -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ - | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ - | --exec | --exe | --ex) - ac_prev=exec_prefix ;; - -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ - | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ - | --exec=* | --exe=* | --ex=*) - exec_prefix=$ac_optarg ;; - - -gas | --gas | --ga | --g) - # Obsolete; use --with-gas. - with_gas=yes ;; - - -help | --help | --hel | --he | -h) - ac_init_help=long ;; - -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) - ac_init_help=recursive ;; - -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) - ac_init_help=short ;; - - -host | --host | --hos | --ho) - ac_prev=host_alias ;; - -host=* | --host=* | --hos=* | --ho=*) - host_alias=$ac_optarg ;; - - -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) - ac_prev=htmldir ;; - -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ - | --ht=*) - htmldir=$ac_optarg ;; - - -includedir | --includedir | --includedi | --included | --include \ - | --includ | --inclu | --incl | --inc) - ac_prev=includedir ;; - -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ - | --includ=* | --inclu=* | --incl=* | --inc=*) - includedir=$ac_optarg ;; - - -infodir | --infodir | --infodi | --infod | --info | --inf) - ac_prev=infodir ;; - -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) - infodir=$ac_optarg ;; - - -libdir | --libdir | --libdi | --libd) - ac_prev=libdir ;; - -libdir=* | --libdir=* | --libdi=* | --libd=*) - libdir=$ac_optarg ;; - - -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ - | --libexe | --libex | --libe) - ac_prev=libexecdir ;; - -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ - | --libexe=* | --libex=* | --libe=*) - libexecdir=$ac_optarg ;; - - -localedir | --localedir | --localedi | --localed | --locale) - ac_prev=localedir ;; - -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) - localedir=$ac_optarg ;; - - -localstatedir | --localstatedir | --localstatedi | --localstated \ - | --localstate | --localstat | --localsta | --localst | --locals) - ac_prev=localstatedir ;; - -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ - | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) - localstatedir=$ac_optarg ;; - - -mandir | --mandir | --mandi | --mand | --man | --ma | --m) - ac_prev=mandir ;; - -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) - mandir=$ac_optarg ;; - - -nfp | --nfp | --nf) - # Obsolete; use --without-fp. - with_fp=no ;; - - -no-create | --no-create | --no-creat | --no-crea | --no-cre \ - | --no-cr | --no-c | -n) - no_create=yes ;; - - -no-recursion | --no-recursion | --no-recursio | --no-recursi \ - | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) - no_recursion=yes ;; - - -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ - | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ - | --oldin | --oldi | --old | --ol | --o) - ac_prev=oldincludedir ;; - -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ - | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ - | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) - oldincludedir=$ac_optarg ;; - - -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) - ac_prev=prefix ;; - -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) - prefix=$ac_optarg ;; - - -program-prefix | --program-prefix | --program-prefi | --program-pref \ - | --program-pre | --program-pr | --program-p) - ac_prev=program_prefix ;; - -program-prefix=* | --program-prefix=* | --program-prefi=* \ - | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) - program_prefix=$ac_optarg ;; - - -program-suffix | --program-suffix | --program-suffi | --program-suff \ - | --program-suf | --program-su | --program-s) - ac_prev=program_suffix ;; - -program-suffix=* | --program-suffix=* | --program-suffi=* \ - | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) - program_suffix=$ac_optarg ;; - - -program-transform-name | --program-transform-name \ - | --program-transform-nam | --program-transform-na \ - | --program-transform-n | --program-transform- \ - | --program-transform | --program-transfor \ - | --program-transfo | --program-transf \ - | --program-trans | --program-tran \ - | --progr-tra | --program-tr | --program-t) - ac_prev=program_transform_name ;; - -program-transform-name=* | --program-transform-name=* \ - | --program-transform-nam=* | --program-transform-na=* \ - | --program-transform-n=* | --program-transform-=* \ - | --program-transform=* | --program-transfor=* \ - | --program-transfo=* | --program-transf=* \ - | --program-trans=* | --program-tran=* \ - | --progr-tra=* | --program-tr=* | --program-t=*) - program_transform_name=$ac_optarg ;; - - -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) - ac_prev=pdfdir ;; - -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) - pdfdir=$ac_optarg ;; - - -psdir | --psdir | --psdi | --psd | --ps) - ac_prev=psdir ;; - -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) - psdir=$ac_optarg ;; - - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil) - silent=yes ;; - - -runstatedir | --runstatedir | --runstatedi | --runstated \ - | --runstate | --runstat | --runsta | --runst | --runs \ - | --run | --ru | --r) - ac_prev=runstatedir ;; - -runstatedir=* | --runstatedir=* | --runstatedi=* | --runstated=* \ - | --runstate=* | --runstat=* | --runsta=* | --runst=* | --runs=* \ - | --run=* | --ru=* | --r=*) - runstatedir=$ac_optarg ;; - - -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) - ac_prev=sbindir ;; - -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ - | --sbi=* | --sb=*) - sbindir=$ac_optarg ;; - - -sharedstatedir | --sharedstatedir | --sharedstatedi \ - | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ - | --sharedst | --shareds | --shared | --share | --shar \ - | --sha | --sh) - ac_prev=sharedstatedir ;; - -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ - | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ - | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ - | --sha=* | --sh=*) - sharedstatedir=$ac_optarg ;; - - -site | --site | --sit) - ac_prev=site ;; - -site=* | --site=* | --sit=*) - site=$ac_optarg ;; - - -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) - ac_prev=srcdir ;; - -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) - srcdir=$ac_optarg ;; - - -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ - | --syscon | --sysco | --sysc | --sys | --sy) - ac_prev=sysconfdir ;; - -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ - | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) - sysconfdir=$ac_optarg ;; - - -target | --target | --targe | --targ | --tar | --ta | --t) - ac_prev=target_alias ;; - -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) - target_alias=$ac_optarg ;; - - -v | -verbose | --verbose | --verbos | --verbo | --verb) - verbose=yes ;; - - -version | --version | --versio | --versi | --vers | -V) - ac_init_version=: ;; - - -with-* | --with-*) - ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid package name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"with_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval with_$ac_useropt=\$ac_optarg ;; - - -without-* | --without-*) - ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid package name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"with_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval with_$ac_useropt=no ;; - - --x) - # Obsolete; use --with-x. - with_x=yes ;; - - -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ - | --x-incl | --x-inc | --x-in | --x-i) - ac_prev=x_includes ;; - -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ - | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) - x_includes=$ac_optarg ;; - - -x-libraries | --x-libraries | --x-librarie | --x-librari \ - | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) - ac_prev=x_libraries ;; - -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ - | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) - x_libraries=$ac_optarg ;; - - -*) as_fn_error $? "unrecognized option: \`$ac_option' -Try \`$0 --help' for more information" - ;; - - *=*) - ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` - # Reject names that are not valid shell variable names. - case $ac_envvar in #( - '' | [0-9]* | *[!_$as_cr_alnum]* ) - as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; - esac - eval $ac_envvar=\$ac_optarg - export $ac_envvar ;; - - *) - # FIXME: should be removed in autoconf 3.0. - $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2 - expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && - $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2 - : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" - ;; - - esac -done - -if test -n "$ac_prev"; then - ac_option=--`echo $ac_prev | sed 's/_/-/g'` - as_fn_error $? "missing argument to $ac_option" -fi - -if test -n "$ac_unrecognized_opts"; then - case $enable_option_checking in - no) ;; - fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; - *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; - esac -fi - -# Check all directory arguments for consistency. -for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ - datadir sysconfdir sharedstatedir localstatedir includedir \ - oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ - libdir localedir mandir runstatedir -do - eval ac_val=\$$ac_var - # Remove trailing slashes. - case $ac_val in - */ ) - ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` - eval $ac_var=\$ac_val;; - esac - # Be sure to have absolute directory names. - case $ac_val in - [\\/$]* | ?:[\\/]* ) continue;; - NONE | '' ) case $ac_var in *prefix ) continue;; esac;; - esac - as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" -done - -# There might be people who depend on the old broken behavior: `$host' -# used to hold the argument of --host etc. -# FIXME: To remove some day. -build=$build_alias -host=$host_alias -target=$target_alias - -# FIXME: To remove some day. -if test "x$host_alias" != x; then - if test "x$build_alias" = x; then - cross_compiling=maybe - elif test "x$build_alias" != "x$host_alias"; then - cross_compiling=yes - fi -fi - -ac_tool_prefix= -test -n "$host_alias" && ac_tool_prefix=$host_alias- - -test "$silent" = yes && exec 6>/dev/null - - -ac_pwd=`pwd` && test -n "$ac_pwd" && -ac_ls_di=`ls -di .` && -ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || - as_fn_error $? "working directory cannot be determined" -test "X$ac_ls_di" = "X$ac_pwd_ls_di" || - as_fn_error $? "pwd does not report name of working directory" - - -# Find the source files, if location was not specified. -if test -z "$srcdir"; then - ac_srcdir_defaulted=yes - # Try the directory containing this script, then the parent directory. - ac_confdir=`$as_dirname -- "$as_myself" || -$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_myself" : 'X\(//\)[^/]' \| \ - X"$as_myself" : 'X\(//\)$' \| \ - X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$as_myself" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - srcdir=$ac_confdir - if test ! -r "$srcdir/$ac_unique_file"; then - srcdir=.. - fi -else - ac_srcdir_defaulted=no -fi -if test ! -r "$srcdir/$ac_unique_file"; then - test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." - as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" -fi -ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" -ac_abs_confdir=`( - cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" - pwd)` -# When building in place, set srcdir=. -if test "$ac_abs_confdir" = "$ac_pwd"; then - srcdir=. -fi -# Remove unnecessary trailing slashes from srcdir. -# Double slashes in file names in object file debugging info -# mess up M-x gdb in Emacs. -case $srcdir in -*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; -esac -for ac_var in $ac_precious_vars; do - eval ac_env_${ac_var}_set=\${${ac_var}+set} - eval ac_env_${ac_var}_value=\$${ac_var} - eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} - eval ac_cv_env_${ac_var}_value=\$${ac_var} -done - -# -# Report the --help message. -# -if test "$ac_init_help" = "long"; then - # Omit some internal or obsolete options to make the list less imposing. - # This message is too long to be a string in the A/UX 3.1 sh. - cat <<_ACEOF -\`configure' configures this package to adapt to many kinds of systems. - -Usage: $0 [OPTION]... [VAR=VALUE]... - -To assign environment variables (e.g., CC, CFLAGS...), specify them as -VAR=VALUE. See below for descriptions of some of the useful variables. - -Defaults for the options are specified in brackets. - -Configuration: - -h, --help display this help and exit - --help=short display options specific to this package - --help=recursive display the short help of all the included packages - -V, --version display version information and exit - -q, --quiet, --silent do not print \`checking ...' messages - --cache-file=FILE cache test results in FILE [disabled] - -C, --config-cache alias for \`--cache-file=config.cache' - -n, --no-create do not create output files - --srcdir=DIR find the sources in DIR [configure dir or \`..'] - -Installation directories: - --prefix=PREFIX install architecture-independent files in PREFIX - [$ac_default_prefix] - --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX - [PREFIX] - -By default, \`make install' will install all the files in -\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify -an installation prefix other than \`$ac_default_prefix' using \`--prefix', -for instance \`--prefix=\$HOME'. - -For better control, use the options below. - -Fine tuning of the installation directories: - --bindir=DIR user executables [EPREFIX/bin] - --sbindir=DIR system admin executables [EPREFIX/sbin] - --libexecdir=DIR program executables [EPREFIX/libexec] - --sysconfdir=DIR read-only single-machine data [PREFIX/etc] - --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] - --localstatedir=DIR modifiable single-machine data [PREFIX/var] - --runstatedir=DIR modifiable per-process data [LOCALSTATEDIR/run] - --libdir=DIR object code libraries [EPREFIX/lib] - --includedir=DIR C header files [PREFIX/include] - --oldincludedir=DIR C header files for non-gcc [/usr/include] - --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] - --datadir=DIR read-only architecture-independent data [DATAROOTDIR] - --infodir=DIR info documentation [DATAROOTDIR/info] - --localedir=DIR locale-dependent data [DATAROOTDIR/locale] - --mandir=DIR man documentation [DATAROOTDIR/man] - --docdir=DIR documentation root [DATAROOTDIR/doc/PACKAGE] - --htmldir=DIR html documentation [DOCDIR] - --dvidir=DIR dvi documentation [DOCDIR] - --pdfdir=DIR pdf documentation [DOCDIR] - --psdir=DIR ps documentation [DOCDIR] -_ACEOF - - cat <<\_ACEOF - -System types: - --build=BUILD configure for building on BUILD [guessed] - --host=HOST cross-compile to build programs to run on HOST [BUILD] -_ACEOF -fi - -if test -n "$ac_init_help"; then - - cat <<\_ACEOF - -Some influential environment variables: - CC C compiler command - CFLAGS C compiler flags - LDFLAGS linker flags, e.g. -L if you have libraries in a - nonstandard directory - LIBS libraries to pass to the linker, e.g. -l - CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if - you have headers in a nonstandard directory - RUSTC Rust compiler to use - RUSTFLAGS Flags to pass to the rust compiler - -Use these variables to override the choices made by `configure' or to help -it to find libraries and programs with nonstandard names/locations. - -Report bugs to the package provider. -_ACEOF -ac_status=$? -fi - -if test "$ac_init_help" = "recursive"; then - # If there are subdirs, report their specific --help. - for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue - test -d "$ac_dir" || - { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || - continue - ac_builddir=. - -case "$ac_dir" in -.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; -*) - ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` - # A ".." for each directory in $ac_dir_suffix. - ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` - case $ac_top_builddir_sub in - "") ac_top_builddir_sub=. ac_top_build_prefix= ;; - *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; - esac ;; -esac -ac_abs_top_builddir=$ac_pwd -ac_abs_builddir=$ac_pwd$ac_dir_suffix -# for backward compatibility: -ac_top_builddir=$ac_top_build_prefix - -case $srcdir in - .) # We are building in place. - ac_srcdir=. - ac_top_srcdir=$ac_top_builddir_sub - ac_abs_top_srcdir=$ac_pwd ;; - [\\/]* | ?:[\\/]* ) # Absolute name. - ac_srcdir=$srcdir$ac_dir_suffix; - ac_top_srcdir=$srcdir - ac_abs_top_srcdir=$srcdir ;; - *) # Relative name. - ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix - ac_top_srcdir=$ac_top_build_prefix$srcdir - ac_abs_top_srcdir=$ac_pwd/$srcdir ;; -esac -ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix - - cd "$ac_dir" || { ac_status=$?; continue; } - # Check for guested configure. - if test -f "$ac_srcdir/configure.gnu"; then - echo && - $SHELL "$ac_srcdir/configure.gnu" --help=recursive - elif test -f "$ac_srcdir/configure"; then - echo && - $SHELL "$ac_srcdir/configure" --help=recursive - else - $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 - fi || ac_status=$? - cd "$ac_pwd" || { ac_status=$?; break; } - done -fi - -test -n "$ac_init_help" && exit $ac_status -if $ac_init_version; then - cat <<\_ACEOF -configure -generated by GNU Autoconf 2.69 - -Copyright (C) 2012 Free Software Foundation, Inc. -This configure script is free software; the Free Software Foundation -gives unlimited permission to copy, distribute and modify it. -_ACEOF - exit -fi - -## ------------------------ ## -## Autoconf initialization. ## -## ------------------------ ## - -# ac_fn_c_try_compile LINENO -# -------------------------- -# Try to compile conftest.$ac_ext, and return whether this succeeded. -ac_fn_c_try_compile () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - rm -f conftest.$ac_objext - if { { ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compile") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_c_try_compile -cat >config.log <<_ACEOF -This file contains any messages produced by compilers while -running configure, to aid debugging if configure makes a mistake. - -It was created by $as_me, which was -generated by GNU Autoconf 2.69. Invocation command line was - - $ $0 $@ - -_ACEOF -exec 5>>config.log -{ -cat <<_ASUNAME -## --------- ## -## Platform. ## -## --------- ## - -hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` -uname -m = `(uname -m) 2>/dev/null || echo unknown` -uname -r = `(uname -r) 2>/dev/null || echo unknown` -uname -s = `(uname -s) 2>/dev/null || echo unknown` -uname -v = `(uname -v) 2>/dev/null || echo unknown` - -/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` -/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` - -/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` -/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` -/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` -/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` -/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` -/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` -/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` - -_ASUNAME - -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - $as_echo "PATH: $as_dir" - done -IFS=$as_save_IFS - -} >&5 - -cat >&5 <<_ACEOF - - -## ----------- ## -## Core tests. ## -## ----------- ## - -_ACEOF - - -# Keep a trace of the command line. -# Strip out --no-create and --no-recursion so they do not pile up. -# Strip out --silent because we don't want to record it for future runs. -# Also quote any args containing shell meta-characters. -# Make two passes to allow for proper duplicate-argument suppression. -ac_configure_args= -ac_configure_args0= -ac_configure_args1= -ac_must_keep_next=false -for ac_pass in 1 2 -do - for ac_arg - do - case $ac_arg in - -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil) - continue ;; - *\'*) - ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; - esac - case $ac_pass in - 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; - 2) - as_fn_append ac_configure_args1 " '$ac_arg'" - if test $ac_must_keep_next = true; then - ac_must_keep_next=false # Got value, back to normal. - else - case $ac_arg in - *=* | --config-cache | -C | -disable-* | --disable-* \ - | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ - | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ - | -with-* | --with-* | -without-* | --without-* | --x) - case "$ac_configure_args0 " in - "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; - esac - ;; - -* ) ac_must_keep_next=true ;; - esac - fi - as_fn_append ac_configure_args " '$ac_arg'" - ;; - esac - done -done -{ ac_configure_args0=; unset ac_configure_args0;} -{ ac_configure_args1=; unset ac_configure_args1;} - -# When interrupted or exit'd, cleanup temporary files, and complete -# config.log. We remove comments because anyway the quotes in there -# would cause problems or look ugly. -# WARNING: Use '\'' to represent an apostrophe within the trap. -# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. -trap 'exit_status=$? - # Save into config.log some information that might help in debugging. - { - echo - - $as_echo "## ---------------- ## -## Cache variables. ## -## ---------------- ##" - echo - # The following way of writing the cache mishandles newlines in values, -( - for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do - eval ac_val=\$$ac_var - case $ac_val in #( - *${as_nl}*) - case $ac_var in #( - *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 -$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; - esac - case $ac_var in #( - _ | IFS | as_nl) ;; #( - BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( - *) { eval $ac_var=; unset $ac_var;} ;; - esac ;; - esac - done - (set) 2>&1 | - case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( - *${as_nl}ac_space=\ *) - sed -n \ - "s/'\''/'\''\\\\'\'''\''/g; - s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" - ;; #( - *) - sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" - ;; - esac | - sort -) - echo - - $as_echo "## ----------------- ## -## Output variables. ## -## ----------------- ##" - echo - for ac_var in $ac_subst_vars - do - eval ac_val=\$$ac_var - case $ac_val in - *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; - esac - $as_echo "$ac_var='\''$ac_val'\''" - done | sort - echo - - if test -n "$ac_subst_files"; then - $as_echo "## ------------------- ## -## File substitutions. ## -## ------------------- ##" - echo - for ac_var in $ac_subst_files - do - eval ac_val=\$$ac_var - case $ac_val in - *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; - esac - $as_echo "$ac_var='\''$ac_val'\''" - done | sort - echo - fi - - if test -s confdefs.h; then - $as_echo "## ----------- ## -## confdefs.h. ## -## ----------- ##" - echo - cat confdefs.h - echo - fi - test "$ac_signal" != 0 && - $as_echo "$as_me: caught signal $ac_signal" - $as_echo "$as_me: exit $exit_status" - } >&5 - rm -f core *.core core.conftest.* && - rm -f -r conftest* confdefs* conf$$* $ac_clean_files && - exit $exit_status -' 0 -for ac_signal in 1 2 13 15; do - trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal -done -ac_signal=0 - -# confdefs.h avoids OS command line length limits that DEFS can exceed. -rm -f -r conftest* confdefs.h - -$as_echo "/* confdefs.h */" > confdefs.h - -# Predefined preprocessor variables. - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_NAME "$PACKAGE_NAME" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_TARNAME "$PACKAGE_TARNAME" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_VERSION "$PACKAGE_VERSION" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_STRING "$PACKAGE_STRING" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_URL "$PACKAGE_URL" -_ACEOF - - -# Let the site file select an alternate cache file if it wants to. -# Prefer an explicitly selected file to automatically selected ones. -ac_site_file1=NONE -ac_site_file2=NONE -if test -n "$CONFIG_SITE"; then - # We do not want a PATH search for config.site. - case $CONFIG_SITE in #(( - -*) ac_site_file1=./$CONFIG_SITE;; - */*) ac_site_file1=$CONFIG_SITE;; - *) ac_site_file1=./$CONFIG_SITE;; - esac -elif test "x$prefix" != xNONE; then - ac_site_file1=$prefix/share/config.site - ac_site_file2=$prefix/etc/config.site -else - ac_site_file1=$ac_default_prefix/share/config.site - ac_site_file2=$ac_default_prefix/etc/config.site -fi -for ac_site_file in "$ac_site_file1" "$ac_site_file2" -do - test "x$ac_site_file" = xNONE && continue - if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 -$as_echo "$as_me: loading site script $ac_site_file" >&6;} - sed 's/^/| /' "$ac_site_file" >&5 - . "$ac_site_file" \ - || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "failed to load site script $ac_site_file -See \`config.log' for more details" "$LINENO" 5; } - fi -done - -if test -r "$cache_file"; then - # Some versions of bash will fail to source /dev/null (special files - # actually), so we avoid doing that. DJGPP emulates it as a regular file. - if test /dev/null != "$cache_file" && test -f "$cache_file"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 -$as_echo "$as_me: loading cache $cache_file" >&6;} - case $cache_file in - [\\/]* | ?:[\\/]* ) . "$cache_file";; - *) . "./$cache_file";; - esac - fi -else - { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 -$as_echo "$as_me: creating cache $cache_file" >&6;} - >$cache_file -fi - -# Check that the precious variables saved in the cache have kept the same -# value. -ac_cache_corrupted=false -for ac_var in $ac_precious_vars; do - eval ac_old_set=\$ac_cv_env_${ac_var}_set - eval ac_new_set=\$ac_env_${ac_var}_set - eval ac_old_val=\$ac_cv_env_${ac_var}_value - eval ac_new_val=\$ac_env_${ac_var}_value - case $ac_old_set,$ac_new_set in - set,) - { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 -$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} - ac_cache_corrupted=: ;; - ,set) - { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 -$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} - ac_cache_corrupted=: ;; - ,);; - *) - if test "x$ac_old_val" != "x$ac_new_val"; then - # differences in whitespace do not lead to failure. - ac_old_val_w=`echo x $ac_old_val` - ac_new_val_w=`echo x $ac_new_val` - if test "$ac_old_val_w" != "$ac_new_val_w"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 -$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} - ac_cache_corrupted=: - else - { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 -$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} - eval $ac_var=\$ac_old_val - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 -$as_echo "$as_me: former value: \`$ac_old_val'" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 -$as_echo "$as_me: current value: \`$ac_new_val'" >&2;} - fi;; - esac - # Pass precious variables to config.status. - if test "$ac_new_set" = set; then - case $ac_new_val in - *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; - *) ac_arg=$ac_var=$ac_new_val ;; - esac - case " $ac_configure_args " in - *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. - *) as_fn_append ac_configure_args " '$ac_arg'" ;; - esac - fi -done -if $ac_cache_corrupted; then - { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 -$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;} - as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 -fi -## -------------------- ## -## Main body of script. ## -## -------------------- ## - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - - - - -ac_aux_dir= -for ac_dir in "$srcdir" "$srcdir/.." "$srcdir/../.."; do - if test -f "$ac_dir/install-sh"; then - ac_aux_dir=$ac_dir - ac_install_sh="$ac_aux_dir/install-sh -c" - break - elif test -f "$ac_dir/install.sh"; then - ac_aux_dir=$ac_dir - ac_install_sh="$ac_aux_dir/install.sh -c" - break - elif test -f "$ac_dir/shtool"; then - ac_aux_dir=$ac_dir - ac_install_sh="$ac_aux_dir/shtool install -c" - break - fi -done -if test -z "$ac_aux_dir"; then - as_fn_error $? "cannot find install-sh, install.sh, or shtool in \"$srcdir\" \"$srcdir/..\" \"$srcdir/../..\"" "$LINENO" 5 -fi - -# These three variables are undocumented and unsupported, -# and are intended to be withdrawn in a future Autoconf release. -# They can cause serious problems if a builder's source tree is in a directory -# whose full name contains unusual characters. -ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var. -ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var. -ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var. - - -# Make sure we can run config.sub. -$SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 || - as_fn_error $? "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5 - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5 -$as_echo_n "checking build system type... " >&6; } -if ${ac_cv_build+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_build_alias=$build_alias -test "x$ac_build_alias" = x && - ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"` -test "x$ac_build_alias" = x && - as_fn_error $? "cannot guess build type; you must specify one" "$LINENO" 5 -ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` || - as_fn_error $? "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5 - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5 -$as_echo "$ac_cv_build" >&6; } -case $ac_cv_build in -*-*-*) ;; -*) as_fn_error $? "invalid value of canonical build" "$LINENO" 5;; -esac -build=$ac_cv_build -ac_save_IFS=$IFS; IFS='-' -set x $ac_cv_build -shift -build_cpu=$1 -build_vendor=$2 -shift; shift -# Remember, the first character of IFS is used to create $*, -# except with old shells: -build_os=$* -IFS=$ac_save_IFS -case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5 -$as_echo_n "checking host system type... " >&6; } -if ${ac_cv_host+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test "x$host_alias" = x; then - ac_cv_host=$ac_cv_build -else - ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` || - as_fn_error $? "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5 -fi - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5 -$as_echo "$ac_cv_host" >&6; } -case $ac_cv_host in -*-*-*) ;; -*) as_fn_error $? "invalid value of canonical host" "$LINENO" 5;; -esac -host=$ac_cv_host -ac_save_IFS=$IFS; IFS='-' -set x $ac_cv_host -shift -host_cpu=$1 -host_vendor=$2 -shift; shift -# Remember, the first character of IFS is used to create $*, -# except with old shells: -host_os=$* -IFS=$ac_save_IFS -case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac - - - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu -if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. -set dummy ${ac_tool_prefix}gcc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_CC="${ac_tool_prefix}gcc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_CC"; then - ac_ct_CC=$CC - # Extract the first word of "gcc", so it can be a program name with args. -set dummy gcc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_CC"; then - ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_ac_ct_CC="gcc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_CC=$ac_cv_prog_ac_ct_CC -if test -n "$ac_ct_CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 -$as_echo "$ac_ct_CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_CC" = x; then - CC="" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - CC=$ac_ct_CC - fi -else - CC="$ac_cv_prog_CC" -fi - -if test -z "$CC"; then - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. -set dummy ${ac_tool_prefix}cc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_CC="${ac_tool_prefix}cc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - fi -fi -if test -z "$CC"; then - # Extract the first word of "cc", so it can be a program name with args. -set dummy cc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else - ac_prog_rejected=no -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then - ac_prog_rejected=yes - continue - fi - ac_cv_prog_CC="cc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -if test $ac_prog_rejected = yes; then - # We found a bogon in the path, so make sure we never use it. - set dummy $ac_cv_prog_CC - shift - if test $# != 0; then - # We chose a different compiler from the bogus one. - # However, it has the same basename, so the bogon will be chosen - # first if we set CC to just the basename; use the full file name. - shift - ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" - fi -fi -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$CC"; then - if test -n "$ac_tool_prefix"; then - for ac_prog in cl.exe - do - # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -set dummy $ac_tool_prefix$ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_CC="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$CC" && break - done -fi -if test -z "$CC"; then - ac_ct_CC=$CC - for ac_prog in cl.exe -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_CC"; then - ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_ac_ct_CC="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_CC=$ac_cv_prog_ac_ct_CC -if test -n "$ac_ct_CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 -$as_echo "$ac_ct_CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$ac_ct_CC" && break -done - - if test "x$ac_ct_CC" = x; then - CC="" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - CC=$ac_ct_CC - fi -fi - -fi - - -test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "no acceptable C compiler found in \$PATH -See \`config.log' for more details" "$LINENO" 5; } - -# Provide some information about the compiler. -$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 -set X $ac_compile -ac_compiler=$2 -for ac_option in --version -v -V -qversion; do - { { ac_try="$ac_compiler $ac_option >&5" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compiler $ac_option >&5") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - sed '10a\ -... rest of stderr output deleted ... - 10q' conftest.err >conftest.er1 - cat conftest.er1 >&5 - fi - rm -f conftest.er1 conftest.err - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } -done - -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -ac_clean_files_save=$ac_clean_files -ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out" -# Try to create an executable without -o first, disregard a.out. -# It will help us diagnose broken compilers, and finding out an intuition -# of exeext. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5 -$as_echo_n "checking whether the C compiler works... " >&6; } -ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` - -# The possible output files: -ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*" - -ac_rmfiles= -for ac_file in $ac_files -do - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; - * ) ac_rmfiles="$ac_rmfiles $ac_file";; - esac -done -rm -f $ac_rmfiles - -if { { ac_try="$ac_link_default" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link_default") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then : - # Autoconf-2.13 could set the ac_cv_exeext variable to `no'. -# So ignore a value of `no', otherwise this would lead to `EXEEXT = no' -# in a Makefile. We should not override ac_cv_exeext if it was cached, -# so that the user can short-circuit this test for compilers unknown to -# Autoconf. -for ac_file in $ac_files '' -do - test -f "$ac_file" || continue - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) - ;; - [ab].out ) - # We found the default executable, but exeext='' is most - # certainly right. - break;; - *.* ) - if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no; - then :; else - ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` - fi - # We set ac_cv_exeext here because the later test for it is not - # safe: cross compilers may not add the suffix if given an `-o' - # argument, so we may need to know it at that point already. - # Even if this section looks crufty: it has the advantage of - # actually working. - break;; - * ) - break;; - esac -done -test "$ac_cv_exeext" = no && ac_cv_exeext= - -else - ac_file='' -fi -if test -z "$ac_file"; then : - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -$as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - -{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error 77 "C compiler cannot create executables -See \`config.log' for more details" "$LINENO" 5; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5 -$as_echo_n "checking for C compiler default output file name... " >&6; } -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5 -$as_echo "$ac_file" >&6; } -ac_exeext=$ac_cv_exeext - -rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out -ac_clean_files=$ac_clean_files_save -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5 -$as_echo_n "checking for suffix of executables... " >&6; } -if { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then : - # If both `conftest.exe' and `conftest' are `present' (well, observable) -# catch `conftest.exe'. For instance with Cygwin, `ls conftest' will -# work properly (i.e., refer to `conftest.exe'), while it won't with -# `rm'. -for ac_file in conftest.exe conftest conftest.*; do - test -f "$ac_file" || continue - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; - *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` - break;; - * ) break;; - esac -done -else - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "cannot compute suffix of executables: cannot compile and link -See \`config.log' for more details" "$LINENO" 5; } -fi -rm -f conftest conftest$ac_cv_exeext -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5 -$as_echo "$ac_cv_exeext" >&6; } - -rm -f conftest.$ac_ext -EXEEXT=$ac_cv_exeext -ac_exeext=$EXEEXT -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -int -main () -{ -FILE *f = fopen ("conftest.out", "w"); - return ferror (f) || fclose (f) != 0; - - ; - return 0; -} -_ACEOF -ac_clean_files="$ac_clean_files conftest.out" -# Check that the compiler produces executables we can run. If not, either -# the compiler is broken, or we cross compile. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5 -$as_echo_n "checking whether we are cross compiling... " >&6; } -if test "$cross_compiling" != yes; then - { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } - if { ac_try='./conftest$ac_cv_exeext' - { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; }; then - cross_compiling=no - else - if test "$cross_compiling" = maybe; then - cross_compiling=yes - else - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "cannot run C compiled programs. -If you meant to cross compile, use \`--host'. -See \`config.log' for more details" "$LINENO" 5; } - fi - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5 -$as_echo "$cross_compiling" >&6; } - -rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out -ac_clean_files=$ac_clean_files_save -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5 -$as_echo_n "checking for suffix of object files... " >&6; } -if ${ac_cv_objext+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -rm -f conftest.o conftest.obj -if { { ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compile") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then : - for ac_file in conftest.o conftest.obj conftest.*; do - test -f "$ac_file" || continue; - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;; - *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` - break;; - esac -done -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - -{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "cannot compute suffix of object files: cannot compile -See \`config.log' for more details" "$LINENO" 5; } -fi -rm -f conftest.$ac_cv_objext conftest.$ac_ext -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5 -$as_echo "$ac_cv_objext" >&6; } -OBJEXT=$ac_cv_objext -ac_objext=$OBJEXT -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 -$as_echo_n "checking whether we are using the GNU C compiler... " >&6; } -if ${ac_cv_c_compiler_gnu+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ -#ifndef __GNUC__ - choke me -#endif - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_compiler_gnu=yes -else - ac_compiler_gnu=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -ac_cv_c_compiler_gnu=$ac_compiler_gnu - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 -$as_echo "$ac_cv_c_compiler_gnu" >&6; } -if test $ac_compiler_gnu = yes; then - GCC=yes -else - GCC= -fi -ac_test_CFLAGS=${CFLAGS+set} -ac_save_CFLAGS=$CFLAGS -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 -$as_echo_n "checking whether $CC accepts -g... " >&6; } -if ${ac_cv_prog_cc_g+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_save_c_werror_flag=$ac_c_werror_flag - ac_c_werror_flag=yes - ac_cv_prog_cc_g=no - CFLAGS="-g" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_prog_cc_g=yes -else - CFLAGS="" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - -else - ac_c_werror_flag=$ac_save_c_werror_flag - CFLAGS="-g" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_prog_cc_g=yes -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - ac_c_werror_flag=$ac_save_c_werror_flag -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 -$as_echo "$ac_cv_prog_cc_g" >&6; } -if test "$ac_test_CFLAGS" = set; then - CFLAGS=$ac_save_CFLAGS -elif test $ac_cv_prog_cc_g = yes; then - if test "$GCC" = yes; then - CFLAGS="-g -O2" - else - CFLAGS="-g" - fi -else - if test "$GCC" = yes; then - CFLAGS="-O2" - else - CFLAGS= - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 -$as_echo_n "checking for $CC option to accept ISO C89... " >&6; } -if ${ac_cv_prog_cc_c89+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_cv_prog_cc_c89=no -ac_save_CC=$CC -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -#include -struct stat; -/* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ -struct buf { int x; }; -FILE * (*rcsopen) (struct buf *, struct stat *, int); -static char *e (p, i) - char **p; - int i; -{ - return p[i]; -} -static char *f (char * (*g) (char **, int), char **p, ...) -{ - char *s; - va_list v; - va_start (v,p); - s = g (p, va_arg (v,int)); - va_end (v); - return s; -} - -/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has - function prototypes and stuff, but not '\xHH' hex character constants. - These don't provoke an error unfortunately, instead are silently treated - as 'x'. The following induces an error, until -std is added to get - proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an - array size at least. It's necessary to write '\x00'==0 to get something - that's true only with -std. */ -int osf4_cc_array ['\x00' == 0 ? 1 : -1]; - -/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters - inside strings and character constants. */ -#define FOO(x) 'x' -int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; - -int test (int i, double x); -struct s1 {int (*f) (int a);}; -struct s2 {int (*f) (double a);}; -int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); -int argc; -char **argv; -int -main () -{ -return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; - ; - return 0; -} -_ACEOF -for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ - -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" -do - CC="$ac_save_CC $ac_arg" - if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_prog_cc_c89=$ac_arg -fi -rm -f core conftest.err conftest.$ac_objext - test "x$ac_cv_prog_cc_c89" != "xno" && break -done -rm -f conftest.$ac_ext -CC=$ac_save_CC - -fi -# AC_CACHE_VAL -case "x$ac_cv_prog_cc_c89" in - x) - { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 -$as_echo "none needed" >&6; } ;; - xno) - { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 -$as_echo "unsupported" >&6; } ;; - *) - CC="$CC $ac_cv_prog_cc_c89" - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 -$as_echo "$ac_cv_prog_cc_c89" >&6; } ;; -esac -if test "x$ac_cv_prog_cc_c89" != xno; then : - -fi - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - -# Find a good install program. We prefer a C program (faster), -# so one script is as good as another. But avoid the broken or -# incompatible versions: -# SysV /etc/install, /usr/sbin/install -# SunOS /usr/etc/install -# IRIX /sbin/install -# AIX /bin/install -# AmigaOS /C/install, which installs bootblocks on floppy discs -# AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag -# AFS /usr/afsws/bin/install, which mishandles nonexistent args -# SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" -# OS/2's system install, which has a completely different semantic -# ./install, which can be erroneously created by make from ./install.sh. -# Reject install programs that cannot install multiple files. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5 -$as_echo_n "checking for a BSD-compatible install... " >&6; } -if test -z "$INSTALL"; then -if ${ac_cv_path_install+:} false; then : - $as_echo_n "(cached) " >&6 -else - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - # Account for people who put trailing slashes in PATH elements. -case $as_dir/ in #(( - ./ | .// | /[cC]/* | \ - /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ - ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \ - /usr/ucb/* ) ;; - *) - # OSF1 and SCO ODT 3.0 have their own names for install. - # Don't use installbsd from OSF since it installs stuff as root - # by default. - for ac_prog in ginstall scoinst install; do - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then - if test $ac_prog = install && - grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then - # AIX install. It has an incompatible calling convention. - : - elif test $ac_prog = install && - grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then - # program-specific install script used by HP pwplus--don't use. - : - else - rm -rf conftest.one conftest.two conftest.dir - echo one > conftest.one - echo two > conftest.two - mkdir conftest.dir - if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" && - test -s conftest.one && test -s conftest.two && - test -s conftest.dir/conftest.one && - test -s conftest.dir/conftest.two - then - ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" - break 3 - fi - fi - fi - done - done - ;; -esac - - done -IFS=$as_save_IFS - -rm -rf conftest.one conftest.two conftest.dir - -fi - if test "${ac_cv_path_install+set}" = set; then - INSTALL=$ac_cv_path_install - else - # As a last resort, use the slow shell script. Don't cache a - # value for INSTALL within a source directory, because that will - # break other packages using the cache if that directory is - # removed, or if the value is a relative name. - INSTALL=$ac_install_sh - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5 -$as_echo "$INSTALL" >&6; } - -# Use test -z because SunOS4 sh mishandles braces in ${var-val}. -# It thinks the first close brace ends the variable substitution. -test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' - -test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' - -test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' - - - - - - - - - if test "$RUSTFLAGS" \= "" - then - RUSTFLAGS="-C opt-level=2 -g" - fi - - if test x$host_alias != x - then - for ac_prog in rustc lcrustc $host-gccrs -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_path_RUSTC+:} false; then : - $as_echo_n "(cached) " >&6 -else - case $RUSTC in - [\\/]* | ?:[\\/]*) - ac_cv_path_RUSTC="$RUSTC" # Let the user override the test with a path. - ;; - *) - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_path_RUSTC="$as_dir/$ac_word$ac_exec_ext" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - - ;; -esac -fi -RUSTC=$ac_cv_path_RUSTC -if test -n "$RUSTC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RUSTC" >&5 -$as_echo "$RUSTC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$RUSTC" && break -done - - else - for ac_prog in rustc lcrustc $host-gccrs gccrs -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_path_RUSTC+:} false; then : - $as_echo_n "(cached) " >&6 -else - case $RUSTC in - [\\/]* | ?:[\\/]*) - ac_cv_path_RUSTC="$RUSTC" # Let the user override the test with a path. - ;; - *) - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_path_RUSTC="$as_dir/$ac_word$ac_exec_ext" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - - ;; -esac -fi -RUSTC=$ac_cv_path_RUSTC -if test -n "$RUSTC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RUSTC" >&5 -$as_echo "$RUSTC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$RUSTC" && break -done - - fi - - if test "$RUSTC" \= "" - then - as_fn_error $? "Failed to find a rust compiler. Install rustc in PATH, or set RUSTC to a suitable compiler" "$LINENO" 5 - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to compile for $host with $RUSTC" >&5 -$as_echo_n "checking how to compile for $host with $RUSTC... " >&6; } - case x$RUSTC in - x${host_alias}-* | x*\\/${host_alias}-* ) - rustc_host_target=${host_alias} - { $as_echo "$as_me:${as_lineno-$LINENO}: result: Not needed" >&5 -$as_echo "Not needed" >&6; } - ;; - x${host}-* | x*\\/${host}-* ) - rustc_host_target=${host} - { $as_echo "$as_me:${as_lineno-$LINENO}: result: Not needed" >&5 -$as_echo "Not needed" >&6; } - ;; - - x* ) - SAVE_RUSTFLAGS="$RUSTFLAGS" - if test x$host_alias != x - then - RUSTFLAGS="$RUSTFLAGS --target $host_alias" - echo '#![no_std]' > comptest.rs - echo Trying target $host_alias >> config.log - echo "$RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs" >> config.log - $RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs 2>> config.log > /dev/null - - if test $? -eq 0 - then - rustc_host_target=$host_alias - else - echo Using target $host_alias failed >> config.log - fi - fi - - if test x$rustc_host_target \= x - then - RUSTFLAGS="$SAVE_RUSTFLAGS --target $host" - echo '#![no_std]' > comptest.rs - echo Trying target $host >> config.log - echo "$RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs" >> config.log - $RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs 2>> config.log > /dev/null - - if test $? -eq 0 - then - rustc_host_target=$host - else - echo Using target $host failed >> config.log - fi - fi - - if test x$rustc_host_target \= x - then - case $host in - x86_64-*-mingw32 | i*86-*-mingw32 ) - IFS="-" read rustc_host_arch rustc_host_vendor rustc_host_sys <<< "$host" - RUSTFLAGS="$SAVE_RUSTFLAGS --target ${rustc_host_arch}-pc-windows-gnu" - echo '#![no_std]' > comptest.rs - echo "$RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs" >> config.log - $RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs 2>> config.log > /dev/null - - if test $? -eq 0 - then - rustc_host_target=${rustc_host_arch}-pc-windows-gnu - fi - ;; - x86_64-*-*-* | i*86-*-*-* ) - IFS="-" read rustc_host_arch rustc_host_vendor rustc_host_os rustc_host_env <<< "$host" - RUSTFLAGS="$SAVE_RUSTFLAGS --target ${rustc_host_arch}-unknown-${rustc_host_os}-${rustc_host_env}" - echo '#![no_std]' > comptest.rs - echo "$RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs" >> config.log - $RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs 2>> config.log > /dev/null - - if test $? -eq 0 - then - rustc_host_target=${rustc_host_arch}-unknown-${rustc_host_os}-${rustc_host_env} - fi - ;; - x86_64-*-* | i*86-*-* ) - IFS="-" read rustc_host_arch rustc_host_vendor rustc_host_sys <<< "$host" - RUSTFLAGS="$SAVE_RUSTFLAGS --target ${rustc_host_arch}-unknown-${rustc_host_sys}" - echo '#![no_std]' > comptest.rs - - echo "$RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs" >> config.log - $RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs 2>> config.log > /dev/null - - if test $? -eq 0 - then - rustc_host_target=${rustc_host_arch}-unknown-${rustc_host_sys} - fi - ;; - *-*-* ) - IFS="-" read rustc_host_arch rustc_host_vendor rustc_host_sys <<< "$host" - RUSTFLAGS="$SAVE_RUSTFLAGS --target ${rustc_host_arch}-${rustc_host_sys}" - echo '#![no_std]' > comptest.rs - - echo "$RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs" >> config.log - $RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs 2>> config.log > /dev/null - - if test $? -eq 0 - then - rustc_host_target=${rustc_host_arch}-${rustc_host_sys} - fi - ;; - esac - fi - if test x$rustc_host_target \= x - then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: not found" >&5 -$as_echo "not found" >&6; } - as_fn_error $? "Cannot compile to $host with $RUSTC" "$LINENO" 5 - else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: --target $rustc_host_target" >&5 -$as_echo "--target $rustc_host_target" >&6; } - fi - ;; - esac - rm -f comptest.rs libcomptest.rlib - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $RUSTC works" >&5 -$as_echo_n "checking whether $RUSTC works... " >&6; } - echo 'fn main(){}' > comptest.rs - $RUSTC $RUSTFLAGS --crate-type bin --crate-name comptest comptest.rs 2>> config.log > /dev/null - if test $? -ne 0 - then - echo '#![no_std]' > comptest.rs - $RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest --emit link=libcomptest.rlib comptest.rs 2>> config.log > /dev/null - if test $? -ne 0 - then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - as_fn_error $? "Cannot compile a simple program with $RUSTC" "$LINENO" 5 - else - rustc_has_std=no - fi - else - rustc_has_std=yes - fi - - if test x$host_alias \= x - then - ./comptest${EXEEXT} - if test $? -ne 0 - then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - as_fn_error $? "Cannot run executables compiled by $RUSTC" "$LINENO" 5 - fi - fi - - rm -f comptest.rs comptest${EXEEXT} - - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } - - - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $RUSTC supports edition 2018" >&5 -$as_echo_n "checking if $RUSTC supports edition 2018... " >&6; } - - SAVE_RUSTFLAGS="$RUSTFLAGS" - RUSTFLAGS="$RUSTFLAGS --edition 2018" - - echo "" >> test.rs - ${RUSTC} ${RUSTFLAGS} --crate-type rlib --crate-name test --emit link=libtest.rlib test.rs - - if test $? -eq 0 - then - rm -f test.rs libtest.rlib - - rust_edition=2018 - - else - rm -f test.rs libtest.rlib - RUSTFLAGS="$SAVE_RUSTFLAGS" - rust_edition=no - - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - as_fn_error $? "Cannot target --edition 2018 with $RUSTC" "$LINENO" 5 - - - fi - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } - - -ac_config_files="$ac_config_files Makefile" - - -cat >confcache <<\_ACEOF -# This file is a shell script that caches the results of configure -# tests run on this system so they can be shared between configure -# scripts and configure runs, see configure's option --config-cache. -# It is not useful on other systems. If it contains results you don't -# want to keep, you may remove or edit it. -# -# config.status only pays attention to the cache file if you give it -# the --recheck option to rerun configure. -# -# `ac_cv_env_foo' variables (set or unset) will be overridden when -# loading this file, other *unset* `ac_cv_foo' will be assigned the -# following values. - -_ACEOF - -# The following way of writing the cache mishandles newlines in values, -# but we know of no workaround that is simple, portable, and efficient. -# So, we kill variables containing newlines. -# Ultrix sh set writes to stderr and can't be redirected directly, -# and sets the high bit in the cache file unless we assign to the vars. -( - for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do - eval ac_val=\$$ac_var - case $ac_val in #( - *${as_nl}*) - case $ac_var in #( - *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 -$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; - esac - case $ac_var in #( - _ | IFS | as_nl) ;; #( - BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( - *) { eval $ac_var=; unset $ac_var;} ;; - esac ;; - esac - done - - (set) 2>&1 | - case $as_nl`(ac_space=' '; set) 2>&1` in #( - *${as_nl}ac_space=\ *) - # `set' does not quote correctly, so add quotes: double-quote - # substitution turns \\\\ into \\, and sed turns \\ into \. - sed -n \ - "s/'/'\\\\''/g; - s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" - ;; #( - *) - # `set' quotes correctly as required by POSIX, so do not add quotes. - sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" - ;; - esac | - sort -) | - sed ' - /^ac_cv_env_/b end - t clear - :clear - s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ - t end - s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ - :end' >>confcache -if diff "$cache_file" confcache >/dev/null 2>&1; then :; else - if test -w "$cache_file"; then - if test "x$cache_file" != "x/dev/null"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 -$as_echo "$as_me: updating cache $cache_file" >&6;} - if test ! -f "$cache_file" || test -h "$cache_file"; then - cat confcache >"$cache_file" - else - case $cache_file in #( - */* | ?:*) - mv -f confcache "$cache_file"$$ && - mv -f "$cache_file"$$ "$cache_file" ;; #( - *) - mv -f confcache "$cache_file" ;; - esac - fi - fi - else - { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 -$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;} - fi -fi -rm -f confcache - -test "x$prefix" = xNONE && prefix=$ac_default_prefix -# Let make expand exec_prefix. -test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' - -# Transform confdefs.h into DEFS. -# Protect against shell expansion while executing Makefile rules. -# Protect against Makefile macro expansion. -# -# If the first sed substitution is executed (which looks for macros that -# take arguments), then branch to the quote section. Otherwise, -# look for a macro that doesn't take arguments. -ac_script=' -:mline -/\\$/{ - N - s,\\\n,, - b mline -} -t clear -:clear -s/^[ ]*#[ ]*define[ ][ ]*\([^ (][^ (]*([^)]*)\)[ ]*\(.*\)/-D\1=\2/g -t quote -s/^[ ]*#[ ]*define[ ][ ]*\([^ ][^ ]*\)[ ]*\(.*\)/-D\1=\2/g -t quote -b any -:quote -s/[ `~#$^&*(){}\\|;'\''"<>?]/\\&/g -s/\[/\\&/g -s/\]/\\&/g -s/\$/$$/g -H -:any -${ - g - s/^\n// - s/\n/ /g - p -} -' -DEFS=`sed -n "$ac_script" confdefs.h` - - -ac_libobjs= -ac_ltlibobjs= -U= -for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue - # 1. Remove the extension, and $U if already installed. - ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' - ac_i=`$as_echo "$ac_i" | sed "$ac_script"` - # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR - # will be set to the directory where LIBOBJS objects are built. - as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" - as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' -done -LIBOBJS=$ac_libobjs - -LTLIBOBJS=$ac_ltlibobjs - - - -: "${CONFIG_STATUS=./config.status}" -ac_write_fail=0 -ac_clean_files_save=$ac_clean_files -ac_clean_files="$ac_clean_files $CONFIG_STATUS" -{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 -$as_echo "$as_me: creating $CONFIG_STATUS" >&6;} -as_write_fail=0 -cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 -#! $SHELL -# Generated by $as_me. -# Run this file to recreate the current configuration. -# Compiler output produced by configure, useful for debugging -# configure, is in config.log if it exists. - -debug=false -ac_cs_recheck=false -ac_cs_silent=false - -SHELL=\${CONFIG_SHELL-$SHELL} -export SHELL -_ASEOF -cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 -## -------------------- ## -## M4sh Initialization. ## -## -------------------- ## - -# Be more Bourne compatible -DUALCASE=1; export DUALCASE # for MKS sh -if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST -else - case `(set -o) 2>/dev/null` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi - - -as_nl=' -' -export as_nl -# Printing a long string crashes Solaris 7 /usr/bin/printf. -as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo -# Prefer a ksh shell builtin over an external printf program on Solaris, -# but without wasting forks for bash or zsh. -if test -z "$BASH_VERSION$ZSH_VERSION" \ - && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='print -r --' - as_echo_n='print -rn --' -elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='printf %s\n' - as_echo_n='printf %s' -else - if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then - as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' - as_echo_n='/usr/ucb/echo -n' - else - as_echo_body='eval expr "X$1" : "X\\(.*\\)"' - as_echo_n_body='eval - arg=$1; - case $arg in #( - *"$as_nl"*) - expr "X$arg" : "X\\(.*\\)$as_nl"; - arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; - esac; - expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" - ' - export as_echo_n_body - as_echo_n='sh -c $as_echo_n_body as_echo' - fi - export as_echo_body - as_echo='sh -c $as_echo_body as_echo' -fi - -# The user is always right. -if test "${PATH_SEPARATOR+set}" != set; then - PATH_SEPARATOR=: - (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { - (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || - PATH_SEPARATOR=';' - } -fi - - -# IFS -# We need space, tab and new line, in precisely that order. Quoting is -# there to prevent editors from complaining about space-tab. -# (If _AS_PATH_WALK were called with IFS unset, it would disable word -# splitting by setting IFS to empty value.) -IFS=" "" $as_nl" - -# Find who we are. Look in the path if we contain no directory separator. -as_myself= -case $0 in #(( - *[\\/]* ) as_myself=$0 ;; - *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break - done -IFS=$as_save_IFS - - ;; -esac -# We did not find ourselves, most probably we were run as `sh COMMAND' -# in which case we are not to be found in the path. -if test "x$as_myself" = x; then - as_myself=$0 -fi -if test ! -f "$as_myself"; then - $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 - exit 1 -fi - -# Unset variables that we do not need and which cause bugs (e.g. in -# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" -# suppresses any "Segmentation fault" message there. '((' could -# trigger a bug in pdksh 5.2.14. -for as_var in BASH_ENV ENV MAIL MAILPATH -do eval test x\${$as_var+set} = xset \ - && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : -done -PS1='$ ' -PS2='> ' -PS4='+ ' - -# NLS nuisances. -LC_ALL=C -export LC_ALL -LANGUAGE=C -export LANGUAGE - -# CDPATH. -(unset CDPATH) >/dev/null 2>&1 && unset CDPATH - - -# as_fn_error STATUS ERROR [LINENO LOG_FD] -# ---------------------------------------- -# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are -# provided, also output the error to LOG_FD, referencing LINENO. Then exit the -# script with STATUS, using 1 if that was 0. -as_fn_error () -{ - as_status=$1; test $as_status -eq 0 && as_status=1 - if test "$4"; then - as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 - fi - $as_echo "$as_me: error: $2" >&2 - as_fn_exit $as_status -} # as_fn_error - - -# as_fn_set_status STATUS -# ----------------------- -# Set $? to STATUS, without forking. -as_fn_set_status () -{ - return $1 -} # as_fn_set_status - -# as_fn_exit STATUS -# ----------------- -# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. -as_fn_exit () -{ - set +e - as_fn_set_status $1 - exit $1 -} # as_fn_exit - -# as_fn_unset VAR -# --------------- -# Portably unset VAR. -as_fn_unset () -{ - { eval $1=; unset $1;} -} -as_unset=as_fn_unset -# as_fn_append VAR VALUE -# ---------------------- -# Append the text in VALUE to the end of the definition contained in VAR. Take -# advantage of any shell optimizations that allow amortized linear growth over -# repeated appends, instead of the typical quadratic growth present in naive -# implementations. -if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : - eval 'as_fn_append () - { - eval $1+=\$2 - }' -else - as_fn_append () - { - eval $1=\$$1\$2 - } -fi # as_fn_append - -# as_fn_arith ARG... -# ------------------ -# Perform arithmetic evaluation on the ARGs, and store the result in the -# global $as_val. Take advantage of shells that can avoid forks. The arguments -# must be portable across $(()) and expr. -if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : - eval 'as_fn_arith () - { - as_val=$(( $* )) - }' -else - as_fn_arith () - { - as_val=`expr "$@" || test $? -eq 1` - } -fi # as_fn_arith - - -if expr a : '\(a\)' >/dev/null 2>&1 && - test "X`expr 00001 : '.*\(...\)'`" = X001; then - as_expr=expr -else - as_expr=false -fi - -if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then - as_basename=basename -else - as_basename=false -fi - -if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then - as_dirname=dirname -else - as_dirname=false -fi - -as_me=`$as_basename -- "$0" || -$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ - X"$0" : 'X\(//\)$' \| \ - X"$0" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X/"$0" | - sed '/^.*\/\([^/][^/]*\)\/*$/{ - s//\1/ - q - } - /^X\/\(\/\/\)$/{ - s//\1/ - q - } - /^X\/\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - -# Avoid depending upon Character Ranges. -as_cr_letters='abcdefghijklmnopqrstuvwxyz' -as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' -as_cr_Letters=$as_cr_letters$as_cr_LETTERS -as_cr_digits='0123456789' -as_cr_alnum=$as_cr_Letters$as_cr_digits - -ECHO_C= ECHO_N= ECHO_T= -case `echo -n x` in #((((( --n*) - case `echo 'xy\c'` in - *c*) ECHO_T=' ';; # ECHO_T is single tab character. - xy) ECHO_C='\c';; - *) echo `echo ksh88 bug on AIX 6.1` > /dev/null - ECHO_T=' ';; - esac;; -*) - ECHO_N='-n';; -esac - -rm -f conf$$ conf$$.exe conf$$.file -if test -d conf$$.dir; then - rm -f conf$$.dir/conf$$.file -else - rm -f conf$$.dir - mkdir conf$$.dir 2>/dev/null -fi -if (echo >conf$$.file) 2>/dev/null; then - if ln -s conf$$.file conf$$ 2>/dev/null; then - as_ln_s='ln -s' - # ... but there are two gotchas: - # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. - # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -pR'. - ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -pR' - elif ln conf$$.file conf$$ 2>/dev/null; then - as_ln_s=ln - else - as_ln_s='cp -pR' - fi -else - as_ln_s='cp -pR' -fi -rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file -rmdir conf$$.dir 2>/dev/null - - -# as_fn_mkdir_p -# ------------- -# Create "$as_dir" as a directory, including parents if necessary. -as_fn_mkdir_p () -{ - - case $as_dir in #( - -*) as_dir=./$as_dir;; - esac - test -d "$as_dir" || eval $as_mkdir_p || { - as_dirs= - while :; do - case $as_dir in #( - *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( - *) as_qdir=$as_dir;; - esac - as_dirs="'$as_qdir' $as_dirs" - as_dir=`$as_dirname -- "$as_dir" || -$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_dir" : 'X\(//\)[^/]' \| \ - X"$as_dir" : 'X\(//\)$' \| \ - X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$as_dir" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - test -d "$as_dir" && break - done - test -z "$as_dirs" || eval "mkdir $as_dirs" - } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" - - -} # as_fn_mkdir_p -if mkdir -p . 2>/dev/null; then - as_mkdir_p='mkdir -p "$as_dir"' -else - test -d ./-p && rmdir ./-p - as_mkdir_p=false -fi - - -# as_fn_executable_p FILE -# ----------------------- -# Test if FILE is an executable regular file. -as_fn_executable_p () -{ - test -f "$1" && test -x "$1" -} # as_fn_executable_p -as_test_x='test -x' -as_executable_p=as_fn_executable_p - -# Sed expression to map a string onto a valid CPP name. -as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" - -# Sed expression to map a string onto a valid variable name. -as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" - - -exec 6>&1 -## ----------------------------------- ## -## Main body of $CONFIG_STATUS script. ## -## ----------------------------------- ## -_ASEOF -test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# Save the log message, to keep $0 and so on meaningful, and to -# report actual input values of CONFIG_FILES etc. instead of their -# values after options handling. -ac_log=" -This file was extended by $as_me, which was -generated by GNU Autoconf 2.69. Invocation command line was - - CONFIG_FILES = $CONFIG_FILES - CONFIG_HEADERS = $CONFIG_HEADERS - CONFIG_LINKS = $CONFIG_LINKS - CONFIG_COMMANDS = $CONFIG_COMMANDS - $ $0 $@ - -on `(hostname || uname -n) 2>/dev/null | sed 1q` -" - -_ACEOF - -case $ac_config_files in *" -"*) set x $ac_config_files; shift; ac_config_files=$*;; -esac - - - -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -# Files that config.status was made for. -config_files="$ac_config_files" - -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -ac_cs_usage="\ -\`$as_me' instantiates files and other configuration actions -from templates according to the current configuration. Unless the files -and actions are specified as TAGs, all are instantiated by default. - -Usage: $0 [OPTION]... [TAG]... - - -h, --help print this help, then exit - -V, --version print version number and configuration settings, then exit - --config print configuration, then exit - -q, --quiet, --silent - do not print progress messages - -d, --debug don't remove temporary files - --recheck update $as_me by reconfiguring in the same conditions - --file=FILE[:TEMPLATE] - instantiate the configuration file FILE - -Configuration files: -$config_files - -Report bugs to the package provider." - -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" -ac_cs_version="\\ -config.status -configured by $0, generated by GNU Autoconf 2.69, - with options \\"\$ac_cs_config\\" - -Copyright (C) 2012 Free Software Foundation, Inc. -This config.status script is free software; the Free Software Foundation -gives unlimited permission to copy, distribute and modify it." - -ac_pwd='$ac_pwd' -srcdir='$srcdir' -INSTALL='$INSTALL' -test -n "\$AWK" || AWK=awk -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# The default lists apply if the user does not specify any file. -ac_need_defaults=: -while test $# != 0 -do - case $1 in - --*=?*) - ac_option=`expr "X$1" : 'X\([^=]*\)='` - ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` - ac_shift=: - ;; - --*=) - ac_option=`expr "X$1" : 'X\([^=]*\)='` - ac_optarg= - ac_shift=: - ;; - *) - ac_option=$1 - ac_optarg=$2 - ac_shift=shift - ;; - esac - - case $ac_option in - # Handling of the options. - -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) - ac_cs_recheck=: ;; - --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) - $as_echo "$ac_cs_version"; exit ;; - --config | --confi | --conf | --con | --co | --c ) - $as_echo "$ac_cs_config"; exit ;; - --debug | --debu | --deb | --de | --d | -d ) - debug=: ;; - --file | --fil | --fi | --f ) - $ac_shift - case $ac_optarg in - *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; - '') as_fn_error $? "missing file argument" ;; - esac - as_fn_append CONFIG_FILES " '$ac_optarg'" - ac_need_defaults=false;; - --he | --h | --help | --hel | -h ) - $as_echo "$ac_cs_usage"; exit ;; - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil | --si | --s) - ac_cs_silent=: ;; - - # This is an error. - -*) as_fn_error $? "unrecognized option: \`$1' -Try \`$0 --help' for more information." ;; - - *) as_fn_append ac_config_targets " $1" - ac_need_defaults=false ;; - - esac - shift -done - -ac_configure_extra_args= - -if $ac_cs_silent; then - exec 6>/dev/null - ac_configure_extra_args="$ac_configure_extra_args --silent" -fi - -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -if \$ac_cs_recheck; then - set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion - shift - \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 - CONFIG_SHELL='$SHELL' - export CONFIG_SHELL - exec "\$@" -fi - -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -exec 5>>config.log -{ - echo - sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX -## Running $as_me. ## -_ASBOX - $as_echo "$ac_log" -} >&5 - -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 - -# Handling of arguments. -for ac_config_target in $ac_config_targets -do - case $ac_config_target in - "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;; - - *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; - esac -done - - -# If the user did not use the arguments to specify the items to instantiate, -# then the envvar interface is used. Set only those that are not. -# We use the long form for the default assignment because of an extremely -# bizarre bug on SunOS 4.1.3. -if $ac_need_defaults; then - test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files -fi - -# Have a temporary directory for convenience. Make it in the build tree -# simply because there is no reason against having it here, and in addition, -# creating and moving files from /tmp can sometimes cause problems. -# Hook for its removal unless debugging. -# Note that there is a small window in which the directory will not be cleaned: -# after its creation but before its name has been assigned to `$tmp'. -$debug || -{ - tmp= ac_tmp= - trap 'exit_status=$? - : "${ac_tmp:=$tmp}" - { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status -' 0 - trap 'as_fn_exit 1' 1 2 13 15 -} -# Create a (secure) tmp directory for tmp files. - -{ - tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && - test -d "$tmp" -} || -{ - tmp=./conf$$-$RANDOM - (umask 077 && mkdir "$tmp") -} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 -ac_tmp=$tmp - -# Set up the scripts for CONFIG_FILES section. -# No need to generate them if there are no CONFIG_FILES. -# This happens for instance with `./config.status config.h'. -if test -n "$CONFIG_FILES"; then - - -ac_cr=`echo X | tr X '\015'` -# On cygwin, bash can eat \r inside `` if the user requested igncr. -# But we know of no other shell where ac_cr would be empty at this -# point, so we can use a bashism as a fallback. -if test "x$ac_cr" = x; then - eval ac_cr=\$\'\\r\' -fi -ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` -if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then - ac_cs_awk_cr='\\r' -else - ac_cs_awk_cr=$ac_cr -fi - -echo 'BEGIN {' >"$ac_tmp/subs1.awk" && -_ACEOF - - -{ - echo "cat >conf$$subs.awk <<_ACEOF" && - echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && - echo "_ACEOF" -} >conf$$subs.sh || - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 -ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` -ac_delim='%!_!# ' -for ac_last_try in false false false false false :; do - . ./conf$$subs.sh || - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 - - ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` - if test $ac_delim_n = $ac_delim_num; then - break - elif $ac_last_try; then - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 - else - ac_delim="$ac_delim!$ac_delim _$ac_delim!! " - fi -done -rm -f conf$$subs.sh - -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && -_ACEOF -sed -n ' -h -s/^/S["/; s/!.*/"]=/ -p -g -s/^[^!]*!// -:repl -t repl -s/'"$ac_delim"'$// -t delim -:nl -h -s/\(.\{148\}\)..*/\1/ -t more1 -s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ -p -n -b repl -:more1 -s/["\\]/\\&/g; s/^/"/; s/$/"\\/ -p -g -s/.\{148\}// -t nl -:delim -h -s/\(.\{148\}\)..*/\1/ -t more2 -s/["\\]/\\&/g; s/^/"/; s/$/"/ -p -b -:more2 -s/["\\]/\\&/g; s/^/"/; s/$/"\\/ -p -g -s/.\{148\}// -t delim -' >$CONFIG_STATUS || ac_write_fail=1 -rm -f conf$$subs.awk -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -_ACAWK -cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && - for (key in S) S_is_set[key] = 1 - FS = "" - -} -{ - line = $ 0 - nfields = split(line, field, "@") - substed = 0 - len = length(field[1]) - for (i = 2; i < nfields; i++) { - key = field[i] - keylen = length(key) - if (S_is_set[key]) { - value = S[key] - line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) - len += length(value) + length(field[++i]) - substed = 1 - } else - len += 1 + keylen - } - - print line -} - -_ACAWK -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then - sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" -else - cat -fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ - || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 -_ACEOF - -# VPATH may cause trouble with some makes, so we remove sole $(srcdir), -# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and -# trailing colons and then remove the whole line if VPATH becomes empty -# (actually we leave an empty line to preserve line numbers). -if test "x$srcdir" = x.; then - ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ -h -s/// -s/^/:/ -s/[ ]*$/:/ -s/:\$(srcdir):/:/g -s/:\${srcdir}:/:/g -s/:@srcdir@:/:/g -s/^:*// -s/:*$// -x -s/\(=[ ]*\).*/\1/ -G -s/\n// -s/^[^=]*=[ ]*$// -}' -fi - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -fi # test -n "$CONFIG_FILES" - - -eval set X " :F $CONFIG_FILES " -shift -for ac_tag -do - case $ac_tag in - :[FHLC]) ac_mode=$ac_tag; continue;; - esac - case $ac_mode$ac_tag in - :[FHL]*:*);; - :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; - :[FH]-) ac_tag=-:-;; - :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; - esac - ac_save_IFS=$IFS - IFS=: - set x $ac_tag - IFS=$ac_save_IFS - shift - ac_file=$1 - shift - - case $ac_mode in - :L) ac_source=$1;; - :[FH]) - ac_file_inputs= - for ac_f - do - case $ac_f in - -) ac_f="$ac_tmp/stdin";; - *) # Look for the file first in the build tree, then in the source tree - # (if the path is not absolute). The absolute path cannot be DOS-style, - # because $ac_f cannot contain `:'. - test -f "$ac_f" || - case $ac_f in - [\\/$]*) false;; - *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; - esac || - as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; - esac - case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac - as_fn_append ac_file_inputs " '$ac_f'" - done - - # Let's still pretend it is `configure' which instantiates (i.e., don't - # use $as_me), people would be surprised to read: - # /* config.h. Generated by config.status. */ - configure_input='Generated from '` - $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' - `' by configure.' - if test x"$ac_file" != x-; then - configure_input="$ac_file. $configure_input" - { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 -$as_echo "$as_me: creating $ac_file" >&6;} - fi - # Neutralize special characters interpreted by sed in replacement strings. - case $configure_input in #( - *\&* | *\|* | *\\* ) - ac_sed_conf_input=`$as_echo "$configure_input" | - sed 's/[\\\\&|]/\\\\&/g'`;; #( - *) ac_sed_conf_input=$configure_input;; - esac - - case $ac_tag in - *:-:* | *:-) cat >"$ac_tmp/stdin" \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; - esac - ;; - esac - - ac_dir=`$as_dirname -- "$ac_file" || -$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$ac_file" : 'X\(//\)[^/]' \| \ - X"$ac_file" : 'X\(//\)$' \| \ - X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$ac_file" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - as_dir="$ac_dir"; as_fn_mkdir_p - ac_builddir=. - -case "$ac_dir" in -.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; -*) - ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` - # A ".." for each directory in $ac_dir_suffix. - ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` - case $ac_top_builddir_sub in - "") ac_top_builddir_sub=. ac_top_build_prefix= ;; - *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; - esac ;; -esac -ac_abs_top_builddir=$ac_pwd -ac_abs_builddir=$ac_pwd$ac_dir_suffix -# for backward compatibility: -ac_top_builddir=$ac_top_build_prefix - -case $srcdir in - .) # We are building in place. - ac_srcdir=. - ac_top_srcdir=$ac_top_builddir_sub - ac_abs_top_srcdir=$ac_pwd ;; - [\\/]* | ?:[\\/]* ) # Absolute name. - ac_srcdir=$srcdir$ac_dir_suffix; - ac_top_srcdir=$srcdir - ac_abs_top_srcdir=$srcdir ;; - *) # Relative name. - ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix - ac_top_srcdir=$ac_top_build_prefix$srcdir - ac_abs_top_srcdir=$ac_pwd/$srcdir ;; -esac -ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix - - - case $ac_mode in - :F) - # - # CONFIG_FILE - # - - case $INSTALL in - [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;; - *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;; - esac -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# If the template does not know about datarootdir, expand it. -# FIXME: This hack should be removed a few years after 2.60. -ac_datarootdir_hack=; ac_datarootdir_seen= -ac_sed_dataroot=' -/datarootdir/ { - p - q -} -/@datadir@/p -/@docdir@/p -/@infodir@/p -/@localedir@/p -/@mandir@/p' -case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in -*datarootdir*) ac_datarootdir_seen=yes;; -*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 -$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 - ac_datarootdir_hack=' - s&@datadir@&$datadir&g - s&@docdir@&$docdir&g - s&@infodir@&$infodir&g - s&@localedir@&$localedir&g - s&@mandir@&$mandir&g - s&\\\${datarootdir}&$datarootdir&g' ;; -esac -_ACEOF - -# Neutralize VPATH when `$srcdir' = `.'. -# Shell code in configure.ac might set extrasub. -# FIXME: do we really want to maintain this feature? -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -ac_sed_extra="$ac_vpsub -$extrasub -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -:t -/@[a-zA-Z_][a-zA-Z_0-9]*@/!b -s|@configure_input@|$ac_sed_conf_input|;t t -s&@top_builddir@&$ac_top_builddir_sub&;t t -s&@top_build_prefix@&$ac_top_build_prefix&;t t -s&@srcdir@&$ac_srcdir&;t t -s&@abs_srcdir@&$ac_abs_srcdir&;t t -s&@top_srcdir@&$ac_top_srcdir&;t t -s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t -s&@builddir@&$ac_builddir&;t t -s&@abs_builddir@&$ac_abs_builddir&;t t -s&@abs_top_builddir@&$ac_abs_top_builddir&;t t -s&@INSTALL@&$ac_INSTALL&;t t -$ac_datarootdir_hack -" -eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ - >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - -test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && - { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && - { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ - "$ac_tmp/out"`; test -z "$ac_out"; } && - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' -which seems to be undefined. Please make sure it is defined" >&5 -$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' -which seems to be undefined. Please make sure it is defined" >&2;} - - rm -f "$ac_tmp/stdin" - case $ac_file in - -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; - *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; - esac \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - ;; - - - - esac - -done # for ac_tag - - -as_fn_exit 0 -_ACEOF -ac_clean_files=$ac_clean_files_save - -test $ac_write_fail = 0 || - as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 - - -# configure is writing to config.log, and then calls config.status. -# config.status does its own redirection, appending to config.log. -# Unfortunately, on DOS this fails, as config.log is still kept open -# by configure, so config.status won't be able to write to it; its -# output is simply discarded. So we exec the FD to /dev/null, -# effectively closing config.log, so it can be properly (re)opened and -# appended to by config.status. When coming back to configure, we -# need to make the FD available again. -if test "$no_create" != yes; then - ac_cs_success=: - ac_config_status_args= - test "$silent" = yes && - ac_config_status_args="$ac_config_status_args --quiet" - exec 5>/dev/null - $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false - exec 5>>config.log - # Use ||, not &&, to avoid exiting from the if with $? = 1, which - # would make configure fail if this is the last instruction. - $ac_cs_success || as_fn_exit 1 -fi -if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 -$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} -fi diff --git a/vendor/fake-enum/configure.ac b/vendor/fake-enum/configure.ac deleted file mode 100644 index 714b1749..00000000 --- a/vendor/fake-enum/configure.ac +++ /dev/null @@ -1,23 +0,0 @@ -AC_INIT() - -AC_CONFIG_MACRO_DIRS([m4/]) - -AC_CANONICAL_HOST - -AC_PROG_CC -AC_PROG_INSTALL -LCRUST_PROG_RUSTC - - -AC_MSG_CHECKING([if $RUSTC supports edition 2018]) -LCRUST_CHECK_EDITION([2018],[ - AC_MSG_RESULT([no]) - AC_MSG_ERROR([Cannot target --edition 2018 with $RUSTC]) -]) - -AC_MSG_RESULT([yes]) - - -AC_CONFIG_FILES([Makefile]) - -AC_OUTPUT \ No newline at end of file diff --git a/vendor/fake-enum/install-sh b/vendor/fake-enum/install-sh deleted file mode 100755 index ec298b53..00000000 --- a/vendor/fake-enum/install-sh +++ /dev/null @@ -1,541 +0,0 @@ -#!/bin/sh -# install - install a program, script, or datafile - -scriptversion=2020-11-14.01; # UTC - -# This originates from X11R5 (mit/util/scripts/install.sh), which was -# later released in X11R6 (xc/config/util/install.sh) with the -# following copyright and license. -# -# Copyright (C) 1994 X Consortium -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN -# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC- -# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -# Except as contained in this notice, the name of the X Consortium shall not -# be used in advertising or otherwise to promote the sale, use or other deal- -# ings in this Software without prior written authorization from the X Consor- -# tium. -# -# -# FSF changes to this file are in the public domain. -# -# Calling this script install-sh is preferred over install.sh, to prevent -# 'make' implicit rules from creating a file called install from it -# when there is no Makefile. -# -# This script is compatible with the BSD install script, but was written -# from scratch. - -tab=' ' -nl=' -' -IFS=" $tab$nl" - -# Set DOITPROG to "echo" to test this script. - -doit=${DOITPROG-} -doit_exec=${doit:-exec} - -# Put in absolute file names if you don't have them in your path; -# or use environment vars. - -chgrpprog=${CHGRPPROG-chgrp} -chmodprog=${CHMODPROG-chmod} -chownprog=${CHOWNPROG-chown} -cmpprog=${CMPPROG-cmp} -cpprog=${CPPROG-cp} -mkdirprog=${MKDIRPROG-mkdir} -mvprog=${MVPROG-mv} -rmprog=${RMPROG-rm} -stripprog=${STRIPPROG-strip} - -posix_mkdir= - -# Desired mode of installed file. -mode=0755 - -# Create dirs (including intermediate dirs) using mode 755. -# This is like GNU 'install' as of coreutils 8.32 (2020). -mkdir_umask=22 - -backupsuffix= -chgrpcmd= -chmodcmd=$chmodprog -chowncmd= -mvcmd=$mvprog -rmcmd="$rmprog -f" -stripcmd= - -src= -dst= -dir_arg= -dst_arg= - -copy_on_change=false -is_target_a_directory=possibly - -usage="\ -Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE - or: $0 [OPTION]... SRCFILES... DIRECTORY - or: $0 [OPTION]... -t DIRECTORY SRCFILES... - or: $0 [OPTION]... -d DIRECTORIES... - -In the 1st form, copy SRCFILE to DSTFILE. -In the 2nd and 3rd, copy all SRCFILES to DIRECTORY. -In the 4th, create DIRECTORIES. - -Options: - --help display this help and exit. - --version display version info and exit. - - -c (ignored) - -C install only if different (preserve data modification time) - -d create directories instead of installing files. - -g GROUP $chgrpprog installed files to GROUP. - -m MODE $chmodprog installed files to MODE. - -o USER $chownprog installed files to USER. - -p pass -p to $cpprog. - -s $stripprog installed files. - -S SUFFIX attempt to back up existing files, with suffix SUFFIX. - -t DIRECTORY install into DIRECTORY. - -T report an error if DSTFILE is a directory. - -Environment variables override the default commands: - CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG - RMPROG STRIPPROG - -By default, rm is invoked with -f; when overridden with RMPROG, -it's up to you to specify -f if you want it. - -If -S is not specified, no backups are attempted. - -Email bug reports to bug-automake@gnu.org. -Automake home page: https://www.gnu.org/software/automake/ -" - -while test $# -ne 0; do - case $1 in - -c) ;; - - -C) copy_on_change=true;; - - -d) dir_arg=true;; - - -g) chgrpcmd="$chgrpprog $2" - shift;; - - --help) echo "$usage"; exit $?;; - - -m) mode=$2 - case $mode in - *' '* | *"$tab"* | *"$nl"* | *'*'* | *'?'* | *'['*) - echo "$0: invalid mode: $mode" >&2 - exit 1;; - esac - shift;; - - -o) chowncmd="$chownprog $2" - shift;; - - -p) cpprog="$cpprog -p";; - - -s) stripcmd=$stripprog;; - - -S) backupsuffix="$2" - shift;; - - -t) - is_target_a_directory=always - dst_arg=$2 - # Protect names problematic for 'test' and other utilities. - case $dst_arg in - -* | [=\(\)!]) dst_arg=./$dst_arg;; - esac - shift;; - - -T) is_target_a_directory=never;; - - --version) echo "$0 $scriptversion"; exit $?;; - - --) shift - break;; - - -*) echo "$0: invalid option: $1" >&2 - exit 1;; - - *) break;; - esac - shift -done - -# We allow the use of options -d and -T together, by making -d -# take the precedence; this is for compatibility with GNU install. - -if test -n "$dir_arg"; then - if test -n "$dst_arg"; then - echo "$0: target directory not allowed when installing a directory." >&2 - exit 1 - fi -fi - -if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then - # When -d is used, all remaining arguments are directories to create. - # When -t is used, the destination is already specified. - # Otherwise, the last argument is the destination. Remove it from $@. - for arg - do - if test -n "$dst_arg"; then - # $@ is not empty: it contains at least $arg. - set fnord "$@" "$dst_arg" - shift # fnord - fi - shift # arg - dst_arg=$arg - # Protect names problematic for 'test' and other utilities. - case $dst_arg in - -* | [=\(\)!]) dst_arg=./$dst_arg;; - esac - done -fi - -if test $# -eq 0; then - if test -z "$dir_arg"; then - echo "$0: no input file specified." >&2 - exit 1 - fi - # It's OK to call 'install-sh -d' without argument. - # This can happen when creating conditional directories. - exit 0 -fi - -if test -z "$dir_arg"; then - if test $# -gt 1 || test "$is_target_a_directory" = always; then - if test ! -d "$dst_arg"; then - echo "$0: $dst_arg: Is not a directory." >&2 - exit 1 - fi - fi -fi - -if test -z "$dir_arg"; then - do_exit='(exit $ret); exit $ret' - trap "ret=129; $do_exit" 1 - trap "ret=130; $do_exit" 2 - trap "ret=141; $do_exit" 13 - trap "ret=143; $do_exit" 15 - - # Set umask so as not to create temps with too-generous modes. - # However, 'strip' requires both read and write access to temps. - case $mode in - # Optimize common cases. - *644) cp_umask=133;; - *755) cp_umask=22;; - - *[0-7]) - if test -z "$stripcmd"; then - u_plus_rw= - else - u_plus_rw='% 200' - fi - cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;; - *) - if test -z "$stripcmd"; then - u_plus_rw= - else - u_plus_rw=,u+rw - fi - cp_umask=$mode$u_plus_rw;; - esac -fi - -for src -do - # Protect names problematic for 'test' and other utilities. - case $src in - -* | [=\(\)!]) src=./$src;; - esac - - if test -n "$dir_arg"; then - dst=$src - dstdir=$dst - test -d "$dstdir" - dstdir_status=$? - # Don't chown directories that already exist. - if test $dstdir_status = 0; then - chowncmd="" - fi - else - - # Waiting for this to be detected by the "$cpprog $src $dsttmp" command - # might cause directories to be created, which would be especially bad - # if $src (and thus $dsttmp) contains '*'. - if test ! -f "$src" && test ! -d "$src"; then - echo "$0: $src does not exist." >&2 - exit 1 - fi - - if test -z "$dst_arg"; then - echo "$0: no destination specified." >&2 - exit 1 - fi - dst=$dst_arg - - # If destination is a directory, append the input filename. - if test -d "$dst"; then - if test "$is_target_a_directory" = never; then - echo "$0: $dst_arg: Is a directory" >&2 - exit 1 - fi - dstdir=$dst - dstbase=`basename "$src"` - case $dst in - */) dst=$dst$dstbase;; - *) dst=$dst/$dstbase;; - esac - dstdir_status=0 - else - dstdir=`dirname "$dst"` - test -d "$dstdir" - dstdir_status=$? - fi - fi - - case $dstdir in - */) dstdirslash=$dstdir;; - *) dstdirslash=$dstdir/;; - esac - - obsolete_mkdir_used=false - - if test $dstdir_status != 0; then - case $posix_mkdir in - '') - # With -d, create the new directory with the user-specified mode. - # Otherwise, rely on $mkdir_umask. - if test -n "$dir_arg"; then - mkdir_mode=-m$mode - else - mkdir_mode= - fi - - posix_mkdir=false - # The $RANDOM variable is not portable (e.g., dash). Use it - # here however when possible just to lower collision chance. - tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$ - - trap ' - ret=$? - rmdir "$tmpdir/a/b" "$tmpdir/a" "$tmpdir" 2>/dev/null - exit $ret - ' 0 - - # Because "mkdir -p" follows existing symlinks and we likely work - # directly in world-writeable /tmp, make sure that the '$tmpdir' - # directory is successfully created first before we actually test - # 'mkdir -p'. - if (umask $mkdir_umask && - $mkdirprog $mkdir_mode "$tmpdir" && - exec $mkdirprog $mkdir_mode -p -- "$tmpdir/a/b") >/dev/null 2>&1 - then - if test -z "$dir_arg" || { - # Check for POSIX incompatibilities with -m. - # HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or - # other-writable bit of parent directory when it shouldn't. - # FreeBSD 6.1 mkdir -m -p sets mode of existing directory. - test_tmpdir="$tmpdir/a" - ls_ld_tmpdir=`ls -ld "$test_tmpdir"` - case $ls_ld_tmpdir in - d????-?r-*) different_mode=700;; - d????-?--*) different_mode=755;; - *) false;; - esac && - $mkdirprog -m$different_mode -p -- "$test_tmpdir" && { - ls_ld_tmpdir_1=`ls -ld "$test_tmpdir"` - test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1" - } - } - then posix_mkdir=: - fi - rmdir "$tmpdir/a/b" "$tmpdir/a" "$tmpdir" - else - # Remove any dirs left behind by ancient mkdir implementations. - rmdir ./$mkdir_mode ./-p ./-- "$tmpdir" 2>/dev/null - fi - trap '' 0;; - esac - - if - $posix_mkdir && ( - umask $mkdir_umask && - $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir" - ) - then : - else - - # mkdir does not conform to POSIX, - # or it failed possibly due to a race condition. Create the - # directory the slow way, step by step, checking for races as we go. - - case $dstdir in - /*) prefix='/';; - [-=\(\)!]*) prefix='./';; - *) prefix='';; - esac - - oIFS=$IFS - IFS=/ - set -f - set fnord $dstdir - shift - set +f - IFS=$oIFS - - prefixes= - - for d - do - test X"$d" = X && continue - - prefix=$prefix$d - if test -d "$prefix"; then - prefixes= - else - if $posix_mkdir; then - (umask $mkdir_umask && - $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break - # Don't fail if two instances are running concurrently. - test -d "$prefix" || exit 1 - else - case $prefix in - *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;; - *) qprefix=$prefix;; - esac - prefixes="$prefixes '$qprefix'" - fi - fi - prefix=$prefix/ - done - - if test -n "$prefixes"; then - # Don't fail if two instances are running concurrently. - (umask $mkdir_umask && - eval "\$doit_exec \$mkdirprog $prefixes") || - test -d "$dstdir" || exit 1 - obsolete_mkdir_used=true - fi - fi - fi - - if test -n "$dir_arg"; then - { test -z "$chowncmd" || $doit $chowncmd "$dst"; } && - { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } && - { test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false || - test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1 - else - - # Make a couple of temp file names in the proper directory. - dsttmp=${dstdirslash}_inst.$$_ - rmtmp=${dstdirslash}_rm.$$_ - - # Trap to clean up those temp files at exit. - trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0 - - # Copy the file name to the temp name. - (umask $cp_umask && - { test -z "$stripcmd" || { - # Create $dsttmp read-write so that cp doesn't create it read-only, - # which would cause strip to fail. - if test -z "$doit"; then - : >"$dsttmp" # No need to fork-exec 'touch'. - else - $doit touch "$dsttmp" - fi - } - } && - $doit_exec $cpprog "$src" "$dsttmp") && - - # and set any options; do chmod last to preserve setuid bits. - # - # If any of these fail, we abort the whole thing. If we want to - # ignore errors from any of these, just make sure not to ignore - # errors from the above "$doit $cpprog $src $dsttmp" command. - # - { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } && - { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } && - { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } && - { test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } && - - # If -C, don't bother to copy if it wouldn't change the file. - if $copy_on_change && - old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` && - new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` && - set -f && - set X $old && old=:$2:$4:$5:$6 && - set X $new && new=:$2:$4:$5:$6 && - set +f && - test "$old" = "$new" && - $cmpprog "$dst" "$dsttmp" >/dev/null 2>&1 - then - rm -f "$dsttmp" - else - # If $backupsuffix is set, and the file being installed - # already exists, attempt a backup. Don't worry if it fails, - # e.g., if mv doesn't support -f. - if test -n "$backupsuffix" && test -f "$dst"; then - $doit $mvcmd -f "$dst" "$dst$backupsuffix" 2>/dev/null - fi - - # Rename the file to the real destination. - $doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null || - - # The rename failed, perhaps because mv can't rename something else - # to itself, or perhaps because mv is so ancient that it does not - # support -f. - { - # Now remove or move aside any old file at destination location. - # We try this two ways since rm can't unlink itself on some - # systems and the destination file might be busy for other - # reasons. In this case, the final cleanup might fail but the new - # file should still install successfully. - { - test ! -f "$dst" || - $doit $rmcmd "$dst" 2>/dev/null || - { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null && - { $doit $rmcmd "$rmtmp" 2>/dev/null; :; } - } || - { echo "$0: cannot unlink or rename $dst" >&2 - (exit 1); exit 1 - } - } && - - # Now rename the file to the real destination. - $doit $mvcmd "$dsttmp" "$dst" - } - fi || exit 1 - - trap '' 0 - fi -done - -# Local variables: -# eval: (add-hook 'before-save-hook 'time-stamp) -# time-stamp-start: "scriptversion=" -# time-stamp-format: "%:y-%02m-%02d.%02H" -# time-stamp-time-zone: "UTC0" -# time-stamp-end: "; # UTC" -# End: diff --git a/vendor/fake-enum/m4/ax_prog_cc_for_build.m4 b/vendor/fake-enum/m4/ax_prog_cc_for_build.m4 deleted file mode 100644 index d53e4f94..00000000 --- a/vendor/fake-enum/m4/ax_prog_cc_for_build.m4 +++ /dev/null @@ -1,139 +0,0 @@ -# =========================================================================== -# https://www.gnu.org/software/autoconf-archive/ax_prog_cc_for_build.html -# =========================================================================== -# -# SYNOPSIS -# -# AX_PROG_CC_FOR_BUILD -# -# DESCRIPTION -# -# This macro searches for a C compiler that generates native executables, -# that is a C compiler that surely is not a cross-compiler. This can be -# useful if you have to generate source code at compile-time like for -# example GCC does. -# -# The macro sets the CC_FOR_BUILD and CPP_FOR_BUILD macros to anything -# needed to compile or link (CC_FOR_BUILD) and preprocess (CPP_FOR_BUILD). -# The value of these variables can be overridden by the user by specifying -# a compiler with an environment variable (like you do for standard CC). -# -# It also sets BUILD_EXEEXT and BUILD_OBJEXT to the executable and object -# file extensions for the build platform, and GCC_FOR_BUILD to `yes' if -# the compiler we found is GCC. All these variables but GCC_FOR_BUILD are -# substituted in the Makefile. -# -# LICENSE -# -# Copyright (c) 2008 Paolo Bonzini -# -# Copying and distribution of this file, with or without modification, are -# permitted in any medium without royalty provided the copyright notice -# and this notice are preserved. This file is offered as-is, without any -# warranty. - -#serial 18 - -AU_ALIAS([AC_PROG_CC_FOR_BUILD], [AX_PROG_CC_FOR_BUILD]) -AC_DEFUN([AX_PROG_CC_FOR_BUILD], [dnl -AC_REQUIRE([AC_PROG_CC])dnl -AC_REQUIRE([AC_PROG_CPP])dnl -AC_REQUIRE([AC_CANONICAL_BUILD])dnl - -dnl Use the standard macros, but make them use other variable names -dnl -pushdef([ac_cv_prog_CPP], ac_cv_build_prog_CPP)dnl -pushdef([ac_cv_prog_cc_c89], ac_cv_build_prog_cc_c89)dnl -pushdef([ac_cv_prog_gcc], ac_cv_build_prog_gcc)dnl -pushdef([ac_cv_prog_cc_works], ac_cv_build_prog_cc_works)dnl -pushdef([ac_cv_prog_cc_cross], ac_cv_build_prog_cc_cross)dnl -pushdef([ac_cv_prog_cc_g], ac_cv_build_prog_cc_g)dnl -pushdef([ac_cv_c_compiler_gnu], ac_cv_build_c_compiler_gnu)dnl -pushdef([ac_cv_exeext], ac_cv_build_exeext)dnl -pushdef([ac_cv_objext], ac_cv_build_objext)dnl -pushdef([ac_exeext], ac_build_exeext)dnl -pushdef([ac_objext], ac_build_objext)dnl -pushdef([CC], CC_FOR_BUILD)dnl -pushdef([CPP], CPP_FOR_BUILD)dnl -pushdef([GCC], GCC_FOR_BUILD)dnl -pushdef([CFLAGS], CFLAGS_FOR_BUILD)dnl -pushdef([CPPFLAGS], CPPFLAGS_FOR_BUILD)dnl -pushdef([EXEEXT], BUILD_EXEEXT)dnl -pushdef([LDFLAGS], LDFLAGS_FOR_BUILD)dnl -pushdef([OBJEXT], BUILD_OBJEXT)dnl -pushdef([host], build)dnl -pushdef([host_alias], build_alias)dnl -pushdef([host_cpu], build_cpu)dnl -pushdef([host_vendor], build_vendor)dnl -pushdef([host_os], build_os)dnl -pushdef([ac_cv_host], ac_cv_build)dnl -pushdef([ac_cv_host_alias], ac_cv_build_alias)dnl -pushdef([ac_cv_host_cpu], ac_cv_build_cpu)dnl -pushdef([ac_cv_host_vendor], ac_cv_build_vendor)dnl -pushdef([ac_cv_host_os], ac_cv_build_os)dnl -pushdef([ac_tool_prefix], ac_build_tool_prefix)dnl -pushdef([am_cv_CC_dependencies_compiler_type], am_cv_build_CC_dependencies_compiler_type)dnl -pushdef([am_cv_prog_cc_c_o], am_cv_build_prog_cc_c_o)dnl -pushdef([cross_compiling], cross_compiling_build)dnl - -cross_compiling_build=no - -ac_build_tool_prefix= -AS_IF([test -n "$build"], [ac_build_tool_prefix="$build-"], - [test -n "$build_alias"],[ac_build_tool_prefix="$build_alias-"]) - -AC_LANG_PUSH([C]) -AC_PROG_CC -_AC_COMPILER_EXEEXT -_AC_COMPILER_OBJEXT -AC_PROG_CPP - -dnl Restore the old definitions -dnl -popdef([cross_compiling])dnl -popdef([am_cv_prog_cc_c_o])dnl -popdef([am_cv_CC_dependencies_compiler_type])dnl -popdef([ac_tool_prefix])dnl -popdef([ac_cv_host_os])dnl -popdef([ac_cv_host_vendor])dnl -popdef([ac_cv_host_cpu])dnl -popdef([ac_cv_host_alias])dnl -popdef([ac_cv_host])dnl -popdef([host_os])dnl -popdef([host_vendor])dnl -popdef([host_cpu])dnl -popdef([host_alias])dnl -popdef([host])dnl -popdef([OBJEXT])dnl -popdef([LDFLAGS])dnl -popdef([EXEEXT])dnl -popdef([CPPFLAGS])dnl -popdef([CFLAGS])dnl -popdef([GCC])dnl -popdef([CPP])dnl -popdef([CC])dnl -popdef([ac_objext])dnl -popdef([ac_exeext])dnl -popdef([ac_cv_objext])dnl -popdef([ac_cv_exeext])dnl -popdef([ac_cv_c_compiler_gnu])dnl -popdef([ac_cv_prog_cc_g])dnl -popdef([ac_cv_prog_cc_cross])dnl -popdef([ac_cv_prog_cc_works])dnl -popdef([ac_cv_prog_cc_c89])dnl -popdef([ac_cv_prog_gcc])dnl -popdef([ac_cv_prog_CPP])dnl - -dnl restore global variables ac_ext, ac_cpp, ac_compile, -dnl ac_link, ac_compiler_gnu (dependant on the current -dnl language after popping): -AC_LANG_POP([C]) - -dnl Finally, set Makefile variables -dnl -AC_SUBST(BUILD_EXEEXT)dnl -AC_SUBST(BUILD_OBJEXT)dnl -AC_SUBST([CFLAGS_FOR_BUILD])dnl -AC_SUBST([CPPFLAGS_FOR_BUILD])dnl -AC_SUBST([LDFLAGS_FOR_BUILD])dnl -]) \ No newline at end of file diff --git a/vendor/fake-enum/m4/bindgen.m4 b/vendor/fake-enum/m4/bindgen.m4 deleted file mode 100644 index c9b43fbe..00000000 --- a/vendor/fake-enum/m4/bindgen.m4 +++ /dev/null @@ -1,5 +0,0 @@ -AC_DEFUN([LCRUST_PROG_BINDGEN],[ - AC_REQUIRE([AC_PROG_CC]) - AC_ARG_VAR([BINDGEN]) - AC_PATH_PROG([BINDGEN],[bindgen]) -]) \ No newline at end of file diff --git a/vendor/fake-enum/m4/build_std.m4 b/vendor/fake-enum/m4/build_std.m4 deleted file mode 100644 index c75b6e41..00000000 --- a/vendor/fake-enum/m4/build_std.m4 +++ /dev/null @@ -1,38 +0,0 @@ - -AC_DEFUN([LCRUST_ENABLE_BUILD_STD],[ - AC_REQUIRE([LCRUST_PROG_RUSTC]) - AC_REQUIRE([LCRUST_RUSTC_VERSION]) - - AC_ARG_ENABLE([build-std],[ - build_std=$enableval - ],[ - build_std=no - ]) - - if test x$build_std != xno - then - AC_MSG_NOTICE([Checking how to build-std with $RUSTC... ]) - if x$rustc_is_lccc != xno - then - # $RUSTC is lccc, do what we want. - _buildstd_workspace_path="$($RUSTC -Z autotools-hacks --print build-std-workspace-path)" - _buildstd_features="" - _buildstd_env="" - else - _rustc_sysroot="$($RUSTC --print sysroot || echo no)" - if test "$_rustc_sysroot" != "no" - then - _buildstd_workspace_path="$_rustc_sysroot/lib/rustlib/src/rust" - if test -d "$_buildstd_workspace_path" - then - _buildstd_features="backtrace" - _buildstd_env="RUSTC_BOOTSTRAP=1" - else - _buildstd_output="$($RUSTC --print build-std-info || echo no)" - - fi - fi - fi - fi - -]) \ No newline at end of file diff --git a/vendor/fake-enum/m4/lcrust_checks.m4 b/vendor/fake-enum/m4/lcrust_checks.m4 deleted file mode 100644 index 25e46850..00000000 --- a/vendor/fake-enum/m4/lcrust_checks.m4 +++ /dev/null @@ -1,48 +0,0 @@ - -AC_DEFUN([LCRUST_CHECK_EDITION],[ - SAVE_RUSTFLAGS="$RUSTFLAGS" - RUSTFLAGS="$RUSTFLAGS --edition $1" - LCRUST_TRY_COMPILE([],[ - rust_edition=$1 - ], - RUSTFLAGS="$SAVE_RUSTFLAGS" - rust_edition=no - $2 - ) -]) - -AC_DEFUN([LCRUST_TRY_COMPILE_WITH_FEATURES],[ - LCRUST_TRY_COMPILE([$1], - $3 - , - [for feature in $2 do - do - LCRUST_TRY_COMPILE([#![feature($feature)] $1],[ - try_compile_result=$feature - feature_$feature=yes - RUSTFLAGS="$RUSTFLAGS --cfg feature_$feature" - $3 - ]) - if test x$try_compile_result != xno - then - break - fi - done - if test x$try_compile_result \= xno - then - $4 - fi - ] - ) -]) - -AC_DEFUN([LCRUST_CHECK_TOOL],[ - LCRUST_TRY_COMPILE([#![$1 :: $2]],[ - tool_$1=yes - $4 - ],[ - LCRUST_TRY_COMPILE([#![register_tool($1)] $![$1 :: $2]],[ - tool_$1=register - ]) - ]) -]) \ No newline at end of file diff --git a/vendor/fake-enum/m4/lcrust_prog_rustc.m4 b/vendor/fake-enum/m4/lcrust_prog_rustc.m4 deleted file mode 100644 index cca98645..00000000 --- a/vendor/fake-enum/m4/lcrust_prog_rustc.m4 +++ /dev/null @@ -1,585 +0,0 @@ -# Copyright 2021 Connor Horman - -# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -AC_DEFUN([LCRUST_PROG_RUSTC],[ - AC_REQUIRE([AC_PROG_CC]) - AC_REQUIRE([AC_CANONICAL_HOST]) - AC_ARG_VAR(RUSTC,[Rust compiler to use]) - AC_ARG_VAR(RUSTFLAGS,[Flags to pass to the rust compiler]) - - - if test "$RUSTFLAGS" \= "" - then - RUSTFLAGS="-C opt-level=2 -g" - fi - - if test x$host_alias != x - then - AC_PATH_PROGS(RUSTC,[rustc lcrustc $host-gccrs]) - else - AC_PATH_PROGS(RUSTC,[rustc lcrustc $host-gccrs gccrs]) - fi - - if test "$RUSTC" \= "" - then - AC_MSG_ERROR([Failed to find a rust compiler. Install rustc in PATH, or set RUSTC to a suitable compiler]) - fi - AC_MSG_CHECKING([how to compile for $host with $RUSTC]) - case x$RUSTC in - x${host_alias}-* | x*[\\/]${host_alias}-* ) - rustc_host_target=${host_alias} - AC_MSG_RESULT([Not needed]) - ;; - x${host}-* | x*[\\/]${host}-* ) - rustc_host_target=${host} - AC_MSG_RESULT([Not needed]) - ;; - - x* ) - SAVE_RUSTFLAGS="$RUSTFLAGS" - if test x$host_alias != x - then - RUSTFLAGS="$RUSTFLAGS --target $host_alias" - echo '#![no_std]' > comptest.rs - echo Trying target $host_alias >> config.log - echo "$RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs" >> config.log - $RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs 2>> config.log > /dev/null - - if test $? -eq 0 - then - rustc_host_target=$host_alias - else - echo Using target $host_alias failed >> config.log - fi - fi - - if test x$rustc_host_target \= x - then - RUSTFLAGS="$SAVE_RUSTFLAGS --target $host" - echo '#![no_std]' > comptest.rs - echo Trying target $host >> config.log - echo "$RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs" >> config.log - $RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs 2>> config.log > /dev/null - - if test $? -eq 0 - then - rustc_host_target=$host - else - echo Using target $host failed >> config.log - fi - fi - - if test x$rustc_host_target \= x - then - case $host in - x86_64-*-mingw32 | i*86-*-mingw32 ) - IFS="-" read rustc_host_arch rustc_host_vendor rustc_host_sys <<< "$host" - RUSTFLAGS="$SAVE_RUSTFLAGS --target ${rustc_host_arch}-pc-windows-gnu" - echo '#![no_std]' > comptest.rs - echo "$RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs" >> config.log - $RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs 2>> config.log > /dev/null - - if test $? -eq 0 - then - rustc_host_target=${rustc_host_arch}-pc-windows-gnu - fi - ;; - x86_64-*-*-* | i*86-*-*-* ) - IFS="-" read rustc_host_arch rustc_host_vendor rustc_host_os rustc_host_env <<< "$host" - RUSTFLAGS="$SAVE_RUSTFLAGS --target ${rustc_host_arch}-unknown-${rustc_host_os}-${rustc_host_env}" - echo '#![no_std]' > comptest.rs - echo "$RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs" >> config.log - $RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs 2>> config.log > /dev/null - - if test $? -eq 0 - then - rustc_host_target=${rustc_host_arch}-unknown-${rustc_host_os}-${rustc_host_env} - fi - ;; - x86_64-*-* | i*86-*-* ) - IFS="-" read rustc_host_arch rustc_host_vendor rustc_host_sys <<< "$host" - RUSTFLAGS="$SAVE_RUSTFLAGS --target ${rustc_host_arch}-unknown-${rustc_host_sys}" - echo '#![no_std]' > comptest.rs - - echo "$RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs" >> config.log - $RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs 2>> config.log > /dev/null - - if test $? -eq 0 - then - rustc_host_target=${rustc_host_arch}-unknown-${rustc_host_sys} - fi - ;; - *-*-* ) - IFS="-" read rustc_host_arch rustc_host_vendor rustc_host_sys <<< "$host" - RUSTFLAGS="$SAVE_RUSTFLAGS --target ${rustc_host_arch}-${rustc_host_sys}" - echo '#![no_std]' > comptest.rs - - echo "$RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs" >> config.log - $RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs 2>> config.log > /dev/null - - if test $? -eq 0 - then - rustc_host_target=${rustc_host_arch}-${rustc_host_sys} - fi - ;; - esac - fi - if test x$rustc_host_target \= x - then - AC_MSG_RESULT([not found]) - AC_MSG_ERROR([Cannot compile to $host with $RUSTC]) - else - AC_MSG_RESULT([--target $rustc_host_target]) - fi - ;; - esac - rm -f comptest.rs libcomptest.rlib - AC_MSG_CHECKING([whether $RUSTC works]) - echo 'fn main(){}' > comptest.rs - $RUSTC $RUSTFLAGS --crate-type bin --crate-name comptest comptest.rs 2>> config.log > /dev/null - if test $? -ne 0 - then - echo '#![no_std]' > comptest.rs - $RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest --emit link=libcomptest.rlib comptest.rs 2>> config.log > /dev/null - if test $? -ne 0 - then - AC_MSG_RESULT([no]) - AC_MSG_ERROR([Cannot compile a simple program with $RUSTC]) - else - rustc_has_std=no - fi - else - rustc_has_std=yes - fi - - if test x$host_alias \= x - then - ./comptest${EXEEXT} - if test $? -ne 0 - then - AC_MSG_RESULT([no]) - AC_MSG_ERROR([Cannot run executables compiled by $RUSTC]) - fi - fi - - rm -f comptest.rs comptest${EXEEXT} - - AC_MSG_RESULT([yes]) - - AC_SUBST(rustc_has_std) - AC_SUBST(RUSTC) - AC_SUBST(RUSTFLAGS) -]) - -AC_DEFUN([LCRUST_RUSTC_VERSION],[ - AC_REQUIRE([LCRUST_PROG_RUSTC]) - - version_output="`${RUSTC} --version`" - - AC_MSG_CHECKING(the rust version supported by ${RUSTC}) - - read rustc_name rust_version <<< ${version_output} - - AC_MSG_RESULT(${rust_version}) - - case $rust_version in - *.*.*-beta.* ) - rust_channel=beta - IFS="." read rust_major rust_minor _lcrust_rest <<< ${rust_version} - IFS="-" read rust_patch <<< ${_lcrust_rest} - ;; - *.*.*-* ) - IFS="." read rust_major rust_minor _lcrust_rest <<< ${rust_version} - IFS="-" read rust_patch rust_channel <<< ${_lcrust_rest} - ;; - *.*.* ) - rust_channel=stable - IFS="." read rust_major rust_minor rust_patch <<< ${rust_version} - ;; - esac - AC_MSG_CHECKING(whether $RUSTC is lccc) - case $rustc_name in - lcrust* | lccc* ) dnl lccc doesn't distinguish between stable and unstable compiler, - rustc_is_lccc=yes - ;; - * ) - rustc_is_lccc=no - ;; - esac - AC_MSG_RESULT([$rustc_is_lccc]) - - AC_SUBST(rustc_name) - AC_SUBST(rust_version) - AC_SUBST(rust_channel) - AC_SUBST(rust_major) - AC_SUBST(rust_minor) - AC_SUBST(rust_patch) -]) - -AC_DEFUN([LCRUST_PROG_RUSTC_FOR_BUILD],[ - AC_REQUIRE([AX_PROG_CC_FOR_BUILD]) - AC_REQUIRE([AC_CANONICAL_BUILD]) - AC_ARG_VAR(RUSTC_FOR_BUILD,[Rust compiler to use on the build system]) - AC_ARG_VAR(RUSTFLAGS_FOR_BUILD,[Flags to pass to the rust compiler for the build system]) - - AC_MSG_NOTICE([checking for the compiler to use for $build...]) - - AC_PATH_PROGS(RUSTC_FOR_BUILD,[rustc lcrustc $build-gccrs gccrs]) - - if test "$RUSTC_FOR_BUILD" \= "" - then - AC_MSG_NOTICE([checking for the compiler to use for $build... not found]) - AC_MSG_ERROR([Failed to find a rust compiler. Install rustc in PATH, or set RUSTC_FOR_BUILD to a suitable compiler]) - fi - - AC_MSG_NOTICE([checking for the compiler to use for $build... $RUSTC_FOR_BUILD]) - - AC_MSG_CHECKING([how to compile for $build with $RUSTC_FOR_BUILD]) - case x$RUSTC_FOR_BUILD in - x${build_alias}-* | x*[\\/]${build_alias}-* ) - rustc_build_target=${build_alias} - AC_MSG_RESULT([Not needed]) - ;; - x${build}-* | x*[\\/]${build}-* ) - rustc_build_target=${build} - AC_MSG_RESULT([Not needed]) - ;; - - x* ) - SAVE_RUSTFLAGS_FOR_BUILD="$RUSTFLAGS_FOR_BUILD" - if test x$build_alias != x - then - RUSTFLAGS_FOR_BUILD="$RUSTFLAGS_FOR_BUILD --target $build_alias" - echo 'fn main(){}' > comptest.rs - echo Trying target $build_alias >> config.log - echo "$RUSTC_FOR_BUILD $RUSTFLAGS_FOR_BUILD --crate-type rlib --crate-name comptest comptest.rs" >> config.log - $RUSTC_FOR_BUILD $RUSTFLAGS_FOR_BUILD --crate-type rlib --crate-name comptest comptest.rs 2>> config.log > /dev/null - - if test $? -eq 0 - then - rustc_host_target=$build_alias - else - echo Using target $build_alias failed >> config.log - fi - fi - - if test x$rustc_build_target \= x - then - RUSTFLAGS_FOR_BUILD="$SAVE_RUSTFLAGS_FOR_BUILD --target $build" - echo 'fn main(){}' > comptest.rs - echo Trying target $build >> config.log - echo "$RUSTC_FOR_BUILD $RUSTFLAGS_FOR_BUILD --crate-type rlib --crate-name comptest comptest.rs" >> config.log - $RUSTC_FOR_BUILD $RUSTFLAGS_FOR_BUILD --crate-type rlib --crate-name comptest comptest.rs 2>> config.log > /dev/null - - if test $? -eq 0 - then - rustc_host_target=$build - else - echo Using target $build failed >> config.log - fi - fi - - if test x$rustc_build_target \= x - then - case $build in - x86_64-*-mingw32 | i*86-*-mingw32 ) - IFS="-" read rustc_host_arch rustc_host_vendor rustc_host_sys <<< "$host" - RUSTFLAGS="$SAVE_RUSTFLAGS --target ${rustc_host_arch}-pc-windows-gnu" - echo '#![no_std]' > comptest.rs - echo "$RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs" >> config.log - $RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs 2>> config.log > /dev/null - - if test $? -eq 0 - then - rustc_host_target=${rustc_host_arch}-pc-windows-gnu - fi - ;; - x86_64-*-*-* | i*86-*-*-* ) - IFS="-" read rustc_build_arch rustc_build_vendor rustc_build_os rustc_build_env <<< "$build" - RUSTFLAGS_FOR_BUILD="$SAVE_RUSTFLAGS_FOR_BUILD --target ${rustc_build_arch}-unknown-${rustc_build_os}-${rustc_build_env}" - echo 'fn main(){}' > comptest.rs - echo "$RUSTC_FOR_BUILD $RUSTFLAGS_FOR_BUILD --crate-type rlib --crate-name comptest comptest.rs" >> config.log - $RUSTC_FOR_BUILD $RUSTFLAGS_FOR_BUILD --crate-type rlib --crate-name comptest comptest.rs 2>> config.log > /dev/null - - if test $? -eq 0 - then - rustc_build_target=${rustc_build_arch}-unknown-${rustc_build_os}-${rustc_build_env} - fi - ;; - x86_64-*-* | i*86-*-* ) - IFS="-" read rustc_build_arch rustc_build_vendor rustc_build_sys <<< "$build" - RUSTFLAGS_FOR_BUILD="$SAVE_RUSTFLAGS_FOR_BUILD --target ${rustc_build_arch}-unknown-${rustc_build_sys}" - echo 'fn main(){}' > comptest.rs - echo "$RUSTC_FOR_BUILD $RUSTFLAGS_FOR_BUILD --crate-type rlib --crate-name comptest comptest.rs" >> config.log - $RUSTC_FOR_BUILD $RUSTFLAGS_FOR_BUILD --crate-type rlib --crate-name comptest comptest.rs 2>> config.log > /dev/null - - if test $? -eq 0 - then - rustc_host_target=${rustc_build_arch}-unknown-${rustc_build_sys} - fi - ;; - - *-*-* ) - IFS="-" read rustc_host_arch rustc_host_vendor rustc_host_sys <<< "$host" - RUSTFLAGS="$SAVE_RUSTFLAGS --target ${rustc_host_arch}-${rustc_host_sys}" - echo 'fn main(){}' > comptest.rs - - echo "$RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs" >> config.log - $RUSTC $RUSTFLAGS --crate-type rlib --crate-name comptest comptest.rs 2>> config.log > /dev/null - - if test $? -eq 0 - then - rustc_host_target=${rustc_host_arch}-${rustc_host_sys} - fi - ;; - esac - fi - if test x$rustc_build_target \= x - then - AC_MSG_RESULT([not found]) - AC_MSG_ERROR([Cannot compile to $build with $RUSTC]) - else - AC_MSG_RESULT([--target $rustc_build_target]) - fi - ;; - esac - - rm -f comptest.rs libcomptest.rlib - AC_MSG_CHECKING([whether $RUSTC_FOR_BUILD works]) - echo 'fn main(){}' > test.rs - $RUSTC_FOR_BUILD $RUSTFLAGS_FOR_BUILD --crate-type bin --crate-name test test.rs 2>> config.log > /dev/null - if test $? -ne 0 - then - AC_MSG_RESULT([no]) - AC_MSG_ERROR([Cannot compile a simple program with $RUSTC_FOR_BUILD]) - fi - - ./test${EXEEXT_FOR_BUILD} - if test $? -ne 0 - then - AC_MSG_RESULT([no]) - AC_MSG_ERROR([Cannot run executables compiled by $RUSTC_FOR_BUILD]) - fi - - rm -rf test.rs test${EXEEXT_FOR_BUILD} - - AC_MSG_RESULT([yes]) - - AC_SUBST(RUSTC_FOR_BUILD) - AC_SUBST(RUSTFLAGS_FOR_BUILD) -]) - -AC_DEFUN([LCRUST_RUSTC_VERSION_FOR_BUILD],[ - AC_REQUIRE([LCRUST_PROG_RUSTC_FOR_BUILD]) - - version_output="`${RUSTC_FOR_BUILD} --version`" - - AC_MSG_CHECKING(the rust version supported by ${RUSTC_FOR_BUILD}) - - read build_rustc_name build_rust_version <<< ${version_output} - - AC_MSG_RESULT(${build_rust_version}) - - case $build_rust_version in - *.*.*-beta.* ) - rust_channel=beta - IFS="." read build_rust_major build_rust_minor _lcrust_rest <<< ${build_rust_version} - IFS="-" read build_rust_patch <<< ${_lcrust_rest} - ;; - *.*.*-* ) - IFS="." read build_rust_major build_rust_minor _lcrust_rest <<< ${build_rust_version} - IFS="-" read build_rust_patch build_rust_channel <<< ${_lcrust_rest} - ;; - *.*.* ) - rust_channel=stable - IFS="." read build_rust_major build_rust_minor build_rust_patch <<< ${build_rust_version} - ;; - esac - AC_MSG_CHECKING(whether $RUSTC_FOR_BUILD is lccc) - case $build_rustc_name in - lcrust* | lccc* ) dnl lccc doesn't distinguish between stable and unstable compiler, - build_rustc_is_lccc=yes - ;; - * ) - build_rustc_is_lccc=no - ;; - esac - AC_MSG_RESULT([$build_rustc_is_lccc]) - - AC_SUBST(build_rustc_name) - AC_SUBST(build_rust_version) - AC_SUBST(build_rust_channel) - AC_SUBST(build_rust_major) - AC_SUBST(build_rust_minor) - AC_SUBST(build_rust_patch) -]) - - -AC_DEFUN([LCRUST_TRY_COMPILE],[ - echo ["$1"] >> test.rs - ${RUSTC} ${RUSTFLAGS} --crate-type rlib --crate-name test --emit link=libtest.rlib test.rs - - if test $? -eq 0 - then - rm -f test.rs libtest.rlib - $2 - else - rm -f test.rs libtest.rlib - $3 - fi -]) - -AC_DEFUN([LCRUST_TRY_COMPILE_FOR_BUILD],[ - echo ["$1"] >> test.rs - ${RUSTC_FOR_BUILD} ${RUSTFLAGS_FOR_BUILD} --crate-type rlib --crate-name test --emit link=libtest.rlib test.rs - - if test $? -eq 0 - then - rm -f test.rs libtest.rlib - try_compile_result=yes - $2 - else - rm -f test.rs libtest.rlib - try_compile_result=no - $3 - fi -]) - -AC_DEFUN([LCRUST_PROG_CARGO],[ - AC_REQUIRE([LCRUST_PROG_RUSTC]) - AC_ARG_VAR([CARGO]) - AC_ARG_VAR([CARGOFLAGS]) - - AC_PATH_PROG([CARGO],[cargo]) - - - CARGOFLAGS="$CARGOFLAGS --target $rustc_host_target" - - AC_MSG_CHECKING([whether $CARGO works]) - mkdir -m700 tmp - cat > tmp/Cargo.toml << "EOF" -[package] -name = "cargotest" -version = "0.1.0" -edition = "2018" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] - -EOF - mkdir tmp/src - echo '#![no_std]' > tmp/src/lib.rs - CARGO_RUSTFLAGS="`sed -e 's/--target [[[:graph:]]]*//'<<<"$RUSTFLAGS"`" - AC_MSG_CHECKING([whether $CARGO works]) - echo "RUSTC=\"$RUSTC\" RUSTFLAGS=\"$CARGO_RUSTFLAGS\" $CARGO build $CARGOFLAGS --lib --manifest-path tmp/Cargo.toml --target-dir tmp/target/" >> config.log - RUSTC="$RUSTC" RUSTFLAGS="$CARGO_RUSTFLAGS" $CARGO build $CARGOFLAGS --lib --manifest-path tmp/Cargo.toml --target-dir tmp/target/ 2>> config.log > /dev/null - if test $? -ne 0 - then - echo "RUSTC=\"$RUSTC\" RUSTFLAGS=\"$CARGO_RUSTFLAGS\" $CARGO gccrs $CARGOFLAGS --lib --manifest-path tmp/Cargo.toml --target-dir tmp/target/" >> config.log - RUSTC="$RUSTC" RUSTFLAGS="$CARGO_RUSTFLAGS" $CARGO gccrs $CARGOFLAGS --lib --manifest-path tmp/Cargo.toml --target-dir tmp/target/ 2>> config.log > /dev/null - if test $? -ne 0 - then - AC_MSG_RESULT([no]) - rm -rf tmp/ - AC_MSG_ERROR([Cannot build a simple workspace with $CARGO]) - fi - cargo_build_command=gccrs - else - cargo_build_command=build - fi - rm -rf tmp/ - AC_MSG_RESULT([yes]) - - AC_SUBST([CARGO]) - AC_SUBST([CARGOFLAGS]) - AC_SUBST([CARGO_RUSTFLAGS]) - AC_SUBST([cargo_build_command]) -]) - - -AC_DEFUN([LCRUST_PROG_RUSTDOC],[ - AC_REQUIRE([LCRUST_PROG_RUSTC]) - AC_ARG_VAR([RUSTDOC]) - AC_ARG_VAR([RUSTDOCFLAGS]) - - AC_PATH_PROG([RUSTDOC],[rustdoc]) - - RUSTDOCFLAGS="$RUSTDOCFLAGS --target $rustc_host_target" - - AC_MSG_CHECKING([whether $RUSTDOC works]) - - cat > comptest.rs << EOF -#![no_std] -#![doc = r"# -Lorem ipsum dolor sit amet, consectetur adipiscing elit. -Vivamus quis porttitor tortor, gravida pharetra mi. -Cras eu est nec massa faucibus efficitur. -Cras congue ultrices efficitur. -Cras non auctor augue. -Mauris faucibus purus ac dui dictum fermentum. -Suspendisse dapibus elementum justo non consequat. -Ut sit amet massa vel justo auctor euismod non rutrum justo. -Fusce sed porttitor lectus. Sed semper enim eu nunc cursus elementum. -#"] -EOF - - echo "$RUSTDOC $RUSTDOCFLAGS --crate-type rlib --crate-name comptest --output tmp/ comptest.rs" >> config.log - $RUSTDOC $RUSTDOCFLAGS --crate-type rlib --crate-name comptest --output tmp/ comptest.rs 2> config.log > /dev/null - - if test $? -ne 0 - then - rm -rf tmp/ - AC_MSG_RESULT([no]) - AC_MSG_ERROR([$RUSTDOC cannot build documentation for a simple program]) - fi - - if test ! -f tmp/comptest/index.html - then - rm -rf tmp/ - AC_MSG_RESULT([no]) - AC_MSG_ERROR([$RUSTDOC did not produce output in the expected format]) - fi - - if test "`grep 'Lorem ipsum dolor sit amet' tmp/comptest/index.html`" \= "" - then - rm -rf tmp/ - AC_MSG_RESULT([no]) - AC_MSG_ERROR([$RUSTDOC did not produce the expected output]) - fi - rm -rf tmp/ - AC_MSG_RESULT([yes]) -]) - -# Separate macro because `--test-builder` is unstable -AC_DEFUN([LCRUST_RUSTDOC_USE_RUSTC],[ - AC_REQUIRE([LCRUST_PROG_RUSTDOC]) - AC_REQUIRE([LCRUST_PROG_RUSTC]) - - AC_MSG_CHECKING([how to pass --test-builder to $RUSTDOC]) - echo "$RUSTDOC --test-builder \"$RUSTC\" $RUSTDOCFLAGS --crate-type rlib --crate-name comptest --output tmp/ comptest.rs" >> config.log - $RUSTDOC --test-builder "$RUSTC" $RUSTDOCFLAGS --crate-type rlib --crate-name comptest --output tmp/ comptest.rs 2> config.log > /dev/null - - if test $? -eq 0 - then - rustdoc_use_rustc=yes - RUSTDOCFLAGS="--test-builder \"$RUSTC\" $RUSTDOCFLAGS" - AC_MSG_RESULT([--test-builder \"$RUSTC\"]) - else - echo "$RUSTDOC -Z unstable-options --test-builder \"$RUSTC\" $RUSTDOCFLAGS --crate-type rlib --crate-name comptest --output tmp/ comptest.rs" >> config.log - $RUSTDOC -Z unstable-options --test-builder "$RUSTC" $RUSTDOCFLAGS --crate-type rlib --crate-name comptest --output tmp/ comptest.rs 2> config.log > /dev/null - if test $? -eq 0 - rustdoc_use_rustc=unstable - RUSTDOCFLAGS="-Z unstable-options --test-builder \"$RUSTC\" $RUSTDOCFLAGS" - AC_MSG_RESULT([-Z unstable-options --test-builder \"$RUSTC\"]) - else - rustdoc_use_rustc=no - AC_MSG_RESULT([not found]) - fi - fi -]) diff --git a/vendor/fake-enum/run-tests.sh b/vendor/fake-enum/run-tests.sh deleted file mode 100755 index 16d10aff..00000000 --- a/vendor/fake-enum/run-tests.sh +++ /dev/null @@ -1,4 +0,0 @@ -for s in $* -do - $s -done \ No newline at end of file diff --git a/vendor/fake-enum/src/lib.rs b/vendor/fake-enum/src/lib.rs deleted file mode 100644 index 18ac5032..00000000 --- a/vendor/fake-enum/src/lib.rs +++ /dev/null @@ -1,182 +0,0 @@ -#![cfg_attr(not(test), no_std)] - -//! -//! Support for fake enum types, that act like rust enums, but all values of the underlying type -//! are accepted as values. -//! See the macro [`fake_enum`] for details. - -/// -/// Constructs a "Fake Enum", that acts like a rust enum with unit variants, -/// but can accept invalid (undefined) variants without undefined behaviour. -/// The enum derives Copy, Clone, Eq, and PartialEq. Additionally, it implements Debug, where all valid variants are printed as defined, -/// and invalid variants are formatted as name(value). -/// Any other derives can be added following the repr. -/// Two forms of this macro is provided. `enum name` declares an enum named "name". All the variants are declared with the same visibility as the type in the enclosing module. -/// `enum struct name` declares an scoped enum named "name". The variants are declared `pub` within "name". -/// -/// In Both cases, it is valid to transmute the declared type to and from the repr type (note that no from implementation is provided) -/// -/// ```rust -/// use fake_enum::fake_enum; -/// fake_enum!{ -/// #[repr(u8)] pub enum Foo{ -/// Bar = 0, -/// Baz = 1, -/// } -/// }; -/// let x = Bar; -/// assert_eq!(format!("{:?}",x),"Bar"); -/// assert_eq!(unsafe{std::mem::transmute::<_,Foo>(1u8)},Baz) -/// ``` -/// -/// The underlying type may be given - -#[macro_export] -macro_rules! fake_enum{ - {#[repr($tvis:vis $t:ty)] $(#[$meta:meta])* $vis:vis enum $name:ident { - $(#![$meta1:meta])* - $($(#[$r:meta])* $item:ident = $expr:literal),*$(,)? - }} => { - - #[derive(Copy,Clone,Eq,PartialEq)] - #[repr(transparent)] - $(#[$meta])* - $(#[$meta1])* - $vis struct $name($tvis $t); - - $(#[allow(non_upper_case_globals)] #[allow(dead_code)] $(#[$r])* $vis const $item: $name = $name($expr as $t);)* - - impl ::core::fmt::Debug for $name{ - #[allow(unreachable_patterns)] - fn fmt(&self,f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result{ - match self{ - $(Self($expr) => f.write_str(::core::stringify!($item)),)* - e => f.write_fmt(::core::format_args!("{}({})",::core::stringify!($name),e.0)) - } - } - } - }; - {#[repr($tvis:vis $t:ty)] $(#[$meta:meta])* $vis:vis enum struct $name:ident { - $(#![$meta1:meta])* - $($(#[$r:meta])* $item:ident = $expr:literal),*$(,)? - }} => { - #[derive(Copy,Clone,Eq,PartialEq)] - #[repr(transparent)] - $(#[$meta])* - $(#[$meta1])* - $vis struct $name($tvis $t); - impl $name{ - $(#[allow(non_upper_case_globals)] #[allow(dead_code)] $(#[$r])* pub const $item: $name = $name($expr as $t);)* - } - impl ::core::fmt::Debug for $name{ - #[allow(unreachable_patterns)] - fn fmt(&self,f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result{ - match self{ - $(Self($expr) => f.write_str(::core::stringify!($item)),)* - e => f.write_fmt(::core::format_args!("{}({})",::core::stringify!($name),e.0)) - } - } - } - } -} - -#[cfg(test)] -mod test { - fake_enum! { - #[repr(u16)] pub enum ElfType{ - //! The type of an elf file - - /// No Elf Type/Invalid Elf File - ET_NONE = 0, - /// Relocatable file - ET_REL = 1, - /// Executable file - ET_EXEC = 2, - /// Dynamic Library/Shared Object - ET_DYN = 3, - /// Core Dump - ET_CORE = 4 - } - } - - #[test] - pub fn fake_enum_elf_type_name() { - assert_eq!(format!("{:?}", ET_NONE), "ET_NONE"); - assert_eq!(format!("{:?}", ET_REL), "ET_REL"); - assert_eq!(format!("{:?}", ET_EXEC), "ET_EXEC"); - assert_eq!(format!("{:?}", ET_DYN), "ET_DYN"); - assert_eq!(format!("{:?}", ET_CORE), "ET_CORE"); - } - - #[test] - pub fn fake_enum_partial_eq_impl() { - assert_eq!(ET_NONE, ET_NONE); - assert_ne!(ET_NONE, ET_REL); - assert_ne!(ET_NONE, ET_EXEC); - assert_ne!(ET_NONE, ET_DYN); - assert_ne!(ET_NONE, ET_CORE); - assert_eq!(ET_REL, ET_REL); - assert_ne!(ET_REL, ET_EXEC); - assert_ne!(ET_REL, ET_DYN); - assert_ne!(ET_REL, ET_CORE); - assert_eq!(ET_EXEC, ET_EXEC); - assert_ne!(ET_EXEC, ET_DYN); - assert_ne!(ET_EXEC, ET_CORE); - assert_eq!(ET_DYN, ET_DYN); - assert_ne!(ET_DYN, ET_CORE); - assert_eq!(ET_CORE, ET_CORE); - } - - #[test] - pub fn fake_enum_transmute_test() { - assert_eq!(unsafe { std::mem::transmute::(0) }, ET_NONE); - assert_eq!(unsafe { std::mem::transmute::(1) }, ET_REL); - assert_eq!(unsafe { std::mem::transmute::(2) }, ET_EXEC); - assert_eq!(unsafe { std::mem::transmute::(3) }, ET_DYN); - assert_eq!(unsafe { std::mem::transmute::(4) }, ET_CORE); - } - - fake_enum! { - #[repr(u8)] - #[derive(Hash,Default)] - pub enum struct NbtTagType{ - //! The type of an Nbt Tag - - /// An End Tag - End = 0, - /// A byte - Byte = 1, - /// A Short - Short = 2, - /// An Int - Int = 3, - /// A Long - Long = 4, - /// A Float - Float = 5, - Double = 6, - ByteArray = 7, - String = 8, - List = 9, - Compound = 10, - IntArray = 11, - LongArray = 12, - FloatArray = 13, - DoubleArray = 14, - Uuid = 15 - } - } - - fake_enum! { - #[repr(pub u8)] - pub enum struct Test{ - Foo = 0 - } - } - - #[test] - fn pub_repr_test() { - let foo = Test(0); - assert_eq!(foo, Test::Foo); - } -} diff --git a/vendor/install-dirs/.cargo-checksum.json b/vendor/install-dirs/.cargo-checksum.json deleted file mode 100644 index 64050e6c..00000000 --- a/vendor/install-dirs/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{"Cargo.toml":"8ac7cacaf3d7694b4a13007540feb371f08b190a498e559656623e06695b30cc","README.md":"26343ad61987a46f73f3d04661ba54f22e058b0eb64a25ac4b16dcf1c474272c","src/dirs.rs":"d0a2297e819302ece4be5392cd5942b73b42e9bf4adbdc6645c14d121a01e3f1","src/lib.rs":"c56af2f365dd169526767020eaf925873ac8c0237b73364c7076bd80e9ca406f"},"package":"41efed366b6d617c339c0920deaf114b9df08febf66df9810fc97f46e1bb36c4"} \ No newline at end of file diff --git a/vendor/install-dirs/Cargo.toml b/vendor/install-dirs/Cargo.toml deleted file mode 100644 index f96f08dd..00000000 --- a/vendor/install-dirs/Cargo.toml +++ /dev/null @@ -1,24 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies -# -# If you believe there's an error in this file please file an -# issue against the rust-lang/cargo repository. If you're -# editing this file be aware that the upstream Cargo.toml -# will likely look very different (and much more reasonable) - -[package] -edition = "2018" -name = "install-dirs" -version = "0.2.1" -authors = ["Connor Horman "] -description = "Library for handling gnu install directories in rust\n" -license = "MIT OR Apache-2.0" -repository = "https://github.com/chorman0773/Install-Dirs.git" -[dependencies.serde] -version = "1.0.123" -features = ["derive"] -optional = true diff --git a/vendor/install-dirs/README.md b/vendor/install-dirs/README.md deleted file mode 100644 index a89f6f04..00000000 --- a/vendor/install-dirs/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Rust Install Dirs - -A library for handling install directories, similar to gnu autoconf and cmake. diff --git a/vendor/install-dirs/src/dirs.rs b/vendor/install-dirs/src/dirs.rs deleted file mode 100644 index ad44fcd1..00000000 --- a/vendor/install-dirs/src/dirs.rs +++ /dev/null @@ -1,632 +0,0 @@ -use std::{ - collections::HashMap, - error::Error, - ffi::OsStr, - fmt::Display, - path::{Path, PathBuf}, -}; - -#[cfg(feature = "serde")] -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug)] -#[non_exhaustive] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "serde", serde(default = "InstallDirs::defaults"))] -pub struct InstallDirs { - pub prefix: PathBuf, - pub exec_prefix: PathBuf, - pub bindir: PathBuf, - pub sbindir: PathBuf, - pub libdir: PathBuf, - pub libexecdir: PathBuf, - pub includedir: PathBuf, - pub datarootdir: PathBuf, - pub datadir: PathBuf, - pub mandir: PathBuf, - pub docdir: PathBuf, - pub infodir: PathBuf, - pub localedir: PathBuf, - pub localstatedir: PathBuf, - pub runstatedir: PathBuf, - pub sharedstatedir: PathBuf, - pub sysconfdir: PathBuf, -} - -#[derive(Debug)] -pub struct CanonicalizationError { - prefix: PathBuf, -} - -impl Display for CanonicalizationError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.write_str("Failed to canonicalize Install Dirs ")?; - f.write_fmt(format_args!( - "(prefix {} is not an absolute path)", - self.prefix.to_str().unwrap_or("()") - )) - } -} - -impl Error for CanonicalizationError {} - -impl InstallDirs { - /// - /// - pub fn defaults() -> Self { - Self { - prefix: if cfg!(windows) { - "C:\\Program Files\\" - } else { - "/usr/local" - } - .into(), - exec_prefix: "".into(), - bindir: "bin".into(), - sbindir: "sbin".into(), - libdir: "lib".into(), - libexecdir: "libexec".into(), - includedir: "include".into(), - datarootdir: "share".into(), - datadir: "".into(), - mandir: "man".into(), - docdir: "doc".into(), - infodir: "info".into(), - localedir: "locale".into(), - localstatedir: "var".into(), - runstatedir: "run".into(), - sharedstatedir: "com".into(), - sysconfdir: "var".into(), - } - } - - pub fn with_project_name+?Sized>(name: &S) -> Self { - Self { - prefix: if cfg!(windows) { - let mut buf = PathBuf::new(); - buf.push("C:\\Program Files"); - buf.push(name.as_ref()); - buf - } else { - "/usr/local".into() - }, - exec_prefix: "".into(), - bindir: "bin".into(), - sbindir: "sbin".into(), - libdir: "lib".into(), - libexecdir: "libexec".into(), - includedir: "include".into(), - datarootdir: "share".into(), - datadir: "".into(), - mandir: "man".into(), - docdir: { - let mut path = PathBuf::new(); - path.push("doc"); - path.push(name.as_ref()); - path - }, - infodir: "info".into(), - localedir: "locale".into(), - localstatedir: "var".into(), - runstatedir: "run".into(), - sharedstatedir: "com".into(), - sysconfdir: "var".into(), - } - } - - pub fn with_exec_target>(target: &S) -> Self { - Self { - prefix: if cfg!(windows) { - "C:\\Program Files\\" - } else { - "/usr/local" - } - .into(), - exec_prefix: target.as_ref().into(), - bindir: "bin".into(), - sbindir: "sbin".into(), - libdir: "lib".into(), - libexecdir: "libexec".into(), - includedir: "include".into(), - datarootdir: "share".into(), - datadir: "".into(), - mandir: "man".into(), - docdir: "doc".into(), - infodir: "info".into(), - localedir: "locale".into(), - localstatedir: "var".into(), - runstatedir: "run".into(), - sharedstatedir: "com".into(), - sysconfdir: "var".into(), - } - } - - pub fn with_project_name_and_target, T: AsRef>( - name: &S, - target: &T, - ) -> Self { - Self { - prefix: if cfg!(windows) { - let mut buf = PathBuf::new(); - buf.push("C:\\Program Files"); - buf.push(name.as_ref()); - buf - } else { - "/usr/local".into() - }, - exec_prefix: target.as_ref().into(), - bindir: "bin".into(), - sbindir: "sbin".into(), - libdir: "lib".into(), - libexecdir: "libexec".into(), - includedir: "include".into(), - datarootdir: "share".into(), - datadir: "".into(), - mandir: "man".into(), - docdir: { - let mut path = PathBuf::new(); - path.push("doc"); - path.push(name.as_ref()); - path - }, - infodir: "info".into(), - localedir: "locale".into(), - localstatedir: "var".into(), - runstatedir: "run".into(), - sharedstatedir: "com".into(), - sysconfdir: "var".into(), - } - } - - pub fn canonicalize(mut self) -> Result { - if !self.prefix.has_root() { - Err(CanonicalizationError { - prefix: self.prefix, - }) - } else { - if !self.exec_prefix.has_root() { - self.exec_prefix = { - let mut path = PathBuf::new(); - path.push(self.prefix.clone()); - path.push(self.exec_prefix); - path - } - } - - let exec_prefix = if (&*self.exec_prefix) == Path::new("/") { - let mut exec_prefix = PathBuf::new(); - exec_prefix.push("/usr"); - exec_prefix - } else { - self.exec_prefix.clone() - }; - let data_prefix = if (&*self.prefix) == Path::new("/") { - let mut exec_prefix = PathBuf::new(); - exec_prefix.push("/usr"); - exec_prefix - } else { - self.prefix.clone() - }; - let state_prefix = if self.prefix.starts_with("/usr") { - let mut prefix = PathBuf::new(); - prefix.push("/"); - prefix - } else { - self.prefix.clone() - }; - if !self.bindir.has_root() { - self.bindir = { - let mut path = exec_prefix.clone(); - path.push(self.bindir); - path - }; - } - - if !self.sbindir.has_root() { - self.sbindir = { - let mut path = exec_prefix.clone(); - path.push(self.sbindir); - path - }; - } - - if !self.libdir.has_root() { - self.libdir = { - let mut path = exec_prefix.clone(); - path.push(self.libdir); - path - }; - } - - if !self.libexecdir.has_root() { - self.libexecdir = { - let mut path = exec_prefix.clone(); - path.push(self.libexecdir); - path - }; - } - - if !self.includedir.has_root() { - self.includedir = { - let mut path = exec_prefix.clone(); - path.push(self.includedir); - path - }; - } - - if !self.datarootdir.has_root() { - self.datarootdir = { - let mut path = data_prefix.clone(); - path.push(self.datarootdir); - path - }; - } - - if !self.datadir.has_root() { - self.datadir = { - let mut path = self.datarootdir.clone(); - path.push(self.datadir); - path - }; - } - - if !self.mandir.has_root() { - self.mandir = { - let mut path = self.datarootdir.clone(); - path.push(self.mandir); - path - }; - } - - if !self.infodir.has_root() { - self.infodir = { - let mut path = self.datarootdir.clone(); - path.push(self.infodir); - path - }; - } - if !self.docdir.has_root() { - self.docdir = { - let mut path = self.datarootdir.clone(); - path.push(self.docdir); - path - }; - } - - if !self.localedir.has_root() { - self.localedir = { - let mut path = self.datarootdir.clone(); - path.push(self.localedir); - path - }; - } - - if !self.sharedstatedir.has_root() { - self.sharedstatedir = { - let mut path = data_prefix.clone(); - path.push(self.sharedstatedir); - path - }; - } - - if !self.sysconfdir.has_root() { - self.sysconfdir = if state_prefix.starts_with("/opt") { - let mut path = PathBuf::new(); - path.push("/"); - path.push(self.sysconfdir); - path.push(state_prefix.clone()); - path - } else { - let mut path = state_prefix.clone(); - path.push(self.sysconfdir); - path - } - } - - if !self.localstatedir.has_root() { - self.localstatedir = if state_prefix.starts_with("/opt") { - let mut path = PathBuf::new(); - path.push("/"); - path.push(self.localstatedir); - path.push(state_prefix.clone()); - path - } else { - let mut path = state_prefix.clone(); - path.push(self.localstatedir); - path - } - } - - if !self.sharedstatedir.has_root() { - self.sharedstatedir = { - let mut path = self.localstatedir.clone(); - path.push(self.sharedstatedir); - path - }; - } - - Ok(self) - } - } - - pub fn canonicalize_dir+?Sized,T: Into>(base: &S,dir: T)->PathBuf{ - let mut dir = dir.into(); - if !dir.has_root(){ - dir = { - let mut path = PathBuf::from(base); - path.push(dir); - path - } - } - dir - } - - pub fn read_env(&mut self) { - if let Ok(dir) = std::env::var("prefix") { - self.prefix = dir.into() - } - - if let Ok(dir) = std::env::var("exec_prefix") { - self.exec_prefix = dir.into() - } - - if let Ok(dir) = std::env::var("bindir") { - self.bindir = dir.into() - } - - if let Ok(dir) = std::env::var("libdir") { - self.libdir = dir.into() - } - - if let Ok(dir) = std::env::var("sbindir") { - self.sbindir = dir.into() - } - if let Ok(dir) = std::env::var("libexecdir") { - self.libexecdir = dir.into() - } - if let Ok(dir) = std::env::var("includedir") { - self.includedir = dir.into() - } - - if let Ok(dir) = std::env::var("datarootdir") { - self.datarootdir = dir.into() - } - - if let Ok(dir) = std::env::var("datadir") { - self.datadir = dir.into() - } - - if let Ok(dir) = std::env::var("mandir") { - self.mandir = dir.into() - } - - if let Ok(dir) = std::env::var("docdir") { - self.docdir = dir.into() - } - - if let Ok(dir) = std::env::var("infodir") { - self.infodir = dir.into() - } - - if let Ok(dir) = std::env::var("localedir") { - self.localedir = dir.into() - } - - if let Ok(dir) = std::env::var("sharedstatedir") { - self.sharedstatedir = dir.into() - } - - if let Ok(dir) = std::env::var("localstatedir") { - self.localstatedir = dir.into() - } - - if let Ok(dir) = std::env::var("runstatedir") { - self.runstatedir = dir.into() - } - - if let Ok(dir) = std::env::var("sysconfdir") { - self.sysconfdir = dir.into() - } - } - - /// - /// Obtains an iterator suitable for passing to [`std::process::Command::envs`]. - /// The resulting iterator contains each field and the value of that field. - /// The order which the Items are encounted is unspecified - /// - /// ## Example - /// - /// ``` - /// use install_dirs::dirs::InstallDirs; - /// use std::process::{Command, Stdio}; - /// let dirs = InstallDirs::defaults(); - /// let cmd = Command::new("printenv") - /// .stdin(Stdio::null()) - /// .stdout(Stdio::inherit()) - /// .stderr(Stdio::null()) - /// .env_clear() - /// .envs(dirs.as_env()) - /// .spawn() - /// .expect("printenv failed to start"); - /// ``` - pub fn as_env(&self) -> impl IntoIterator { - let mut map = HashMap::new(); - map.insert("prefix", &*self.prefix); - map.insert("exec_prefix", &*self.exec_prefix); - map.insert("bindir", &*self.bindir); - map.insert("sbindir", &*self.sbindir); - map.insert("libdir", &*self.libdir); - map.insert("libexecdir", &*self.libexecdir); - map.insert("datarootdir", &*self.datarootdir); - map.insert("datadir", &*self.datadir); - map.insert("docdir", &*self.docdir); - map.insert("mandir", &*self.mandir); - map.insert("infodir", &*self.infodir); - map.insert("localedir", &*self.localedir); - map.insert("sharedstatedir", &*self.sharedstatedir); - map.insert("localstatedir", &*self.localstatedir); - map.insert("runstatedir", &*self.runstatedir); - map.insert("sysconfdir", &*self.sysconfdir); - map - } -} - -/// -/// Parses the compile-time environment into an instance of InstallDirs. -/// Note: This returns an owning structure and is not const. -/// Likely you will want to either store this, or it's canonical representation, -/// Inside a lazy_static!. -/// -/// This uses the default installation configuration, see [`InstallDirs::defaults()`] -/// If a package name is specified as an expression, it uses the defaults for that package name, [`InstallDirs::with_project_name()`]. -#[macro_export] -macro_rules! parse_env { - () => {{ - let mut dirs = InstallDirs::defaults(); - if let Some(dir) = std::option_env!("prefix") { - dirs.prefix = dir.into(); - } - - if let Some(dir) = std::option_env!("exec_prefix") { - dirs.exec_prefix = dir.into(); - } - - if let Some(dir) = std::option_env!("bindir") { - dirs.bindir = dir.into(); - } - - if let Some(dir) = std::option_env!("sbindir") { - dirs.sbindir = dir.into(); - } - if let Some(dir) = std::option_env!("libdir") { - dirs.libdir = dir.into(); - } - - if let Some(dir) = std::option_env!("libexecdir") { - dirs.libexecdir = dir.into(); - } - - if let Some(dir) = std::option_env!("includedir") { - dirs.includedir = dir.into(); - } - - if let Some(dir) = std::option_env!("datarootdir") { - dirs.datarootdir = dir.into(); - } - - if let Some(dir) = std::option_env!("datadir") { - dirs.datadir = dir.into(); - } - - if let Some(dir) = std::option_env!("mandir") { - dirs.mandir = dir.into(); - } - - if let Some(dir) = std::option_env!("docdir") { - dirs.docdir = dir.into(); - } - - if let Some(dir) = std::option_env!("infodir") { - dirs.infodir = dir.into(); - } - - if let Some(dir) = std::option_env!("localedir") { - dirs.localedir = dir.into(); - } - - if let Some(dir) = std::option_env!("sharedstatedir") { - dirs.sharedstatedir = dir.into(); - } - - if let Some(dir) = std::option_env!("localstatedir") { - dirs.localstatedir = dir.into(); - } - - if let Some(dir) = std::option_env!("runstatedir") { - dirs.runstatedir = dir.into(); - } - - if let Some(dir) = std::option_env!("sysconfdir") { - dirs.sysconfdir = dir.into(); - } - - dirs - }}; - ($project:expr) => {{ - let mut dirs = InstallDirs::with_project_name($project); - if let Some(dir) = std::option_env!("prefix") { - dirs.prefix = dir.into(); - } - - if let Some(dir) = std::option_env!("exec_prefix") { - dirs.exec_prefix = dir.into(); - } - - if let Some(dir) = std::option_env!("bindir") { - dirs.bindir = dir.into(); - } - - if let Some(dir) = std::option_env!("sbindir") { - dirs.sbindir = dir.into(); - } - if let Some(dir) = std::option_env!("libdir") { - dirs.libdir = dir.into(); - } - - if let Some(dir) = std::option_env!("libexecdir") { - dirs.libexecdir = dir.into(); - } - - if let Some(dir) = std::option_env!("includedir") { - dirs.includedir = dir.into(); - } - - if let Some(dir) = std::option_env!("datarootdir") { - dirs.datarootdir = dir.into(); - } - - if let Some(dir) = std::option_env!("datadir") { - dirs.datadir = dir.into(); - } - - if let Some(dir) = std::option_env!("mandir") { - dirs.mandir = dir.into(); - } - - if let Some(dir) = std::option_env!("docdir") { - dirs.docdir = dir.into(); - } - - if let Some(dir) = std::option_env!("infodir") { - dirs.infodir = dir.into(); - } - - if let Some(dir) = std::option_env!("localedir") { - dirs.localedir = dir.into(); - } - - if let Some(dir) = std::option_env!("sharedstatedir") { - dirs.sharedstatedir = dir.into(); - } - - if let Some(dir) = std::option_env!("localstatedir") { - dirs.localstatedir = dir.into(); - } - - if let Some(dir) = std::option_env!("runstatedir") { - dirs.runstatedir = dir.into(); - } - - if let Some(dir) = std::option_env!("sysconfdir") { - dirs.sysconfdir = dir.into(); - } - - dirs - }}; -} - -pub fn from_env() -> InstallDirs { - let mut dirs = InstallDirs::defaults(); - dirs.read_env(); - dirs -} diff --git a/vendor/install-dirs/src/lib.rs b/vendor/install-dirs/src/lib.rs deleted file mode 100644 index 16ec3672..00000000 --- a/vendor/install-dirs/src/lib.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod dirs; diff --git a/vendor/lazy_static/.cargo-checksum.json b/vendor/lazy_static/.cargo-checksum.json deleted file mode 100644 index fa241ed8..00000000 --- a/vendor/lazy_static/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{"Cargo.toml":"05e37a4e63dc4a495998bb5133252a51d671c4e99061a6342089ed6eab43978a","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0621878e61f0d0fda054bcbe02df75192c28bde1ecc8289cbd86aeba2dd72720","README.md":"e2effacb5bbd7c01523f9a9e4a6a59c0f9b8698753b210fec5742408498197df","src/core_lazy.rs":"6b9fb6a4f553058e240756125b6b9ca43a83ed1fb72964343038ea0ea2e1af10","src/inline_lazy.rs":"f6184afbca4b477616f270790edc180263be806aa92ef0a9de681b4aac9e88c4","src/lib.rs":"99096a5d3089c0d86646f0805d1455befe2cb09683704af29c5c9d99ecab2683","tests/no_std.rs":"d68b149ee51ef5ae2b2906c0c94f5a9812d3b02811b13365c5a35e2ef90d25cf","tests/test.rs":"b3f7d805375dc5af7a2aa4b869944ad2ab4fc982b35ad718ea58f6914dc0a698"},"package":"e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"} \ No newline at end of file diff --git a/vendor/lazy_static/Cargo.toml b/vendor/lazy_static/Cargo.toml deleted file mode 100644 index 7f930c5e..00000000 --- a/vendor/lazy_static/Cargo.toml +++ /dev/null @@ -1,46 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies -# -# If you believe there's an error in this file please file an -# issue against the rust-lang/cargo repository. If you're -# editing this file be aware that the upstream Cargo.toml -# will likely look very different (and much more reasonable) - -[package] -name = "lazy_static" -version = "1.4.0" -authors = ["Marvin Löbel "] -exclude = ["/.travis.yml", "/appveyor.yml"] -description = "A macro for declaring lazily evaluated statics in Rust." -documentation = "https://docs.rs/lazy_static" -readme = "README.md" -keywords = ["macro", "lazy", "static"] -categories = ["no-std", "rust-patterns", "memory-management"] -license = "MIT/Apache-2.0" -repository = "https://github.com/rust-lang-nursery/lazy-static.rs" -[dependencies.spin] -version = "0.5.0" -optional = true -[dev-dependencies.doc-comment] -version = "0.3.1" - -[features] -spin_no_std = ["spin"] -[badges.appveyor] -repository = "rust-lang-nursery/lazy-static.rs" - -[badges.is-it-maintained-issue-resolution] -repository = "rust-lang-nursery/lazy-static.rs" - -[badges.is-it-maintained-open-issues] -repository = "rust-lang-nursery/lazy-static.rs" - -[badges.maintenance] -status = "passively-maintained" - -[badges.travis-ci] -repository = "rust-lang-nursery/lazy-static.rs" diff --git a/vendor/lazy_static/LICENSE-APACHE b/vendor/lazy_static/LICENSE-APACHE deleted file mode 100644 index 16fe87b0..00000000 --- a/vendor/lazy_static/LICENSE-APACHE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/vendor/lazy_static/LICENSE-MIT b/vendor/lazy_static/LICENSE-MIT deleted file mode 100644 index 25597d58..00000000 --- a/vendor/lazy_static/LICENSE-MIT +++ /dev/null @@ -1,25 +0,0 @@ -Copyright (c) 2010 The Rust Project Developers - -Permission is hereby granted, free of charge, to any -person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the -Software without restriction, including without -limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software -is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice -shall be included in all copies or substantial portions -of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/vendor/lazy_static/README.md b/vendor/lazy_static/README.md deleted file mode 100644 index aa9f8283..00000000 --- a/vendor/lazy_static/README.md +++ /dev/null @@ -1,79 +0,0 @@ -lazy-static.rs -============== - -A macro for declaring lazily evaluated statics in Rust. - -Using this macro, it is possible to have `static`s that require code to be -executed at runtime in order to be initialized. -This includes anything requiring heap allocations, like vectors or hash maps, -as well as anything that requires non-const function calls to be computed. - -[![Travis-CI Status](https://travis-ci.com/rust-lang-nursery/lazy-static.rs.svg?branch=master)](https://travis-ci.com/rust-lang-nursery/lazy-static.rs) -[![Latest version](https://img.shields.io/crates/v/lazy_static.svg)](https://crates.io/crates/lazy_static) -[![Documentation](https://docs.rs/lazy_static/badge.svg)](https://docs.rs/lazy_static) -[![License](https://img.shields.io/crates/l/lazy_static.svg)](https://github.com/rust-lang-nursery/lazy-static.rs#license) - -## Minimum supported `rustc` - -`1.27.2+` - -This version is explicitly tested in CI and may only be bumped in new minor versions. Any changes to the supported minimum version will be called out in the release notes. - - -# Getting Started - -[lazy-static.rs is available on crates.io](https://crates.io/crates/lazy_static). -It is recommended to look there for the newest released version, as well as links to the newest builds of the docs. - -At the point of the last update of this README, the latest published version could be used like this: - -Add the following dependency to your Cargo manifest... - -```toml -[dependencies] -lazy_static = "1.4.0" -``` - -...and see the [docs](https://docs.rs/lazy_static) for how to use it. - -# Example - -```rust -#[macro_use] -extern crate lazy_static; - -use std::collections::HashMap; - -lazy_static! { - static ref HASHMAP: HashMap = { - let mut m = HashMap::new(); - m.insert(0, "foo"); - m.insert(1, "bar"); - m.insert(2, "baz"); - m - }; -} - -fn main() { - // First access to `HASHMAP` initializes it - println!("The entry for `0` is \"{}\".", HASHMAP.get(&0).unwrap()); - - // Any further access to `HASHMAP` just returns the computed value - println!("The entry for `1` is \"{}\".", HASHMAP.get(&1).unwrap()); -} -``` - -## License - -Licensed under either of - - * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) - * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) - -at your option. - -### Contribution - -Unless you explicitly state otherwise, any contribution intentionally submitted -for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any -additional terms or conditions. diff --git a/vendor/lazy_static/src/core_lazy.rs b/vendor/lazy_static/src/core_lazy.rs deleted file mode 100644 index b66c3e0d..00000000 --- a/vendor/lazy_static/src/core_lazy.rs +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2016 lazy-static.rs Developers -// -// Licensed under the Apache License, Version 2.0, or the MIT license , at your option. This file may not be -// copied, modified, or distributed except according to those terms. - -extern crate spin; - -use self::spin::Once; - -pub struct Lazy(Once); - -impl Lazy { - pub const INIT: Self = Lazy(Once::INIT); - - #[inline(always)] - pub fn get(&'static self, builder: F) -> &T - where F: FnOnce() -> T - { - self.0.call_once(builder) - } -} - -#[macro_export] -#[doc(hidden)] -macro_rules! __lazy_static_create { - ($NAME:ident, $T:ty) => { - static $NAME: $crate::lazy::Lazy<$T> = $crate::lazy::Lazy::INIT; - } -} diff --git a/vendor/lazy_static/src/inline_lazy.rs b/vendor/lazy_static/src/inline_lazy.rs deleted file mode 100644 index 219ce9c6..00000000 --- a/vendor/lazy_static/src/inline_lazy.rs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright 2016 lazy-static.rs Developers -// -// Licensed under the Apache License, Version 2.0, or the MIT license , at your option. This file may not be -// copied, modified, or distributed except according to those terms. - -extern crate core; -extern crate std; - -use self::std::prelude::v1::*; -use self::std::cell::Cell; -use self::std::hint::unreachable_unchecked; -use self::std::sync::Once; -#[allow(deprecated)] -pub use self::std::sync::ONCE_INIT; - -// FIXME: Replace Option with MaybeUninit (stable since 1.36.0) -pub struct Lazy(Cell>, Once); - -impl Lazy { - #[allow(deprecated)] - pub const INIT: Self = Lazy(Cell::new(None), ONCE_INIT); - - #[inline(always)] - pub fn get(&'static self, f: F) -> &T - where - F: FnOnce() -> T, - { - self.1.call_once(|| { - self.0.set(Some(f())); - }); - - // `self.0` is guaranteed to be `Some` by this point - // The `Once` will catch and propagate panics - unsafe { - match *self.0.as_ptr() { - Some(ref x) => x, - None => { - debug_assert!(false, "attempted to derefence an uninitialized lazy static. This is a bug"); - - unreachable_unchecked() - }, - } - } - } -} - -unsafe impl Sync for Lazy {} - -#[macro_export] -#[doc(hidden)] -macro_rules! __lazy_static_create { - ($NAME:ident, $T:ty) => { - static $NAME: $crate::lazy::Lazy<$T> = $crate::lazy::Lazy::INIT; - }; -} diff --git a/vendor/lazy_static/src/lib.rs b/vendor/lazy_static/src/lib.rs deleted file mode 100644 index cada0dc6..00000000 --- a/vendor/lazy_static/src/lib.rs +++ /dev/null @@ -1,215 +0,0 @@ -// Copyright 2016 lazy-static.rs Developers -// -// Licensed under the Apache License, Version 2.0, or the MIT license , at your option. This file may not be -// copied, modified, or distributed except according to those terms. - -/*! -A macro for declaring lazily evaluated statics. - -Using this macro, it is possible to have `static`s that require code to be -executed at runtime in order to be initialized. -This includes anything requiring heap allocations, like vectors or hash maps, -as well as anything that requires function calls to be computed. - -# Syntax - -```ignore -lazy_static! { - [pub] static ref NAME_1: TYPE_1 = EXPR_1; - [pub] static ref NAME_2: TYPE_2 = EXPR_2; - ... - [pub] static ref NAME_N: TYPE_N = EXPR_N; -} -``` - -Attributes (including doc comments) are supported as well: - -```rust -# #[macro_use] -# extern crate lazy_static; -# fn main() { -lazy_static! { - /// This is an example for using doc comment attributes - static ref EXAMPLE: u8 = 42; -} -# } -``` - -# Semantics - -For a given `static ref NAME: TYPE = EXPR;`, the macro generates a unique type that -implements `Deref` and stores it in a static with name `NAME`. (Attributes end up -attaching to this type.) - -On first deref, `EXPR` gets evaluated and stored internally, such that all further derefs -can return a reference to the same object. Note that this can lead to deadlocks -if you have multiple lazy statics that depend on each other in their initialization. - -Apart from the lazy initialization, the resulting "static ref" variables -have generally the same properties as regular "static" variables: - -- Any type in them needs to fulfill the `Sync` trait. -- If the type has a destructor, then it will not run when the process exits. - -# Example - -Using the macro: - -```rust -#[macro_use] -extern crate lazy_static; - -use std::collections::HashMap; - -lazy_static! { - static ref HASHMAP: HashMap = { - let mut m = HashMap::new(); - m.insert(0, "foo"); - m.insert(1, "bar"); - m.insert(2, "baz"); - m - }; - static ref COUNT: usize = HASHMAP.len(); - static ref NUMBER: u32 = times_two(21); -} - -fn times_two(n: u32) -> u32 { n * 2 } - -fn main() { - println!("The map has {} entries.", *COUNT); - println!("The entry for `0` is \"{}\".", HASHMAP.get(&0).unwrap()); - println!("A expensive calculation on a static results in: {}.", *NUMBER); -} -``` - -# Implementation details - -The `Deref` implementation uses a hidden static variable that is guarded by an atomic check on each access. - -# Cargo features - -This crate provides one cargo feature: - -- `spin_no_std`: This allows using this crate in a no-std environment, by depending on the standalone `spin` crate. - -*/ - -#![doc(html_root_url = "https://docs.rs/lazy_static/1.4.0")] -#![no_std] - -#[cfg(not(feature = "spin_no_std"))] -#[path="inline_lazy.rs"] -#[doc(hidden)] -pub mod lazy; - -#[cfg(test)] -#[macro_use] -extern crate doc_comment; - -#[cfg(test)] -doctest!("../README.md"); - -#[cfg(feature = "spin_no_std")] -#[path="core_lazy.rs"] -#[doc(hidden)] -pub mod lazy; - -#[doc(hidden)] -pub use core::ops::Deref as __Deref; - -#[macro_export(local_inner_macros)] -#[doc(hidden)] -macro_rules! __lazy_static_internal { - // optional visibility restrictions are wrapped in `()` to allow for - // explicitly passing otherwise implicit information about private items - ($(#[$attr:meta])* ($($vis:tt)*) static ref $N:ident : $T:ty = $e:expr; $($t:tt)*) => { - __lazy_static_internal!(@MAKE TY, $(#[$attr])*, ($($vis)*), $N); - __lazy_static_internal!(@TAIL, $N : $T = $e); - lazy_static!($($t)*); - }; - (@TAIL, $N:ident : $T:ty = $e:expr) => { - impl $crate::__Deref for $N { - type Target = $T; - fn deref(&self) -> &$T { - #[inline(always)] - fn __static_ref_initialize() -> $T { $e } - - #[inline(always)] - fn __stability() -> &'static $T { - __lazy_static_create!(LAZY, $T); - LAZY.get(__static_ref_initialize) - } - __stability() - } - } - impl $crate::LazyStatic for $N { - fn initialize(lazy: &Self) { - let _ = &**lazy; - } - } - }; - // `vis` is wrapped in `()` to prevent parsing ambiguity - (@MAKE TY, $(#[$attr:meta])*, ($($vis:tt)*), $N:ident) => { - #[allow(missing_copy_implementations)] - #[allow(non_camel_case_types)] - #[allow(dead_code)] - $(#[$attr])* - $($vis)* struct $N {__private_field: ()} - #[doc(hidden)] - $($vis)* static $N: $N = $N {__private_field: ()}; - }; - () => () -} - -#[macro_export(local_inner_macros)] -macro_rules! lazy_static { - ($(#[$attr:meta])* static ref $N:ident : $T:ty = $e:expr; $($t:tt)*) => { - // use `()` to explicitly forward the information about private items - __lazy_static_internal!($(#[$attr])* () static ref $N : $T = $e; $($t)*); - }; - ($(#[$attr:meta])* pub static ref $N:ident : $T:ty = $e:expr; $($t:tt)*) => { - __lazy_static_internal!($(#[$attr])* (pub) static ref $N : $T = $e; $($t)*); - }; - ($(#[$attr:meta])* pub ($($vis:tt)+) static ref $N:ident : $T:ty = $e:expr; $($t:tt)*) => { - __lazy_static_internal!($(#[$attr])* (pub ($($vis)+)) static ref $N : $T = $e; $($t)*); - }; - () => () -} - -/// Support trait for enabling a few common operation on lazy static values. -/// -/// This is implemented by each defined lazy static, and -/// used by the free functions in this crate. -pub trait LazyStatic { - #[doc(hidden)] - fn initialize(lazy: &Self); -} - -/// Takes a shared reference to a lazy static and initializes -/// it if it has not been already. -/// -/// This can be used to control the initialization point of a lazy static. -/// -/// Example: -/// -/// ```rust -/// #[macro_use] -/// extern crate lazy_static; -/// -/// lazy_static! { -/// static ref BUFFER: Vec = (0..255).collect(); -/// } -/// -/// fn main() { -/// lazy_static::initialize(&BUFFER); -/// -/// // ... -/// work_with_initialized_data(&BUFFER); -/// } -/// # fn work_with_initialized_data(_: &[u8]) {} -/// ``` -pub fn initialize(lazy: &T) { - LazyStatic::initialize(lazy); -} diff --git a/vendor/lazy_static/tests/no_std.rs b/vendor/lazy_static/tests/no_std.rs deleted file mode 100644 index f94a1aaa..00000000 --- a/vendor/lazy_static/tests/no_std.rs +++ /dev/null @@ -1,20 +0,0 @@ -#![cfg(feature="spin_no_std")] - -#![no_std] - -#[macro_use] -extern crate lazy_static; - -lazy_static! { - /// Documentation! - pub static ref NUMBER: u32 = times_two(3); -} - -fn times_two(n: u32) -> u32 { - n * 2 -} - -#[test] -fn test_basic() { - assert_eq!(*NUMBER, 6); -} diff --git a/vendor/lazy_static/tests/test.rs b/vendor/lazy_static/tests/test.rs deleted file mode 100644 index 03d0ab68..00000000 --- a/vendor/lazy_static/tests/test.rs +++ /dev/null @@ -1,164 +0,0 @@ -#[macro_use] -extern crate lazy_static; -use std::collections::HashMap; - -lazy_static! { - /// Documentation! - pub static ref NUMBER: u32 = times_two(3); - - static ref ARRAY_BOXES: [Box; 3] = [Box::new(1), Box::new(2), Box::new(3)]; - - /// More documentation! - #[allow(unused_variables)] - #[derive(Copy, Clone, Debug)] - pub static ref STRING: String = "hello".to_string(); - - static ref HASHMAP: HashMap = { - let mut m = HashMap::new(); - m.insert(0, "abc"); - m.insert(1, "def"); - m.insert(2, "ghi"); - m - }; - - // This should not compile if the unsafe is removed. - static ref UNSAFE: u32 = unsafe { - std::mem::transmute::(-1) - }; -} - -lazy_static! { - static ref S1: &'static str = "a"; - static ref S2: &'static str = "b"; -} -lazy_static! { - static ref S3: String = [*S1, *S2].join(""); -} - -#[test] -fn s3() { - assert_eq!(&*S3, "ab"); -} - -fn times_two(n: u32) -> u32 { - n * 2 -} - -#[test] -fn test_basic() { - assert_eq!(&**STRING, "hello"); - assert_eq!(*NUMBER, 6); - assert!(HASHMAP.get(&1).is_some()); - assert!(HASHMAP.get(&3).is_none()); - assert_eq!(&*ARRAY_BOXES, &[Box::new(1), Box::new(2), Box::new(3)]); - assert_eq!(*UNSAFE, std::u32::MAX); -} - -#[test] -fn test_repeat() { - assert_eq!(*NUMBER, 6); - assert_eq!(*NUMBER, 6); - assert_eq!(*NUMBER, 6); -} - -#[test] -fn test_meta() { - // this would not compile if STRING were not marked #[derive(Copy, Clone)] - let copy_of_string = STRING; - // just to make sure it was copied - assert!(&STRING as *const _ != ©_of_string as *const _); - - // this would not compile if STRING were not marked #[derive(Debug)] - assert_eq!(format!("{:?}", STRING), "STRING { __private_field: () }".to_string()); -} - -mod visibility { - lazy_static! { - pub static ref FOO: Box = Box::new(0); - static ref BAR: Box = Box::new(98); - } - - pub mod inner { - lazy_static! { - pub(in visibility) static ref BAZ: Box = Box::new(42); - pub(crate) static ref BAG: Box = Box::new(37); - } - } - - #[test] - fn sub_test() { - assert_eq!(**FOO, 0); - assert_eq!(**BAR, 98); - assert_eq!(**inner::BAZ, 42); - assert_eq!(**inner::BAG, 37); - } -} - -#[test] -fn test_visibility() { - assert_eq!(*visibility::FOO, Box::new(0)); - assert_eq!(*visibility::inner::BAG, Box::new(37)); -} - -// This should not cause a warning about a missing Copy implementation -lazy_static! { - pub static ref VAR: i32 = { 0 }; -} - -#[derive(Copy, Clone, Debug, PartialEq)] -struct X; -struct Once(X); -const ONCE_INIT: Once = Once(X); -static DATA: X = X; -static ONCE: X = X; -fn require_sync() -> X { X } -fn transmute() -> X { X } -fn __static_ref_initialize() -> X { X } -fn test(_: Vec) -> X { X } - -// All these names should not be shadowed -lazy_static! { - static ref ITEM_NAME_TEST: X = { - test(vec![X, Once(X).0, ONCE_INIT.0, DATA, ONCE, - require_sync(), transmute(), - // Except this, which will sadly be shadowed by internals: - // __static_ref_initialize() - ]) - }; -} - -#[test] -fn item_name_shadowing() { - assert_eq!(*ITEM_NAME_TEST, X); -} - -use std::sync::atomic::AtomicBool; -#[allow(deprecated)] -use std::sync::atomic::ATOMIC_BOOL_INIT; -use std::sync::atomic::Ordering::SeqCst; - -#[allow(deprecated)] -static PRE_INIT_FLAG: AtomicBool = ATOMIC_BOOL_INIT; - -lazy_static! { - static ref PRE_INIT: () = { - PRE_INIT_FLAG.store(true, SeqCst); - () - }; -} - -#[test] -fn pre_init() { - assert_eq!(PRE_INIT_FLAG.load(SeqCst), false); - lazy_static::initialize(&PRE_INIT); - assert_eq!(PRE_INIT_FLAG.load(SeqCst), true); -} - -lazy_static! { - static ref LIFETIME_NAME: for<'a> fn(&'a u8) = { fn f(_: &u8) {} f }; -} - -#[test] -fn lifetime_name() { - let _ = LIFETIME_NAME; -} diff --git a/vendor/proc-macro2/.cargo-checksum.json b/vendor/proc-macro2/.cargo-checksum.json deleted file mode 100644 index b88de6c1..00000000 --- a/vendor/proc-macro2/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{"Cargo.toml":"0a5984c855f13db167b90022183c267afcebbafa42a5a66bbcba04953d23b7d7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"e1f9d4fc22cff2c049f166a403b41458632a94357890d31cf0e3ad83807fb430","build.rs":"4a6ea112a069e927e40dd79c4b2eaf054521c49eb381c1b24cd5a8a035ee06b5","src/detection.rs":"9d25d896889e65330858f2d6f6223c1b98cd1dad189813ad4161ff189fbda2b8","src/fallback.rs":"c161f65f18d7d19bcbd568f5c0bea1cfc1ce3bd9c66427b1fdb4944ad7966ce0","src/lib.rs":"993e046f5777ea09500775b6323eadd9c48cb48e9f818ec0d39faf50479f019e","src/marker.rs":"87fce2d0357f5b7998b6d9dfb064f4a0cbc9dabb19e33d4b514a446243ebe2e8","src/parse.rs":"b82101576fdc493793f040aa17c11267e2747964729df1f88c81fe413aed407e","src/wrapper.rs":"624527eaa49c272824ab5f7286e6ecbdd80b6eac54e99be11b6e8b29dcb0015d","tests/comments.rs":"ea6cbe6f4c8852e6a0612893c7d4f2c144a2e6a134a6c3db641a320cbfc3c800","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"652db9f25c69ffc65baa60cdca8f195aa2e254d4de0a9ddc85de4dc2470544b6","tests/test.rs":"597186c00ebf51191934c88ff970b5457ca0fb8e608bf896be1ebf8d74c17f4d","tests/test_fmt.rs":"745dfdc41d09c5308c221395eb43f2041f0a1413d2927a813bc2ad4554438fe2"},"package":"b9f5105d4fdaab20335ca9565e106a5d9b82b6219b5ba735731124ac6711d23d"} \ No newline at end of file diff --git a/vendor/proc-macro2/Cargo.toml b/vendor/proc-macro2/Cargo.toml deleted file mode 100644 index 901c787f..00000000 --- a/vendor/proc-macro2/Cargo.toml +++ /dev/null @@ -1,41 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies. -# -# If you are reading this file be aware that the original Cargo.toml -# will likely look very different (and much more reasonable). -# See Cargo.toml.orig for the original contents. - -[package] -edition = "2018" -name = "proc-macro2" -version = "1.0.29" -authors = ["Alex Crichton ", "David Tolnay "] -description = "A substitute implementation of the compiler's `proc_macro` API to decouple\ntoken-based libraries from the procedural macro use case.\n" -documentation = "https://docs.rs/proc-macro2" -readme = "README.md" -keywords = ["macros"] -categories = ["development-tools::procedural-macro-helpers"] -license = "MIT OR Apache-2.0" -repository = "https://github.com/alexcrichton/proc-macro2" -[package.metadata.docs.rs] -rustc-args = ["--cfg", "procmacro2_semver_exempt"] -rustdoc-args = ["--cfg", "procmacro2_semver_exempt", "--cfg", "doc_cfg"] -targets = ["x86_64-unknown-linux-gnu"] - -[package.metadata.playground] -features = ["span-locations"] -[dependencies.unicode-xid] -version = "0.2" -[dev-dependencies.quote] -version = "1.0" -default_features = false - -[features] -default = ["proc-macro"] -nightly = [] -proc-macro = [] -span-locations = [] diff --git a/vendor/proc-macro2/LICENSE-APACHE b/vendor/proc-macro2/LICENSE-APACHE deleted file mode 100644 index 16fe87b0..00000000 --- a/vendor/proc-macro2/LICENSE-APACHE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/vendor/proc-macro2/LICENSE-MIT b/vendor/proc-macro2/LICENSE-MIT deleted file mode 100644 index 39e0ed66..00000000 --- a/vendor/proc-macro2/LICENSE-MIT +++ /dev/null @@ -1,25 +0,0 @@ -Copyright (c) 2014 Alex Crichton - -Permission is hereby granted, free of charge, to any -person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the -Software without restriction, including without -limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software -is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice -shall be included in all copies or substantial portions -of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/vendor/proc-macro2/README.md b/vendor/proc-macro2/README.md deleted file mode 100644 index 3d05e871..00000000 --- a/vendor/proc-macro2/README.md +++ /dev/null @@ -1,93 +0,0 @@ -# proc-macro2 - -[![Build Status](https://img.shields.io/github/workflow/status/alexcrichton/proc-macro2/build%20and%20test)](https://github.com/alexcrichton/proc-macro2/actions) -[![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2) -[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2) - -A wrapper around the procedural macro API of the compiler's `proc_macro` crate. -This library serves two purposes: - -- **Bring proc-macro-like functionality to other contexts like build.rs and - main.rs.** Types from `proc_macro` are entirely specific to procedural macros - and cannot ever exist in code outside of a procedural macro. Meanwhile - `proc_macro2` types may exist anywhere including non-macro code. By developing - foundational libraries like [syn] and [quote] against `proc_macro2` rather - than `proc_macro`, the procedural macro ecosystem becomes easily applicable to - many other use cases and we avoid reimplementing non-macro equivalents of - those libraries. - -- **Make procedural macros unit testable.** As a consequence of being specific - to procedural macros, nothing that uses `proc_macro` can be executed from a - unit test. In order for helper libraries or components of a macro to be - testable in isolation, they must be implemented using `proc_macro2`. - -[syn]: https://github.com/dtolnay/syn -[quote]: https://github.com/dtolnay/quote - -## Usage - -```toml -[dependencies] -proc-macro2 = "1.0" -``` - -The skeleton of a typical procedural macro typically looks like this: - -```rust -extern crate proc_macro; - -#[proc_macro_derive(MyDerive)] -pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input = proc_macro2::TokenStream::from(input); - - let output: proc_macro2::TokenStream = { - /* transform input */ - }; - - proc_macro::TokenStream::from(output) -} -``` - -If parsing with [Syn], you'll use [`parse_macro_input!`] instead to propagate -parse errors correctly back to the compiler when parsing fails. - -[`parse_macro_input!`]: https://docs.rs/syn/1.0/syn/macro.parse_macro_input.html - -## Unstable features - -The default feature set of proc-macro2 tracks the most recent stable compiler -API. Functionality in `proc_macro` that is not yet stable is not exposed by -proc-macro2 by default. - -To opt into the additional APIs available in the most recent nightly compiler, -the `procmacro2_semver_exempt` config flag must be passed to rustc. We will -polyfill those nightly-only APIs back to Rust 1.31.0. As these are unstable APIs -that track the nightly compiler, minor versions of proc-macro2 may make breaking -changes to them at any time. - -``` -RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build -``` - -Note that this must not only be done for your crate, but for any crate that -depends on your crate. This infectious nature is intentional, as it serves as a -reminder that you are outside of the normal semver guarantees. - -Semver exempt methods are marked as such in the proc-macro2 documentation. - -
- -#### License - - -Licensed under either of
Apache License, Version -2.0 or MIT license at your option. - - -
- - -Unless you explicitly state otherwise, any contribution intentionally submitted -for inclusion in this crate by you, as defined in the Apache-2.0 license, shall -be dual licensed as above, without any additional terms or conditions. - diff --git a/vendor/proc-macro2/build.rs b/vendor/proc-macro2/build.rs deleted file mode 100644 index f32d5c8f..00000000 --- a/vendor/proc-macro2/build.rs +++ /dev/null @@ -1,172 +0,0 @@ -// rustc-cfg emitted by the build script: -// -// "use_proc_macro" -// Link to extern crate proc_macro. Available on any compiler and any target -// except wasm32. Requires "proc-macro" Cargo cfg to be enabled (default is -// enabled). On wasm32 we never link to proc_macro even if "proc-macro" cfg -// is enabled. -// -// "wrap_proc_macro" -// Wrap types from libproc_macro rather than polyfilling the whole API. -// Enabled on rustc 1.29+ as long as procmacro2_semver_exempt is not set, -// because we can't emulate the unstable API without emulating everything -// else. Also enabled unconditionally on nightly, in which case the -// procmacro2_semver_exempt surface area is implemented by using the -// nightly-only proc_macro API. -// -// "hygiene" -// Enable Span::mixed_site() and non-dummy behavior of Span::resolved_at -// and Span::located_at. Enabled on Rust 1.45+. -// -// "proc_macro_span" -// Enable non-dummy behavior of Span::start and Span::end methods which -// requires an unstable compiler feature. Enabled when building with -// nightly, unless `-Z allow-feature` in RUSTFLAGS disallows unstable -// features. -// -// "super_unstable" -// Implement the semver exempt API in terms of the nightly-only proc_macro -// API. Enabled when using procmacro2_semver_exempt on a nightly compiler. -// -// "span_locations" -// Provide methods Span::start and Span::end which give the line/column -// location of a token. Enabled by procmacro2_semver_exempt or the -// "span-locations" Cargo cfg. This is behind a cfg because tracking -// location inside spans is a performance hit. - -use std::env; -use std::iter; -use std::process::{self, Command}; -use std::str; - -fn main() { - println!("cargo:rerun-if-changed=build.rs"); - - let version = match rustc_version() { - Some(version) => version, - None => return, - }; - - if version.minor < 31 { - eprintln!("Minimum supported rustc version is 1.31"); - process::exit(1); - } - - let semver_exempt = cfg!(procmacro2_semver_exempt); - if semver_exempt { - // https://github.com/alexcrichton/proc-macro2/issues/147 - println!("cargo:rustc-cfg=procmacro2_semver_exempt"); - } - - if semver_exempt || cfg!(feature = "span-locations") { - println!("cargo:rustc-cfg=span_locations"); - } - - if version.minor < 32 { - println!("cargo:rustc-cfg=no_libprocmacro_unwind_safe"); - } - - if version.minor < 39 { - println!("cargo:rustc-cfg=no_bind_by_move_pattern_guard"); - } - - if version.minor >= 44 { - println!("cargo:rustc-cfg=lexerror_display"); - } - - if version.minor >= 45 { - println!("cargo:rustc-cfg=hygiene"); - } - - if version.minor >= 54 { - println!("cargo:rustc-cfg=literal_from_str"); - } - - let target = env::var("TARGET").unwrap(); - if !enable_use_proc_macro(&target) { - return; - } - - println!("cargo:rustc-cfg=use_proc_macro"); - - if version.nightly || !semver_exempt { - println!("cargo:rustc-cfg=wrap_proc_macro"); - } - - if version.nightly && feature_allowed("proc_macro_span") { - println!("cargo:rustc-cfg=proc_macro_span"); - } - - if semver_exempt && version.nightly { - println!("cargo:rustc-cfg=super_unstable"); - } -} - -fn enable_use_proc_macro(target: &str) -> bool { - // wasm targets don't have the `proc_macro` crate, disable this feature. - if target.contains("wasm32") { - return false; - } - - // Otherwise, only enable it if our feature is actually enabled. - cfg!(feature = "proc-macro") -} - -struct RustcVersion { - minor: u32, - nightly: bool, -} - -fn rustc_version() -> Option { - let rustc = env::var_os("RUSTC")?; - let output = Command::new(rustc).arg("--version").output().ok()?; - let version = str::from_utf8(&output.stdout).ok()?; - let nightly = version.contains("nightly") || version.contains("dev"); - let mut pieces = version.split('.'); - if pieces.next() != Some("rustc 1") { - return None; - } - let minor = pieces.next()?.parse().ok()?; - Some(RustcVersion { minor, nightly }) -} - -fn feature_allowed(feature: &str) -> bool { - // Recognized formats: - // - // -Z allow-features=feature1,feature2 - // - // -Zallow-features=feature1,feature2 - - let flags_var; - let flags_var_string; - let mut flags_var_split; - let mut flags_none; - let flags: &mut dyn Iterator = - if let Some(encoded_rustflags) = env::var_os("CARGO_ENCODED_RUSTFLAGS") { - flags_var = encoded_rustflags; - flags_var_string = flags_var.to_string_lossy(); - flags_var_split = flags_var_string.split('\x1f'); - &mut flags_var_split - } else if let Some(rustflags) = env::var_os("RUSTFLAGS") { - flags_var = rustflags; - flags_var_string = flags_var.to_string_lossy(); - flags_var_split = flags_var_string.split(' '); - &mut flags_var_split - } else { - flags_none = iter::empty(); - &mut flags_none - }; - - for mut flag in flags { - if flag.starts_with("-Z") { - flag = &flag["-Z".len()..]; - } - if flag.starts_with("allow-features=") { - flag = &flag["allow-features=".len()..]; - return flag.split(',').any(|allowed| allowed == feature); - } - } - - // No allow-features= flag, allowed by default. - true -} diff --git a/vendor/proc-macro2/src/detection.rs b/vendor/proc-macro2/src/detection.rs deleted file mode 100644 index c597bc99..00000000 --- a/vendor/proc-macro2/src/detection.rs +++ /dev/null @@ -1,67 +0,0 @@ -use std::panic::{self, PanicInfo}; -use std::sync::atomic::*; -use std::sync::Once; - -static WORKS: AtomicUsize = AtomicUsize::new(0); -static INIT: Once = Once::new(); - -pub(crate) fn inside_proc_macro() -> bool { - match WORKS.load(Ordering::SeqCst) { - 1 => return false, - 2 => return true, - _ => {} - } - - INIT.call_once(initialize); - inside_proc_macro() -} - -pub(crate) fn force_fallback() { - WORKS.store(1, Ordering::SeqCst); -} - -pub(crate) fn unforce_fallback() { - initialize(); -} - -// Swap in a null panic hook to avoid printing "thread panicked" to stderr, -// then use catch_unwind to determine whether the compiler's proc_macro is -// working. When proc-macro2 is used from outside of a procedural macro all -// of the proc_macro crate's APIs currently panic. -// -// The Once is to prevent the possibility of this ordering: -// -// thread 1 calls take_hook, gets the user's original hook -// thread 1 calls set_hook with the null hook -// thread 2 calls take_hook, thinks null hook is the original hook -// thread 2 calls set_hook with the null hook -// thread 1 calls set_hook with the actual original hook -// thread 2 calls set_hook with what it thinks is the original hook -// -// in which the user's hook has been lost. -// -// There is still a race condition where a panic in a different thread can -// happen during the interval that the user's original panic hook is -// unregistered such that their hook is incorrectly not called. This is -// sufficiently unlikely and less bad than printing panic messages to stderr -// on correct use of this crate. Maybe there is a libstd feature request -// here. For now, if a user needs to guarantee that this failure mode does -// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from -// the main thread before launching any other threads. -fn initialize() { - type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static; - - let null_hook: Box = Box::new(|_panic_info| { /* ignore */ }); - let sanity_check = &*null_hook as *const PanicHook; - let original_hook = panic::take_hook(); - panic::set_hook(null_hook); - - let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok(); - WORKS.store(works as usize + 1, Ordering::SeqCst); - - let hopefully_null_hook = panic::take_hook(); - panic::set_hook(original_hook); - if sanity_check != &*hopefully_null_hook { - panic!("observed race condition in proc_macro2::inside_proc_macro"); - } -} diff --git a/vendor/proc-macro2/src/fallback.rs b/vendor/proc-macro2/src/fallback.rs deleted file mode 100644 index 3d2feae1..00000000 --- a/vendor/proc-macro2/src/fallback.rs +++ /dev/null @@ -1,923 +0,0 @@ -use crate::parse::{self, Cursor}; -use crate::{Delimiter, Spacing, TokenTree}; -#[cfg(span_locations)] -use std::cell::RefCell; -#[cfg(span_locations)] -use std::cmp; -use std::fmt::{self, Debug, Display}; -use std::iter::FromIterator; -use std::mem; -use std::ops::RangeBounds; -#[cfg(procmacro2_semver_exempt)] -use std::path::Path; -use std::path::PathBuf; -use std::str::FromStr; -use std::vec; -use unicode_xid::UnicodeXID; - -/// Force use of proc-macro2's fallback implementation of the API for now, even -/// if the compiler's implementation is available. -pub fn force() { - #[cfg(wrap_proc_macro)] - crate::detection::force_fallback(); -} - -/// Resume using the compiler's implementation of the proc macro API if it is -/// available. -pub fn unforce() { - #[cfg(wrap_proc_macro)] - crate::detection::unforce_fallback(); -} - -#[derive(Clone)] -pub(crate) struct TokenStream { - pub(crate) inner: Vec, -} - -#[derive(Debug)] -pub(crate) struct LexError { - pub(crate) span: Span, -} - -impl LexError { - pub(crate) fn span(&self) -> Span { - self.span - } - - fn call_site() -> Self { - LexError { - span: Span::call_site(), - } - } -} - -impl TokenStream { - pub fn new() -> TokenStream { - TokenStream { inner: Vec::new() } - } - - pub fn is_empty(&self) -> bool { - self.inner.len() == 0 - } - - fn take_inner(&mut self) -> Vec { - mem::replace(&mut self.inner, Vec::new()) - } - - fn push_token(&mut self, token: TokenTree) { - // https://github.com/alexcrichton/proc-macro2/issues/235 - match token { - #[cfg(not(no_bind_by_move_pattern_guard))] - TokenTree::Literal(crate::Literal { - #[cfg(wrap_proc_macro)] - inner: crate::imp::Literal::Fallback(literal), - #[cfg(not(wrap_proc_macro))] - inner: literal, - .. - }) if literal.text.starts_with('-') => { - push_negative_literal(self, literal); - } - #[cfg(no_bind_by_move_pattern_guard)] - TokenTree::Literal(crate::Literal { - #[cfg(wrap_proc_macro)] - inner: crate::imp::Literal::Fallback(literal), - #[cfg(not(wrap_proc_macro))] - inner: literal, - .. - }) => { - if literal.text.starts_with('-') { - push_negative_literal(self, literal); - } else { - self.inner - .push(TokenTree::Literal(crate::Literal::_new_stable(literal))); - } - } - _ => self.inner.push(token), - } - - #[cold] - fn push_negative_literal(stream: &mut TokenStream, mut literal: Literal) { - literal.text.remove(0); - let mut punct = crate::Punct::new('-', Spacing::Alone); - punct.set_span(crate::Span::_new_stable(literal.span)); - stream.inner.push(TokenTree::Punct(punct)); - stream - .inner - .push(TokenTree::Literal(crate::Literal::_new_stable(literal))); - } - } -} - -// Nonrecursive to prevent stack overflow. -impl Drop for TokenStream { - fn drop(&mut self) { - while let Some(token) = self.inner.pop() { - let group = match token { - TokenTree::Group(group) => group.inner, - _ => continue, - }; - #[cfg(wrap_proc_macro)] - let group = match group { - crate::imp::Group::Fallback(group) => group, - _ => continue, - }; - let mut group = group; - self.inner.extend(group.stream.take_inner()); - } - } -} - -#[cfg(span_locations)] -fn get_cursor(src: &str) -> Cursor { - // Create a dummy file & add it to the source map - SOURCE_MAP.with(|cm| { - let mut cm = cm.borrow_mut(); - let name = format!("", cm.files.len()); - let span = cm.add_file(&name, src); - Cursor { - rest: src, - off: span.lo, - } - }) -} - -#[cfg(not(span_locations))] -fn get_cursor(src: &str) -> Cursor { - Cursor { rest: src } -} - -impl FromStr for TokenStream { - type Err = LexError; - - fn from_str(src: &str) -> Result { - // Create a dummy file & add it to the source map - let cursor = get_cursor(src); - - parse::token_stream(cursor) - } -} - -impl Display for LexError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str("cannot parse string into token stream") - } -} - -impl Display for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let mut joint = false; - for (i, tt) in self.inner.iter().enumerate() { - if i != 0 && !joint { - write!(f, " ")?; - } - joint = false; - match tt { - TokenTree::Group(tt) => Display::fmt(tt, f), - TokenTree::Ident(tt) => Display::fmt(tt, f), - TokenTree::Punct(tt) => { - joint = tt.spacing() == Spacing::Joint; - Display::fmt(tt, f) - } - TokenTree::Literal(tt) => Display::fmt(tt, f), - }? - } - - Ok(()) - } -} - -impl Debug for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str("TokenStream ")?; - f.debug_list().entries(self.clone()).finish() - } -} - -#[cfg(use_proc_macro)] -impl From for TokenStream { - fn from(inner: proc_macro::TokenStream) -> TokenStream { - inner - .to_string() - .parse() - .expect("compiler token stream parse failed") - } -} - -#[cfg(use_proc_macro)] -impl From for proc_macro::TokenStream { - fn from(inner: TokenStream) -> proc_macro::TokenStream { - inner - .to_string() - .parse() - .expect("failed to parse to compiler tokens") - } -} - -impl From for TokenStream { - fn from(tree: TokenTree) -> TokenStream { - let mut stream = TokenStream::new(); - stream.push_token(tree); - stream - } -} - -impl FromIterator for TokenStream { - fn from_iter>(tokens: I) -> Self { - let mut stream = TokenStream::new(); - stream.extend(tokens); - stream - } -} - -impl FromIterator for TokenStream { - fn from_iter>(streams: I) -> Self { - let mut v = Vec::new(); - - for mut stream in streams { - v.extend(stream.take_inner()); - } - - TokenStream { inner: v } - } -} - -impl Extend for TokenStream { - fn extend>(&mut self, tokens: I) { - tokens.into_iter().for_each(|token| self.push_token(token)); - } -} - -impl Extend for TokenStream { - fn extend>(&mut self, streams: I) { - self.inner.extend(streams.into_iter().flatten()); - } -} - -pub(crate) type TokenTreeIter = vec::IntoIter; - -impl IntoIterator for TokenStream { - type Item = TokenTree; - type IntoIter = TokenTreeIter; - - fn into_iter(mut self) -> TokenTreeIter { - self.take_inner().into_iter() - } -} - -#[derive(Clone, PartialEq, Eq)] -pub(crate) struct SourceFile { - path: PathBuf, -} - -impl SourceFile { - /// Get the path to this source file as a string. - pub fn path(&self) -> PathBuf { - self.path.clone() - } - - pub fn is_real(&self) -> bool { - // XXX(nika): Support real files in the future? - false - } -} - -impl Debug for SourceFile { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("SourceFile") - .field("path", &self.path()) - .field("is_real", &self.is_real()) - .finish() - } -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -pub(crate) struct LineColumn { - pub line: usize, - pub column: usize, -} - -#[cfg(span_locations)] -thread_local! { - static SOURCE_MAP: RefCell = RefCell::new(SourceMap { - // NOTE: We start with a single dummy file which all call_site() and - // def_site() spans reference. - files: vec![FileInfo { - #[cfg(procmacro2_semver_exempt)] - name: "".to_owned(), - span: Span { lo: 0, hi: 0 }, - lines: vec![0], - }], - }); -} - -#[cfg(span_locations)] -struct FileInfo { - #[cfg(procmacro2_semver_exempt)] - name: String, - span: Span, - lines: Vec, -} - -#[cfg(span_locations)] -impl FileInfo { - fn offset_line_column(&self, offset: usize) -> LineColumn { - assert!(self.span_within(Span { - lo: offset as u32, - hi: offset as u32 - })); - let offset = offset - self.span.lo as usize; - match self.lines.binary_search(&offset) { - Ok(found) => LineColumn { - line: found + 1, - column: 0, - }, - Err(idx) => LineColumn { - line: idx, - column: offset - self.lines[idx - 1], - }, - } - } - - fn span_within(&self, span: Span) -> bool { - span.lo >= self.span.lo && span.hi <= self.span.hi - } -} - -/// Computes the offsets of each line in the given source string -/// and the total number of characters -#[cfg(span_locations)] -fn lines_offsets(s: &str) -> (usize, Vec) { - let mut lines = vec![0]; - let mut total = 0; - - for ch in s.chars() { - total += 1; - if ch == '\n' { - lines.push(total); - } - } - - (total, lines) -} - -#[cfg(span_locations)] -struct SourceMap { - files: Vec, -} - -#[cfg(span_locations)] -impl SourceMap { - fn next_start_pos(&self) -> u32 { - // Add 1 so there's always space between files. - // - // We'll always have at least 1 file, as we initialize our files list - // with a dummy file. - self.files.last().unwrap().span.hi + 1 - } - - fn add_file(&mut self, name: &str, src: &str) -> Span { - let (len, lines) = lines_offsets(src); - let lo = self.next_start_pos(); - // XXX(nika): Shouild we bother doing a checked cast or checked add here? - let span = Span { - lo, - hi: lo + (len as u32), - }; - - self.files.push(FileInfo { - #[cfg(procmacro2_semver_exempt)] - name: name.to_owned(), - span, - lines, - }); - - #[cfg(not(procmacro2_semver_exempt))] - let _ = name; - - span - } - - fn fileinfo(&self, span: Span) -> &FileInfo { - for file in &self.files { - if file.span_within(span) { - return file; - } - } - panic!("Invalid span with no related FileInfo!"); - } -} - -#[derive(Clone, Copy, PartialEq, Eq)] -pub(crate) struct Span { - #[cfg(span_locations)] - pub(crate) lo: u32, - #[cfg(span_locations)] - pub(crate) hi: u32, -} - -impl Span { - #[cfg(not(span_locations))] - pub fn call_site() -> Span { - Span {} - } - - #[cfg(span_locations)] - pub fn call_site() -> Span { - Span { lo: 0, hi: 0 } - } - - #[cfg(hygiene)] - pub fn mixed_site() -> Span { - Span::call_site() - } - - #[cfg(procmacro2_semver_exempt)] - pub fn def_site() -> Span { - Span::call_site() - } - - pub fn resolved_at(&self, _other: Span) -> Span { - // Stable spans consist only of line/column information, so - // `resolved_at` and `located_at` only select which span the - // caller wants line/column information from. - *self - } - - pub fn located_at(&self, other: Span) -> Span { - other - } - - #[cfg(procmacro2_semver_exempt)] - pub fn source_file(&self) -> SourceFile { - SOURCE_MAP.with(|cm| { - let cm = cm.borrow(); - let fi = cm.fileinfo(*self); - SourceFile { - path: Path::new(&fi.name).to_owned(), - } - }) - } - - #[cfg(span_locations)] - pub fn start(&self) -> LineColumn { - SOURCE_MAP.with(|cm| { - let cm = cm.borrow(); - let fi = cm.fileinfo(*self); - fi.offset_line_column(self.lo as usize) - }) - } - - #[cfg(span_locations)] - pub fn end(&self) -> LineColumn { - SOURCE_MAP.with(|cm| { - let cm = cm.borrow(); - let fi = cm.fileinfo(*self); - fi.offset_line_column(self.hi as usize) - }) - } - - #[cfg(not(span_locations))] - pub fn join(&self, _other: Span) -> Option { - Some(Span {}) - } - - #[cfg(span_locations)] - pub fn join(&self, other: Span) -> Option { - SOURCE_MAP.with(|cm| { - let cm = cm.borrow(); - // If `other` is not within the same FileInfo as us, return None. - if !cm.fileinfo(*self).span_within(other) { - return None; - } - Some(Span { - lo: cmp::min(self.lo, other.lo), - hi: cmp::max(self.hi, other.hi), - }) - }) - } - - #[cfg(not(span_locations))] - fn first_byte(self) -> Self { - self - } - - #[cfg(span_locations)] - fn first_byte(self) -> Self { - Span { - lo: self.lo, - hi: cmp::min(self.lo.saturating_add(1), self.hi), - } - } - - #[cfg(not(span_locations))] - fn last_byte(self) -> Self { - self - } - - #[cfg(span_locations)] - fn last_byte(self) -> Self { - Span { - lo: cmp::max(self.hi.saturating_sub(1), self.lo), - hi: self.hi, - } - } -} - -impl Debug for Span { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - #[cfg(span_locations)] - return write!(f, "bytes({}..{})", self.lo, self.hi); - - #[cfg(not(span_locations))] - write!(f, "Span") - } -} - -pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) { - #[cfg(span_locations)] - { - if span.lo == 0 && span.hi == 0 { - return; - } - } - - if cfg!(span_locations) { - debug.field("span", &span); - } -} - -#[derive(Clone)] -pub(crate) struct Group { - delimiter: Delimiter, - stream: TokenStream, - span: Span, -} - -impl Group { - pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group { - Group { - delimiter, - stream, - span: Span::call_site(), - } - } - - pub fn delimiter(&self) -> Delimiter { - self.delimiter - } - - pub fn stream(&self) -> TokenStream { - self.stream.clone() - } - - pub fn span(&self) -> Span { - self.span - } - - pub fn span_open(&self) -> Span { - self.span.first_byte() - } - - pub fn span_close(&self) -> Span { - self.span.last_byte() - } - - pub fn set_span(&mut self, span: Span) { - self.span = span; - } -} - -impl Display for Group { - // We attempt to match libproc_macro's formatting. - // Empty parens: () - // Nonempty parens: (...) - // Empty brackets: [] - // Nonempty brackets: [...] - // Empty braces: { } - // Nonempty braces: { ... } - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let (open, close) = match self.delimiter { - Delimiter::Parenthesis => ("(", ")"), - Delimiter::Brace => ("{ ", "}"), - Delimiter::Bracket => ("[", "]"), - Delimiter::None => ("", ""), - }; - - f.write_str(open)?; - Display::fmt(&self.stream, f)?; - if self.delimiter == Delimiter::Brace && !self.stream.inner.is_empty() { - f.write_str(" ")?; - } - f.write_str(close)?; - - Ok(()) - } -} - -impl Debug for Group { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - let mut debug = fmt.debug_struct("Group"); - debug.field("delimiter", &self.delimiter); - debug.field("stream", &self.stream); - debug_span_field_if_nontrivial(&mut debug, self.span); - debug.finish() - } -} - -#[derive(Clone)] -pub(crate) struct Ident { - sym: String, - span: Span, - raw: bool, -} - -impl Ident { - fn _new(string: &str, raw: bool, span: Span) -> Ident { - validate_ident(string); - - Ident { - sym: string.to_owned(), - span, - raw, - } - } - - pub fn new(string: &str, span: Span) -> Ident { - Ident::_new(string, false, span) - } - - pub fn new_raw(string: &str, span: Span) -> Ident { - Ident::_new(string, true, span) - } - - pub fn span(&self) -> Span { - self.span - } - - pub fn set_span(&mut self, span: Span) { - self.span = span; - } -} - -pub(crate) fn is_ident_start(c: char) -> bool { - ('a' <= c && c <= 'z') - || ('A' <= c && c <= 'Z') - || c == '_' - || (c > '\x7f' && UnicodeXID::is_xid_start(c)) -} - -pub(crate) fn is_ident_continue(c: char) -> bool { - ('a' <= c && c <= 'z') - || ('A' <= c && c <= 'Z') - || c == '_' - || ('0' <= c && c <= '9') - || (c > '\x7f' && UnicodeXID::is_xid_continue(c)) -} - -fn validate_ident(string: &str) { - let validate = string; - if validate.is_empty() { - panic!("Ident is not allowed to be empty; use Option"); - } - - if validate.bytes().all(|digit| digit >= b'0' && digit <= b'9') { - panic!("Ident cannot be a number; use Literal instead"); - } - - fn ident_ok(string: &str) -> bool { - let mut chars = string.chars(); - let first = chars.next().unwrap(); - if !is_ident_start(first) { - return false; - } - for ch in chars { - if !is_ident_continue(ch) { - return false; - } - } - true - } - - if !ident_ok(validate) { - panic!("{:?} is not a valid Ident", string); - } -} - -impl PartialEq for Ident { - fn eq(&self, other: &Ident) -> bool { - self.sym == other.sym && self.raw == other.raw - } -} - -impl PartialEq for Ident -where - T: ?Sized + AsRef, -{ - fn eq(&self, other: &T) -> bool { - let other = other.as_ref(); - if self.raw { - other.starts_with("r#") && self.sym == other[2..] - } else { - self.sym == other - } - } -} - -impl Display for Ident { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - if self.raw { - f.write_str("r#")?; - } - Display::fmt(&self.sym, f) - } -} - -impl Debug for Ident { - // Ident(proc_macro), Ident(r#union) - #[cfg(not(span_locations))] - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let mut debug = f.debug_tuple("Ident"); - debug.field(&format_args!("{}", self)); - debug.finish() - } - - // Ident { - // sym: proc_macro, - // span: bytes(128..138) - // } - #[cfg(span_locations)] - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let mut debug = f.debug_struct("Ident"); - debug.field("sym", &format_args!("{}", self)); - debug_span_field_if_nontrivial(&mut debug, self.span); - debug.finish() - } -} - -#[derive(Clone)] -pub(crate) struct Literal { - text: String, - span: Span, -} - -macro_rules! suffixed_numbers { - ($($name:ident => $kind:ident,)*) => ($( - pub fn $name(n: $kind) -> Literal { - Literal::_new(format!(concat!("{}", stringify!($kind)), n)) - } - )*) -} - -macro_rules! unsuffixed_numbers { - ($($name:ident => $kind:ident,)*) => ($( - pub fn $name(n: $kind) -> Literal { - Literal::_new(n.to_string()) - } - )*) -} - -impl Literal { - pub(crate) fn _new(text: String) -> Literal { - Literal { - text, - span: Span::call_site(), - } - } - - suffixed_numbers! { - u8_suffixed => u8, - u16_suffixed => u16, - u32_suffixed => u32, - u64_suffixed => u64, - u128_suffixed => u128, - usize_suffixed => usize, - i8_suffixed => i8, - i16_suffixed => i16, - i32_suffixed => i32, - i64_suffixed => i64, - i128_suffixed => i128, - isize_suffixed => isize, - - f32_suffixed => f32, - f64_suffixed => f64, - } - - unsuffixed_numbers! { - u8_unsuffixed => u8, - u16_unsuffixed => u16, - u32_unsuffixed => u32, - u64_unsuffixed => u64, - u128_unsuffixed => u128, - usize_unsuffixed => usize, - i8_unsuffixed => i8, - i16_unsuffixed => i16, - i32_unsuffixed => i32, - i64_unsuffixed => i64, - i128_unsuffixed => i128, - isize_unsuffixed => isize, - } - - pub fn f32_unsuffixed(f: f32) -> Literal { - let mut s = f.to_string(); - if !s.contains('.') { - s.push_str(".0"); - } - Literal::_new(s) - } - - pub fn f64_unsuffixed(f: f64) -> Literal { - let mut s = f.to_string(); - if !s.contains('.') { - s.push_str(".0"); - } - Literal::_new(s) - } - - pub fn string(t: &str) -> Literal { - let mut text = String::with_capacity(t.len() + 2); - text.push('"'); - for c in t.chars() { - if c == '\'' { - // escape_debug turns this into "\'" which is unnecessary. - text.push(c); - } else { - text.extend(c.escape_debug()); - } - } - text.push('"'); - Literal::_new(text) - } - - pub fn character(t: char) -> Literal { - let mut text = String::new(); - text.push('\''); - if t == '"' { - // escape_debug turns this into '\"' which is unnecessary. - text.push(t); - } else { - text.extend(t.escape_debug()); - } - text.push('\''); - Literal::_new(text) - } - - pub fn byte_string(bytes: &[u8]) -> Literal { - let mut escaped = "b\"".to_string(); - for b in bytes { - #[allow(clippy::match_overlapping_arm)] - match *b { - b'\0' => escaped.push_str(r"\0"), - b'\t' => escaped.push_str(r"\t"), - b'\n' => escaped.push_str(r"\n"), - b'\r' => escaped.push_str(r"\r"), - b'"' => escaped.push_str("\\\""), - b'\\' => escaped.push_str("\\\\"), - b'\x20'..=b'\x7E' => escaped.push(*b as char), - _ => escaped.push_str(&format!("\\x{:02X}", b)), - } - } - escaped.push('"'); - Literal::_new(escaped) - } - - pub fn span(&self) -> Span { - self.span - } - - pub fn set_span(&mut self, span: Span) { - self.span = span; - } - - pub fn subspan>(&self, _range: R) -> Option { - None - } -} - -impl FromStr for Literal { - type Err = LexError; - - fn from_str(repr: &str) -> Result { - let cursor = get_cursor(repr); - if let Ok((_rest, literal)) = parse::literal(cursor) { - if literal.text.len() == repr.len() { - return Ok(literal); - } - } - Err(LexError::call_site()) - } -} - -impl Display for Literal { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Display::fmt(&self.text, f) - } -} - -impl Debug for Literal { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - let mut debug = fmt.debug_struct("Literal"); - debug.field("lit", &format_args!("{}", self.text)); - debug_span_field_if_nontrivial(&mut debug, self.span); - debug.finish() - } -} diff --git a/vendor/proc-macro2/src/lib.rs b/vendor/proc-macro2/src/lib.rs deleted file mode 100644 index 1d35bfd2..00000000 --- a/vendor/proc-macro2/src/lib.rs +++ /dev/null @@ -1,1272 +0,0 @@ -//! A wrapper around the procedural macro API of the compiler's [`proc_macro`] -//! crate. This library serves two purposes: -//! -//! [`proc_macro`]: https://doc.rust-lang.org/proc_macro/ -//! -//! - **Bring proc-macro-like functionality to other contexts like build.rs and -//! main.rs.** Types from `proc_macro` are entirely specific to procedural -//! macros and cannot ever exist in code outside of a procedural macro. -//! Meanwhile `proc_macro2` types may exist anywhere including non-macro code. -//! By developing foundational libraries like [syn] and [quote] against -//! `proc_macro2` rather than `proc_macro`, the procedural macro ecosystem -//! becomes easily applicable to many other use cases and we avoid -//! reimplementing non-macro equivalents of those libraries. -//! -//! - **Make procedural macros unit testable.** As a consequence of being -//! specific to procedural macros, nothing that uses `proc_macro` can be -//! executed from a unit test. In order for helper libraries or components of -//! a macro to be testable in isolation, they must be implemented using -//! `proc_macro2`. -//! -//! [syn]: https://github.com/dtolnay/syn -//! [quote]: https://github.com/dtolnay/quote -//! -//! # Usage -//! -//! The skeleton of a typical procedural macro typically looks like this: -//! -//! ``` -//! extern crate proc_macro; -//! -//! # const IGNORE: &str = stringify! { -//! #[proc_macro_derive(MyDerive)] -//! # }; -//! # #[cfg(wrap_proc_macro)] -//! pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream { -//! let input = proc_macro2::TokenStream::from(input); -//! -//! let output: proc_macro2::TokenStream = { -//! /* transform input */ -//! # input -//! }; -//! -//! proc_macro::TokenStream::from(output) -//! } -//! ``` -//! -//! If parsing with [Syn], you'll use [`parse_macro_input!`] instead to -//! propagate parse errors correctly back to the compiler when parsing fails. -//! -//! [`parse_macro_input!`]: https://docs.rs/syn/1.0/syn/macro.parse_macro_input.html -//! -//! # Unstable features -//! -//! The default feature set of proc-macro2 tracks the most recent stable -//! compiler API. Functionality in `proc_macro` that is not yet stable is not -//! exposed by proc-macro2 by default. -//! -//! To opt into the additional APIs available in the most recent nightly -//! compiler, the `procmacro2_semver_exempt` config flag must be passed to -//! rustc. We will polyfill those nightly-only APIs back to Rust 1.31.0. As -//! these are unstable APIs that track the nightly compiler, minor versions of -//! proc-macro2 may make breaking changes to them at any time. -//! -//! ```sh -//! RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build -//! ``` -//! -//! Note that this must not only be done for your crate, but for any crate that -//! depends on your crate. This infectious nature is intentional, as it serves -//! as a reminder that you are outside of the normal semver guarantees. -//! -//! Semver exempt methods are marked as such in the proc-macro2 documentation. -//! -//! # Thread-Safety -//! -//! Most types in this crate are `!Sync` because the underlying compiler -//! types make use of thread-local memory, meaning they cannot be accessed from -//! a different thread. - -// Proc-macro2 types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.29")] -#![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))] -#![cfg_attr(super_unstable, feature(proc_macro_raw_ident, proc_macro_def_site))] -#![cfg_attr(doc_cfg, feature(doc_cfg))] -#![allow(clippy::needless_doctest_main, clippy::vec_init_then_push)] - -#[cfg(use_proc_macro)] -extern crate proc_macro; - -mod marker; -mod parse; - -#[cfg(wrap_proc_macro)] -mod detection; - -// Public for proc_macro2::fallback::force() and unforce(), but those are quite -// a niche use case so we omit it from rustdoc. -#[doc(hidden)] -pub mod fallback; - -#[cfg(not(wrap_proc_macro))] -use crate::fallback as imp; -#[path = "wrapper.rs"] -#[cfg(wrap_proc_macro)] -mod imp; - -use crate::marker::Marker; -use std::cmp::Ordering; -use std::error::Error; -use std::fmt::{self, Debug, Display}; -use std::hash::{Hash, Hasher}; -use std::iter::FromIterator; -use std::ops::RangeBounds; -#[cfg(procmacro2_semver_exempt)] -use std::path::PathBuf; -use std::str::FromStr; - -/// An abstract stream of tokens, or more concretely a sequence of token trees. -/// -/// This type provides interfaces for iterating over token trees and for -/// collecting token trees into one stream. -/// -/// Token stream is both the input and output of `#[proc_macro]`, -/// `#[proc_macro_attribute]` and `#[proc_macro_derive]` definitions. -#[derive(Clone)] -pub struct TokenStream { - inner: imp::TokenStream, - _marker: Marker, -} - -/// Error returned from `TokenStream::from_str`. -pub struct LexError { - inner: imp::LexError, - _marker: Marker, -} - -impl TokenStream { - fn _new(inner: imp::TokenStream) -> TokenStream { - TokenStream { - inner, - _marker: Marker, - } - } - - fn _new_stable(inner: fallback::TokenStream) -> TokenStream { - TokenStream { - inner: inner.into(), - _marker: Marker, - } - } - - /// Returns an empty `TokenStream` containing no token trees. - pub fn new() -> TokenStream { - TokenStream::_new(imp::TokenStream::new()) - } - - /// Checks if this `TokenStream` is empty. - pub fn is_empty(&self) -> bool { - self.inner.is_empty() - } -} - -/// `TokenStream::default()` returns an empty stream, -/// i.e. this is equivalent with `TokenStream::new()`. -impl Default for TokenStream { - fn default() -> Self { - TokenStream::new() - } -} - -/// Attempts to break the string into tokens and parse those tokens into a token -/// stream. -/// -/// May fail for a number of reasons, for example, if the string contains -/// unbalanced delimiters or characters not existing in the language. -/// -/// NOTE: Some errors may cause panics instead of returning `LexError`. We -/// reserve the right to change these errors into `LexError`s later. -impl FromStr for TokenStream { - type Err = LexError; - - fn from_str(src: &str) -> Result { - let e = src.parse().map_err(|e| LexError { - inner: e, - _marker: Marker, - })?; - Ok(TokenStream::_new(e)) - } -} - -#[cfg(use_proc_macro)] -impl From for TokenStream { - fn from(inner: proc_macro::TokenStream) -> TokenStream { - TokenStream::_new(inner.into()) - } -} - -#[cfg(use_proc_macro)] -impl From for proc_macro::TokenStream { - fn from(inner: TokenStream) -> proc_macro::TokenStream { - inner.inner.into() - } -} - -impl From for TokenStream { - fn from(token: TokenTree) -> Self { - TokenStream::_new(imp::TokenStream::from(token)) - } -} - -impl Extend for TokenStream { - fn extend>(&mut self, streams: I) { - self.inner.extend(streams) - } -} - -impl Extend for TokenStream { - fn extend>(&mut self, streams: I) { - self.inner - .extend(streams.into_iter().map(|stream| stream.inner)) - } -} - -/// Collects a number of token trees into a single stream. -impl FromIterator for TokenStream { - fn from_iter>(streams: I) -> Self { - TokenStream::_new(streams.into_iter().collect()) - } -} -impl FromIterator for TokenStream { - fn from_iter>(streams: I) -> Self { - TokenStream::_new(streams.into_iter().map(|i| i.inner).collect()) - } -} - -/// Prints the token stream as a string that is supposed to be losslessly -/// convertible back into the same token stream (modulo spans), except for -/// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative -/// numeric literals. -impl Display for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Display::fmt(&self.inner, f) - } -} - -/// Prints token in a form convenient for debugging. -impl Debug for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&self.inner, f) - } -} - -impl LexError { - pub fn span(&self) -> Span { - Span::_new(self.inner.span()) - } -} - -impl Debug for LexError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&self.inner, f) - } -} - -impl Display for LexError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Display::fmt(&self.inner, f) - } -} - -impl Error for LexError {} - -/// The source file of a given `Span`. -/// -/// This type is semver exempt and not exposed by default. -#[cfg(procmacro2_semver_exempt)] -#[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))] -#[derive(Clone, PartialEq, Eq)] -pub struct SourceFile { - inner: imp::SourceFile, - _marker: Marker, -} - -#[cfg(procmacro2_semver_exempt)] -impl SourceFile { - fn _new(inner: imp::SourceFile) -> Self { - SourceFile { - inner, - _marker: Marker, - } - } - - /// Get the path to this source file. - /// - /// ### Note - /// - /// If the code span associated with this `SourceFile` was generated by an - /// external macro, this may not be an actual path on the filesystem. Use - /// [`is_real`] to check. - /// - /// Also note that even if `is_real` returns `true`, if - /// `--remap-path-prefix` was passed on the command line, the path as given - /// may not actually be valid. - /// - /// [`is_real`]: #method.is_real - pub fn path(&self) -> PathBuf { - self.inner.path() - } - - /// Returns `true` if this source file is a real source file, and not - /// generated by an external macro's expansion. - pub fn is_real(&self) -> bool { - self.inner.is_real() - } -} - -#[cfg(procmacro2_semver_exempt)] -impl Debug for SourceFile { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&self.inner, f) - } -} - -/// A line-column pair representing the start or end of a `Span`. -/// -/// This type is semver exempt and not exposed by default. -#[cfg(span_locations)] -#[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))] -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct LineColumn { - /// The 1-indexed line in the source file on which the span starts or ends - /// (inclusive). - pub line: usize, - /// The 0-indexed column (in UTF-8 characters) in the source file on which - /// the span starts or ends (inclusive). - pub column: usize, -} - -#[cfg(span_locations)] -impl Ord for LineColumn { - fn cmp(&self, other: &Self) -> Ordering { - self.line - .cmp(&other.line) - .then(self.column.cmp(&other.column)) - } -} - -#[cfg(span_locations)] -impl PartialOrd for LineColumn { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -/// A region of source code, along with macro expansion information. -#[derive(Copy, Clone)] -pub struct Span { - inner: imp::Span, - _marker: Marker, -} - -impl Span { - fn _new(inner: imp::Span) -> Span { - Span { - inner, - _marker: Marker, - } - } - - fn _new_stable(inner: fallback::Span) -> Span { - Span { - inner: inner.into(), - _marker: Marker, - } - } - - /// The span of the invocation of the current procedural macro. - /// - /// Identifiers created with this span will be resolved as if they were - /// written directly at the macro call location (call-site hygiene) and - /// other code at the macro call site will be able to refer to them as well. - pub fn call_site() -> Span { - Span::_new(imp::Span::call_site()) - } - - /// The span located at the invocation of the procedural macro, but with - /// local variables, labels, and `$crate` resolved at the definition site - /// of the macro. This is the same hygiene behavior as `macro_rules`. - /// - /// This function requires Rust 1.45 or later. - #[cfg(hygiene)] - pub fn mixed_site() -> Span { - Span::_new(imp::Span::mixed_site()) - } - - /// A span that resolves at the macro definition site. - /// - /// This method is semver exempt and not exposed by default. - #[cfg(procmacro2_semver_exempt)] - #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))] - pub fn def_site() -> Span { - Span::_new(imp::Span::def_site()) - } - - /// Creates a new span with the same line/column information as `self` but - /// that resolves symbols as though it were at `other`. - pub fn resolved_at(&self, other: Span) -> Span { - Span::_new(self.inner.resolved_at(other.inner)) - } - - /// Creates a new span with the same name resolution behavior as `self` but - /// with the line/column information of `other`. - pub fn located_at(&self, other: Span) -> Span { - Span::_new(self.inner.located_at(other.inner)) - } - - /// Convert `proc_macro2::Span` to `proc_macro::Span`. - /// - /// This method is available when building with a nightly compiler, or when - /// building with rustc 1.29+ *without* semver exempt features. - /// - /// # Panics - /// - /// Panics if called from outside of a procedural macro. Unlike - /// `proc_macro2::Span`, the `proc_macro::Span` type can only exist within - /// the context of a procedural macro invocation. - #[cfg(wrap_proc_macro)] - pub fn unwrap(self) -> proc_macro::Span { - self.inner.unwrap() - } - - // Soft deprecated. Please use Span::unwrap. - #[cfg(wrap_proc_macro)] - #[doc(hidden)] - pub fn unstable(self) -> proc_macro::Span { - self.unwrap() - } - - /// The original source file into which this span points. - /// - /// This method is semver exempt and not exposed by default. - #[cfg(procmacro2_semver_exempt)] - #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))] - pub fn source_file(&self) -> SourceFile { - SourceFile::_new(self.inner.source_file()) - } - - /// Get the starting line/column in the source file for this span. - /// - /// This method requires the `"span-locations"` feature to be enabled. - /// - /// When executing in a procedural macro context, the returned line/column - /// are only meaningful if compiled with a nightly toolchain. The stable - /// toolchain does not have this information available. When executing - /// outside of a procedural macro, such as main.rs or build.rs, the - /// line/column are always meaningful regardless of toolchain. - #[cfg(span_locations)] - #[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))] - pub fn start(&self) -> LineColumn { - let imp::LineColumn { line, column } = self.inner.start(); - LineColumn { line, column } - } - - /// Get the ending line/column in the source file for this span. - /// - /// This method requires the `"span-locations"` feature to be enabled. - /// - /// When executing in a procedural macro context, the returned line/column - /// are only meaningful if compiled with a nightly toolchain. The stable - /// toolchain does not have this information available. When executing - /// outside of a procedural macro, such as main.rs or build.rs, the - /// line/column are always meaningful regardless of toolchain. - #[cfg(span_locations)] - #[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))] - pub fn end(&self) -> LineColumn { - let imp::LineColumn { line, column } = self.inner.end(); - LineColumn { line, column } - } - - /// Create a new span encompassing `self` and `other`. - /// - /// Returns `None` if `self` and `other` are from different files. - /// - /// Warning: the underlying [`proc_macro::Span::join`] method is - /// nightly-only. When called from within a procedural macro not using a - /// nightly compiler, this method will always return `None`. - /// - /// [`proc_macro::Span::join`]: https://doc.rust-lang.org/proc_macro/struct.Span.html#method.join - pub fn join(&self, other: Span) -> Option { - self.inner.join(other.inner).map(Span::_new) - } - - /// Compares two spans to see if they're equal. - /// - /// This method is semver exempt and not exposed by default. - #[cfg(procmacro2_semver_exempt)] - #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))] - pub fn eq(&self, other: &Span) -> bool { - self.inner.eq(&other.inner) - } -} - -/// Prints a span in a form convenient for debugging. -impl Debug for Span { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&self.inner, f) - } -} - -/// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`). -#[derive(Clone)] -pub enum TokenTree { - /// A token stream surrounded by bracket delimiters. - Group(Group), - /// An identifier. - Ident(Ident), - /// A single punctuation character (`+`, `,`, `$`, etc.). - Punct(Punct), - /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc. - Literal(Literal), -} - -impl TokenTree { - /// Returns the span of this tree, delegating to the `span` method of - /// the contained token or a delimited stream. - pub fn span(&self) -> Span { - match self { - TokenTree::Group(t) => t.span(), - TokenTree::Ident(t) => t.span(), - TokenTree::Punct(t) => t.span(), - TokenTree::Literal(t) => t.span(), - } - } - - /// Configures the span for *only this token*. - /// - /// Note that if this token is a `Group` then this method will not configure - /// the span of each of the internal tokens, this will simply delegate to - /// the `set_span` method of each variant. - pub fn set_span(&mut self, span: Span) { - match self { - TokenTree::Group(t) => t.set_span(span), - TokenTree::Ident(t) => t.set_span(span), - TokenTree::Punct(t) => t.set_span(span), - TokenTree::Literal(t) => t.set_span(span), - } - } -} - -impl From for TokenTree { - fn from(g: Group) -> TokenTree { - TokenTree::Group(g) - } -} - -impl From for TokenTree { - fn from(g: Ident) -> TokenTree { - TokenTree::Ident(g) - } -} - -impl From for TokenTree { - fn from(g: Punct) -> TokenTree { - TokenTree::Punct(g) - } -} - -impl From for TokenTree { - fn from(g: Literal) -> TokenTree { - TokenTree::Literal(g) - } -} - -/// Prints the token tree as a string that is supposed to be losslessly -/// convertible back into the same token tree (modulo spans), except for -/// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative -/// numeric literals. -impl Display for TokenTree { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - TokenTree::Group(t) => Display::fmt(t, f), - TokenTree::Ident(t) => Display::fmt(t, f), - TokenTree::Punct(t) => Display::fmt(t, f), - TokenTree::Literal(t) => Display::fmt(t, f), - } - } -} - -/// Prints token tree in a form convenient for debugging. -impl Debug for TokenTree { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - // Each of these has the name in the struct type in the derived debug, - // so don't bother with an extra layer of indirection - match self { - TokenTree::Group(t) => Debug::fmt(t, f), - TokenTree::Ident(t) => { - let mut debug = f.debug_struct("Ident"); - debug.field("sym", &format_args!("{}", t)); - imp::debug_span_field_if_nontrivial(&mut debug, t.span().inner); - debug.finish() - } - TokenTree::Punct(t) => Debug::fmt(t, f), - TokenTree::Literal(t) => Debug::fmt(t, f), - } - } -} - -/// A delimited token stream. -/// -/// A `Group` internally contains a `TokenStream` which is surrounded by -/// `Delimiter`s. -#[derive(Clone)] -pub struct Group { - inner: imp::Group, -} - -/// Describes how a sequence of token trees is delimited. -#[derive(Copy, Clone, Debug, Eq, PartialEq)] -pub enum Delimiter { - /// `( ... )` - Parenthesis, - /// `{ ... }` - Brace, - /// `[ ... ]` - Bracket, - /// `Ø ... Ø` - /// - /// An implicit delimiter, that may, for example, appear around tokens - /// coming from a "macro variable" `$var`. It is important to preserve - /// operator priorities in cases like `$var * 3` where `$var` is `1 + 2`. - /// Implicit delimiters may not survive roundtrip of a token stream through - /// a string. - None, -} - -impl Group { - fn _new(inner: imp::Group) -> Self { - Group { inner } - } - - fn _new_stable(inner: fallback::Group) -> Self { - Group { - inner: inner.into(), - } - } - - /// Creates a new `Group` with the given delimiter and token stream. - /// - /// This constructor will set the span for this group to - /// `Span::call_site()`. To change the span you can use the `set_span` - /// method below. - pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group { - Group { - inner: imp::Group::new(delimiter, stream.inner), - } - } - - /// Returns the delimiter of this `Group` - pub fn delimiter(&self) -> Delimiter { - self.inner.delimiter() - } - - /// Returns the `TokenStream` of tokens that are delimited in this `Group`. - /// - /// Note that the returned token stream does not include the delimiter - /// returned above. - pub fn stream(&self) -> TokenStream { - TokenStream::_new(self.inner.stream()) - } - - /// Returns the span for the delimiters of this token stream, spanning the - /// entire `Group`. - /// - /// ```text - /// pub fn span(&self) -> Span { - /// ^^^^^^^ - /// ``` - pub fn span(&self) -> Span { - Span::_new(self.inner.span()) - } - - /// Returns the span pointing to the opening delimiter of this group. - /// - /// ```text - /// pub fn span_open(&self) -> Span { - /// ^ - /// ``` - pub fn span_open(&self) -> Span { - Span::_new(self.inner.span_open()) - } - - /// Returns the span pointing to the closing delimiter of this group. - /// - /// ```text - /// pub fn span_close(&self) -> Span { - /// ^ - /// ``` - pub fn span_close(&self) -> Span { - Span::_new(self.inner.span_close()) - } - - /// Configures the span for this `Group`'s delimiters, but not its internal - /// tokens. - /// - /// This method will **not** set the span of all the internal tokens spanned - /// by this group, but rather it will only set the span of the delimiter - /// tokens at the level of the `Group`. - pub fn set_span(&mut self, span: Span) { - self.inner.set_span(span.inner) - } -} - -/// Prints the group as a string that should be losslessly convertible back -/// into the same group (modulo spans), except for possibly `TokenTree::Group`s -/// with `Delimiter::None` delimiters. -impl Display for Group { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - Display::fmt(&self.inner, formatter) - } -} - -impl Debug for Group { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&self.inner, formatter) - } -} - -/// A `Punct` is a single punctuation character like `+`, `-` or `#`. -/// -/// Multicharacter operators like `+=` are represented as two instances of -/// `Punct` with different forms of `Spacing` returned. -#[derive(Clone)] -pub struct Punct { - ch: char, - spacing: Spacing, - span: Span, -} - -/// Whether a `Punct` is followed immediately by another `Punct` or followed by -/// another token or whitespace. -#[derive(Copy, Clone, Debug, Eq, PartialEq)] -pub enum Spacing { - /// E.g. `+` is `Alone` in `+ =`, `+ident` or `+()`. - Alone, - /// E.g. `+` is `Joint` in `+=` or `'` is `Joint` in `'#`. - /// - /// Additionally, single quote `'` can join with identifiers to form - /// lifetimes `'ident`. - Joint, -} - -impl Punct { - /// Creates a new `Punct` from the given character and spacing. - /// - /// The `ch` argument must be a valid punctuation character permitted by the - /// language, otherwise the function will panic. - /// - /// The returned `Punct` will have the default span of `Span::call_site()` - /// which can be further configured with the `set_span` method below. - pub fn new(ch: char, spacing: Spacing) -> Punct { - Punct { - ch, - spacing, - span: Span::call_site(), - } - } - - /// Returns the value of this punctuation character as `char`. - pub fn as_char(&self) -> char { - self.ch - } - - /// Returns the spacing of this punctuation character, indicating whether - /// it's immediately followed by another `Punct` in the token stream, so - /// they can potentially be combined into a multicharacter operator - /// (`Joint`), or it's followed by some other token or whitespace (`Alone`) - /// so the operator has certainly ended. - pub fn spacing(&self) -> Spacing { - self.spacing - } - - /// Returns the span for this punctuation character. - pub fn span(&self) -> Span { - self.span - } - - /// Configure the span for this punctuation character. - pub fn set_span(&mut self, span: Span) { - self.span = span; - } -} - -/// Prints the punctuation character as a string that should be losslessly -/// convertible back into the same character. -impl Display for Punct { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Display::fmt(&self.ch, f) - } -} - -impl Debug for Punct { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - let mut debug = fmt.debug_struct("Punct"); - debug.field("char", &self.ch); - debug.field("spacing", &self.spacing); - imp::debug_span_field_if_nontrivial(&mut debug, self.span.inner); - debug.finish() - } -} - -/// A word of Rust code, which may be a keyword or legal variable name. -/// -/// An identifier consists of at least one Unicode code point, the first of -/// which has the XID_Start property and the rest of which have the XID_Continue -/// property. -/// -/// - The empty string is not an identifier. Use `Option`. -/// - A lifetime is not an identifier. Use `syn::Lifetime` instead. -/// -/// An identifier constructed with `Ident::new` is permitted to be a Rust -/// keyword, though parsing one through its [`Parse`] implementation rejects -/// Rust keywords. Use `input.call(Ident::parse_any)` when parsing to match the -/// behaviour of `Ident::new`. -/// -/// [`Parse`]: https://docs.rs/syn/1.0/syn/parse/trait.Parse.html -/// -/// # Examples -/// -/// A new ident can be created from a string using the `Ident::new` function. -/// A span must be provided explicitly which governs the name resolution -/// behavior of the resulting identifier. -/// -/// ``` -/// use proc_macro2::{Ident, Span}; -/// -/// fn main() { -/// let call_ident = Ident::new("calligraphy", Span::call_site()); -/// -/// println!("{}", call_ident); -/// } -/// ``` -/// -/// An ident can be interpolated into a token stream using the `quote!` macro. -/// -/// ``` -/// use proc_macro2::{Ident, Span}; -/// use quote::quote; -/// -/// fn main() { -/// let ident = Ident::new("demo", Span::call_site()); -/// -/// // Create a variable binding whose name is this ident. -/// let expanded = quote! { let #ident = 10; }; -/// -/// // Create a variable binding with a slightly different name. -/// let temp_ident = Ident::new(&format!("new_{}", ident), Span::call_site()); -/// let expanded = quote! { let #temp_ident = 10; }; -/// } -/// ``` -/// -/// A string representation of the ident is available through the `to_string()` -/// method. -/// -/// ``` -/// # use proc_macro2::{Ident, Span}; -/// # -/// # let ident = Ident::new("another_identifier", Span::call_site()); -/// # -/// // Examine the ident as a string. -/// let ident_string = ident.to_string(); -/// if ident_string.len() > 60 { -/// println!("Very long identifier: {}", ident_string) -/// } -/// ``` -#[derive(Clone)] -pub struct Ident { - inner: imp::Ident, - _marker: Marker, -} - -impl Ident { - fn _new(inner: imp::Ident) -> Ident { - Ident { - inner, - _marker: Marker, - } - } - - /// Creates a new `Ident` with the given `string` as well as the specified - /// `span`. - /// - /// The `string` argument must be a valid identifier permitted by the - /// language, otherwise the function will panic. - /// - /// Note that `span`, currently in rustc, configures the hygiene information - /// for this identifier. - /// - /// As of this time `Span::call_site()` explicitly opts-in to "call-site" - /// hygiene meaning that identifiers created with this span will be resolved - /// as if they were written directly at the location of the macro call, and - /// other code at the macro call site will be able to refer to them as well. - /// - /// Later spans like `Span::def_site()` will allow to opt-in to - /// "definition-site" hygiene meaning that identifiers created with this - /// span will be resolved at the location of the macro definition and other - /// code at the macro call site will not be able to refer to them. - /// - /// Due to the current importance of hygiene this constructor, unlike other - /// tokens, requires a `Span` to be specified at construction. - /// - /// # Panics - /// - /// Panics if the input string is neither a keyword nor a legal variable - /// name. If you are not sure whether the string contains an identifier and - /// need to handle an error case, use - /// syn::parse_str::<Ident> - /// rather than `Ident::new`. - pub fn new(string: &str, span: Span) -> Ident { - Ident::_new(imp::Ident::new(string, span.inner)) - } - - /// Same as `Ident::new`, but creates a raw identifier (`r#ident`). - /// - /// This method is semver exempt and not exposed by default. - #[cfg(procmacro2_semver_exempt)] - #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))] - pub fn new_raw(string: &str, span: Span) -> Ident { - Ident::_new_raw(string, span) - } - - fn _new_raw(string: &str, span: Span) -> Ident { - Ident::_new(imp::Ident::new_raw(string, span.inner)) - } - - /// Returns the span of this `Ident`. - pub fn span(&self) -> Span { - Span::_new(self.inner.span()) - } - - /// Configures the span of this `Ident`, possibly changing its hygiene - /// context. - pub fn set_span(&mut self, span: Span) { - self.inner.set_span(span.inner); - } -} - -impl PartialEq for Ident { - fn eq(&self, other: &Ident) -> bool { - self.inner == other.inner - } -} - -impl PartialEq for Ident -where - T: ?Sized + AsRef, -{ - fn eq(&self, other: &T) -> bool { - self.inner == other - } -} - -impl Eq for Ident {} - -impl PartialOrd for Ident { - fn partial_cmp(&self, other: &Ident) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for Ident { - fn cmp(&self, other: &Ident) -> Ordering { - self.to_string().cmp(&other.to_string()) - } -} - -impl Hash for Ident { - fn hash(&self, hasher: &mut H) { - self.to_string().hash(hasher) - } -} - -/// Prints the identifier as a string that should be losslessly convertible back -/// into the same identifier. -impl Display for Ident { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Display::fmt(&self.inner, f) - } -} - -impl Debug for Ident { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&self.inner, f) - } -} - -/// A literal string (`"hello"`), byte string (`b"hello"`), character (`'a'`), -/// byte character (`b'a'`), an integer or floating point number with or without -/// a suffix (`1`, `1u8`, `2.3`, `2.3f32`). -/// -/// Boolean literals like `true` and `false` do not belong here, they are -/// `Ident`s. -#[derive(Clone)] -pub struct Literal { - inner: imp::Literal, - _marker: Marker, -} - -macro_rules! suffixed_int_literals { - ($($name:ident => $kind:ident,)*) => ($( - /// Creates a new suffixed integer literal with the specified value. - /// - /// This function will create an integer like `1u32` where the integer - /// value specified is the first part of the token and the integral is - /// also suffixed at the end. Literals created from negative numbers may - /// not survive roundtrips through `TokenStream` or strings and may be - /// broken into two tokens (`-` and positive literal). - /// - /// Literals created through this method have the `Span::call_site()` - /// span by default, which can be configured with the `set_span` method - /// below. - pub fn $name(n: $kind) -> Literal { - Literal::_new(imp::Literal::$name(n)) - } - )*) -} - -macro_rules! unsuffixed_int_literals { - ($($name:ident => $kind:ident,)*) => ($( - /// Creates a new unsuffixed integer literal with the specified value. - /// - /// This function will create an integer like `1` where the integer - /// value specified is the first part of the token. No suffix is - /// specified on this token, meaning that invocations like - /// `Literal::i8_unsuffixed(1)` are equivalent to - /// `Literal::u32_unsuffixed(1)`. Literals created from negative numbers - /// may not survive roundtrips through `TokenStream` or strings and may - /// be broken into two tokens (`-` and positive literal). - /// - /// Literals created through this method have the `Span::call_site()` - /// span by default, which can be configured with the `set_span` method - /// below. - pub fn $name(n: $kind) -> Literal { - Literal::_new(imp::Literal::$name(n)) - } - )*) -} - -impl Literal { - fn _new(inner: imp::Literal) -> Literal { - Literal { - inner, - _marker: Marker, - } - } - - fn _new_stable(inner: fallback::Literal) -> Literal { - Literal { - inner: inner.into(), - _marker: Marker, - } - } - - suffixed_int_literals! { - u8_suffixed => u8, - u16_suffixed => u16, - u32_suffixed => u32, - u64_suffixed => u64, - u128_suffixed => u128, - usize_suffixed => usize, - i8_suffixed => i8, - i16_suffixed => i16, - i32_suffixed => i32, - i64_suffixed => i64, - i128_suffixed => i128, - isize_suffixed => isize, - } - - unsuffixed_int_literals! { - u8_unsuffixed => u8, - u16_unsuffixed => u16, - u32_unsuffixed => u32, - u64_unsuffixed => u64, - u128_unsuffixed => u128, - usize_unsuffixed => usize, - i8_unsuffixed => i8, - i16_unsuffixed => i16, - i32_unsuffixed => i32, - i64_unsuffixed => i64, - i128_unsuffixed => i128, - isize_unsuffixed => isize, - } - - /// Creates a new unsuffixed floating-point literal. - /// - /// This constructor is similar to those like `Literal::i8_unsuffixed` where - /// the float's value is emitted directly into the token but no suffix is - /// used, so it may be inferred to be a `f64` later in the compiler. - /// Literals created from negative numbers may not survive rountrips through - /// `TokenStream` or strings and may be broken into two tokens (`-` and - /// positive literal). - /// - /// # Panics - /// - /// This function requires that the specified float is finite, for example - /// if it is infinity or NaN this function will panic. - pub fn f64_unsuffixed(f: f64) -> Literal { - assert!(f.is_finite()); - Literal::_new(imp::Literal::f64_unsuffixed(f)) - } - - /// Creates a new suffixed floating-point literal. - /// - /// This constructor will create a literal like `1.0f64` where the value - /// specified is the preceding part of the token and `f64` is the suffix of - /// the token. This token will always be inferred to be an `f64` in the - /// compiler. Literals created from negative numbers may not survive - /// rountrips through `TokenStream` or strings and may be broken into two - /// tokens (`-` and positive literal). - /// - /// # Panics - /// - /// This function requires that the specified float is finite, for example - /// if it is infinity or NaN this function will panic. - pub fn f64_suffixed(f: f64) -> Literal { - assert!(f.is_finite()); - Literal::_new(imp::Literal::f64_suffixed(f)) - } - - /// Creates a new unsuffixed floating-point literal. - /// - /// This constructor is similar to those like `Literal::i8_unsuffixed` where - /// the float's value is emitted directly into the token but no suffix is - /// used, so it may be inferred to be a `f64` later in the compiler. - /// Literals created from negative numbers may not survive rountrips through - /// `TokenStream` or strings and may be broken into two tokens (`-` and - /// positive literal). - /// - /// # Panics - /// - /// This function requires that the specified float is finite, for example - /// if it is infinity or NaN this function will panic. - pub fn f32_unsuffixed(f: f32) -> Literal { - assert!(f.is_finite()); - Literal::_new(imp::Literal::f32_unsuffixed(f)) - } - - /// Creates a new suffixed floating-point literal. - /// - /// This constructor will create a literal like `1.0f32` where the value - /// specified is the preceding part of the token and `f32` is the suffix of - /// the token. This token will always be inferred to be an `f32` in the - /// compiler. Literals created from negative numbers may not survive - /// rountrips through `TokenStream` or strings and may be broken into two - /// tokens (`-` and positive literal). - /// - /// # Panics - /// - /// This function requires that the specified float is finite, for example - /// if it is infinity or NaN this function will panic. - pub fn f32_suffixed(f: f32) -> Literal { - assert!(f.is_finite()); - Literal::_new(imp::Literal::f32_suffixed(f)) - } - - /// String literal. - pub fn string(string: &str) -> Literal { - Literal::_new(imp::Literal::string(string)) - } - - /// Character literal. - pub fn character(ch: char) -> Literal { - Literal::_new(imp::Literal::character(ch)) - } - - /// Byte string literal. - pub fn byte_string(s: &[u8]) -> Literal { - Literal::_new(imp::Literal::byte_string(s)) - } - - /// Returns the span encompassing this literal. - pub fn span(&self) -> Span { - Span::_new(self.inner.span()) - } - - /// Configures the span associated for this literal. - pub fn set_span(&mut self, span: Span) { - self.inner.set_span(span.inner); - } - - /// Returns a `Span` that is a subset of `self.span()` containing only - /// the source bytes in range `range`. Returns `None` if the would-be - /// trimmed span is outside the bounds of `self`. - /// - /// Warning: the underlying [`proc_macro::Literal::subspan`] method is - /// nightly-only. When called from within a procedural macro not using a - /// nightly compiler, this method will always return `None`. - /// - /// [`proc_macro::Literal::subspan`]: https://doc.rust-lang.org/proc_macro/struct.Literal.html#method.subspan - pub fn subspan>(&self, range: R) -> Option { - self.inner.subspan(range).map(Span::_new) - } -} - -impl FromStr for Literal { - type Err = LexError; - - fn from_str(repr: &str) -> Result { - repr.parse().map(Literal::_new).map_err(|inner| LexError { - inner, - _marker: Marker, - }) - } -} - -impl Debug for Literal { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&self.inner, f) - } -} - -impl Display for Literal { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Display::fmt(&self.inner, f) - } -} - -/// Public implementation details for the `TokenStream` type, such as iterators. -pub mod token_stream { - use crate::marker::Marker; - use crate::{imp, TokenTree}; - use std::fmt::{self, Debug}; - - pub use crate::TokenStream; - - /// An iterator over `TokenStream`'s `TokenTree`s. - /// - /// The iteration is "shallow", e.g. the iterator doesn't recurse into - /// delimited groups, and returns whole groups as token trees. - #[derive(Clone)] - pub struct IntoIter { - inner: imp::TokenTreeIter, - _marker: Marker, - } - - impl Iterator for IntoIter { - type Item = TokenTree; - - fn next(&mut self) -> Option { - self.inner.next() - } - } - - impl Debug for IntoIter { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&self.inner, f) - } - } - - impl IntoIterator for TokenStream { - type Item = TokenTree; - type IntoIter = IntoIter; - - fn into_iter(self) -> IntoIter { - IntoIter { - inner: self.inner.into_iter(), - _marker: Marker, - } - } - } -} diff --git a/vendor/proc-macro2/src/marker.rs b/vendor/proc-macro2/src/marker.rs deleted file mode 100644 index 58729baf..00000000 --- a/vendor/proc-macro2/src/marker.rs +++ /dev/null @@ -1,18 +0,0 @@ -use std::marker::PhantomData; -use std::panic::{RefUnwindSafe, UnwindSafe}; -use std::rc::Rc; - -// Zero sized marker with the correct set of autotrait impls we want all proc -// macro types to have. -pub(crate) type Marker = PhantomData; - -pub(crate) use self::value::*; - -mod value { - pub(crate) use std::marker::PhantomData as Marker; -} - -pub(crate) struct ProcMacroAutoTraits(Rc<()>); - -impl UnwindSafe for ProcMacroAutoTraits {} -impl RefUnwindSafe for ProcMacroAutoTraits {} diff --git a/vendor/proc-macro2/src/parse.rs b/vendor/proc-macro2/src/parse.rs deleted file mode 100644 index eddb4901..00000000 --- a/vendor/proc-macro2/src/parse.rs +++ /dev/null @@ -1,866 +0,0 @@ -use crate::fallback::{ - is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream, -}; -use crate::{Delimiter, Punct, Spacing, TokenTree}; -use std::char; -use std::str::{Bytes, CharIndices, Chars}; - -#[derive(Copy, Clone, Eq, PartialEq)] -pub(crate) struct Cursor<'a> { - pub rest: &'a str, - #[cfg(span_locations)] - pub off: u32, -} - -impl<'a> Cursor<'a> { - fn advance(&self, bytes: usize) -> Cursor<'a> { - let (_front, rest) = self.rest.split_at(bytes); - Cursor { - rest, - #[cfg(span_locations)] - off: self.off + _front.chars().count() as u32, - } - } - - fn starts_with(&self, s: &str) -> bool { - self.rest.starts_with(s) - } - - fn is_empty(&self) -> bool { - self.rest.is_empty() - } - - fn len(&self) -> usize { - self.rest.len() - } - - fn as_bytes(&self) -> &'a [u8] { - self.rest.as_bytes() - } - - fn bytes(&self) -> Bytes<'a> { - self.rest.bytes() - } - - fn chars(&self) -> Chars<'a> { - self.rest.chars() - } - - fn char_indices(&self) -> CharIndices<'a> { - self.rest.char_indices() - } - - fn parse(&self, tag: &str) -> Result, Reject> { - if self.starts_with(tag) { - Ok(self.advance(tag.len())) - } else { - Err(Reject) - } - } -} - -pub(crate) struct Reject; -type PResult<'a, O> = Result<(Cursor<'a>, O), Reject>; - -fn skip_whitespace(input: Cursor) -> Cursor { - let mut s = input; - - while !s.is_empty() { - let byte = s.as_bytes()[0]; - if byte == b'/' { - if s.starts_with("//") - && (!s.starts_with("///") || s.starts_with("////")) - && !s.starts_with("//!") - { - let (cursor, _) = take_until_newline_or_eof(s); - s = cursor; - continue; - } else if s.starts_with("/**/") { - s = s.advance(4); - continue; - } else if s.starts_with("/*") - && (!s.starts_with("/**") || s.starts_with("/***")) - && !s.starts_with("/*!") - { - match block_comment(s) { - Ok((rest, _)) => { - s = rest; - continue; - } - Err(Reject) => return s, - } - } - } - match byte { - b' ' | 0x09..=0x0d => { - s = s.advance(1); - continue; - } - b if b <= 0x7f => {} - _ => { - let ch = s.chars().next().unwrap(); - if is_whitespace(ch) { - s = s.advance(ch.len_utf8()); - continue; - } - } - } - return s; - } - s -} - -fn block_comment(input: Cursor) -> PResult<&str> { - if !input.starts_with("/*") { - return Err(Reject); - } - - let mut depth = 0; - let bytes = input.as_bytes(); - let mut i = 0; - let upper = bytes.len() - 1; - - while i < upper { - if bytes[i] == b'/' && bytes[i + 1] == b'*' { - depth += 1; - i += 1; // eat '*' - } else if bytes[i] == b'*' && bytes[i + 1] == b'/' { - depth -= 1; - if depth == 0 { - return Ok((input.advance(i + 2), &input.rest[..i + 2])); - } - i += 1; // eat '/' - } - i += 1; - } - - Err(Reject) -} - -fn is_whitespace(ch: char) -> bool { - // Rust treats left-to-right mark and right-to-left mark as whitespace - ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}' -} - -fn word_break(input: Cursor) -> Result { - match input.chars().next() { - Some(ch) if is_ident_continue(ch) => Err(Reject), - Some(_) | None => Ok(input), - } -} - -pub(crate) fn token_stream(mut input: Cursor) -> Result { - let mut trees = Vec::new(); - let mut stack = Vec::new(); - - loop { - input = skip_whitespace(input); - - if let Ok((rest, tt)) = doc_comment(input) { - trees.extend(tt); - input = rest; - continue; - } - - #[cfg(span_locations)] - let lo = input.off; - - let first = match input.bytes().next() { - Some(first) => first, - None => match stack.last() { - None => return Ok(TokenStream { inner: trees }), - #[cfg(span_locations)] - Some((lo, _frame)) => { - return Err(LexError { - span: Span { lo: *lo, hi: *lo }, - }) - } - #[cfg(not(span_locations))] - Some(_frame) => return Err(LexError { span: Span {} }), - }, - }; - - if let Some(open_delimiter) = match first { - b'(' => Some(Delimiter::Parenthesis), - b'[' => Some(Delimiter::Bracket), - b'{' => Some(Delimiter::Brace), - _ => None, - } { - input = input.advance(1); - let frame = (open_delimiter, trees); - #[cfg(span_locations)] - let frame = (lo, frame); - stack.push(frame); - trees = Vec::new(); - } else if let Some(close_delimiter) = match first { - b')' => Some(Delimiter::Parenthesis), - b']' => Some(Delimiter::Bracket), - b'}' => Some(Delimiter::Brace), - _ => None, - } { - let frame = match stack.pop() { - Some(frame) => frame, - None => return Err(lex_error(input)), - }; - #[cfg(span_locations)] - let (lo, frame) = frame; - let (open_delimiter, outer) = frame; - if open_delimiter != close_delimiter { - return Err(lex_error(input)); - } - input = input.advance(1); - let mut g = Group::new(open_delimiter, TokenStream { inner: trees }); - g.set_span(Span { - #[cfg(span_locations)] - lo, - #[cfg(span_locations)] - hi: input.off, - }); - trees = outer; - trees.push(TokenTree::Group(crate::Group::_new_stable(g))); - } else { - let (rest, mut tt) = match leaf_token(input) { - Ok((rest, tt)) => (rest, tt), - Err(Reject) => return Err(lex_error(input)), - }; - tt.set_span(crate::Span::_new_stable(Span { - #[cfg(span_locations)] - lo, - #[cfg(span_locations)] - hi: rest.off, - })); - trees.push(tt); - input = rest; - } - } -} - -fn lex_error(cursor: Cursor) -> LexError { - #[cfg(not(span_locations))] - let _ = cursor; - LexError { - span: Span { - #[cfg(span_locations)] - lo: cursor.off, - #[cfg(span_locations)] - hi: cursor.off, - }, - } -} - -fn leaf_token(input: Cursor) -> PResult { - if let Ok((input, l)) = literal(input) { - // must be parsed before ident - Ok((input, TokenTree::Literal(crate::Literal::_new_stable(l)))) - } else if let Ok((input, p)) = punct(input) { - Ok((input, TokenTree::Punct(p))) - } else if let Ok((input, i)) = ident(input) { - Ok((input, TokenTree::Ident(i))) - } else { - Err(Reject) - } -} - -fn ident(input: Cursor) -> PResult { - if ["r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#"] - .iter() - .any(|prefix| input.starts_with(prefix)) - { - Err(Reject) - } else { - ident_any(input) - } -} - -fn ident_any(input: Cursor) -> PResult { - let raw = input.starts_with("r#"); - let rest = input.advance((raw as usize) << 1); - - let (rest, sym) = ident_not_raw(rest)?; - - if !raw { - let ident = crate::Ident::new(sym, crate::Span::call_site()); - return Ok((rest, ident)); - } - - if sym == "_" { - return Err(Reject); - } - - let ident = crate::Ident::_new_raw(sym, crate::Span::call_site()); - Ok((rest, ident)) -} - -fn ident_not_raw(input: Cursor) -> PResult<&str> { - let mut chars = input.char_indices(); - - match chars.next() { - Some((_, ch)) if is_ident_start(ch) => {} - _ => return Err(Reject), - } - - let mut end = input.len(); - for (i, ch) in chars { - if !is_ident_continue(ch) { - end = i; - break; - } - } - - Ok((input.advance(end), &input.rest[..end])) -} - -pub(crate) fn literal(input: Cursor) -> PResult { - let rest = literal_nocapture(input)?; - let end = input.len() - rest.len(); - Ok((rest, Literal::_new(input.rest[..end].to_string()))) -} - -fn literal_nocapture(input: Cursor) -> Result { - if let Ok(ok) = string(input) { - Ok(ok) - } else if let Ok(ok) = byte_string(input) { - Ok(ok) - } else if let Ok(ok) = byte(input) { - Ok(ok) - } else if let Ok(ok) = character(input) { - Ok(ok) - } else if let Ok(ok) = float(input) { - Ok(ok) - } else if let Ok(ok) = int(input) { - Ok(ok) - } else { - Err(Reject) - } -} - -fn literal_suffix(input: Cursor) -> Cursor { - match ident_not_raw(input) { - Ok((input, _)) => input, - Err(Reject) => input, - } -} - -fn string(input: Cursor) -> Result { - if let Ok(input) = input.parse("\"") { - cooked_string(input) - } else if let Ok(input) = input.parse("r") { - raw_string(input) - } else { - Err(Reject) - } -} - -fn cooked_string(input: Cursor) -> Result { - let mut chars = input.char_indices().peekable(); - - while let Some((i, ch)) = chars.next() { - match ch { - '"' => { - let input = input.advance(i + 1); - return Ok(literal_suffix(input)); - } - '\r' => match chars.next() { - Some((_, '\n')) => {} - _ => break, - }, - '\\' => match chars.next() { - Some((_, 'x')) => { - if !backslash_x_char(&mut chars) { - break; - } - } - Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\')) - | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {} - Some((_, 'u')) => { - if !backslash_u(&mut chars) { - break; - } - } - Some((_, ch @ '\n')) | Some((_, ch @ '\r')) => { - let mut last = ch; - loop { - if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') { - return Err(Reject); - } - match chars.peek() { - Some((_, ch)) if ch.is_whitespace() => { - last = *ch; - chars.next(); - } - _ => break, - } - } - } - _ => break, - }, - _ch => {} - } - } - Err(Reject) -} - -fn byte_string(input: Cursor) -> Result { - if let Ok(input) = input.parse("b\"") { - cooked_byte_string(input) - } else if let Ok(input) = input.parse("br") { - raw_string(input) - } else { - Err(Reject) - } -} - -fn cooked_byte_string(mut input: Cursor) -> Result { - let mut bytes = input.bytes().enumerate(); - while let Some((offset, b)) = bytes.next() { - match b { - b'"' => { - let input = input.advance(offset + 1); - return Ok(literal_suffix(input)); - } - b'\r' => match bytes.next() { - Some((_, b'\n')) => {} - _ => break, - }, - b'\\' => match bytes.next() { - Some((_, b'x')) => { - if !backslash_x_byte(&mut bytes) { - break; - } - } - Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\')) - | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {} - Some((newline, b @ b'\n')) | Some((newline, b @ b'\r')) => { - let mut last = b as char; - let rest = input.advance(newline + 1); - let mut chars = rest.char_indices(); - loop { - if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') { - return Err(Reject); - } - match chars.next() { - Some((_, ch)) if ch.is_whitespace() => last = ch, - Some((offset, _)) => { - input = rest.advance(offset); - bytes = input.bytes().enumerate(); - break; - } - None => return Err(Reject), - } - } - } - _ => break, - }, - b if b < 0x80 => {} - _ => break, - } - } - Err(Reject) -} - -fn raw_string(input: Cursor) -> Result { - let mut chars = input.char_indices(); - let mut n = 0; - for (i, ch) in &mut chars { - match ch { - '"' => { - n = i; - break; - } - '#' => {} - _ => return Err(Reject), - } - } - while let Some((i, ch)) = chars.next() { - match ch { - '"' if input.rest[i + 1..].starts_with(&input.rest[..n]) => { - let rest = input.advance(i + 1 + n); - return Ok(literal_suffix(rest)); - } - '\r' => match chars.next() { - Some((_, '\n')) => {} - _ => break, - }, - _ => {} - } - } - Err(Reject) -} - -fn byte(input: Cursor) -> Result { - let input = input.parse("b'")?; - let mut bytes = input.bytes().enumerate(); - let ok = match bytes.next().map(|(_, b)| b) { - Some(b'\\') => match bytes.next().map(|(_, b)| b) { - Some(b'x') => backslash_x_byte(&mut bytes), - Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'') - | Some(b'"') => true, - _ => false, - }, - b => b.is_some(), - }; - if !ok { - return Err(Reject); - } - let (offset, _) = bytes.next().ok_or(Reject)?; - if !input.chars().as_str().is_char_boundary(offset) { - return Err(Reject); - } - let input = input.advance(offset).parse("'")?; - Ok(literal_suffix(input)) -} - -fn character(input: Cursor) -> Result { - let input = input.parse("'")?; - let mut chars = input.char_indices(); - let ok = match chars.next().map(|(_, ch)| ch) { - Some('\\') => match chars.next().map(|(_, ch)| ch) { - Some('x') => backslash_x_char(&mut chars), - Some('u') => backslash_u(&mut chars), - Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => { - true - } - _ => false, - }, - ch => ch.is_some(), - }; - if !ok { - return Err(Reject); - } - let (idx, _) = chars.next().ok_or(Reject)?; - let input = input.advance(idx).parse("'")?; - Ok(literal_suffix(input)) -} - -macro_rules! next_ch { - ($chars:ident @ $pat:pat $(| $rest:pat)*) => { - match $chars.next() { - Some((_, ch)) => match ch { - $pat $(| $rest)* => ch, - _ => return false, - }, - None => return false, - } - }; -} - -fn backslash_x_char(chars: &mut I) -> bool -where - I: Iterator, -{ - next_ch!(chars @ '0'..='7'); - next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F'); - true -} - -fn backslash_x_byte(chars: &mut I) -> bool -where - I: Iterator, -{ - next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F'); - next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F'); - true -} - -fn backslash_u(chars: &mut I) -> bool -where - I: Iterator, -{ - next_ch!(chars @ '{'); - let mut value = 0; - let mut len = 0; - for (_, ch) in chars { - let digit = match ch { - '0'..='9' => ch as u8 - b'0', - 'a'..='f' => 10 + ch as u8 - b'a', - 'A'..='F' => 10 + ch as u8 - b'A', - '_' if len > 0 => continue, - '}' if len > 0 => return char::from_u32(value).is_some(), - _ => return false, - }; - if len == 6 { - return false; - } - value *= 0x10; - value += u32::from(digit); - len += 1; - } - false -} - -fn float(input: Cursor) -> Result { - let mut rest = float_digits(input)?; - if let Some(ch) = rest.chars().next() { - if is_ident_start(ch) { - rest = ident_not_raw(rest)?.0; - } - } - word_break(rest) -} - -fn float_digits(input: Cursor) -> Result { - let mut chars = input.chars().peekable(); - match chars.next() { - Some(ch) if ch >= '0' && ch <= '9' => {} - _ => return Err(Reject), - } - - let mut len = 1; - let mut has_dot = false; - let mut has_exp = false; - while let Some(&ch) = chars.peek() { - match ch { - '0'..='9' | '_' => { - chars.next(); - len += 1; - } - '.' => { - if has_dot { - break; - } - chars.next(); - if chars - .peek() - .map(|&ch| ch == '.' || is_ident_start(ch)) - .unwrap_or(false) - { - return Err(Reject); - } - len += 1; - has_dot = true; - } - 'e' | 'E' => { - chars.next(); - len += 1; - has_exp = true; - break; - } - _ => break, - } - } - - if !(has_dot || has_exp) { - return Err(Reject); - } - - if has_exp { - let token_before_exp = if has_dot { - Ok(input.advance(len - 1)) - } else { - Err(Reject) - }; - let mut has_sign = false; - let mut has_exp_value = false; - while let Some(&ch) = chars.peek() { - match ch { - '+' | '-' => { - if has_exp_value { - break; - } - if has_sign { - return token_before_exp; - } - chars.next(); - len += 1; - has_sign = true; - } - '0'..='9' => { - chars.next(); - len += 1; - has_exp_value = true; - } - '_' => { - chars.next(); - len += 1; - } - _ => break, - } - } - if !has_exp_value { - return token_before_exp; - } - } - - Ok(input.advance(len)) -} - -fn int(input: Cursor) -> Result { - let mut rest = digits(input)?; - if let Some(ch) = rest.chars().next() { - if is_ident_start(ch) { - rest = ident_not_raw(rest)?.0; - } - } - word_break(rest) -} - -fn digits(mut input: Cursor) -> Result { - let base = if input.starts_with("0x") { - input = input.advance(2); - 16 - } else if input.starts_with("0o") { - input = input.advance(2); - 8 - } else if input.starts_with("0b") { - input = input.advance(2); - 2 - } else { - 10 - }; - - let mut len = 0; - let mut empty = true; - for b in input.bytes() { - match b { - b'0'..=b'9' => { - let digit = (b - b'0') as u64; - if digit >= base { - return Err(Reject); - } - } - b'a'..=b'f' => { - let digit = 10 + (b - b'a') as u64; - if digit >= base { - break; - } - } - b'A'..=b'F' => { - let digit = 10 + (b - b'A') as u64; - if digit >= base { - break; - } - } - b'_' => { - if empty && base == 10 { - return Err(Reject); - } - len += 1; - continue; - } - _ => break, - }; - len += 1; - empty = false; - } - if empty { - Err(Reject) - } else { - Ok(input.advance(len)) - } -} - -fn punct(input: Cursor) -> PResult { - let (rest, ch) = punct_char(input)?; - if ch == '\'' { - if ident_any(rest)?.0.starts_with("'") { - Err(Reject) - } else { - Ok((rest, Punct::new('\'', Spacing::Joint))) - } - } else { - let kind = match punct_char(rest) { - Ok(_) => Spacing::Joint, - Err(Reject) => Spacing::Alone, - }; - Ok((rest, Punct::new(ch, kind))) - } -} - -fn punct_char(input: Cursor) -> PResult { - if input.starts_with("//") || input.starts_with("/*") { - // Do not accept `/` of a comment as a punct. - return Err(Reject); - } - - let mut chars = input.chars(); - let first = match chars.next() { - Some(ch) => ch, - None => { - return Err(Reject); - } - }; - let recognized = "~!@#$%^&*-=+|;:,<.>/?'"; - if recognized.contains(first) { - Ok((input.advance(first.len_utf8()), first)) - } else { - Err(Reject) - } -} - -fn doc_comment(input: Cursor) -> PResult> { - #[cfg(span_locations)] - let lo = input.off; - let (rest, (comment, inner)) = doc_comment_contents(input)?; - let span = crate::Span::_new_stable(Span { - #[cfg(span_locations)] - lo, - #[cfg(span_locations)] - hi: rest.off, - }); - - let mut scan_for_bare_cr = comment; - while let Some(cr) = scan_for_bare_cr.find('\r') { - let rest = &scan_for_bare_cr[cr + 1..]; - if !rest.starts_with('\n') { - return Err(Reject); - } - scan_for_bare_cr = rest; - } - - let mut trees = Vec::new(); - trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone))); - if inner { - trees.push(Punct::new('!', Spacing::Alone).into()); - } - let mut stream = vec![ - TokenTree::Ident(crate::Ident::new("doc", span)), - TokenTree::Punct(Punct::new('=', Spacing::Alone)), - TokenTree::Literal(crate::Literal::string(comment)), - ]; - for tt in stream.iter_mut() { - tt.set_span(span); - } - let group = Group::new(Delimiter::Bracket, stream.into_iter().collect()); - trees.push(crate::Group::_new_stable(group).into()); - for tt in trees.iter_mut() { - tt.set_span(span); - } - Ok((rest, trees)) -} - -fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> { - if input.starts_with("//!") { - let input = input.advance(3); - let (input, s) = take_until_newline_or_eof(input); - Ok((input, (s, true))) - } else if input.starts_with("/*!") { - let (input, s) = block_comment(input)?; - Ok((input, (&s[3..s.len() - 2], true))) - } else if input.starts_with("///") { - let input = input.advance(3); - if input.starts_with("/") { - return Err(Reject); - } - let (input, s) = take_until_newline_or_eof(input); - Ok((input, (s, false))) - } else if input.starts_with("/**") && !input.rest[3..].starts_with('*') { - let (input, s) = block_comment(input)?; - Ok((input, (&s[3..s.len() - 2], false))) - } else { - Err(Reject) - } -} - -fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) { - let chars = input.char_indices(); - - for (i, ch) in chars { - if ch == '\n' { - return (input.advance(i), &input.rest[..i]); - } else if ch == '\r' && input.rest[i + 1..].starts_with('\n') { - return (input.advance(i + 1), &input.rest[..i]); - } - } - - (input.advance(input.len()), input.rest) -} diff --git a/vendor/proc-macro2/src/wrapper.rs b/vendor/proc-macro2/src/wrapper.rs deleted file mode 100644 index dc938736..00000000 --- a/vendor/proc-macro2/src/wrapper.rs +++ /dev/null @@ -1,966 +0,0 @@ -use crate::detection::inside_proc_macro; -use crate::{fallback, Delimiter, Punct, Spacing, TokenTree}; -use std::fmt::{self, Debug, Display}; -use std::iter::FromIterator; -use std::ops::RangeBounds; -use std::panic; -#[cfg(super_unstable)] -use std::path::PathBuf; -use std::str::FromStr; - -#[derive(Clone)] -pub(crate) enum TokenStream { - Compiler(DeferredTokenStream), - Fallback(fallback::TokenStream), -} - -// Work around https://github.com/rust-lang/rust/issues/65080. -// In `impl Extend for TokenStream` which is used heavily by quote, -// we hold on to the appended tokens and do proc_macro::TokenStream::extend as -// late as possible to batch together consecutive uses of the Extend impl. -#[derive(Clone)] -pub(crate) struct DeferredTokenStream { - stream: proc_macro::TokenStream, - extra: Vec, -} - -pub(crate) enum LexError { - Compiler(proc_macro::LexError), - Fallback(fallback::LexError), -} - -impl LexError { - fn call_site() -> Self { - LexError::Fallback(fallback::LexError { - span: fallback::Span::call_site(), - }) - } -} - -fn mismatch() -> ! { - panic!("stable/nightly mismatch") -} - -impl DeferredTokenStream { - fn new(stream: proc_macro::TokenStream) -> Self { - DeferredTokenStream { - stream, - extra: Vec::new(), - } - } - - fn is_empty(&self) -> bool { - self.stream.is_empty() && self.extra.is_empty() - } - - fn evaluate_now(&mut self) { - // If-check provides a fast short circuit for the common case of `extra` - // being empty, which saves a round trip over the proc macro bridge. - // Improves macro expansion time in winrt by 6% in debug mode. - if !self.extra.is_empty() { - self.stream.extend(self.extra.drain(..)); - } - } - - fn into_token_stream(mut self) -> proc_macro::TokenStream { - self.evaluate_now(); - self.stream - } -} - -impl TokenStream { - pub fn new() -> TokenStream { - if inside_proc_macro() { - TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new())) - } else { - TokenStream::Fallback(fallback::TokenStream::new()) - } - } - - pub fn is_empty(&self) -> bool { - match self { - TokenStream::Compiler(tts) => tts.is_empty(), - TokenStream::Fallback(tts) => tts.is_empty(), - } - } - - fn unwrap_nightly(self) -> proc_macro::TokenStream { - match self { - TokenStream::Compiler(s) => s.into_token_stream(), - TokenStream::Fallback(_) => mismatch(), - } - } - - fn unwrap_stable(self) -> fallback::TokenStream { - match self { - TokenStream::Compiler(_) => mismatch(), - TokenStream::Fallback(s) => s, - } - } -} - -impl FromStr for TokenStream { - type Err = LexError; - - fn from_str(src: &str) -> Result { - if inside_proc_macro() { - Ok(TokenStream::Compiler(DeferredTokenStream::new( - proc_macro_parse(src)?, - ))) - } else { - Ok(TokenStream::Fallback(src.parse()?)) - } - } -} - -// Work around https://github.com/rust-lang/rust/issues/58736. -fn proc_macro_parse(src: &str) -> Result { - let result = panic::catch_unwind(|| src.parse().map_err(LexError::Compiler)); - result.unwrap_or_else(|_| Err(LexError::call_site())) -} - -impl Display for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - TokenStream::Compiler(tts) => Display::fmt(&tts.clone().into_token_stream(), f), - TokenStream::Fallback(tts) => Display::fmt(tts, f), - } - } -} - -impl From for TokenStream { - fn from(inner: proc_macro::TokenStream) -> TokenStream { - TokenStream::Compiler(DeferredTokenStream::new(inner)) - } -} - -impl From for proc_macro::TokenStream { - fn from(inner: TokenStream) -> proc_macro::TokenStream { - match inner { - TokenStream::Compiler(inner) => inner.into_token_stream(), - TokenStream::Fallback(inner) => inner.to_string().parse().unwrap(), - } - } -} - -impl From for TokenStream { - fn from(inner: fallback::TokenStream) -> TokenStream { - TokenStream::Fallback(inner) - } -} - -// Assumes inside_proc_macro(). -fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree { - match token { - TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(), - TokenTree::Punct(tt) => { - let spacing = match tt.spacing() { - Spacing::Joint => proc_macro::Spacing::Joint, - Spacing::Alone => proc_macro::Spacing::Alone, - }; - let mut punct = proc_macro::Punct::new(tt.as_char(), spacing); - punct.set_span(tt.span().inner.unwrap_nightly()); - punct.into() - } - TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(), - TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(), - } -} - -impl From for TokenStream { - fn from(token: TokenTree) -> TokenStream { - if inside_proc_macro() { - TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into())) - } else { - TokenStream::Fallback(token.into()) - } - } -} - -impl FromIterator for TokenStream { - fn from_iter>(trees: I) -> Self { - if inside_proc_macro() { - TokenStream::Compiler(DeferredTokenStream::new( - trees.into_iter().map(into_compiler_token).collect(), - )) - } else { - TokenStream::Fallback(trees.into_iter().collect()) - } - } -} - -impl FromIterator for TokenStream { - fn from_iter>(streams: I) -> Self { - let mut streams = streams.into_iter(); - match streams.next() { - Some(TokenStream::Compiler(mut first)) => { - first.evaluate_now(); - first.stream.extend(streams.map(|s| match s { - TokenStream::Compiler(s) => s.into_token_stream(), - TokenStream::Fallback(_) => mismatch(), - })); - TokenStream::Compiler(first) - } - Some(TokenStream::Fallback(mut first)) => { - first.extend(streams.map(|s| match s { - TokenStream::Fallback(s) => s, - TokenStream::Compiler(_) => mismatch(), - })); - TokenStream::Fallback(first) - } - None => TokenStream::new(), - } - } -} - -impl Extend for TokenStream { - fn extend>(&mut self, stream: I) { - match self { - TokenStream::Compiler(tts) => { - // Here is the reason for DeferredTokenStream. - for token in stream { - tts.extra.push(into_compiler_token(token)); - } - } - TokenStream::Fallback(tts) => tts.extend(stream), - } - } -} - -impl Extend for TokenStream { - fn extend>(&mut self, streams: I) { - match self { - TokenStream::Compiler(tts) => { - tts.evaluate_now(); - tts.stream - .extend(streams.into_iter().map(TokenStream::unwrap_nightly)); - } - TokenStream::Fallback(tts) => { - tts.extend(streams.into_iter().map(TokenStream::unwrap_stable)); - } - } - } -} - -impl Debug for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - TokenStream::Compiler(tts) => Debug::fmt(&tts.clone().into_token_stream(), f), - TokenStream::Fallback(tts) => Debug::fmt(tts, f), - } - } -} - -impl LexError { - pub(crate) fn span(&self) -> Span { - match self { - LexError::Compiler(_) => Span::call_site(), - LexError::Fallback(e) => Span::Fallback(e.span()), - } - } -} - -impl From for LexError { - fn from(e: proc_macro::LexError) -> LexError { - LexError::Compiler(e) - } -} - -impl From for LexError { - fn from(e: fallback::LexError) -> LexError { - LexError::Fallback(e) - } -} - -impl Debug for LexError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - LexError::Compiler(e) => Debug::fmt(e, f), - LexError::Fallback(e) => Debug::fmt(e, f), - } - } -} - -impl Display for LexError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - #[cfg(lexerror_display)] - LexError::Compiler(e) => Display::fmt(e, f), - #[cfg(not(lexerror_display))] - LexError::Compiler(_e) => Display::fmt( - &fallback::LexError { - span: fallback::Span::call_site(), - }, - f, - ), - LexError::Fallback(e) => Display::fmt(e, f), - } - } -} - -#[derive(Clone)] -pub(crate) enum TokenTreeIter { - Compiler(proc_macro::token_stream::IntoIter), - Fallback(fallback::TokenTreeIter), -} - -impl IntoIterator for TokenStream { - type Item = TokenTree; - type IntoIter = TokenTreeIter; - - fn into_iter(self) -> TokenTreeIter { - match self { - TokenStream::Compiler(tts) => { - TokenTreeIter::Compiler(tts.into_token_stream().into_iter()) - } - TokenStream::Fallback(tts) => TokenTreeIter::Fallback(tts.into_iter()), - } - } -} - -impl Iterator for TokenTreeIter { - type Item = TokenTree; - - fn next(&mut self) -> Option { - let token = match self { - TokenTreeIter::Compiler(iter) => iter.next()?, - TokenTreeIter::Fallback(iter) => return iter.next(), - }; - Some(match token { - proc_macro::TokenTree::Group(tt) => crate::Group::_new(Group::Compiler(tt)).into(), - proc_macro::TokenTree::Punct(tt) => { - let spacing = match tt.spacing() { - proc_macro::Spacing::Joint => Spacing::Joint, - proc_macro::Spacing::Alone => Spacing::Alone, - }; - let mut o = Punct::new(tt.as_char(), spacing); - o.set_span(crate::Span::_new(Span::Compiler(tt.span()))); - o.into() - } - proc_macro::TokenTree::Ident(s) => crate::Ident::_new(Ident::Compiler(s)).into(), - proc_macro::TokenTree::Literal(l) => crate::Literal::_new(Literal::Compiler(l)).into(), - }) - } - - fn size_hint(&self) -> (usize, Option) { - match self { - TokenTreeIter::Compiler(tts) => tts.size_hint(), - TokenTreeIter::Fallback(tts) => tts.size_hint(), - } - } -} - -impl Debug for TokenTreeIter { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("TokenTreeIter").finish() - } -} - -#[derive(Clone, PartialEq, Eq)] -#[cfg(super_unstable)] -pub(crate) enum SourceFile { - Compiler(proc_macro::SourceFile), - Fallback(fallback::SourceFile), -} - -#[cfg(super_unstable)] -impl SourceFile { - fn nightly(sf: proc_macro::SourceFile) -> Self { - SourceFile::Compiler(sf) - } - - /// Get the path to this source file as a string. - pub fn path(&self) -> PathBuf { - match self { - SourceFile::Compiler(a) => a.path(), - SourceFile::Fallback(a) => a.path(), - } - } - - pub fn is_real(&self) -> bool { - match self { - SourceFile::Compiler(a) => a.is_real(), - SourceFile::Fallback(a) => a.is_real(), - } - } -} - -#[cfg(super_unstable)] -impl Debug for SourceFile { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - SourceFile::Compiler(a) => Debug::fmt(a, f), - SourceFile::Fallback(a) => Debug::fmt(a, f), - } - } -} - -#[cfg(any(super_unstable, feature = "span-locations"))] -pub(crate) struct LineColumn { - pub line: usize, - pub column: usize, -} - -#[derive(Copy, Clone)] -pub(crate) enum Span { - Compiler(proc_macro::Span), - Fallback(fallback::Span), -} - -impl Span { - pub fn call_site() -> Span { - if inside_proc_macro() { - Span::Compiler(proc_macro::Span::call_site()) - } else { - Span::Fallback(fallback::Span::call_site()) - } - } - - #[cfg(hygiene)] - pub fn mixed_site() -> Span { - if inside_proc_macro() { - Span::Compiler(proc_macro::Span::mixed_site()) - } else { - Span::Fallback(fallback::Span::mixed_site()) - } - } - - #[cfg(super_unstable)] - pub fn def_site() -> Span { - if inside_proc_macro() { - Span::Compiler(proc_macro::Span::def_site()) - } else { - Span::Fallback(fallback::Span::def_site()) - } - } - - pub fn resolved_at(&self, other: Span) -> Span { - match (self, other) { - #[cfg(hygiene)] - (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)), - - // Name resolution affects semantics, but location is only cosmetic - #[cfg(not(hygiene))] - (Span::Compiler(_), Span::Compiler(_)) => other, - - (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)), - _ => mismatch(), - } - } - - pub fn located_at(&self, other: Span) -> Span { - match (self, other) { - #[cfg(hygiene)] - (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)), - - // Name resolution affects semantics, but location is only cosmetic - #[cfg(not(hygiene))] - (Span::Compiler(_), Span::Compiler(_)) => *self, - - (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)), - _ => mismatch(), - } - } - - pub fn unwrap(self) -> proc_macro::Span { - match self { - Span::Compiler(s) => s, - Span::Fallback(_) => panic!("proc_macro::Span is only available in procedural macros"), - } - } - - #[cfg(super_unstable)] - pub fn source_file(&self) -> SourceFile { - match self { - Span::Compiler(s) => SourceFile::nightly(s.source_file()), - Span::Fallback(s) => SourceFile::Fallback(s.source_file()), - } - } - - #[cfg(any(super_unstable, feature = "span-locations"))] - pub fn start(&self) -> LineColumn { - match self { - #[cfg(proc_macro_span)] - Span::Compiler(s) => { - let proc_macro::LineColumn { line, column } = s.start(); - LineColumn { line, column } - } - #[cfg(not(proc_macro_span))] - Span::Compiler(_) => LineColumn { line: 0, column: 0 }, - Span::Fallback(s) => { - let fallback::LineColumn { line, column } = s.start(); - LineColumn { line, column } - } - } - } - - #[cfg(any(super_unstable, feature = "span-locations"))] - pub fn end(&self) -> LineColumn { - match self { - #[cfg(proc_macro_span)] - Span::Compiler(s) => { - let proc_macro::LineColumn { line, column } = s.end(); - LineColumn { line, column } - } - #[cfg(not(proc_macro_span))] - Span::Compiler(_) => LineColumn { line: 0, column: 0 }, - Span::Fallback(s) => { - let fallback::LineColumn { line, column } = s.end(); - LineColumn { line, column } - } - } - } - - pub fn join(&self, other: Span) -> Option { - let ret = match (self, other) { - #[cfg(proc_macro_span)] - (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.join(b)?), - (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.join(b)?), - _ => return None, - }; - Some(ret) - } - - #[cfg(super_unstable)] - pub fn eq(&self, other: &Span) -> bool { - match (self, other) { - (Span::Compiler(a), Span::Compiler(b)) => a.eq(b), - (Span::Fallback(a), Span::Fallback(b)) => a.eq(b), - _ => false, - } - } - - fn unwrap_nightly(self) -> proc_macro::Span { - match self { - Span::Compiler(s) => s, - Span::Fallback(_) => mismatch(), - } - } -} - -impl From for crate::Span { - fn from(proc_span: proc_macro::Span) -> crate::Span { - crate::Span::_new(Span::Compiler(proc_span)) - } -} - -impl From for Span { - fn from(inner: fallback::Span) -> Span { - Span::Fallback(inner) - } -} - -impl Debug for Span { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Span::Compiler(s) => Debug::fmt(s, f), - Span::Fallback(s) => Debug::fmt(s, f), - } - } -} - -pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) { - match span { - Span::Compiler(s) => { - debug.field("span", &s); - } - Span::Fallback(s) => fallback::debug_span_field_if_nontrivial(debug, s), - } -} - -#[derive(Clone)] -pub(crate) enum Group { - Compiler(proc_macro::Group), - Fallback(fallback::Group), -} - -impl Group { - pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group { - match stream { - TokenStream::Compiler(tts) => { - let delimiter = match delimiter { - Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis, - Delimiter::Bracket => proc_macro::Delimiter::Bracket, - Delimiter::Brace => proc_macro::Delimiter::Brace, - Delimiter::None => proc_macro::Delimiter::None, - }; - Group::Compiler(proc_macro::Group::new(delimiter, tts.into_token_stream())) - } - TokenStream::Fallback(stream) => { - Group::Fallback(fallback::Group::new(delimiter, stream)) - } - } - } - - pub fn delimiter(&self) -> Delimiter { - match self { - Group::Compiler(g) => match g.delimiter() { - proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis, - proc_macro::Delimiter::Bracket => Delimiter::Bracket, - proc_macro::Delimiter::Brace => Delimiter::Brace, - proc_macro::Delimiter::None => Delimiter::None, - }, - Group::Fallback(g) => g.delimiter(), - } - } - - pub fn stream(&self) -> TokenStream { - match self { - Group::Compiler(g) => TokenStream::Compiler(DeferredTokenStream::new(g.stream())), - Group::Fallback(g) => TokenStream::Fallback(g.stream()), - } - } - - pub fn span(&self) -> Span { - match self { - Group::Compiler(g) => Span::Compiler(g.span()), - Group::Fallback(g) => Span::Fallback(g.span()), - } - } - - pub fn span_open(&self) -> Span { - match self { - #[cfg(proc_macro_span)] - Group::Compiler(g) => Span::Compiler(g.span_open()), - #[cfg(not(proc_macro_span))] - Group::Compiler(g) => Span::Compiler(g.span()), - Group::Fallback(g) => Span::Fallback(g.span_open()), - } - } - - pub fn span_close(&self) -> Span { - match self { - #[cfg(proc_macro_span)] - Group::Compiler(g) => Span::Compiler(g.span_close()), - #[cfg(not(proc_macro_span))] - Group::Compiler(g) => Span::Compiler(g.span()), - Group::Fallback(g) => Span::Fallback(g.span_close()), - } - } - - pub fn set_span(&mut self, span: Span) { - match (self, span) { - (Group::Compiler(g), Span::Compiler(s)) => g.set_span(s), - (Group::Fallback(g), Span::Fallback(s)) => g.set_span(s), - _ => mismatch(), - } - } - - fn unwrap_nightly(self) -> proc_macro::Group { - match self { - Group::Compiler(g) => g, - Group::Fallback(_) => mismatch(), - } - } -} - -impl From for Group { - fn from(g: fallback::Group) -> Self { - Group::Fallback(g) - } -} - -impl Display for Group { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - match self { - Group::Compiler(group) => Display::fmt(group, formatter), - Group::Fallback(group) => Display::fmt(group, formatter), - } - } -} - -impl Debug for Group { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - match self { - Group::Compiler(group) => Debug::fmt(group, formatter), - Group::Fallback(group) => Debug::fmt(group, formatter), - } - } -} - -#[derive(Clone)] -pub(crate) enum Ident { - Compiler(proc_macro::Ident), - Fallback(fallback::Ident), -} - -impl Ident { - pub fn new(string: &str, span: Span) -> Ident { - match span { - Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new(string, s)), - Span::Fallback(s) => Ident::Fallback(fallback::Ident::new(string, s)), - } - } - - pub fn new_raw(string: &str, span: Span) -> Ident { - match span { - Span::Compiler(s) => { - let p: proc_macro::TokenStream = string.parse().unwrap(); - let ident = match p.into_iter().next() { - Some(proc_macro::TokenTree::Ident(mut i)) => { - i.set_span(s); - i - } - _ => panic!(), - }; - Ident::Compiler(ident) - } - Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_raw(string, s)), - } - } - - pub fn span(&self) -> Span { - match self { - Ident::Compiler(t) => Span::Compiler(t.span()), - Ident::Fallback(t) => Span::Fallback(t.span()), - } - } - - pub fn set_span(&mut self, span: Span) { - match (self, span) { - (Ident::Compiler(t), Span::Compiler(s)) => t.set_span(s), - (Ident::Fallback(t), Span::Fallback(s)) => t.set_span(s), - _ => mismatch(), - } - } - - fn unwrap_nightly(self) -> proc_macro::Ident { - match self { - Ident::Compiler(s) => s, - Ident::Fallback(_) => mismatch(), - } - } -} - -impl PartialEq for Ident { - fn eq(&self, other: &Ident) -> bool { - match (self, other) { - (Ident::Compiler(t), Ident::Compiler(o)) => t.to_string() == o.to_string(), - (Ident::Fallback(t), Ident::Fallback(o)) => t == o, - _ => mismatch(), - } - } -} - -impl PartialEq for Ident -where - T: ?Sized + AsRef, -{ - fn eq(&self, other: &T) -> bool { - let other = other.as_ref(); - match self { - Ident::Compiler(t) => t.to_string() == other, - Ident::Fallback(t) => t == other, - } - } -} - -impl Display for Ident { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Ident::Compiler(t) => Display::fmt(t, f), - Ident::Fallback(t) => Display::fmt(t, f), - } - } -} - -impl Debug for Ident { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Ident::Compiler(t) => Debug::fmt(t, f), - Ident::Fallback(t) => Debug::fmt(t, f), - } - } -} - -#[derive(Clone)] -pub(crate) enum Literal { - Compiler(proc_macro::Literal), - Fallback(fallback::Literal), -} - -macro_rules! suffixed_numbers { - ($($name:ident => $kind:ident,)*) => ($( - pub fn $name(n: $kind) -> Literal { - if inside_proc_macro() { - Literal::Compiler(proc_macro::Literal::$name(n)) - } else { - Literal::Fallback(fallback::Literal::$name(n)) - } - } - )*) -} - -macro_rules! unsuffixed_integers { - ($($name:ident => $kind:ident,)*) => ($( - pub fn $name(n: $kind) -> Literal { - if inside_proc_macro() { - Literal::Compiler(proc_macro::Literal::$name(n)) - } else { - Literal::Fallback(fallback::Literal::$name(n)) - } - } - )*) -} - -impl Literal { - suffixed_numbers! { - u8_suffixed => u8, - u16_suffixed => u16, - u32_suffixed => u32, - u64_suffixed => u64, - u128_suffixed => u128, - usize_suffixed => usize, - i8_suffixed => i8, - i16_suffixed => i16, - i32_suffixed => i32, - i64_suffixed => i64, - i128_suffixed => i128, - isize_suffixed => isize, - - f32_suffixed => f32, - f64_suffixed => f64, - } - - unsuffixed_integers! { - u8_unsuffixed => u8, - u16_unsuffixed => u16, - u32_unsuffixed => u32, - u64_unsuffixed => u64, - u128_unsuffixed => u128, - usize_unsuffixed => usize, - i8_unsuffixed => i8, - i16_unsuffixed => i16, - i32_unsuffixed => i32, - i64_unsuffixed => i64, - i128_unsuffixed => i128, - isize_unsuffixed => isize, - } - - pub fn f32_unsuffixed(f: f32) -> Literal { - if inside_proc_macro() { - Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f)) - } else { - Literal::Fallback(fallback::Literal::f32_unsuffixed(f)) - } - } - - pub fn f64_unsuffixed(f: f64) -> Literal { - if inside_proc_macro() { - Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f)) - } else { - Literal::Fallback(fallback::Literal::f64_unsuffixed(f)) - } - } - - pub fn string(t: &str) -> Literal { - if inside_proc_macro() { - Literal::Compiler(proc_macro::Literal::string(t)) - } else { - Literal::Fallback(fallback::Literal::string(t)) - } - } - - pub fn character(t: char) -> Literal { - if inside_proc_macro() { - Literal::Compiler(proc_macro::Literal::character(t)) - } else { - Literal::Fallback(fallback::Literal::character(t)) - } - } - - pub fn byte_string(bytes: &[u8]) -> Literal { - if inside_proc_macro() { - Literal::Compiler(proc_macro::Literal::byte_string(bytes)) - } else { - Literal::Fallback(fallback::Literal::byte_string(bytes)) - } - } - - pub fn span(&self) -> Span { - match self { - Literal::Compiler(lit) => Span::Compiler(lit.span()), - Literal::Fallback(lit) => Span::Fallback(lit.span()), - } - } - - pub fn set_span(&mut self, span: Span) { - match (self, span) { - (Literal::Compiler(lit), Span::Compiler(s)) => lit.set_span(s), - (Literal::Fallback(lit), Span::Fallback(s)) => lit.set_span(s), - _ => mismatch(), - } - } - - pub fn subspan>(&self, range: R) -> Option { - match self { - #[cfg(proc_macro_span)] - Literal::Compiler(lit) => lit.subspan(range).map(Span::Compiler), - #[cfg(not(proc_macro_span))] - Literal::Compiler(_lit) => None, - Literal::Fallback(lit) => lit.subspan(range).map(Span::Fallback), - } - } - - fn unwrap_nightly(self) -> proc_macro::Literal { - match self { - Literal::Compiler(s) => s, - Literal::Fallback(_) => mismatch(), - } - } -} - -impl From for Literal { - fn from(s: fallback::Literal) -> Literal { - Literal::Fallback(s) - } -} - -impl FromStr for Literal { - type Err = LexError; - - fn from_str(repr: &str) -> Result { - if inside_proc_macro() { - #[cfg(literal_from_str)] - { - proc_macro::Literal::from_str(repr) - .map(Literal::Compiler) - .map_err(LexError::Compiler) - } - #[cfg(not(literal_from_str))] - { - let tokens = proc_macro_parse(repr)?; - let mut iter = tokens.into_iter(); - if let (Some(proc_macro::TokenTree::Literal(literal)), None) = - (iter.next(), iter.next()) - { - if literal.to_string().len() == repr.len() { - return Ok(Literal::Compiler(literal)); - } - } - Err(LexError::call_site()) - } - } else { - let literal = fallback::Literal::from_str(repr)?; - Ok(Literal::Fallback(literal)) - } - } -} - -impl Display for Literal { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Literal::Compiler(t) => Display::fmt(t, f), - Literal::Fallback(t) => Display::fmt(t, f), - } - } -} - -impl Debug for Literal { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Literal::Compiler(t) => Debug::fmt(t, f), - Literal::Fallback(t) => Debug::fmt(t, f), - } - } -} diff --git a/vendor/proc-macro2/tests/comments.rs b/vendor/proc-macro2/tests/comments.rs deleted file mode 100644 index 708cccb8..00000000 --- a/vendor/proc-macro2/tests/comments.rs +++ /dev/null @@ -1,103 +0,0 @@ -use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree}; - -// #[doc = "..."] -> "..." -fn lit_of_outer_doc_comment(tokens: TokenStream) -> Literal { - lit_of_doc_comment(tokens, false) -} - -// #![doc = "..."] -> "..." -fn lit_of_inner_doc_comment(tokens: TokenStream) -> Literal { - lit_of_doc_comment(tokens, true) -} - -fn lit_of_doc_comment(tokens: TokenStream, inner: bool) -> Literal { - let mut iter = tokens.clone().into_iter(); - match iter.next().unwrap() { - TokenTree::Punct(punct) => { - assert_eq!(punct.as_char(), '#'); - assert_eq!(punct.spacing(), Spacing::Alone); - } - _ => panic!("wrong token {:?}", tokens), - } - if inner { - match iter.next().unwrap() { - TokenTree::Punct(punct) => { - assert_eq!(punct.as_char(), '!'); - assert_eq!(punct.spacing(), Spacing::Alone); - } - _ => panic!("wrong token {:?}", tokens), - } - } - iter = match iter.next().unwrap() { - TokenTree::Group(group) => { - assert_eq!(group.delimiter(), Delimiter::Bracket); - assert!(iter.next().is_none(), "unexpected token {:?}", tokens); - group.stream().into_iter() - } - _ => panic!("wrong token {:?}", tokens), - }; - match iter.next().unwrap() { - TokenTree::Ident(ident) => assert_eq!(ident.to_string(), "doc"), - _ => panic!("wrong token {:?}", tokens), - } - match iter.next().unwrap() { - TokenTree::Punct(punct) => { - assert_eq!(punct.as_char(), '='); - assert_eq!(punct.spacing(), Spacing::Alone); - } - _ => panic!("wrong token {:?}", tokens), - } - match iter.next().unwrap() { - TokenTree::Literal(literal) => { - assert!(iter.next().is_none(), "unexpected token {:?}", tokens); - literal - } - _ => panic!("wrong token {:?}", tokens), - } -} - -#[test] -fn closed_immediately() { - let stream = "/**/".parse::().unwrap(); - let tokens = stream.into_iter().collect::>(); - assert!(tokens.is_empty(), "not empty -- {:?}", tokens); -} - -#[test] -fn incomplete() { - assert!("/*/".parse::().is_err()); -} - -#[test] -fn lit() { - let stream = "/// doc".parse::().unwrap(); - let lit = lit_of_outer_doc_comment(stream); - assert_eq!(lit.to_string(), "\" doc\""); - - let stream = "//! doc".parse::().unwrap(); - let lit = lit_of_inner_doc_comment(stream); - assert_eq!(lit.to_string(), "\" doc\""); - - let stream = "/** doc */".parse::().unwrap(); - let lit = lit_of_outer_doc_comment(stream); - assert_eq!(lit.to_string(), "\" doc \""); - - let stream = "/*! doc */".parse::().unwrap(); - let lit = lit_of_inner_doc_comment(stream); - assert_eq!(lit.to_string(), "\" doc \""); -} - -#[test] -fn carriage_return() { - let stream = "///\r\n".parse::().unwrap(); - let lit = lit_of_outer_doc_comment(stream); - assert_eq!(lit.to_string(), "\"\""); - - let stream = "/**\r\n*/".parse::().unwrap(); - let lit = lit_of_outer_doc_comment(stream); - assert_eq!(lit.to_string(), "\"\\r\\n\""); - - "///\r".parse::().unwrap_err(); - "///\r \n".parse::().unwrap_err(); - "/**\r \n*/".parse::().unwrap_err(); -} diff --git a/vendor/proc-macro2/tests/features.rs b/vendor/proc-macro2/tests/features.rs deleted file mode 100644 index 073f6e60..00000000 --- a/vendor/proc-macro2/tests/features.rs +++ /dev/null @@ -1,8 +0,0 @@ -#[test] -#[ignore] -fn make_sure_no_proc_macro() { - assert!( - !cfg!(feature = "proc-macro"), - "still compiled with proc_macro?" - ); -} diff --git a/vendor/proc-macro2/tests/marker.rs b/vendor/proc-macro2/tests/marker.rs deleted file mode 100644 index 70e57677..00000000 --- a/vendor/proc-macro2/tests/marker.rs +++ /dev/null @@ -1,92 +0,0 @@ -use proc_macro2::*; - -macro_rules! assert_impl { - ($ty:ident is $($marker:ident) and +) => { - #[test] - #[allow(non_snake_case)] - fn $ty() { - fn assert_implemented() {} - assert_implemented::<$ty>(); - } - }; - - ($ty:ident is not $($marker:ident) or +) => { - #[test] - #[allow(non_snake_case)] - fn $ty() { - $( - { - // Implemented for types that implement $marker. - trait IsNotImplemented { - fn assert_not_implemented() {} - } - impl IsNotImplemented for T {} - - // Implemented for the type being tested. - trait IsImplemented { - fn assert_not_implemented() {} - } - impl IsImplemented for $ty {} - - // If $ty does not implement $marker, there is no ambiguity - // in the following trait method call. - <$ty>::assert_not_implemented(); - } - )+ - } - }; -} - -assert_impl!(Delimiter is Send and Sync); -assert_impl!(Spacing is Send and Sync); - -assert_impl!(Group is not Send or Sync); -assert_impl!(Ident is not Send or Sync); -assert_impl!(LexError is not Send or Sync); -assert_impl!(Literal is not Send or Sync); -assert_impl!(Punct is not Send or Sync); -assert_impl!(Span is not Send or Sync); -assert_impl!(TokenStream is not Send or Sync); -assert_impl!(TokenTree is not Send or Sync); - -#[cfg(procmacro2_semver_exempt)] -mod semver_exempt { - use super::*; - - assert_impl!(LineColumn is Send and Sync); - - assert_impl!(SourceFile is not Send or Sync); -} - -#[cfg(not(no_libprocmacro_unwind_safe))] -mod unwind_safe { - use super::*; - use std::panic::{RefUnwindSafe, UnwindSafe}; - - macro_rules! assert_unwind_safe { - ($($types:ident)*) => { - $( - assert_impl!($types is UnwindSafe and RefUnwindSafe); - )* - }; - } - - assert_unwind_safe! { - Delimiter - Group - Ident - LexError - Literal - Punct - Spacing - Span - TokenStream - TokenTree - } - - #[cfg(procmacro2_semver_exempt)] - assert_unwind_safe! { - LineColumn - SourceFile - } -} diff --git a/vendor/proc-macro2/tests/test.rs b/vendor/proc-macro2/tests/test.rs deleted file mode 100644 index 75a880f5..00000000 --- a/vendor/proc-macro2/tests/test.rs +++ /dev/null @@ -1,562 +0,0 @@ -use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; -use std::panic; -use std::str::{self, FromStr}; - -#[test] -fn idents() { - assert_eq!( - Ident::new("String", Span::call_site()).to_string(), - "String" - ); - assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn"); - assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_"); -} - -#[test] -#[cfg(procmacro2_semver_exempt)] -fn raw_idents() { - assert_eq!( - Ident::new_raw("String", Span::call_site()).to_string(), - "r#String" - ); - assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn"); - assert_eq!(Ident::new_raw("_", Span::call_site()).to_string(), "r#_"); -} - -#[test] -#[should_panic(expected = "Ident is not allowed to be empty; use Option")] -fn ident_empty() { - Ident::new("", Span::call_site()); -} - -#[test] -#[should_panic(expected = "Ident cannot be a number; use Literal instead")] -fn ident_number() { - Ident::new("255", Span::call_site()); -} - -#[test] -#[should_panic(expected = "\"a#\" is not a valid Ident")] -fn ident_invalid() { - Ident::new("a#", Span::call_site()); -} - -#[test] -#[should_panic(expected = "not a valid Ident")] -fn raw_ident_empty() { - Ident::new("r#", Span::call_site()); -} - -#[test] -#[should_panic(expected = "not a valid Ident")] -fn raw_ident_number() { - Ident::new("r#255", Span::call_site()); -} - -#[test] -#[should_panic(expected = "\"r#a#\" is not a valid Ident")] -fn raw_ident_invalid() { - Ident::new("r#a#", Span::call_site()); -} - -#[test] -#[should_panic(expected = "not a valid Ident")] -fn lifetime_empty() { - Ident::new("'", Span::call_site()); -} - -#[test] -#[should_panic(expected = "not a valid Ident")] -fn lifetime_number() { - Ident::new("'255", Span::call_site()); -} - -#[test] -fn lifetime_invalid() { - let result = panic::catch_unwind(|| Ident::new("'a#", Span::call_site())); - match result { - Err(box_any) => { - let message = box_any.downcast_ref::().unwrap(); - let expected1 = r#""\'a#" is not a valid Ident"#; // 1.31.0 .. 1.53.0 - let expected2 = r#""'a#" is not a valid Ident"#; // 1.53.0 .. - assert!( - message == expected1 || message == expected2, - "panic message does not match expected string\n\ - \x20 panic message: `{:?}`\n\ - \x20expected message: `{:?}`", - message, - expected2, - ); - } - Ok(_) => panic!("test did not panic as expected"), - } -} - -#[test] -fn literal_string() { - assert_eq!(Literal::string("foo").to_string(), "\"foo\""); - assert_eq!(Literal::string("\"").to_string(), "\"\\\"\""); - assert_eq!(Literal::string("didn't").to_string(), "\"didn't\""); -} - -#[test] -fn literal_raw_string() { - "r\"\r\n\"".parse::().unwrap(); -} - -#[test] -fn literal_character() { - assert_eq!(Literal::character('x').to_string(), "'x'"); - assert_eq!(Literal::character('\'').to_string(), "'\\''"); - assert_eq!(Literal::character('"').to_string(), "'\"'"); -} - -#[test] -fn literal_float() { - assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0"); -} - -#[test] -fn literal_suffix() { - fn token_count(p: &str) -> usize { - p.parse::().unwrap().into_iter().count() - } - - assert_eq!(token_count("999u256"), 1); - assert_eq!(token_count("999r#u256"), 3); - assert_eq!(token_count("1."), 1); - assert_eq!(token_count("1.f32"), 3); - assert_eq!(token_count("1.0_0"), 1); - assert_eq!(token_count("1._0"), 3); - assert_eq!(token_count("1._m"), 3); - assert_eq!(token_count("\"\"s"), 1); - assert_eq!(token_count("r\"\"r"), 1); - assert_eq!(token_count("b\"\"b"), 1); - assert_eq!(token_count("br\"\"br"), 1); - assert_eq!(token_count("r#\"\"#r"), 1); - assert_eq!(token_count("'c'c"), 1); - assert_eq!(token_count("b'b'b"), 1); - assert_eq!(token_count("0E"), 1); - assert_eq!(token_count("0o0A"), 1); - assert_eq!(token_count("0E--0"), 4); - assert_eq!(token_count("0.0ECMA"), 1); -} - -#[test] -fn literal_iter_negative() { - let negative_literal = Literal::i32_suffixed(-3); - let tokens = TokenStream::from(TokenTree::Literal(negative_literal)); - let mut iter = tokens.into_iter(); - match iter.next().unwrap() { - TokenTree::Punct(punct) => { - assert_eq!(punct.as_char(), '-'); - assert_eq!(punct.spacing(), Spacing::Alone); - } - unexpected => panic!("unexpected token {:?}", unexpected), - } - match iter.next().unwrap() { - TokenTree::Literal(literal) => { - assert_eq!(literal.to_string(), "3i32"); - } - unexpected => panic!("unexpected token {:?}", unexpected), - } - assert!(iter.next().is_none()); -} - -#[test] -fn literal_parse() { - assert!("1".parse::().is_ok()); - assert!("1.0".parse::().is_ok()); - assert!("'a'".parse::().is_ok()); - assert!("\"\n\"".parse::().is_ok()); - assert!("0 1".parse::().is_err()); - assert!(" 0".parse::().is_err()); - assert!("0 ".parse::().is_err()); - assert!("/* comment */0".parse::().is_err()); - assert!("0/* comment */".parse::().is_err()); - assert!("0// comment".parse::().is_err()); -} - -#[test] -fn roundtrip() { - fn roundtrip(p: &str) { - println!("parse: {}", p); - let s = p.parse::().unwrap().to_string(); - println!("first: {}", s); - let s2 = s.to_string().parse::().unwrap().to_string(); - assert_eq!(s, s2); - } - roundtrip("a"); - roundtrip("<<"); - roundtrip("<<="); - roundtrip( - " - 1 - 1.0 - 1f32 - 2f64 - 1usize - 4isize - 4e10 - 1_000 - 1_0i32 - 8u8 - 9 - 0 - 0xffffffffffffffffffffffffffffffff - 1x - 1u80 - 1f320 - ", - ); - roundtrip("'a"); - roundtrip("'_"); - roundtrip("'static"); - roundtrip("'\\u{10__FFFF}'"); - roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\""); -} - -#[test] -fn fail() { - fn fail(p: &str) { - if let Ok(s) = p.parse::() { - panic!("should have failed to parse: {}\n{:#?}", p, s); - } - } - fail("' static"); - fail("r#1"); - fail("r#_"); - fail("\"\\u{0000000}\""); // overlong unicode escape (rust allows at most 6 hex digits) - fail("\"\\u{999999}\""); // outside of valid range of char - fail("\"\\u{_0}\""); // leading underscore - fail("\"\\u{}\""); // empty - fail("b\"\r\""); // bare carriage return in byte string - fail("r\"\r\""); // bare carriage return in raw string - fail("\"\\\r \""); // backslash carriage return - fail("'aa'aa"); - fail("br##\"\"#"); - fail("\"\\\n\u{85}\r\""); -} - -#[cfg(span_locations)] -#[test] -fn span_test() { - check_spans( - "\ -/// This is a document comment -testing 123 -{ - testing 234 -}", - &[ - (1, 0, 1, 30), // # - (1, 0, 1, 30), // [ ... ] - (1, 0, 1, 30), // doc - (1, 0, 1, 30), // = - (1, 0, 1, 30), // "This is..." - (2, 0, 2, 7), // testing - (2, 8, 2, 11), // 123 - (3, 0, 5, 1), // { ... } - (4, 2, 4, 9), // testing - (4, 10, 4, 13), // 234 - ], - ); -} - -#[cfg(procmacro2_semver_exempt)] -#[cfg(not(nightly))] -#[test] -fn default_span() { - let start = Span::call_site().start(); - assert_eq!(start.line, 1); - assert_eq!(start.column, 0); - let end = Span::call_site().end(); - assert_eq!(end.line, 1); - assert_eq!(end.column, 0); - let source_file = Span::call_site().source_file(); - assert_eq!(source_file.path().to_string_lossy(), ""); - assert!(!source_file.is_real()); -} - -#[cfg(procmacro2_semver_exempt)] -#[test] -fn span_join() { - let source1 = "aaa\nbbb" - .parse::() - .unwrap() - .into_iter() - .collect::>(); - let source2 = "ccc\nddd" - .parse::() - .unwrap() - .into_iter() - .collect::>(); - - assert!(source1[0].span().source_file() != source2[0].span().source_file()); - assert_eq!( - source1[0].span().source_file(), - source1[1].span().source_file() - ); - - let joined1 = source1[0].span().join(source1[1].span()); - let joined2 = source1[0].span().join(source2[0].span()); - assert!(joined1.is_some()); - assert!(joined2.is_none()); - - let start = joined1.unwrap().start(); - let end = joined1.unwrap().end(); - assert_eq!(start.line, 1); - assert_eq!(start.column, 0); - assert_eq!(end.line, 2); - assert_eq!(end.column, 3); - - assert_eq!( - joined1.unwrap().source_file(), - source1[0].span().source_file() - ); -} - -#[test] -fn no_panic() { - let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap(); - assert!(s.parse::().is_err()); -} - -#[test] -fn punct_before_comment() { - let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter(); - match tts.next().unwrap() { - TokenTree::Punct(tt) => { - assert_eq!(tt.as_char(), '~'); - assert_eq!(tt.spacing(), Spacing::Alone); - } - wrong => panic!("wrong token {:?}", wrong), - } -} - -#[test] -fn joint_last_token() { - // This test verifies that we match the behavior of libproc_macro *not* in - // the range nightly-2020-09-06 through nightly-2020-09-10, in which this - // behavior was temporarily broken. - // See https://github.com/rust-lang/rust/issues/76399 - - let joint_punct = Punct::new(':', Spacing::Joint); - let stream = TokenStream::from(TokenTree::Punct(joint_punct)); - let punct = match stream.into_iter().next().unwrap() { - TokenTree::Punct(punct) => punct, - _ => unreachable!(), - }; - assert_eq!(punct.spacing(), Spacing::Joint); -} - -#[test] -fn raw_identifier() { - let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter(); - match tts.next().unwrap() { - TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()), - wrong => panic!("wrong token {:?}", wrong), - } - assert!(tts.next().is_none()); -} - -#[test] -fn test_debug_ident() { - let ident = Ident::new("proc_macro", Span::call_site()); - - #[cfg(not(span_locations))] - let expected = "Ident(proc_macro)"; - - #[cfg(span_locations)] - let expected = "Ident { sym: proc_macro }"; - - assert_eq!(expected, format!("{:?}", ident)); -} - -#[test] -fn test_debug_tokenstream() { - let tts = TokenStream::from_str("[a + 1]").unwrap(); - - #[cfg(not(span_locations))] - let expected = "\ -TokenStream [ - Group { - delimiter: Bracket, - stream: TokenStream [ - Ident { - sym: a, - }, - Punct { - char: '+', - spacing: Alone, - }, - Literal { - lit: 1, - }, - ], - }, -]\ - "; - - #[cfg(not(span_locations))] - let expected_before_trailing_commas = "\ -TokenStream [ - Group { - delimiter: Bracket, - stream: TokenStream [ - Ident { - sym: a - }, - Punct { - char: '+', - spacing: Alone - }, - Literal { - lit: 1 - } - ] - } -]\ - "; - - #[cfg(span_locations)] - let expected = "\ -TokenStream [ - Group { - delimiter: Bracket, - stream: TokenStream [ - Ident { - sym: a, - span: bytes(2..3), - }, - Punct { - char: '+', - spacing: Alone, - span: bytes(4..5), - }, - Literal { - lit: 1, - span: bytes(6..7), - }, - ], - span: bytes(1..8), - }, -]\ - "; - - #[cfg(span_locations)] - let expected_before_trailing_commas = "\ -TokenStream [ - Group { - delimiter: Bracket, - stream: TokenStream [ - Ident { - sym: a, - span: bytes(2..3) - }, - Punct { - char: '+', - spacing: Alone, - span: bytes(4..5) - }, - Literal { - lit: 1, - span: bytes(6..7) - } - ], - span: bytes(1..8) - } -]\ - "; - - let actual = format!("{:#?}", tts); - if actual.ends_with(",\n]") { - assert_eq!(expected, actual); - } else { - assert_eq!(expected_before_trailing_commas, actual); - } -} - -#[test] -fn default_tokenstream_is_empty() { - let default_token_stream: TokenStream = Default::default(); - - assert!(default_token_stream.is_empty()); -} - -#[test] -fn tuple_indexing() { - // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322 - let mut tokens = "tuple.0.0".parse::().unwrap().into_iter(); - assert_eq!("tuple", tokens.next().unwrap().to_string()); - assert_eq!(".", tokens.next().unwrap().to_string()); - assert_eq!("0.0", tokens.next().unwrap().to_string()); - assert!(tokens.next().is_none()); -} - -#[cfg(span_locations)] -#[test] -fn non_ascii_tokens() { - check_spans("// abc", &[]); - check_spans("// ábc", &[]); - check_spans("// abc x", &[]); - check_spans("// ábc x", &[]); - check_spans("/* abc */ x", &[(1, 10, 1, 11)]); - check_spans("/* ábc */ x", &[(1, 10, 1, 11)]); - check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]); - check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]); - check_spans("/*** abc */ x", &[(1, 12, 1, 13)]); - check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]); - check_spans(r#""abc""#, &[(1, 0, 1, 5)]); - check_spans(r#""ábc""#, &[(1, 0, 1, 5)]); - check_spans(r###"r#"abc"#"###, &[(1, 0, 1, 8)]); - check_spans(r###"r#"ábc"#"###, &[(1, 0, 1, 8)]); - check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]); - check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]); - check_spans("'a'", &[(1, 0, 1, 3)]); - check_spans("'á'", &[(1, 0, 1, 3)]); - check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]); - check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]); - check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]); - check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]); - check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]); - check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]); - check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]); - check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]); - check_spans("abc", &[(1, 0, 1, 3)]); - check_spans("ábc", &[(1, 0, 1, 3)]); - check_spans("ábć", &[(1, 0, 1, 3)]); - check_spans("abc// foo", &[(1, 0, 1, 3)]); - check_spans("ábc// foo", &[(1, 0, 1, 3)]); - check_spans("ábć// foo", &[(1, 0, 1, 3)]); - check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]); - check_spans("b\"a\\\n\u{00a0}c\"", &[(1, 0, 2, 3)]); -} - -#[cfg(span_locations)] -fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) { - let ts = p.parse::().unwrap(); - check_spans_internal(ts, &mut lines); - assert!(lines.is_empty(), "leftover ranges: {:?}", lines); -} - -#[cfg(span_locations)] -fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) { - for i in ts { - if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() { - *lines = rest; - - let start = i.span().start(); - assert_eq!(start.line, sline, "sline did not match for {}", i); - assert_eq!(start.column, scol, "scol did not match for {}", i); - - let end = i.span().end(); - assert_eq!(end.line, eline, "eline did not match for {}", i); - assert_eq!(end.column, ecol, "ecol did not match for {}", i); - - if let TokenTree::Group(g) = i { - check_spans_internal(g.stream().clone(), lines); - } - } - } -} diff --git a/vendor/proc-macro2/tests/test_fmt.rs b/vendor/proc-macro2/tests/test_fmt.rs deleted file mode 100644 index 99a0aee5..00000000 --- a/vendor/proc-macro2/tests/test_fmt.rs +++ /dev/null @@ -1,26 +0,0 @@ -use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree}; -use std::iter::{self, FromIterator}; - -#[test] -fn test_fmt_group() { - let ident = Ident::new("x", Span::call_site()); - let inner = TokenStream::from_iter(iter::once(TokenTree::Ident(ident))); - let parens_empty = Group::new(Delimiter::Parenthesis, TokenStream::new()); - let parens_nonempty = Group::new(Delimiter::Parenthesis, inner.clone()); - let brackets_empty = Group::new(Delimiter::Bracket, TokenStream::new()); - let brackets_nonempty = Group::new(Delimiter::Bracket, inner.clone()); - let braces_empty = Group::new(Delimiter::Brace, TokenStream::new()); - let braces_nonempty = Group::new(Delimiter::Brace, inner.clone()); - let none_empty = Group::new(Delimiter::None, TokenStream::new()); - let none_nonempty = Group::new(Delimiter::None, inner.clone()); - - // Matches libproc_macro. - assert_eq!("()", parens_empty.to_string()); - assert_eq!("(x)", parens_nonempty.to_string()); - assert_eq!("[]", brackets_empty.to_string()); - assert_eq!("[x]", brackets_nonempty.to_string()); - assert_eq!("{ }", braces_empty.to_string()); - assert_eq!("{ x }", braces_nonempty.to_string()); - assert_eq!("", none_empty.to_string()); - assert_eq!("x", none_nonempty.to_string()); -} diff --git a/vendor/quote/.cargo-checksum.json b/vendor/quote/.cargo-checksum.json deleted file mode 100644 index 768055df..00000000 --- a/vendor/quote/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{"Cargo.toml":"d1e6bb8b4ac54b84f367a4e1b46e7dca3b1a744017d8f7fa2f4c11a8730e657a","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"9209682116de84bb9cc7be6ccf44478b46b909c7857f9e186d90bcff522af864","src/ext.rs":"a9fed3a1a4c9d3f2de717ba808af99291b995db2cbf8067f4b6927c39cc62bc6","src/format.rs":"a9c3e3a333c6dacf6e330d02b4c726862e273df1c2c6be6da199049cd1e521db","src/ident_fragment.rs":"e66a63f6e9020f2639a71f120d627bc6cfd60081a6caf8a1d735b59ee2413d29","src/lib.rs":"2500b1955d139e5b467df046cda4f2837fb1edace838aa190020752ab79314c4","src/runtime.rs":"f2d1fa6084764d98f98b96344cf675886a79b46a845c592e604f96bbde9aca07","src/spanned.rs":"adc0ed742ad17327c375879472d435cea168c208c303f53eb93cb2c0f10f3650","src/to_tokens.rs":"e589c1643479a9003d4dd1d9fa63714042b106f1b16d8ea3903cfe2f73a020f5","tests/compiletest.rs":"0a52a44786aea1c299c695bf948b2ed2081e4cc344e5c2cadceab4eb03d0010d","tests/test.rs":"6eb200350fa78405d4fd920ecf71d226e258c61aa88f850750efa99e065f06d6","tests/ui/does-not-have-iter-interpolated-dup.rs":"ad13eea21d4cdd2ab6c082f633392e1ff20fb0d1af5f2177041e0bf7f30da695","tests/ui/does-not-have-iter-interpolated.rs":"83a5b3f240651adcbe4b6e51076d76d653ad439b37442cf4054f1fd3c073f3b7","tests/ui/does-not-have-iter-separated.rs":"fe413c48331d5e3a7ae5fef6a5892a90c72f610d54595879eb49d0a94154ba3f","tests/ui/does-not-have-iter.rs":"09dc9499d861b63cebb0848b855b78e2dc9497bfde37ba6339f3625ae009a62f","tests/ui/not-quotable.rs":"5759d0884943417609f28faadc70254a3e2fd3d9bd6ff7297a3fb70a77fafd8a","tests/ui/not-repeatable.rs":"a4b115c04e4e41049a05f5b69450503fbffeba031218b4189cb931839f7f9a9c","tests/ui/wrong-type-span.rs":"5f310cb7fde3ef51bad01e7f286d244e3b6e67396cd2ea7eab77275c9d902699"},"package":"c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7"} \ No newline at end of file diff --git a/vendor/quote/Cargo.toml b/vendor/quote/Cargo.toml deleted file mode 100644 index 411f943a..00000000 --- a/vendor/quote/Cargo.toml +++ /dev/null @@ -1,40 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies -# -# If you believe there's an error in this file please file an -# issue against the rust-lang/cargo repository. If you're -# editing this file be aware that the upstream Cargo.toml -# will likely look very different (and much more reasonable) - -[package] -edition = "2018" -name = "quote" -version = "1.0.9" -authors = ["David Tolnay "] -include = ["Cargo.toml", "src/**/*.rs", "tests/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"] -description = "Quasi-quoting macro quote!(...)" -documentation = "https://docs.rs/quote/" -readme = "README.md" -keywords = ["syn"] -categories = ["development-tools::procedural-macro-helpers"] -license = "MIT OR Apache-2.0" -repository = "https://github.com/dtolnay/quote" -[package.metadata.docs.rs] -targets = ["x86_64-unknown-linux-gnu"] -[dependencies.proc-macro2] -version = "1.0.20" -default-features = false -[dev-dependencies.rustversion] -version = "1.0" - -[dev-dependencies.trybuild] -version = "1.0.19" -features = ["diff"] - -[features] -default = ["proc-macro"] -proc-macro = ["proc-macro2/proc-macro"] diff --git a/vendor/quote/LICENSE-APACHE b/vendor/quote/LICENSE-APACHE deleted file mode 100644 index 16fe87b0..00000000 --- a/vendor/quote/LICENSE-APACHE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/vendor/quote/LICENSE-MIT b/vendor/quote/LICENSE-MIT deleted file mode 100644 index 40b8817a..00000000 --- a/vendor/quote/LICENSE-MIT +++ /dev/null @@ -1,25 +0,0 @@ -Copyright (c) 2016 The Rust Project Developers - -Permission is hereby granted, free of charge, to any -person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the -Software without restriction, including without -limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software -is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice -shall be included in all copies or substantial portions -of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/vendor/quote/README.md b/vendor/quote/README.md deleted file mode 100644 index 57b2a629..00000000 --- a/vendor/quote/README.md +++ /dev/null @@ -1,261 +0,0 @@ -Rust Quasi-Quoting -================== - -[github](https://github.com/dtolnay/quote) -[crates.io](https://crates.io/crates/quote) -[docs.rs](https://docs.rs/quote) -[build status](https://github.com/dtolnay/quote/actions?query=branch%3Amaster) - -This crate provides the [`quote!`] macro for turning Rust syntax tree data -structures into tokens of source code. - -[`quote!`]: https://docs.rs/quote/1.0/quote/macro.quote.html - -Procedural macros in Rust receive a stream of tokens as input, execute arbitrary -Rust code to determine how to manipulate those tokens, and produce a stream of -tokens to hand back to the compiler to compile into the caller's crate. -Quasi-quoting is a solution to one piece of that — producing tokens to -return to the compiler. - -The idea of quasi-quoting is that we write *code* that we treat as *data*. -Within the `quote!` macro, we can write what looks like code to our text editor -or IDE. We get all the benefits of the editor's brace matching, syntax -highlighting, indentation, and maybe autocompletion. But rather than compiling -that as code into the current crate, we can treat it as data, pass it around, -mutate it, and eventually hand it back to the compiler as tokens to compile into -the macro caller's crate. - -This crate is motivated by the procedural macro use case, but is a -general-purpose Rust quasi-quoting library and is not specific to procedural -macros. - -```toml -[dependencies] -quote = "1.0" -``` - -*Version requirement: Quote supports rustc 1.31 and up.*
-[*Release notes*](https://github.com/dtolnay/quote/releases) - -
- -## Syntax - -The quote crate provides a [`quote!`] macro within which you can write Rust code -that gets packaged into a [`TokenStream`] and can be treated as data. You should -think of `TokenStream` as representing a fragment of Rust source code. - -[`TokenStream`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.TokenStream.html - -Within the `quote!` macro, interpolation is done with `#var`. Any type -implementing the [`quote::ToTokens`] trait can be interpolated. This includes -most Rust primitive types as well as most of the syntax tree types from [`syn`]. - -[`quote::ToTokens`]: https://docs.rs/quote/1.0/quote/trait.ToTokens.html -[`syn`]: https://github.com/dtolnay/syn - -```rust -let tokens = quote! { - struct SerializeWith #generics #where_clause { - value: &'a #field_ty, - phantom: core::marker::PhantomData<#item_ty>, - } - - impl #generics serde::Serialize for SerializeWith #generics #where_clause { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - #path(self.value, serializer) - } - } - - SerializeWith { - value: #value, - phantom: core::marker::PhantomData::<#item_ty>, - } -}; -``` - -
- -## Repetition - -Repetition is done using `#(...)*` or `#(...),*` similar to `macro_rules!`. This -iterates through the elements of any variable interpolated within the repetition -and inserts a copy of the repetition body for each one. The variables in an -interpolation may be anything that implements `IntoIterator`, including `Vec` or -a pre-existing iterator. - -- `#(#var)*` — no separators -- `#(#var),*` — the character before the asterisk is used as a separator -- `#( struct #var; )*` — the repetition can contain other things -- `#( #k => println!("{}", #v), )*` — even multiple interpolations - -Note that there is a difference between `#(#var ,)*` and `#(#var),*`—the latter -does not produce a trailing comma. This matches the behavior of delimiters in -`macro_rules!`. - -
- -## Returning tokens to the compiler - -The `quote!` macro evaluates to an expression of type -`proc_macro2::TokenStream`. Meanwhile Rust procedural macros are expected to -return the type `proc_macro::TokenStream`. - -The difference between the two types is that `proc_macro` types are entirely -specific to procedural macros and cannot ever exist in code outside of a -procedural macro, while `proc_macro2` types may exist anywhere including tests -and non-macro code like main.rs and build.rs. This is why even the procedural -macro ecosystem is largely built around `proc_macro2`, because that ensures the -libraries are unit testable and accessible in non-macro contexts. - -There is a [`From`]-conversion in both directions so returning the output of -`quote!` from a procedural macro usually looks like `tokens.into()` or -`proc_macro::TokenStream::from(tokens)`. - -[`From`]: https://doc.rust-lang.org/std/convert/trait.From.html - -
- -## Examples - -### Combining quoted fragments - -Usually you don't end up constructing an entire final `TokenStream` in one -piece. Different parts may come from different helper functions. The tokens -produced by `quote!` themselves implement `ToTokens` and so can be interpolated -into later `quote!` invocations to build up a final result. - -```rust -let type_definition = quote! {...}; -let methods = quote! {...}; - -let tokens = quote! { - #type_definition - #methods -}; -``` - -### Constructing identifiers - -Suppose we have an identifier `ident` which came from somewhere in a macro -input and we need to modify it in some way for the macro output. Let's consider -prepending the identifier with an underscore. - -Simply interpolating the identifier next to an underscore will not have the -behavior of concatenating them. The underscore and the identifier will continue -to be two separate tokens as if you had written `_ x`. - -```rust -// incorrect -quote! { - let mut _#ident = 0; -} -``` - -The solution is to build a new identifier token with the correct value. As this -is such a common case, the `format_ident!` macro provides a convenient utility -for doing so correctly. - -```rust -let varname = format_ident!("_{}", ident); -quote! { - let mut #varname = 0; -} -``` - -Alternatively, the APIs provided by Syn and proc-macro2 can be used to directly -build the identifier. This is roughly equivalent to the above, but will not -handle `ident` being a raw identifier. - -```rust -let concatenated = format!("_{}", ident); -let varname = syn::Ident::new(&concatenated, ident.span()); -quote! { - let mut #varname = 0; -} -``` - -### Making method calls - -Let's say our macro requires some type specified in the macro input to have a -constructor called `new`. We have the type in a variable called `field_type` of -type `syn::Type` and want to invoke the constructor. - -```rust -// incorrect -quote! { - let value = #field_type::new(); -} -``` - -This works only sometimes. If `field_type` is `String`, the expanded code -contains `String::new()` which is fine. But if `field_type` is something like -`Vec` then the expanded code is `Vec::new()` which is invalid syntax. -Ordinarily in handwritten Rust we would write `Vec::::new()` but for macros -often the following is more convenient. - -```rust -quote! { - let value = <#field_type>::new(); -} -``` - -This expands to `>::new()` which behaves correctly. - -A similar pattern is appropriate for trait methods. - -```rust -quote! { - let value = <#field_type as core::default::Default>::default(); -} -``` - -
- -## Hygiene - -Any interpolated tokens preserve the `Span` information provided by their -`ToTokens` implementation. Tokens that originate within a `quote!` invocation -are spanned with [`Span::call_site()`]. - -[`Span::call_site()`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html#method.call_site - -A different span can be provided explicitly through the [`quote_spanned!`] -macro. - -[`quote_spanned!`]: https://docs.rs/quote/1.0/quote/macro.quote_spanned.html - -
- -## Non-macro code generators - -When using `quote` in a build.rs or main.rs and writing the output out to a -file, consider having the code generator pass the tokens through [rustfmt] -before writing (either by shelling out to the `rustfmt` binary or by pulling in -the `rustfmt` library as a dependency). This way if an error occurs in the -generated code it is convenient for a human to read and debug. - -Be aware that no kind of hygiene or span information is retained when tokens are -written to a file; the conversion from tokens to source code is lossy. - -[rustfmt]: https://github.com/rust-lang/rustfmt - -
- -#### License - - -Licensed under either of Apache License, Version -2.0 or MIT license at your option. - - -
- - -Unless you explicitly state otherwise, any contribution intentionally submitted -for inclusion in this crate by you, as defined in the Apache-2.0 license, shall -be dual licensed as above, without any additional terms or conditions. - diff --git a/vendor/quote/src/ext.rs b/vendor/quote/src/ext.rs deleted file mode 100644 index 9e9b4a50..00000000 --- a/vendor/quote/src/ext.rs +++ /dev/null @@ -1,112 +0,0 @@ -use super::ToTokens; - -use std::iter; - -use proc_macro2::{TokenStream, TokenTree}; - -/// TokenStream extension trait with methods for appending tokens. -/// -/// This trait is sealed and cannot be implemented outside of the `quote` crate. -pub trait TokenStreamExt: private::Sealed { - /// For use by `ToTokens` implementations. - /// - /// Appends the token specified to this list of tokens. - fn append(&mut self, token: U) - where - U: Into; - - /// For use by `ToTokens` implementations. - /// - /// ``` - /// # use quote::{quote, TokenStreamExt, ToTokens}; - /// # use proc_macro2::TokenStream; - /// # - /// struct X; - /// - /// impl ToTokens for X { - /// fn to_tokens(&self, tokens: &mut TokenStream) { - /// tokens.append_all(&[true, false]); - /// } - /// } - /// - /// let tokens = quote!(#X); - /// assert_eq!(tokens.to_string(), "true false"); - /// ``` - fn append_all(&mut self, iter: I) - where - I: IntoIterator, - I::Item: ToTokens; - - /// For use by `ToTokens` implementations. - /// - /// Appends all of the items in the iterator `I`, separated by the tokens - /// `U`. - fn append_separated(&mut self, iter: I, op: U) - where - I: IntoIterator, - I::Item: ToTokens, - U: ToTokens; - - /// For use by `ToTokens` implementations. - /// - /// Appends all tokens in the iterator `I`, appending `U` after each - /// element, including after the last element of the iterator. - fn append_terminated(&mut self, iter: I, term: U) - where - I: IntoIterator, - I::Item: ToTokens, - U: ToTokens; -} - -impl TokenStreamExt for TokenStream { - fn append(&mut self, token: U) - where - U: Into, - { - self.extend(iter::once(token.into())); - } - - fn append_all(&mut self, iter: I) - where - I: IntoIterator, - I::Item: ToTokens, - { - for token in iter { - token.to_tokens(self); - } - } - - fn append_separated(&mut self, iter: I, op: U) - where - I: IntoIterator, - I::Item: ToTokens, - U: ToTokens, - { - for (i, token) in iter.into_iter().enumerate() { - if i > 0 { - op.to_tokens(self); - } - token.to_tokens(self); - } - } - - fn append_terminated(&mut self, iter: I, term: U) - where - I: IntoIterator, - I::Item: ToTokens, - U: ToTokens, - { - for token in iter { - token.to_tokens(self); - term.to_tokens(self); - } - } -} - -mod private { - use proc_macro2::TokenStream; - - pub trait Sealed {} - - impl Sealed for TokenStream {} -} diff --git a/vendor/quote/src/format.rs b/vendor/quote/src/format.rs deleted file mode 100644 index 745cb5d2..00000000 --- a/vendor/quote/src/format.rs +++ /dev/null @@ -1,164 +0,0 @@ -/// Formatting macro for constructing `Ident`s. -/// -///
-/// -/// # Syntax -/// -/// Syntax is copied from the [`format!`] macro, supporting both positional and -/// named arguments. -/// -/// Only a limited set of formatting traits are supported. The current mapping -/// of format types to traits is: -/// -/// * `{}` ⇒ [`IdentFragment`] -/// * `{:o}` ⇒ [`Octal`](`std::fmt::Octal`) -/// * `{:x}` ⇒ [`LowerHex`](`std::fmt::LowerHex`) -/// * `{:X}` ⇒ [`UpperHex`](`std::fmt::UpperHex`) -/// * `{:b}` ⇒ [`Binary`](`std::fmt::Binary`) -/// -/// See [`std::fmt`] for more information. -/// -///
-/// -/// # IdentFragment -/// -/// Unlike `format!`, this macro uses the [`IdentFragment`] formatting trait by -/// default. This trait is like `Display`, with a few differences: -/// -/// * `IdentFragment` is only implemented for a limited set of types, such as -/// unsigned integers and strings. -/// * [`Ident`] arguments will have their `r#` prefixes stripped, if present. -/// -/// [`Ident`]: `proc_macro2::Ident` -/// -///
-/// -/// # Hygiene -/// -/// The [`Span`] of the first `Ident` argument is used as the span of the final -/// identifier, falling back to [`Span::call_site`] when no identifiers are -/// provided. -/// -/// ``` -/// # use quote::format_ident; -/// # let ident = format_ident!("Ident"); -/// // If `ident` is an Ident, the span of `my_ident` will be inherited from it. -/// let my_ident = format_ident!("My{}{}", ident, "IsCool"); -/// assert_eq!(my_ident, "MyIdentIsCool"); -/// ``` -/// -/// Alternatively, the span can be overridden by passing the `span` named -/// argument. -/// -/// ``` -/// # use quote::format_ident; -/// # const IGNORE_TOKENS: &'static str = stringify! { -/// let my_span = /* ... */; -/// # }; -/// # let my_span = proc_macro2::Span::call_site(); -/// format_ident!("MyIdent", span = my_span); -/// ``` -/// -/// [`Span`]: `proc_macro2::Span` -/// [`Span::call_site`]: `proc_macro2::Span::call_site` -/// -///


-/// -/// # Panics -/// -/// This method will panic if the resulting formatted string is not a valid -/// identifier. -/// -///
-/// -/// # Examples -/// -/// Composing raw and non-raw identifiers: -/// ``` -/// # use quote::format_ident; -/// let my_ident = format_ident!("My{}", "Ident"); -/// assert_eq!(my_ident, "MyIdent"); -/// -/// let raw = format_ident!("r#Raw"); -/// assert_eq!(raw, "r#Raw"); -/// -/// let my_ident_raw = format_ident!("{}Is{}", my_ident, raw); -/// assert_eq!(my_ident_raw, "MyIdentIsRaw"); -/// ``` -/// -/// Integer formatting options: -/// ``` -/// # use quote::format_ident; -/// let num: u32 = 10; -/// -/// let decimal = format_ident!("Id_{}", num); -/// assert_eq!(decimal, "Id_10"); -/// -/// let octal = format_ident!("Id_{:o}", num); -/// assert_eq!(octal, "Id_12"); -/// -/// let binary = format_ident!("Id_{:b}", num); -/// assert_eq!(binary, "Id_1010"); -/// -/// let lower_hex = format_ident!("Id_{:x}", num); -/// assert_eq!(lower_hex, "Id_a"); -/// -/// let upper_hex = format_ident!("Id_{:X}", num); -/// assert_eq!(upper_hex, "Id_A"); -/// ``` -#[macro_export] -macro_rules! format_ident { - ($fmt:expr) => { - $crate::format_ident_impl!([ - ::std::option::Option::None, - $fmt - ]) - }; - - ($fmt:expr, $($rest:tt)*) => { - $crate::format_ident_impl!([ - ::std::option::Option::None, - $fmt - ] $($rest)*) - }; -} - -#[macro_export] -#[doc(hidden)] -macro_rules! format_ident_impl { - // Final state - ([$span:expr, $($fmt:tt)*]) => { - $crate::__private::mk_ident(&format!($($fmt)*), $span) - }; - - // Span argument - ([$old:expr, $($fmt:tt)*] span = $span:expr) => { - $crate::format_ident_impl!([$old, $($fmt)*] span = $span,) - }; - ([$old:expr, $($fmt:tt)*] span = $span:expr, $($rest:tt)*) => { - $crate::format_ident_impl!([ - ::std::option::Option::Some::<$crate::__private::Span>($span), - $($fmt)* - ] $($rest)*) - }; - - // Named argument - ([$span:expr, $($fmt:tt)*] $name:ident = $arg:expr) => { - $crate::format_ident_impl!([$span, $($fmt)*] $name = $arg,) - }; - ([$span:expr, $($fmt:tt)*] $name:ident = $arg:expr, $($rest:tt)*) => { - match $crate::__private::IdentFragmentAdapter(&$arg) { - arg => $crate::format_ident_impl!([$span.or(arg.span()), $($fmt)*, $name = arg] $($rest)*), - } - }; - - // Positional argument - ([$span:expr, $($fmt:tt)*] $arg:expr) => { - $crate::format_ident_impl!([$span, $($fmt)*] $arg,) - }; - ([$span:expr, $($fmt:tt)*] $arg:expr, $($rest:tt)*) => { - match $crate::__private::IdentFragmentAdapter(&$arg) { - arg => $crate::format_ident_impl!([$span.or(arg.span()), $($fmt)*, arg] $($rest)*), - } - }; -} diff --git a/vendor/quote/src/ident_fragment.rs b/vendor/quote/src/ident_fragment.rs deleted file mode 100644 index e7472fe1..00000000 --- a/vendor/quote/src/ident_fragment.rs +++ /dev/null @@ -1,86 +0,0 @@ -use proc_macro2::{Ident, Span}; -use std::borrow::Cow; -use std::fmt; - -/// Specialized formatting trait used by `format_ident!`. -/// -/// [`Ident`] arguments formatted using this trait will have their `r#` prefix -/// stripped, if present. -/// -/// See [`format_ident!`] for more information. -pub trait IdentFragment { - /// Format this value as an identifier fragment. - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result; - - /// Span associated with this `IdentFragment`. - /// - /// If non-`None`, may be inherited by formatted identifiers. - fn span(&self) -> Option { - None - } -} - -impl IdentFragment for &T { - fn span(&self) -> Option { - ::span(*self) - } - - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - IdentFragment::fmt(*self, f) - } -} - -impl IdentFragment for &mut T { - fn span(&self) -> Option { - ::span(*self) - } - - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - IdentFragment::fmt(*self, f) - } -} - -impl IdentFragment for Ident { - fn span(&self) -> Option { - Some(self.span()) - } - - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let id = self.to_string(); - if id.starts_with("r#") { - fmt::Display::fmt(&id[2..], f) - } else { - fmt::Display::fmt(&id[..], f) - } - } -} - -impl IdentFragment for Cow<'_, T> -where - T: IdentFragment + ToOwned + ?Sized, -{ - fn span(&self) -> Option { - T::span(self) - } - - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - T::fmt(self, f) - } -} - -// Limited set of types which this is implemented for, as we want to avoid types -// which will often include non-identifier characters in their `Display` impl. -macro_rules! ident_fragment_display { - ($($T:ty),*) => { - $( - impl IdentFragment for $T { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(self, f) - } - } - )* - } -} - -ident_fragment_display!(bool, str, String, char); -ident_fragment_display!(u8, u16, u32, u64, u128, usize); diff --git a/vendor/quote/src/lib.rs b/vendor/quote/src/lib.rs deleted file mode 100644 index 356e43a0..00000000 --- a/vendor/quote/src/lib.rs +++ /dev/null @@ -1,1267 +0,0 @@ -//! [![github]](https://github.com/dtolnay/quote) [![crates-io]](https://crates.io/crates/quote) [![docs-rs]](https://docs.rs/quote) -//! -//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github -//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust -//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K -//! -//!
-//! -//! This crate provides the [`quote!`] macro for turning Rust syntax tree data -//! structures into tokens of source code. -//! -//! [`quote!`]: macro.quote.html -//! -//! Procedural macros in Rust receive a stream of tokens as input, execute -//! arbitrary Rust code to determine how to manipulate those tokens, and produce -//! a stream of tokens to hand back to the compiler to compile into the caller's -//! crate. Quasi-quoting is a solution to one piece of that — producing -//! tokens to return to the compiler. -//! -//! The idea of quasi-quoting is that we write *code* that we treat as *data*. -//! Within the `quote!` macro, we can write what looks like code to our text -//! editor or IDE. We get all the benefits of the editor's brace matching, -//! syntax highlighting, indentation, and maybe autocompletion. But rather than -//! compiling that as code into the current crate, we can treat it as data, pass -//! it around, mutate it, and eventually hand it back to the compiler as tokens -//! to compile into the macro caller's crate. -//! -//! This crate is motivated by the procedural macro use case, but is a -//! general-purpose Rust quasi-quoting library and is not specific to procedural -//! macros. -//! -//! ```toml -//! [dependencies] -//! quote = "1.0" -//! ``` -//! -//!
-//! -//! # Example -//! -//! The following quasi-quoted block of code is something you might find in [a] -//! procedural macro having to do with data structure serialization. The `#var` -//! syntax performs interpolation of runtime variables into the quoted tokens. -//! Check out the documentation of the [`quote!`] macro for more detail about -//! the syntax. See also the [`quote_spanned!`] macro which is important for -//! implementing hygienic procedural macros. -//! -//! [a]: https://serde.rs/ -//! [`quote_spanned!`]: macro.quote_spanned.html -//! -//! ``` -//! # use quote::quote; -//! # -//! # let generics = ""; -//! # let where_clause = ""; -//! # let field_ty = ""; -//! # let item_ty = ""; -//! # let path = ""; -//! # let value = ""; -//! # -//! let tokens = quote! { -//! struct SerializeWith #generics #where_clause { -//! value: &'a #field_ty, -//! phantom: core::marker::PhantomData<#item_ty>, -//! } -//! -//! impl #generics serde::Serialize for SerializeWith #generics #where_clause { -//! fn serialize(&self, serializer: S) -> Result -//! where -//! S: serde::Serializer, -//! { -//! #path(self.value, serializer) -//! } -//! } -//! -//! SerializeWith { -//! value: #value, -//! phantom: core::marker::PhantomData::<#item_ty>, -//! } -//! }; -//! ``` - -// Quote types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/quote/1.0.9")] -#![allow( - clippy::doc_markdown, - clippy::missing_errors_doc, - clippy::missing_panics_doc, - clippy::module_name_repetitions -)] - -#[cfg(all( - not(all(target_arch = "wasm32", target_os = "unknown")), - feature = "proc-macro" -))] -extern crate proc_macro; - -mod ext; -mod format; -mod ident_fragment; -mod to_tokens; - -// Not public API. -#[doc(hidden)] -#[path = "runtime.rs"] -pub mod __private; - -pub use crate::ext::TokenStreamExt; -pub use crate::ident_fragment::IdentFragment; -pub use crate::to_tokens::ToTokens; - -// Not public API. -#[doc(hidden)] -pub mod spanned; - -/// The whole point. -/// -/// Performs variable interpolation against the input and produces it as -/// [`proc_macro2::TokenStream`]. -/// -/// Note: for returning tokens to the compiler in a procedural macro, use -/// `.into()` on the result to convert to [`proc_macro::TokenStream`]. -/// -/// [`TokenStream`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.TokenStream.html -/// -///
-/// -/// # Interpolation -/// -/// Variable interpolation is done with `#var` (similar to `$var` in -/// `macro_rules!` macros). This grabs the `var` variable that is currently in -/// scope and inserts it in that location in the output tokens. Any type -/// implementing the [`ToTokens`] trait can be interpolated. This includes most -/// Rust primitive types as well as most of the syntax tree types from the [Syn] -/// crate. -/// -/// [`ToTokens`]: trait.ToTokens.html -/// [Syn]: https://github.com/dtolnay/syn -/// -/// Repetition is done using `#(...)*` or `#(...),*` again similar to -/// `macro_rules!`. This iterates through the elements of any variable -/// interpolated within the repetition and inserts a copy of the repetition body -/// for each one. The variables in an interpolation may be a `Vec`, slice, -/// `BTreeSet`, or any `Iterator`. -/// -/// - `#(#var)*` — no separators -/// - `#(#var),*` — the character before the asterisk is used as a separator -/// - `#( struct #var; )*` — the repetition can contain other tokens -/// - `#( #k => println!("{}", #v), )*` — even multiple interpolations -/// -///
-/// -/// # Hygiene -/// -/// Any interpolated tokens preserve the `Span` information provided by their -/// `ToTokens` implementation. Tokens that originate within the `quote!` -/// invocation are spanned with [`Span::call_site()`]. -/// -/// [`Span::call_site()`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html#method.call_site -/// -/// A different span can be provided through the [`quote_spanned!`] macro. -/// -/// [`quote_spanned!`]: macro.quote_spanned.html -/// -///
-/// -/// # Return type -/// -/// The macro evaluates to an expression of type `proc_macro2::TokenStream`. -/// Meanwhile Rust procedural macros are expected to return the type -/// `proc_macro::TokenStream`. -/// -/// The difference between the two types is that `proc_macro` types are entirely -/// specific to procedural macros and cannot ever exist in code outside of a -/// procedural macro, while `proc_macro2` types may exist anywhere including -/// tests and non-macro code like main.rs and build.rs. This is why even the -/// procedural macro ecosystem is largely built around `proc_macro2`, because -/// that ensures the libraries are unit testable and accessible in non-macro -/// contexts. -/// -/// There is a [`From`]-conversion in both directions so returning the output of -/// `quote!` from a procedural macro usually looks like `tokens.into()` or -/// `proc_macro::TokenStream::from(tokens)`. -/// -/// [`From`]: https://doc.rust-lang.org/std/convert/trait.From.html -/// -///
-/// -/// # Examples -/// -/// ### Procedural macro -/// -/// The structure of a basic procedural macro is as follows. Refer to the [Syn] -/// crate for further useful guidance on using `quote!` as part of a procedural -/// macro. -/// -/// [Syn]: https://github.com/dtolnay/syn -/// -/// ``` -/// # #[cfg(any())] -/// extern crate proc_macro; -/// # extern crate proc_macro2; -/// -/// # #[cfg(any())] -/// use proc_macro::TokenStream; -/// # use proc_macro2::TokenStream; -/// use quote::quote; -/// -/// # const IGNORE_TOKENS: &'static str = stringify! { -/// #[proc_macro_derive(HeapSize)] -/// # }; -/// pub fn derive_heap_size(input: TokenStream) -> TokenStream { -/// // Parse the input and figure out what implementation to generate... -/// # const IGNORE_TOKENS: &'static str = stringify! { -/// let name = /* ... */; -/// let expr = /* ... */; -/// # }; -/// # -/// # let name = 0; -/// # let expr = 0; -/// -/// let expanded = quote! { -/// // The generated impl. -/// impl heapsize::HeapSize for #name { -/// fn heap_size_of_children(&self) -> usize { -/// #expr -/// } -/// } -/// }; -/// -/// // Hand the output tokens back to the compiler. -/// TokenStream::from(expanded) -/// } -/// ``` -/// -///


-/// -/// ### Combining quoted fragments -/// -/// Usually you don't end up constructing an entire final `TokenStream` in one -/// piece. Different parts may come from different helper functions. The tokens -/// produced by `quote!` themselves implement `ToTokens` and so can be -/// interpolated into later `quote!` invocations to build up a final result. -/// -/// ``` -/// # use quote::quote; -/// # -/// let type_definition = quote! {...}; -/// let methods = quote! {...}; -/// -/// let tokens = quote! { -/// #type_definition -/// #methods -/// }; -/// ``` -/// -///


-/// -/// ### Constructing identifiers -/// -/// Suppose we have an identifier `ident` which came from somewhere in a macro -/// input and we need to modify it in some way for the macro output. Let's -/// consider prepending the identifier with an underscore. -/// -/// Simply interpolating the identifier next to an underscore will not have the -/// behavior of concatenating them. The underscore and the identifier will -/// continue to be two separate tokens as if you had written `_ x`. -/// -/// ``` -/// # use proc_macro2::{self as syn, Span}; -/// # use quote::quote; -/// # -/// # let ident = syn::Ident::new("i", Span::call_site()); -/// # -/// // incorrect -/// quote! { -/// let mut _#ident = 0; -/// } -/// # ; -/// ``` -/// -/// The solution is to build a new identifier token with the correct value. As -/// this is such a common case, the [`format_ident!`] macro provides a -/// convenient utility for doing so correctly. -/// -/// ``` -/// # use proc_macro2::{Ident, Span}; -/// # use quote::{format_ident, quote}; -/// # -/// # let ident = Ident::new("i", Span::call_site()); -/// # -/// let varname = format_ident!("_{}", ident); -/// quote! { -/// let mut #varname = 0; -/// } -/// # ; -/// ``` -/// -/// Alternatively, the APIs provided by Syn and proc-macro2 can be used to -/// directly build the identifier. This is roughly equivalent to the above, but -/// will not handle `ident` being a raw identifier. -/// -/// ``` -/// # use proc_macro2::{self as syn, Span}; -/// # use quote::quote; -/// # -/// # let ident = syn::Ident::new("i", Span::call_site()); -/// # -/// let concatenated = format!("_{}", ident); -/// let varname = syn::Ident::new(&concatenated, ident.span()); -/// quote! { -/// let mut #varname = 0; -/// } -/// # ; -/// ``` -/// -///


-/// -/// ### Making method calls -/// -/// Let's say our macro requires some type specified in the macro input to have -/// a constructor called `new`. We have the type in a variable called -/// `field_type` of type `syn::Type` and want to invoke the constructor. -/// -/// ``` -/// # use quote::quote; -/// # -/// # let field_type = quote!(...); -/// # -/// // incorrect -/// quote! { -/// let value = #field_type::new(); -/// } -/// # ; -/// ``` -/// -/// This works only sometimes. If `field_type` is `String`, the expanded code -/// contains `String::new()` which is fine. But if `field_type` is something -/// like `Vec` then the expanded code is `Vec::new()` which is invalid -/// syntax. Ordinarily in handwritten Rust we would write `Vec::::new()` -/// but for macros often the following is more convenient. -/// -/// ``` -/// # use quote::quote; -/// # -/// # let field_type = quote!(...); -/// # -/// quote! { -/// let value = <#field_type>::new(); -/// } -/// # ; -/// ``` -/// -/// This expands to `>::new()` which behaves correctly. -/// -/// A similar pattern is appropriate for trait methods. -/// -/// ``` -/// # use quote::quote; -/// # -/// # let field_type = quote!(...); -/// # -/// quote! { -/// let value = <#field_type as core::default::Default>::default(); -/// } -/// # ; -/// ``` -/// -///


-/// -/// ### Interpolating text inside of doc comments -/// -/// Neither doc comments nor string literals get interpolation behavior in -/// quote: -/// -/// ```compile_fail -/// quote! { -/// /// try to interpolate: #ident -/// /// -/// /// ... -/// } -/// ``` -/// -/// ```compile_fail -/// quote! { -/// #[doc = "try to interpolate: #ident"] -/// } -/// ``` -/// -/// Macro calls in a doc attribute are not valid syntax: -/// -/// ```compile_fail -/// quote! { -/// #[doc = concat!("try to interpolate: ", stringify!(#ident))] -/// } -/// ``` -/// -/// Instead the best way to build doc comments that involve variables is by -/// formatting the doc string literal outside of quote. -/// -/// ```rust -/// # use proc_macro2::{Ident, Span}; -/// # use quote::quote; -/// # -/// # const IGNORE: &str = stringify! { -/// let msg = format!(...); -/// # }; -/// # -/// # let ident = Ident::new("var", Span::call_site()); -/// # let msg = format!("try to interpolate: {}", ident); -/// quote! { -/// #[doc = #msg] -/// /// -/// /// ... -/// } -/// # ; -/// ``` -/// -///


-/// -/// ### Indexing into a tuple struct -/// -/// When interpolating indices of a tuple or tuple struct, we need them not to -/// appears suffixed as integer literals by interpolating them as [`syn::Index`] -/// instead. -/// -/// [`syn::Index`]: https://docs.rs/syn/1.0/syn/struct.Index.html -/// -/// ```compile_fail -/// let i = 0usize..self.fields.len(); -/// -/// // expands to 0 + self.0usize.heap_size() + self.1usize.heap_size() + ... -/// // which is not valid syntax -/// quote! { -/// 0 #( + self.#i.heap_size() )* -/// } -/// ``` -/// -/// ``` -/// # use proc_macro2::{Ident, TokenStream}; -/// # use quote::quote; -/// # -/// # mod syn { -/// # use proc_macro2::{Literal, TokenStream}; -/// # use quote::{ToTokens, TokenStreamExt}; -/// # -/// # pub struct Index(usize); -/// # -/// # impl From for Index { -/// # fn from(i: usize) -> Self { -/// # Index(i) -/// # } -/// # } -/// # -/// # impl ToTokens for Index { -/// # fn to_tokens(&self, tokens: &mut TokenStream) { -/// # tokens.append(Literal::usize_unsuffixed(self.0)); -/// # } -/// # } -/// # } -/// # -/// # struct Struct { -/// # fields: Vec, -/// # } -/// # -/// # impl Struct { -/// # fn example(&self) -> TokenStream { -/// let i = (0..self.fields.len()).map(syn::Index::from); -/// -/// // expands to 0 + self.0.heap_size() + self.1.heap_size() + ... -/// quote! { -/// 0 #( + self.#i.heap_size() )* -/// } -/// # } -/// # } -/// ``` -#[macro_export] -macro_rules! quote { - () => { - $crate::__private::TokenStream::new() - }; - ($($tt:tt)*) => {{ - let mut _s = $crate::__private::TokenStream::new(); - $crate::quote_each_token!(_s $($tt)*); - _s - }}; -} - -/// Same as `quote!`, but applies a given span to all tokens originating within -/// the macro invocation. -/// -///
-/// -/// # Syntax -/// -/// A span expression of type [`Span`], followed by `=>`, followed by the tokens -/// to quote. The span expression should be brief — use a variable for -/// anything more than a few characters. There should be no space before the -/// `=>` token. -/// -/// [`Span`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html -/// -/// ``` -/// # use proc_macro2::Span; -/// # use quote::quote_spanned; -/// # -/// # const IGNORE_TOKENS: &'static str = stringify! { -/// let span = /* ... */; -/// # }; -/// # let span = Span::call_site(); -/// # let init = 0; -/// -/// // On one line, use parentheses. -/// let tokens = quote_spanned!(span=> Box::into_raw(Box::new(#init))); -/// -/// // On multiple lines, place the span at the top and use braces. -/// let tokens = quote_spanned! {span=> -/// Box::into_raw(Box::new(#init)) -/// }; -/// ``` -/// -/// The lack of space before the `=>` should look jarring to Rust programmers -/// and this is intentional. The formatting is designed to be visibly -/// off-balance and draw the eye a particular way, due to the span expression -/// being evaluated in the context of the procedural macro and the remaining -/// tokens being evaluated in the generated code. -/// -///
-/// -/// # Hygiene -/// -/// Any interpolated tokens preserve the `Span` information provided by their -/// `ToTokens` implementation. Tokens that originate within the `quote_spanned!` -/// invocation are spanned with the given span argument. -/// -///
-/// -/// # Example -/// -/// The following procedural macro code uses `quote_spanned!` to assert that a -/// particular Rust type implements the [`Sync`] trait so that references can be -/// safely shared between threads. -/// -/// [`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html -/// -/// ``` -/// # use quote::{quote_spanned, TokenStreamExt, ToTokens}; -/// # use proc_macro2::{Span, TokenStream}; -/// # -/// # struct Type; -/// # -/// # impl Type { -/// # fn span(&self) -> Span { -/// # Span::call_site() -/// # } -/// # } -/// # -/// # impl ToTokens for Type { -/// # fn to_tokens(&self, _tokens: &mut TokenStream) {} -/// # } -/// # -/// # let ty = Type; -/// # let call_site = Span::call_site(); -/// # -/// let ty_span = ty.span(); -/// let assert_sync = quote_spanned! {ty_span=> -/// struct _AssertSync where #ty: Sync; -/// }; -/// ``` -/// -/// If the assertion fails, the user will see an error like the following. The -/// input span of their type is highlighted in the error. -/// -/// ```text -/// error[E0277]: the trait bound `*const (): std::marker::Sync` is not satisfied -/// --> src/main.rs:10:21 -/// | -/// 10 | static ref PTR: *const () = &(); -/// | ^^^^^^^^^ `*const ()` cannot be shared between threads safely -/// ``` -/// -/// In this example it is important for the where-clause to be spanned with the -/// line/column information of the user's input type so that error messages are -/// placed appropriately by the compiler. -#[macro_export] -macro_rules! quote_spanned { - ($span:expr=>) => {{ - let _: $crate::__private::Span = $span; - $crate::__private::TokenStream::new() - }}; - ($span:expr=> $($tt:tt)*) => {{ - let mut _s = $crate::__private::TokenStream::new(); - let _span: $crate::__private::Span = $span; - $crate::quote_each_token_spanned!(_s _span $($tt)*); - _s - }}; -} - -// Extract the names of all #metavariables and pass them to the $call macro. -// -// in: pounded_var_names!(then!(...) a #b c #( #d )* #e) -// out: then!(... b); -// then!(... d); -// then!(... e); -#[macro_export] -#[doc(hidden)] -macro_rules! pounded_var_names { - ($call:ident! $extra:tt $($tts:tt)*) => { - $crate::pounded_var_names_with_context!($call! $extra - (@ $($tts)*) - ($($tts)* @) - ) - }; -} - -#[macro_export] -#[doc(hidden)] -macro_rules! pounded_var_names_with_context { - ($call:ident! $extra:tt ($($b1:tt)*) ($($curr:tt)*)) => { - $( - $crate::pounded_var_with_context!($call! $extra $b1 $curr); - )* - }; -} - -#[macro_export] -#[doc(hidden)] -macro_rules! pounded_var_with_context { - ($call:ident! $extra:tt $b1:tt ( $($inner:tt)* )) => { - $crate::pounded_var_names!($call! $extra $($inner)*); - }; - - ($call:ident! $extra:tt $b1:tt [ $($inner:tt)* ]) => { - $crate::pounded_var_names!($call! $extra $($inner)*); - }; - - ($call:ident! $extra:tt $b1:tt { $($inner:tt)* }) => { - $crate::pounded_var_names!($call! $extra $($inner)*); - }; - - ($call:ident!($($extra:tt)*) # $var:ident) => { - $crate::$call!($($extra)* $var); - }; - - ($call:ident! $extra:tt $b1:tt $curr:tt) => {}; -} - -#[macro_export] -#[doc(hidden)] -macro_rules! quote_bind_into_iter { - ($has_iter:ident $var:ident) => { - // `mut` may be unused if $var occurs multiple times in the list. - #[allow(unused_mut)] - let (mut $var, i) = $var.quote_into_iter(); - let $has_iter = $has_iter | i; - }; -} - -#[macro_export] -#[doc(hidden)] -macro_rules! quote_bind_next_or_break { - ($var:ident) => { - let $var = match $var.next() { - Some(_x) => $crate::__private::RepInterp(_x), - None => break, - }; - }; -} - -#[macro_export] -#[doc(hidden)] -macro_rules! quote_each_token { - ($tokens:ident $($tts:tt)*) => { - $crate::quote_tokens_with_context!($tokens - (@ @ @ @ @ @ $($tts)*) - (@ @ @ @ @ $($tts)* @) - (@ @ @ @ $($tts)* @ @) - (@ @ @ $(($tts))* @ @ @) - (@ @ $($tts)* @ @ @ @) - (@ $($tts)* @ @ @ @ @) - ($($tts)* @ @ @ @ @ @) - ); - }; -} - -#[macro_export] -#[doc(hidden)] -macro_rules! quote_each_token_spanned { - ($tokens:ident $span:ident $($tts:tt)*) => { - $crate::quote_tokens_with_context_spanned!($tokens $span - (@ @ @ @ @ @ $($tts)*) - (@ @ @ @ @ $($tts)* @) - (@ @ @ @ $($tts)* @ @) - (@ @ @ $(($tts))* @ @ @) - (@ @ $($tts)* @ @ @ @) - (@ $($tts)* @ @ @ @ @) - ($($tts)* @ @ @ @ @ @) - ); - }; -} - -#[macro_export] -#[doc(hidden)] -macro_rules! quote_tokens_with_context { - ($tokens:ident - ($($b3:tt)*) ($($b2:tt)*) ($($b1:tt)*) - ($($curr:tt)*) - ($($a1:tt)*) ($($a2:tt)*) ($($a3:tt)*) - ) => { - $( - $crate::quote_token_with_context!($tokens $b3 $b2 $b1 $curr $a1 $a2 $a3); - )* - }; -} - -#[macro_export] -#[doc(hidden)] -macro_rules! quote_tokens_with_context_spanned { - ($tokens:ident $span:ident - ($($b3:tt)*) ($($b2:tt)*) ($($b1:tt)*) - ($($curr:tt)*) - ($($a1:tt)*) ($($a2:tt)*) ($($a3:tt)*) - ) => { - $( - $crate::quote_token_with_context_spanned!($tokens $span $b3 $b2 $b1 $curr $a1 $a2 $a3); - )* - }; -} - -#[macro_export] -#[doc(hidden)] -macro_rules! quote_token_with_context { - ($tokens:ident $b3:tt $b2:tt $b1:tt @ $a1:tt $a2:tt $a3:tt) => {}; - - ($tokens:ident $b3:tt $b2:tt $b1:tt (#) ( $($inner:tt)* ) * $a3:tt) => {{ - use $crate::__private::ext::*; - let has_iter = $crate::__private::ThereIsNoIteratorInRepetition; - $crate::pounded_var_names!(quote_bind_into_iter!(has_iter) () $($inner)*); - let _: $crate::__private::HasIterator = has_iter; - // This is `while true` instead of `loop` because if there are no - // iterators used inside of this repetition then the body would not - // contain any `break`, so the compiler would emit unreachable code - // warnings on anything below the loop. We use has_iter to detect and - // fail to compile when there are no iterators, so here we just work - // around the unneeded extra warning. - while true { - $crate::pounded_var_names!(quote_bind_next_or_break!() () $($inner)*); - $crate::quote_each_token!($tokens $($inner)*); - } - }}; - ($tokens:ident $b3:tt $b2:tt # (( $($inner:tt)* )) * $a2:tt $a3:tt) => {}; - ($tokens:ident $b3:tt # ( $($inner:tt)* ) (*) $a1:tt $a2:tt $a3:tt) => {}; - - ($tokens:ident $b3:tt $b2:tt $b1:tt (#) ( $($inner:tt)* ) $sep:tt *) => {{ - use $crate::__private::ext::*; - let mut _i = 0usize; - let has_iter = $crate::__private::ThereIsNoIteratorInRepetition; - $crate::pounded_var_names!(quote_bind_into_iter!(has_iter) () $($inner)*); - let _: $crate::__private::HasIterator = has_iter; - while true { - $crate::pounded_var_names!(quote_bind_next_or_break!() () $($inner)*); - if _i > 0 { - $crate::quote_token!($tokens $sep); - } - _i += 1; - $crate::quote_each_token!($tokens $($inner)*); - } - }}; - ($tokens:ident $b3:tt $b2:tt # (( $($inner:tt)* )) $sep:tt * $a3:tt) => {}; - ($tokens:ident $b3:tt # ( $($inner:tt)* ) ($sep:tt) * $a2:tt $a3:tt) => {}; - ($tokens:ident # ( $($inner:tt)* ) * (*) $a1:tt $a2:tt $a3:tt) => { - // https://github.com/dtolnay/quote/issues/130 - $crate::quote_token!($tokens *); - }; - ($tokens:ident # ( $($inner:tt)* ) $sep:tt (*) $a1:tt $a2:tt $a3:tt) => {}; - - ($tokens:ident $b3:tt $b2:tt $b1:tt (#) $var:ident $a2:tt $a3:tt) => { - $crate::ToTokens::to_tokens(&$var, &mut $tokens); - }; - ($tokens:ident $b3:tt $b2:tt # ($var:ident) $a1:tt $a2:tt $a3:tt) => {}; - ($tokens:ident $b3:tt $b2:tt $b1:tt ($curr:tt) $a1:tt $a2:tt $a3:tt) => { - $crate::quote_token!($tokens $curr); - }; -} - -#[macro_export] -#[doc(hidden)] -macro_rules! quote_token_with_context_spanned { - ($tokens:ident $span:ident $b3:tt $b2:tt $b1:tt @ $a1:tt $a2:tt $a3:tt) => {}; - - ($tokens:ident $span:ident $b3:tt $b2:tt $b1:tt (#) ( $($inner:tt)* ) * $a3:tt) => {{ - use $crate::__private::ext::*; - let has_iter = $crate::__private::ThereIsNoIteratorInRepetition; - $crate::pounded_var_names!(quote_bind_into_iter!(has_iter) () $($inner)*); - let _: $crate::__private::HasIterator = has_iter; - // This is `while true` instead of `loop` because if there are no - // iterators used inside of this repetition then the body would not - // contain any `break`, so the compiler would emit unreachable code - // warnings on anything below the loop. We use has_iter to detect and - // fail to compile when there are no iterators, so here we just work - // around the unneeded extra warning. - while true { - $crate::pounded_var_names!(quote_bind_next_or_break!() () $($inner)*); - $crate::quote_each_token_spanned!($tokens $span $($inner)*); - } - }}; - ($tokens:ident $span:ident $b3:tt $b2:tt # (( $($inner:tt)* )) * $a2:tt $a3:tt) => {}; - ($tokens:ident $span:ident $b3:tt # ( $($inner:tt)* ) (*) $a1:tt $a2:tt $a3:tt) => {}; - - ($tokens:ident $span:ident $b3:tt $b2:tt $b1:tt (#) ( $($inner:tt)* ) $sep:tt *) => {{ - use $crate::__private::ext::*; - let mut _i = 0usize; - let has_iter = $crate::__private::ThereIsNoIteratorInRepetition; - $crate::pounded_var_names!(quote_bind_into_iter!(has_iter) () $($inner)*); - let _: $crate::__private::HasIterator = has_iter; - while true { - $crate::pounded_var_names!(quote_bind_next_or_break!() () $($inner)*); - if _i > 0 { - $crate::quote_token_spanned!($tokens $span $sep); - } - _i += 1; - $crate::quote_each_token_spanned!($tokens $span $($inner)*); - } - }}; - ($tokens:ident $span:ident $b3:tt $b2:tt # (( $($inner:tt)* )) $sep:tt * $a3:tt) => {}; - ($tokens:ident $span:ident $b3:tt # ( $($inner:tt)* ) ($sep:tt) * $a2:tt $a3:tt) => {}; - ($tokens:ident $span:ident # ( $($inner:tt)* ) * (*) $a1:tt $a2:tt $a3:tt) => { - // https://github.com/dtolnay/quote/issues/130 - $crate::quote_token_spanned!($tokens $span *); - }; - ($tokens:ident $span:ident # ( $($inner:tt)* ) $sep:tt (*) $a1:tt $a2:tt $a3:tt) => {}; - - ($tokens:ident $span:ident $b3:tt $b2:tt $b1:tt (#) $var:ident $a2:tt $a3:tt) => { - $crate::ToTokens::to_tokens(&$var, &mut $tokens); - }; - ($tokens:ident $span:ident $b3:tt $b2:tt # ($var:ident) $a1:tt $a2:tt $a3:tt) => {}; - ($tokens:ident $span:ident $b3:tt $b2:tt $b1:tt ($curr:tt) $a1:tt $a2:tt $a3:tt) => { - $crate::quote_token_spanned!($tokens $span $curr); - }; -} - -#[macro_export] -#[doc(hidden)] -macro_rules! quote_token { - ($tokens:ident ( $($inner:tt)* )) => { - $crate::__private::push_group( - &mut $tokens, - $crate::__private::Delimiter::Parenthesis, - $crate::quote!($($inner)*), - ); - }; - - ($tokens:ident [ $($inner:tt)* ]) => { - $crate::__private::push_group( - &mut $tokens, - $crate::__private::Delimiter::Bracket, - $crate::quote!($($inner)*), - ); - }; - - ($tokens:ident { $($inner:tt)* }) => { - $crate::__private::push_group( - &mut $tokens, - $crate::__private::Delimiter::Brace, - $crate::quote!($($inner)*), - ); - }; - - ($tokens:ident +) => { - $crate::__private::push_add(&mut $tokens); - }; - - ($tokens:ident +=) => { - $crate::__private::push_add_eq(&mut $tokens); - }; - - ($tokens:ident &) => { - $crate::__private::push_and(&mut $tokens); - }; - - ($tokens:ident &&) => { - $crate::__private::push_and_and(&mut $tokens); - }; - - ($tokens:ident &=) => { - $crate::__private::push_and_eq(&mut $tokens); - }; - - ($tokens:ident @) => { - $crate::__private::push_at(&mut $tokens); - }; - - ($tokens:ident !) => { - $crate::__private::push_bang(&mut $tokens); - }; - - ($tokens:ident ^) => { - $crate::__private::push_caret(&mut $tokens); - }; - - ($tokens:ident ^=) => { - $crate::__private::push_caret_eq(&mut $tokens); - }; - - ($tokens:ident :) => { - $crate::__private::push_colon(&mut $tokens); - }; - - ($tokens:ident ::) => { - $crate::__private::push_colon2(&mut $tokens); - }; - - ($tokens:ident ,) => { - $crate::__private::push_comma(&mut $tokens); - }; - - ($tokens:ident /) => { - $crate::__private::push_div(&mut $tokens); - }; - - ($tokens:ident /=) => { - $crate::__private::push_div_eq(&mut $tokens); - }; - - ($tokens:ident .) => { - $crate::__private::push_dot(&mut $tokens); - }; - - ($tokens:ident ..) => { - $crate::__private::push_dot2(&mut $tokens); - }; - - ($tokens:ident ...) => { - $crate::__private::push_dot3(&mut $tokens); - }; - - ($tokens:ident ..=) => { - $crate::__private::push_dot_dot_eq(&mut $tokens); - }; - - ($tokens:ident =) => { - $crate::__private::push_eq(&mut $tokens); - }; - - ($tokens:ident ==) => { - $crate::__private::push_eq_eq(&mut $tokens); - }; - - ($tokens:ident >=) => { - $crate::__private::push_ge(&mut $tokens); - }; - - ($tokens:ident >) => { - $crate::__private::push_gt(&mut $tokens); - }; - - ($tokens:ident <=) => { - $crate::__private::push_le(&mut $tokens); - }; - - ($tokens:ident <) => { - $crate::__private::push_lt(&mut $tokens); - }; - - ($tokens:ident *=) => { - $crate::__private::push_mul_eq(&mut $tokens); - }; - - ($tokens:ident !=) => { - $crate::__private::push_ne(&mut $tokens); - }; - - ($tokens:ident |) => { - $crate::__private::push_or(&mut $tokens); - }; - - ($tokens:ident |=) => { - $crate::__private::push_or_eq(&mut $tokens); - }; - - ($tokens:ident ||) => { - $crate::__private::push_or_or(&mut $tokens); - }; - - ($tokens:ident #) => { - $crate::__private::push_pound(&mut $tokens); - }; - - ($tokens:ident ?) => { - $crate::__private::push_question(&mut $tokens); - }; - - ($tokens:ident ->) => { - $crate::__private::push_rarrow(&mut $tokens); - }; - - ($tokens:ident <-) => { - $crate::__private::push_larrow(&mut $tokens); - }; - - ($tokens:ident %) => { - $crate::__private::push_rem(&mut $tokens); - }; - - ($tokens:ident %=) => { - $crate::__private::push_rem_eq(&mut $tokens); - }; - - ($tokens:ident =>) => { - $crate::__private::push_fat_arrow(&mut $tokens); - }; - - ($tokens:ident ;) => { - $crate::__private::push_semi(&mut $tokens); - }; - - ($tokens:ident <<) => { - $crate::__private::push_shl(&mut $tokens); - }; - - ($tokens:ident <<=) => { - $crate::__private::push_shl_eq(&mut $tokens); - }; - - ($tokens:ident >>) => { - $crate::__private::push_shr(&mut $tokens); - }; - - ($tokens:ident >>=) => { - $crate::__private::push_shr_eq(&mut $tokens); - }; - - ($tokens:ident *) => { - $crate::__private::push_star(&mut $tokens); - }; - - ($tokens:ident -) => { - $crate::__private::push_sub(&mut $tokens); - }; - - ($tokens:ident -=) => { - $crate::__private::push_sub_eq(&mut $tokens); - }; - - ($tokens:ident $ident:ident) => { - $crate::__private::push_ident(&mut $tokens, stringify!($ident)); - }; - - ($tokens:ident $other:tt) => { - $crate::__private::parse(&mut $tokens, stringify!($other)); - }; -} - -#[macro_export] -#[doc(hidden)] -macro_rules! quote_token_spanned { - ($tokens:ident $span:ident ( $($inner:tt)* )) => { - $crate::__private::push_group_spanned( - &mut $tokens, - $span, - $crate::__private::Delimiter::Parenthesis, - $crate::quote_spanned!($span=> $($inner)*), - ); - }; - - ($tokens:ident $span:ident [ $($inner:tt)* ]) => { - $crate::__private::push_group_spanned( - &mut $tokens, - $span, - $crate::__private::Delimiter::Bracket, - $crate::quote_spanned!($span=> $($inner)*), - ); - }; - - ($tokens:ident $span:ident { $($inner:tt)* }) => { - $crate::__private::push_group_spanned( - &mut $tokens, - $span, - $crate::__private::Delimiter::Brace, - $crate::quote_spanned!($span=> $($inner)*), - ); - }; - - ($tokens:ident $span:ident +) => { - $crate::__private::push_add_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident +=) => { - $crate::__private::push_add_eq_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident &) => { - $crate::__private::push_and_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident &&) => { - $crate::__private::push_and_and_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident &=) => { - $crate::__private::push_and_eq_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident @) => { - $crate::__private::push_at_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident !) => { - $crate::__private::push_bang_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident ^) => { - $crate::__private::push_caret_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident ^=) => { - $crate::__private::push_caret_eq_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident :) => { - $crate::__private::push_colon_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident ::) => { - $crate::__private::push_colon2_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident ,) => { - $crate::__private::push_comma_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident /) => { - $crate::__private::push_div_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident /=) => { - $crate::__private::push_div_eq_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident .) => { - $crate::__private::push_dot_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident ..) => { - $crate::__private::push_dot2_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident ...) => { - $crate::__private::push_dot3_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident ..=) => { - $crate::__private::push_dot_dot_eq_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident =) => { - $crate::__private::push_eq_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident ==) => { - $crate::__private::push_eq_eq_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident >=) => { - $crate::__private::push_ge_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident >) => { - $crate::__private::push_gt_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident <=) => { - $crate::__private::push_le_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident <) => { - $crate::__private::push_lt_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident *=) => { - $crate::__private::push_mul_eq_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident !=) => { - $crate::__private::push_ne_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident |) => { - $crate::__private::push_or_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident |=) => { - $crate::__private::push_or_eq_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident ||) => { - $crate::__private::push_or_or_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident #) => { - $crate::__private::push_pound_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident ?) => { - $crate::__private::push_question_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident ->) => { - $crate::__private::push_rarrow_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident <-) => { - $crate::__private::push_larrow_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident %) => { - $crate::__private::push_rem_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident %=) => { - $crate::__private::push_rem_eq_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident =>) => { - $crate::__private::push_fat_arrow_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident ;) => { - $crate::__private::push_semi_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident <<) => { - $crate::__private::push_shl_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident <<=) => { - $crate::__private::push_shl_eq_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident >>) => { - $crate::__private::push_shr_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident >>=) => { - $crate::__private::push_shr_eq_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident *) => { - $crate::__private::push_star_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident -) => { - $crate::__private::push_sub_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident -=) => { - $crate::__private::push_sub_eq_spanned(&mut $tokens, $span); - }; - - ($tokens:ident $span:ident $ident:ident) => { - $crate::__private::push_ident_spanned(&mut $tokens, $span, stringify!($ident)); - }; - - ($tokens:ident $span:ident $other:tt) => { - $crate::__private::parse_spanned(&mut $tokens, $span, stringify!($other)); - }; -} diff --git a/vendor/quote/src/runtime.rs b/vendor/quote/src/runtime.rs deleted file mode 100644 index db3b6a93..00000000 --- a/vendor/quote/src/runtime.rs +++ /dev/null @@ -1,402 +0,0 @@ -use crate::{IdentFragment, ToTokens, TokenStreamExt}; -use std::fmt; -use std::ops::BitOr; - -pub use proc_macro2::*; - -pub struct HasIterator; // True -pub struct ThereIsNoIteratorInRepetition; // False - -impl BitOr for ThereIsNoIteratorInRepetition { - type Output = ThereIsNoIteratorInRepetition; - fn bitor(self, _rhs: ThereIsNoIteratorInRepetition) -> ThereIsNoIteratorInRepetition { - ThereIsNoIteratorInRepetition - } -} - -impl BitOr for HasIterator { - type Output = HasIterator; - fn bitor(self, _rhs: ThereIsNoIteratorInRepetition) -> HasIterator { - HasIterator - } -} - -impl BitOr for ThereIsNoIteratorInRepetition { - type Output = HasIterator; - fn bitor(self, _rhs: HasIterator) -> HasIterator { - HasIterator - } -} - -impl BitOr for HasIterator { - type Output = HasIterator; - fn bitor(self, _rhs: HasIterator) -> HasIterator { - HasIterator - } -} - -/// Extension traits used by the implementation of `quote!`. These are defined -/// in separate traits, rather than as a single trait due to ambiguity issues. -/// -/// These traits expose a `quote_into_iter` method which should allow calling -/// whichever impl happens to be applicable. Calling that method repeatedly on -/// the returned value should be idempotent. -pub mod ext { - use super::RepInterp; - use super::{HasIterator as HasIter, ThereIsNoIteratorInRepetition as DoesNotHaveIter}; - use crate::ToTokens; - use std::collections::btree_set::{self, BTreeSet}; - use std::slice; - - /// Extension trait providing the `quote_into_iter` method on iterators. - pub trait RepIteratorExt: Iterator + Sized { - fn quote_into_iter(self) -> (Self, HasIter) { - (self, HasIter) - } - } - - impl RepIteratorExt for T {} - - /// Extension trait providing the `quote_into_iter` method for - /// non-iterable types. These types interpolate the same value in each - /// iteration of the repetition. - pub trait RepToTokensExt { - /// Pretend to be an iterator for the purposes of `quote_into_iter`. - /// This allows repeated calls to `quote_into_iter` to continue - /// correctly returning DoesNotHaveIter. - fn next(&self) -> Option<&Self> { - Some(self) - } - - fn quote_into_iter(&self) -> (&Self, DoesNotHaveIter) { - (self, DoesNotHaveIter) - } - } - - impl RepToTokensExt for T {} - - /// Extension trait providing the `quote_into_iter` method for types that - /// can be referenced as an iterator. - pub trait RepAsIteratorExt<'q> { - type Iter: Iterator; - - fn quote_into_iter(&'q self) -> (Self::Iter, HasIter); - } - - impl<'q, 'a, T: RepAsIteratorExt<'q> + ?Sized> RepAsIteratorExt<'q> for &'a T { - type Iter = T::Iter; - - fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) { - ::quote_into_iter(*self) - } - } - - impl<'q, 'a, T: RepAsIteratorExt<'q> + ?Sized> RepAsIteratorExt<'q> for &'a mut T { - type Iter = T::Iter; - - fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) { - ::quote_into_iter(*self) - } - } - - impl<'q, T: 'q> RepAsIteratorExt<'q> for [T] { - type Iter = slice::Iter<'q, T>; - - fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) { - (self.iter(), HasIter) - } - } - - impl<'q, T: 'q> RepAsIteratorExt<'q> for Vec { - type Iter = slice::Iter<'q, T>; - - fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) { - (self.iter(), HasIter) - } - } - - impl<'q, T: 'q> RepAsIteratorExt<'q> for BTreeSet { - type Iter = btree_set::Iter<'q, T>; - - fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) { - (self.iter(), HasIter) - } - } - - macro_rules! array_rep_slice { - ($($l:tt)*) => { - $( - impl<'q, T: 'q> RepAsIteratorExt<'q> for [T; $l] { - type Iter = slice::Iter<'q, T>; - - fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) { - (self.iter(), HasIter) - } - } - )* - } - } - - array_rep_slice!( - 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 - 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 - ); - - impl<'q, T: RepAsIteratorExt<'q>> RepAsIteratorExt<'q> for RepInterp { - type Iter = T::Iter; - - fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) { - self.0.quote_into_iter() - } - } -} - -// Helper type used within interpolations to allow for repeated binding names. -// Implements the relevant traits, and exports a dummy `next()` method. -#[derive(Copy, Clone)] -pub struct RepInterp(pub T); - -impl RepInterp { - // This method is intended to look like `Iterator::next`, and is called when - // a name is bound multiple times, as the previous binding will shadow the - // original `Iterator` object. This allows us to avoid advancing the - // iterator multiple times per iteration. - pub fn next(self) -> Option { - Some(self.0) - } -} - -impl Iterator for RepInterp { - type Item = T::Item; - - fn next(&mut self) -> Option { - self.0.next() - } -} - -impl ToTokens for RepInterp { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.0.to_tokens(tokens); - } -} - -pub fn push_group(tokens: &mut TokenStream, delimiter: Delimiter, inner: TokenStream) { - tokens.append(Group::new(delimiter, inner)); -} - -pub fn push_group_spanned( - tokens: &mut TokenStream, - span: Span, - delimiter: Delimiter, - inner: TokenStream, -) { - let mut g = Group::new(delimiter, inner); - g.set_span(span); - tokens.append(g); -} - -pub fn parse(tokens: &mut TokenStream, s: &str) { - let s: TokenStream = s.parse().expect("invalid token stream"); - tokens.extend(s); -} - -pub fn parse_spanned(tokens: &mut TokenStream, span: Span, s: &str) { - let s: TokenStream = s.parse().expect("invalid token stream"); - tokens.extend(s.into_iter().map(|mut t| { - t.set_span(span); - t - })); -} - -pub fn push_ident(tokens: &mut TokenStream, s: &str) { - // Optimization over `mk_ident`, as `s` is guaranteed to be a valid ident. - // - // FIXME: When `Ident::new_raw` becomes stable, this method should be - // updated to call it when available. - if s.starts_with("r#") { - parse(tokens, s); - } else { - tokens.append(Ident::new(s, Span::call_site())); - } -} - -pub fn push_ident_spanned(tokens: &mut TokenStream, span: Span, s: &str) { - // Optimization over `mk_ident`, as `s` is guaranteed to be a valid ident. - // - // FIXME: When `Ident::new_raw` becomes stable, this method should be - // updated to call it when available. - if s.starts_with("r#") { - parse_spanned(tokens, span, s); - } else { - tokens.append(Ident::new(s, span)); - } -} - -macro_rules! push_punct { - ($name:ident $spanned:ident $char1:tt) => { - pub fn $name(tokens: &mut TokenStream) { - tokens.append(Punct::new($char1, Spacing::Alone)); - } - pub fn $spanned(tokens: &mut TokenStream, span: Span) { - let mut punct = Punct::new($char1, Spacing::Alone); - punct.set_span(span); - tokens.append(punct); - } - }; - ($name:ident $spanned:ident $char1:tt $char2:tt) => { - pub fn $name(tokens: &mut TokenStream) { - tokens.append(Punct::new($char1, Spacing::Joint)); - tokens.append(Punct::new($char2, Spacing::Alone)); - } - pub fn $spanned(tokens: &mut TokenStream, span: Span) { - let mut punct = Punct::new($char1, Spacing::Joint); - punct.set_span(span); - tokens.append(punct); - let mut punct = Punct::new($char2, Spacing::Alone); - punct.set_span(span); - tokens.append(punct); - } - }; - ($name:ident $spanned:ident $char1:tt $char2:tt $char3:tt) => { - pub fn $name(tokens: &mut TokenStream) { - tokens.append(Punct::new($char1, Spacing::Joint)); - tokens.append(Punct::new($char2, Spacing::Joint)); - tokens.append(Punct::new($char3, Spacing::Alone)); - } - pub fn $spanned(tokens: &mut TokenStream, span: Span) { - let mut punct = Punct::new($char1, Spacing::Joint); - punct.set_span(span); - tokens.append(punct); - let mut punct = Punct::new($char2, Spacing::Joint); - punct.set_span(span); - tokens.append(punct); - let mut punct = Punct::new($char3, Spacing::Alone); - punct.set_span(span); - tokens.append(punct); - } - }; -} - -push_punct!(push_add push_add_spanned '+'); -push_punct!(push_add_eq push_add_eq_spanned '+' '='); -push_punct!(push_and push_and_spanned '&'); -push_punct!(push_and_and push_and_and_spanned '&' '&'); -push_punct!(push_and_eq push_and_eq_spanned '&' '='); -push_punct!(push_at push_at_spanned '@'); -push_punct!(push_bang push_bang_spanned '!'); -push_punct!(push_caret push_caret_spanned '^'); -push_punct!(push_caret_eq push_caret_eq_spanned '^' '='); -push_punct!(push_colon push_colon_spanned ':'); -push_punct!(push_colon2 push_colon2_spanned ':' ':'); -push_punct!(push_comma push_comma_spanned ','); -push_punct!(push_div push_div_spanned '/'); -push_punct!(push_div_eq push_div_eq_spanned '/' '='); -push_punct!(push_dot push_dot_spanned '.'); -push_punct!(push_dot2 push_dot2_spanned '.' '.'); -push_punct!(push_dot3 push_dot3_spanned '.' '.' '.'); -push_punct!(push_dot_dot_eq push_dot_dot_eq_spanned '.' '.' '='); -push_punct!(push_eq push_eq_spanned '='); -push_punct!(push_eq_eq push_eq_eq_spanned '=' '='); -push_punct!(push_ge push_ge_spanned '>' '='); -push_punct!(push_gt push_gt_spanned '>'); -push_punct!(push_le push_le_spanned '<' '='); -push_punct!(push_lt push_lt_spanned '<'); -push_punct!(push_mul_eq push_mul_eq_spanned '*' '='); -push_punct!(push_ne push_ne_spanned '!' '='); -push_punct!(push_or push_or_spanned '|'); -push_punct!(push_or_eq push_or_eq_spanned '|' '='); -push_punct!(push_or_or push_or_or_spanned '|' '|'); -push_punct!(push_pound push_pound_spanned '#'); -push_punct!(push_question push_question_spanned '?'); -push_punct!(push_rarrow push_rarrow_spanned '-' '>'); -push_punct!(push_larrow push_larrow_spanned '<' '-'); -push_punct!(push_rem push_rem_spanned '%'); -push_punct!(push_rem_eq push_rem_eq_spanned '%' '='); -push_punct!(push_fat_arrow push_fat_arrow_spanned '=' '>'); -push_punct!(push_semi push_semi_spanned ';'); -push_punct!(push_shl push_shl_spanned '<' '<'); -push_punct!(push_shl_eq push_shl_eq_spanned '<' '<' '='); -push_punct!(push_shr push_shr_spanned '>' '>'); -push_punct!(push_shr_eq push_shr_eq_spanned '>' '>' '='); -push_punct!(push_star push_star_spanned '*'); -push_punct!(push_sub push_sub_spanned '-'); -push_punct!(push_sub_eq push_sub_eq_spanned '-' '='); - -// Helper method for constructing identifiers from the `format_ident!` macro, -// handling `r#` prefixes. -// -// Directly parsing the input string may produce a valid identifier, -// although the input string was invalid, due to ignored characters such as -// whitespace and comments. Instead, we always create a non-raw identifier -// to validate that the string is OK, and only parse again if needed. -pub fn mk_ident(id: &str, span: Option) -> Ident { - let span = span.unwrap_or_else(Span::call_site); - - let is_raw = id.starts_with("r#"); - let unraw = Ident::new(if is_raw { &id[2..] } else { id }, span); - if !is_raw { - return unraw; - } - - // At this point, the identifier is raw, and the unraw-ed version of it was - // successfully converted into an identifier. Try to produce a valid raw - // identifier by running the `TokenStream` parser, and unwrapping the first - // token as an `Ident`. - // - // FIXME: When `Ident::new_raw` becomes stable, this method should be - // updated to call it when available. - if let Ok(ts) = id.parse::() { - let mut iter = ts.into_iter(); - if let (Some(TokenTree::Ident(mut id)), None) = (iter.next(), iter.next()) { - id.set_span(span); - return id; - } - } - - panic!("not allowed as a raw identifier: `{}`", id); -} - -// Adapts from `IdentFragment` to `fmt::Display` for use by the `format_ident!` -// macro, and exposes span information from these fragments. -// -// This struct also has forwarding implementations of the formatting traits -// `Octal`, `LowerHex`, `UpperHex`, and `Binary` to allow for their use within -// `format_ident!`. -#[derive(Copy, Clone)] -pub struct IdentFragmentAdapter(pub T); - -impl IdentFragmentAdapter { - pub fn span(&self) -> Option { - self.0.span() - } -} - -impl fmt::Display for IdentFragmentAdapter { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - IdentFragment::fmt(&self.0, f) - } -} - -impl fmt::Octal for IdentFragmentAdapter { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Octal::fmt(&self.0, f) - } -} - -impl fmt::LowerHex for IdentFragmentAdapter { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::LowerHex::fmt(&self.0, f) - } -} - -impl fmt::UpperHex for IdentFragmentAdapter { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::UpperHex::fmt(&self.0, f) - } -} - -impl fmt::Binary for IdentFragmentAdapter { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Binary::fmt(&self.0, f) - } -} diff --git a/vendor/quote/src/spanned.rs b/vendor/quote/src/spanned.rs deleted file mode 100644 index 55168bdd..00000000 --- a/vendor/quote/src/spanned.rs +++ /dev/null @@ -1,42 +0,0 @@ -use crate::ToTokens; -use proc_macro2::{Span, TokenStream}; - -pub trait Spanned { - fn __span(&self) -> Span; -} - -impl Spanned for Span { - fn __span(&self) -> Span { - *self - } -} - -impl Spanned for T { - fn __span(&self) -> Span { - join_spans(self.into_token_stream()) - } -} - -fn join_spans(tokens: TokenStream) -> Span { - let mut iter = tokens.into_iter().filter_map(|tt| { - // FIXME: This shouldn't be required, since optimally spans should - // never be invalid. This filter_map can probably be removed when - // https://github.com/rust-lang/rust/issues/43081 is resolved. - let span = tt.span(); - let debug = format!("{:?}", span); - if debug.ends_with("bytes(0..0)") { - None - } else { - Some(span) - } - }); - - let first = match iter.next() { - Some(span) => span, - None => return Span::call_site(), - }; - - iter.fold(None, |_prev, next| Some(next)) - .and_then(|last| first.join(last)) - .unwrap_or(first) -} diff --git a/vendor/quote/src/to_tokens.rs b/vendor/quote/src/to_tokens.rs deleted file mode 100644 index 7f980839..00000000 --- a/vendor/quote/src/to_tokens.rs +++ /dev/null @@ -1,209 +0,0 @@ -use super::TokenStreamExt; - -use std::borrow::Cow; -use std::iter; -use std::rc::Rc; - -use proc_macro2::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree}; - -/// Types that can be interpolated inside a `quote!` invocation. -/// -/// [`quote!`]: macro.quote.html -pub trait ToTokens { - /// Write `self` to the given `TokenStream`. - /// - /// The token append methods provided by the [`TokenStreamExt`] extension - /// trait may be useful for implementing `ToTokens`. - /// - /// [`TokenStreamExt`]: trait.TokenStreamExt.html - /// - /// # Example - /// - /// Example implementation for a struct representing Rust paths like - /// `std::cmp::PartialEq`: - /// - /// ``` - /// use proc_macro2::{TokenTree, Spacing, Span, Punct, TokenStream}; - /// use quote::{TokenStreamExt, ToTokens}; - /// - /// pub struct Path { - /// pub global: bool, - /// pub segments: Vec, - /// } - /// - /// impl ToTokens for Path { - /// fn to_tokens(&self, tokens: &mut TokenStream) { - /// for (i, segment) in self.segments.iter().enumerate() { - /// if i > 0 || self.global { - /// // Double colon `::` - /// tokens.append(Punct::new(':', Spacing::Joint)); - /// tokens.append(Punct::new(':', Spacing::Alone)); - /// } - /// segment.to_tokens(tokens); - /// } - /// } - /// } - /// # - /// # pub struct PathSegment; - /// # - /// # impl ToTokens for PathSegment { - /// # fn to_tokens(&self, tokens: &mut TokenStream) { - /// # unimplemented!() - /// # } - /// # } - /// ``` - fn to_tokens(&self, tokens: &mut TokenStream); - - /// Convert `self` directly into a `TokenStream` object. - /// - /// This method is implicitly implemented using `to_tokens`, and acts as a - /// convenience method for consumers of the `ToTokens` trait. - fn to_token_stream(&self) -> TokenStream { - let mut tokens = TokenStream::new(); - self.to_tokens(&mut tokens); - tokens - } - - /// Convert `self` directly into a `TokenStream` object. - /// - /// This method is implicitly implemented using `to_tokens`, and acts as a - /// convenience method for consumers of the `ToTokens` trait. - fn into_token_stream(self) -> TokenStream - where - Self: Sized, - { - self.to_token_stream() - } -} - -impl<'a, T: ?Sized + ToTokens> ToTokens for &'a T { - fn to_tokens(&self, tokens: &mut TokenStream) { - (**self).to_tokens(tokens); - } -} - -impl<'a, T: ?Sized + ToTokens> ToTokens for &'a mut T { - fn to_tokens(&self, tokens: &mut TokenStream) { - (**self).to_tokens(tokens); - } -} - -impl<'a, T: ?Sized + ToOwned + ToTokens> ToTokens for Cow<'a, T> { - fn to_tokens(&self, tokens: &mut TokenStream) { - (**self).to_tokens(tokens); - } -} - -impl ToTokens for Box { - fn to_tokens(&self, tokens: &mut TokenStream) { - (**self).to_tokens(tokens); - } -} - -impl ToTokens for Rc { - fn to_tokens(&self, tokens: &mut TokenStream) { - (**self).to_tokens(tokens); - } -} - -impl ToTokens for Option { - fn to_tokens(&self, tokens: &mut TokenStream) { - if let Some(ref t) = *self { - t.to_tokens(tokens); - } - } -} - -impl ToTokens for str { - fn to_tokens(&self, tokens: &mut TokenStream) { - tokens.append(Literal::string(self)); - } -} - -impl ToTokens for String { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.as_str().to_tokens(tokens); - } -} - -macro_rules! primitive { - ($($t:ident => $name:ident)*) => ($( - impl ToTokens for $t { - fn to_tokens(&self, tokens: &mut TokenStream) { - tokens.append(Literal::$name(*self)); - } - } - )*) -} - -primitive! { - i8 => i8_suffixed - i16 => i16_suffixed - i32 => i32_suffixed - i64 => i64_suffixed - i128 => i128_suffixed - isize => isize_suffixed - - u8 => u8_suffixed - u16 => u16_suffixed - u32 => u32_suffixed - u64 => u64_suffixed - u128 => u128_suffixed - usize => usize_suffixed - - f32 => f32_suffixed - f64 => f64_suffixed -} - -impl ToTokens for char { - fn to_tokens(&self, tokens: &mut TokenStream) { - tokens.append(Literal::character(*self)); - } -} - -impl ToTokens for bool { - fn to_tokens(&self, tokens: &mut TokenStream) { - let word = if *self { "true" } else { "false" }; - tokens.append(Ident::new(word, Span::call_site())); - } -} - -impl ToTokens for Group { - fn to_tokens(&self, tokens: &mut TokenStream) { - tokens.append(self.clone()); - } -} - -impl ToTokens for Ident { - fn to_tokens(&self, tokens: &mut TokenStream) { - tokens.append(self.clone()); - } -} - -impl ToTokens for Punct { - fn to_tokens(&self, tokens: &mut TokenStream) { - tokens.append(self.clone()); - } -} - -impl ToTokens for Literal { - fn to_tokens(&self, tokens: &mut TokenStream) { - tokens.append(self.clone()); - } -} - -impl ToTokens for TokenTree { - fn to_tokens(&self, dst: &mut TokenStream) { - dst.append(self.clone()); - } -} - -impl ToTokens for TokenStream { - fn to_tokens(&self, dst: &mut TokenStream) { - dst.extend(iter::once(self.clone())); - } - - fn into_token_stream(self) -> TokenStream { - self - } -} diff --git a/vendor/quote/tests/compiletest.rs b/vendor/quote/tests/compiletest.rs deleted file mode 100644 index f9aea23b..00000000 --- a/vendor/quote/tests/compiletest.rs +++ /dev/null @@ -1,6 +0,0 @@ -#[rustversion::attr(not(nightly), ignore)] -#[test] -fn ui() { - let t = trybuild::TestCases::new(); - t.compile_fail("tests/ui/*.rs"); -} diff --git a/vendor/quote/tests/test.rs b/vendor/quote/tests/test.rs deleted file mode 100644 index d5a3490c..00000000 --- a/vendor/quote/tests/test.rs +++ /dev/null @@ -1,459 +0,0 @@ -#![cfg_attr(feature = "cargo-clippy", allow(blacklisted_name))] - -use std::borrow::Cow; -use std::collections::BTreeSet; - -use proc_macro2::{Ident, Span, TokenStream}; -use quote::{format_ident, quote, quote_spanned, TokenStreamExt}; - -struct X; - -impl quote::ToTokens for X { - fn to_tokens(&self, tokens: &mut TokenStream) { - tokens.append(Ident::new("X", Span::call_site())); - } -} - -#[test] -fn test_quote_impl() { - let tokens = quote! { - impl<'a, T: ToTokens> ToTokens for &'a T { - fn to_tokens(&self, tokens: &mut TokenStream) { - (**self).to_tokens(tokens) - } - } - }; - - let expected = concat!( - "impl < 'a , T : ToTokens > ToTokens for & 'a T { ", - "fn to_tokens (& self , tokens : & mut TokenStream) { ", - "(* * self) . to_tokens (tokens) ", - "} ", - "}" - ); - - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_quote_spanned_impl() { - let span = Span::call_site(); - let tokens = quote_spanned! {span=> - impl<'a, T: ToTokens> ToTokens for &'a T { - fn to_tokens(&self, tokens: &mut TokenStream) { - (**self).to_tokens(tokens) - } - } - }; - - let expected = concat!( - "impl < 'a , T : ToTokens > ToTokens for & 'a T { ", - "fn to_tokens (& self , tokens : & mut TokenStream) { ", - "(* * self) . to_tokens (tokens) ", - "} ", - "}" - ); - - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_substitution() { - let x = X; - let tokens = quote!(#x <#x> (#x) [#x] {#x}); - - let expected = "X < X > (X) [X] { X }"; - - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_iter() { - let primes = &[X, X, X, X]; - - assert_eq!("X X X X", quote!(#(#primes)*).to_string()); - - assert_eq!("X , X , X , X ,", quote!(#(#primes,)*).to_string()); - - assert_eq!("X , X , X , X", quote!(#(#primes),*).to_string()); -} - -#[test] -fn test_advanced() { - let generics = quote!( <'a, T> ); - - let where_clause = quote!( where T: Serialize ); - - let field_ty = quote!(String); - - let item_ty = quote!(Cow<'a, str>); - - let path = quote!(SomeTrait::serialize_with); - - let value = quote!(self.x); - - let tokens = quote! { - struct SerializeWith #generics #where_clause { - value: &'a #field_ty, - phantom: ::std::marker::PhantomData<#item_ty>, - } - - impl #generics ::serde::Serialize for SerializeWith #generics #where_clause { - fn serialize(&self, s: &mut S) -> Result<(), S::Error> - where S: ::serde::Serializer - { - #path(self.value, s) - } - } - - SerializeWith { - value: #value, - phantom: ::std::marker::PhantomData::<#item_ty>, - } - }; - - let expected = concat!( - "struct SerializeWith < 'a , T > where T : Serialize { ", - "value : & 'a String , ", - "phantom : :: std :: marker :: PhantomData < Cow < 'a , str > > , ", - "} ", - "impl < 'a , T > :: serde :: Serialize for SerializeWith < 'a , T > where T : Serialize { ", - "fn serialize < S > (& self , s : & mut S) -> Result < () , S :: Error > ", - "where S : :: serde :: Serializer ", - "{ ", - "SomeTrait :: serialize_with (self . value , s) ", - "} ", - "} ", - "SerializeWith { ", - "value : self . x , ", - "phantom : :: std :: marker :: PhantomData :: < Cow < 'a , str > > , ", - "}" - ); - - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_integer() { - let ii8 = -1i8; - let ii16 = -1i16; - let ii32 = -1i32; - let ii64 = -1i64; - let ii128 = -1i128; - let iisize = -1isize; - let uu8 = 1u8; - let uu16 = 1u16; - let uu32 = 1u32; - let uu64 = 1u64; - let uu128 = 1u128; - let uusize = 1usize; - - let tokens = quote! { - #ii8 #ii16 #ii32 #ii64 #ii128 #iisize - #uu8 #uu16 #uu32 #uu64 #uu128 #uusize - }; - let expected = "- 1i8 - 1i16 - 1i32 - 1i64 - 1i128 - 1isize 1u8 1u16 1u32 1u64 1u128 1usize"; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_floating() { - let e32 = 2.345f32; - - let e64 = 2.345f64; - - let tokens = quote! { - #e32 - #e64 - }; - let expected = concat!("2.345f32 2.345f64"); - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_char() { - let zero = '\0'; - let pound = '#'; - let quote = '"'; - let apost = '\''; - let newline = '\n'; - let heart = '\u{2764}'; - - let tokens = quote! { - #zero #pound #quote #apost #newline #heart - }; - let expected = "'\\u{0}' '#' '\"' '\\'' '\\n' '\u{2764}'"; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_str() { - let s = "\0 a 'b \" c"; - let tokens = quote!(#s); - let expected = "\"\\u{0} a 'b \\\" c\""; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_string() { - let s = "\0 a 'b \" c".to_string(); - let tokens = quote!(#s); - let expected = "\"\\u{0} a 'b \\\" c\""; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_ident() { - let foo = Ident::new("Foo", Span::call_site()); - let bar = Ident::new(&format!("Bar{}", 7), Span::call_site()); - let tokens = quote!(struct #foo; enum #bar {}); - let expected = "struct Foo ; enum Bar7 { }"; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_duplicate() { - let ch = 'x'; - - let tokens = quote!(#ch #ch); - - let expected = "'x' 'x'"; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_fancy_repetition() { - let foo = vec!["a", "b"]; - let bar = vec![true, false]; - - let tokens = quote! { - #(#foo: #bar),* - }; - - let expected = r#""a" : true , "b" : false"#; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_nested_fancy_repetition() { - let nested = vec![vec!['a', 'b', 'c'], vec!['x', 'y', 'z']]; - - let tokens = quote! { - #( - #(#nested)* - ),* - }; - - let expected = "'a' 'b' 'c' , 'x' 'y' 'z'"; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_duplicate_name_repetition() { - let foo = &["a", "b"]; - - let tokens = quote! { - #(#foo: #foo),* - #(#foo: #foo),* - }; - - let expected = r#""a" : "a" , "b" : "b" "a" : "a" , "b" : "b""#; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_duplicate_name_repetition_no_copy() { - let foo = vec!["a".to_owned(), "b".to_owned()]; - - let tokens = quote! { - #(#foo: #foo),* - }; - - let expected = r#""a" : "a" , "b" : "b""#; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_btreeset_repetition() { - let mut set = BTreeSet::new(); - set.insert("a".to_owned()); - set.insert("b".to_owned()); - - let tokens = quote! { - #(#set: #set),* - }; - - let expected = r#""a" : "a" , "b" : "b""#; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_variable_name_conflict() { - // The implementation of `#(...),*` uses the variable `_i` but it should be - // fine, if a little confusing when debugging. - let _i = vec!['a', 'b']; - let tokens = quote! { #(#_i),* }; - let expected = "'a' , 'b'"; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_nonrep_in_repetition() { - let rep = vec!["a", "b"]; - let nonrep = "c"; - - let tokens = quote! { - #(#rep #rep : #nonrep #nonrep),* - }; - - let expected = r#""a" "a" : "c" "c" , "b" "b" : "c" "c""#; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_empty_quote() { - let tokens = quote!(); - assert_eq!("", tokens.to_string()); -} - -#[test] -fn test_box_str() { - let b = "str".to_owned().into_boxed_str(); - let tokens = quote! { #b }; - assert_eq!("\"str\"", tokens.to_string()); -} - -#[test] -fn test_cow() { - let owned: Cow = Cow::Owned(Ident::new("owned", Span::call_site())); - - let ident = Ident::new("borrowed", Span::call_site()); - let borrowed = Cow::Borrowed(&ident); - - let tokens = quote! { #owned #borrowed }; - assert_eq!("owned borrowed", tokens.to_string()); -} - -#[test] -fn test_closure() { - fn field_i(i: usize) -> Ident { - format_ident!("__field{}", i) - } - - let fields = (0usize..3) - .map(field_i as fn(_) -> _) - .map(|var| quote! { #var }); - - let tokens = quote! { #(#fields)* }; - assert_eq!("__field0 __field1 __field2", tokens.to_string()); -} - -#[test] -fn test_append_tokens() { - let mut a = quote!(a); - let b = quote!(b); - a.append_all(b); - assert_eq!("a b", a.to_string()); -} - -#[test] -fn test_format_ident() { - let id0 = format_ident!("Aa"); - let id1 = format_ident!("Hello{x}", x = id0); - let id2 = format_ident!("Hello{x}", x = 5usize); - let id3 = format_ident!("Hello{}_{x}", id0, x = 10usize); - let id4 = format_ident!("Aa", span = Span::call_site()); - let id5 = format_ident!("Hello{}", Cow::Borrowed("World")); - - assert_eq!(id0, "Aa"); - assert_eq!(id1, "HelloAa"); - assert_eq!(id2, "Hello5"); - assert_eq!(id3, "HelloAa_10"); - assert_eq!(id4, "Aa"); - assert_eq!(id5, "HelloWorld"); -} - -#[test] -fn test_format_ident_strip_raw() { - let id = format_ident!("r#struct"); - let my_id = format_ident!("MyId{}", id); - let raw_my_id = format_ident!("r#MyId{}", id); - - assert_eq!(id, "r#struct"); - assert_eq!(my_id, "MyIdstruct"); - assert_eq!(raw_my_id, "r#MyIdstruct"); -} - -#[test] -fn test_outer_line_comment() { - let tokens = quote! { - /// doc - }; - let expected = "# [doc = r\" doc\"]"; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_inner_line_comment() { - let tokens = quote! { - //! doc - }; - let expected = "# ! [doc = r\" doc\"]"; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_outer_block_comment() { - let tokens = quote! { - /** doc */ - }; - let expected = "# [doc = r\" doc \"]"; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_inner_block_comment() { - let tokens = quote! { - /*! doc */ - }; - let expected = "# ! [doc = r\" doc \"]"; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_outer_attr() { - let tokens = quote! { - #[inline] - }; - let expected = "# [inline]"; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_inner_attr() { - let tokens = quote! { - #![no_std] - }; - let expected = "# ! [no_std]"; - assert_eq!(expected, tokens.to_string()); -} - -// https://github.com/dtolnay/quote/issues/130 -#[test] -fn test_star_after_repetition() { - let c = vec!['0', '1']; - let tokens = quote! { - #( - f(#c); - )* - *out = None; - }; - let expected = "f ('0') ; f ('1') ; * out = None ;"; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_quote_raw_id() { - let id = quote!(r#raw_id); - assert_eq!(id.to_string(), "r#raw_id"); -} diff --git a/vendor/quote/tests/ui/does-not-have-iter-interpolated-dup.rs b/vendor/quote/tests/ui/does-not-have-iter-interpolated-dup.rs deleted file mode 100644 index 0a39f415..00000000 --- a/vendor/quote/tests/ui/does-not-have-iter-interpolated-dup.rs +++ /dev/null @@ -1,9 +0,0 @@ -use quote::quote; - -fn main() { - let nonrep = ""; - - // Without some protection against repetitions with no iterator somewhere - // inside, this would loop infinitely. - quote!(#(#nonrep #nonrep)*); -} diff --git a/vendor/quote/tests/ui/does-not-have-iter-interpolated.rs b/vendor/quote/tests/ui/does-not-have-iter-interpolated.rs deleted file mode 100644 index 2c740cc0..00000000 --- a/vendor/quote/tests/ui/does-not-have-iter-interpolated.rs +++ /dev/null @@ -1,9 +0,0 @@ -use quote::quote; - -fn main() { - let nonrep = ""; - - // Without some protection against repetitions with no iterator somewhere - // inside, this would loop infinitely. - quote!(#(#nonrep)*); -} diff --git a/vendor/quote/tests/ui/does-not-have-iter-separated.rs b/vendor/quote/tests/ui/does-not-have-iter-separated.rs deleted file mode 100644 index c027243d..00000000 --- a/vendor/quote/tests/ui/does-not-have-iter-separated.rs +++ /dev/null @@ -1,5 +0,0 @@ -use quote::quote; - -fn main() { - quote!(#(a b),*); -} diff --git a/vendor/quote/tests/ui/does-not-have-iter.rs b/vendor/quote/tests/ui/does-not-have-iter.rs deleted file mode 100644 index 8908353b..00000000 --- a/vendor/quote/tests/ui/does-not-have-iter.rs +++ /dev/null @@ -1,5 +0,0 @@ -use quote::quote; - -fn main() { - quote!(#(a b)*); -} diff --git a/vendor/quote/tests/ui/not-quotable.rs b/vendor/quote/tests/ui/not-quotable.rs deleted file mode 100644 index f991c188..00000000 --- a/vendor/quote/tests/ui/not-quotable.rs +++ /dev/null @@ -1,7 +0,0 @@ -use quote::quote; -use std::net::Ipv4Addr; - -fn main() { - let ip = Ipv4Addr::LOCALHOST; - let _ = quote! { #ip }; -} diff --git a/vendor/quote/tests/ui/not-repeatable.rs b/vendor/quote/tests/ui/not-repeatable.rs deleted file mode 100644 index a8f0fe77..00000000 --- a/vendor/quote/tests/ui/not-repeatable.rs +++ /dev/null @@ -1,8 +0,0 @@ -use quote::quote; - -struct Ipv4Addr; - -fn main() { - let ip = Ipv4Addr; - let _ = quote! { #(#ip)* }; -} diff --git a/vendor/quote/tests/ui/wrong-type-span.rs b/vendor/quote/tests/ui/wrong-type-span.rs deleted file mode 100644 index 1ce391c8..00000000 --- a/vendor/quote/tests/ui/wrong-type-span.rs +++ /dev/null @@ -1,7 +0,0 @@ -use quote::quote_spanned; - -fn main() { - let span = ""; - let x = 0; - quote_spanned!(span=> #x); -} diff --git a/vendor/serde/.cargo-checksum.json b/vendor/serde/.cargo-checksum.json deleted file mode 100644 index d44c6da7..00000000 --- a/vendor/serde/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{"Cargo.toml":"6d29815d77bf771150eb70b68cb7e153c745d4933fee235555204f92bb340c7a","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"5cf9d2158d70048a2916360ad59d9079f6233c6f68781a7a792e70f8b772d8ce","build.rs":"adfdafeb547084efb49f2845325b97f7105e48fa9e8c5045d0bf6c3597d28d14","crates-io.md":"25ed421fe25d0f6f74c4b78674144bef2843a5f78bf552d0a8ec633be69d282b","src/de/ignored_any.rs":"c69d6071191c2075372218442e9e73991335c6b4be18736a7a789f04bb305525","src/de/impls.rs":"8505b47b4fa97e426bedf97082005ee2d5700bfac0da41da9127c9826004c163","src/de/mod.rs":"5c176d8d909910a100f67eb26de8228c3e6465886100cdc3bcc146c16aec111e","src/de/seed.rs":"e8cf0233afe0af5b8fb9e4c94f301c92729c5ba417280af9e2201b732e374a72","src/de/utf8.rs":"f17524ee0af98ec3abcfd7d0b812fbd1033263bd8e2ce2f57c1e1999ce153558","src/de/value.rs":"82d530d0bc50cba75a095c819b4269d58229a7384043f7f6e674891cc6dae7bb","src/integer128.rs":"12f6ce6a513c1c293398db38cf1d3ea7c0c5a6717152621bcba61f49abc7b5b1","src/lib.rs":"5d3bbac10e459e7c4001cc790bdad2acfc660214d9be1e41ebd63a9056541a75","src/macros.rs":"3d695a51f0a07f9f719dcb5620012c21a1b084c06a6283349cabf574ceba8123","src/private/de.rs":"abcd02697fc887d6a9c450dfa1ceb640e069683532998ed4ba3c0b859b4744d7","src/private/doc.rs":"e9801a43c3088fccd5f1fac76416698f948e65b647024aa9da17d673e1e8c217","src/private/mod.rs":"761d198c739508117beeaae44ae4e11769aaa6c2e9a4acf584eb9adc1952879f","src/private/ser.rs":"3a90dfb5c17e81bf1d959fed60a9477713498e9d0934463627c98709132f066e","src/private/size_hint.rs":"605521227e9ba3100fbb9d5ea7fd5853385097c35015ce6908bd5f1ea20d59ad","src/ser/fmt.rs":"7827ed07fd8897e6324f75625ba0c926a4c4e7ec2914cd067391ce54d942ac7b","src/ser/impls.rs":"c99000b33b2b7cb1c9b275f769f0cb5dd6ecb3caf260b66f2d0157e6faf04d96","src/ser/impossible.rs":"db17913522c1c27389c5a085113911353b9813c1b116518681362e7c8b692c3a","src/ser/mod.rs":"4f686acd03f310a966194ef225a0cec5f96810bf73f636f670601c3d2d9018c6","src/std_error.rs":"3aac687856c035517fae44ed2906dd4a1e3184bae4bf613adcdeb73f74126c57"},"package":"8b9875c23cf305cd1fd7eb77234cbb705f21ea6a72c637a5c6db5fe4b8e7f008"} \ No newline at end of file diff --git a/vendor/serde/Cargo.toml b/vendor/serde/Cargo.toml deleted file mode 100644 index b119a61a..00000000 --- a/vendor/serde/Cargo.toml +++ /dev/null @@ -1,44 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies. -# -# If you are reading this file be aware that the original Cargo.toml -# will likely look very different (and much more reasonable). -# See Cargo.toml.orig for the original contents. - -[package] -rust-version = "1.15" -name = "serde" -version = "1.0.132" -authors = ["Erick Tryzelaar ", "David Tolnay "] -build = "build.rs" -include = ["build.rs", "src/**/*.rs", "crates-io.md", "README.md", "LICENSE-APACHE", "LICENSE-MIT"] -description = "A generic serialization/deserialization framework" -homepage = "https://serde.rs" -documentation = "https://docs.serde.rs/serde/" -readme = "crates-io.md" -keywords = ["serde", "serialization", "no_std"] -categories = ["encoding"] -license = "MIT OR Apache-2.0" -repository = "https://github.com/serde-rs/serde" -[package.metadata.docs.rs] -targets = ["x86_64-unknown-linux-gnu"] - -[package.metadata.playground] -features = ["derive", "rc"] -[dependencies.serde_derive] -version = "=1.0.132" -optional = true -[dev-dependencies.serde_derive] -version = "1.0" - -[features] -alloc = [] -default = ["std"] -derive = ["serde_derive"] -rc = [] -std = [] -unstable = [] diff --git a/vendor/serde/LICENSE-APACHE b/vendor/serde/LICENSE-APACHE deleted file mode 100644 index 16fe87b0..00000000 --- a/vendor/serde/LICENSE-APACHE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/vendor/serde/LICENSE-MIT b/vendor/serde/LICENSE-MIT deleted file mode 100644 index 31aa7938..00000000 --- a/vendor/serde/LICENSE-MIT +++ /dev/null @@ -1,23 +0,0 @@ -Permission is hereby granted, free of charge, to any -person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the -Software without restriction, including without -limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software -is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice -shall be included in all copies or substantial portions -of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/vendor/serde/README.md b/vendor/serde/README.md deleted file mode 100644 index 14b00982..00000000 --- a/vendor/serde/README.md +++ /dev/null @@ -1,111 +0,0 @@ -# Serde   [![Build Status]][actions] [![Latest Version]][crates.io] [![serde: rustc 1.13+]][Rust 1.13] [![serde_derive: rustc 1.31+]][Rust 1.31] - -[Build Status]: https://img.shields.io/github/workflow/status/serde-rs/serde/CI/master -[actions]: https://github.com/serde-rs/serde/actions?query=branch%3Amaster -[Latest Version]: https://img.shields.io/crates/v/serde.svg -[crates.io]: https://crates.io/crates/serde -[serde: rustc 1.13+]: https://img.shields.io/badge/serde-rustc_1.13+-lightgray.svg -[serde_derive: rustc 1.31+]: https://img.shields.io/badge/serde_derive-rustc_1.31+-lightgray.svg -[Rust 1.13]: https://blog.rust-lang.org/2016/11/10/Rust-1.13.html -[Rust 1.31]: https://blog.rust-lang.org/2018/12/06/Rust-1.31-and-rust-2018.html - -**Serde is a framework for *ser*ializing and *de*serializing Rust data structures efficiently and generically.** - ---- - -You may be looking for: - -- [An overview of Serde](https://serde.rs/) -- [Data formats supported by Serde](https://serde.rs/#data-formats) -- [Setting up `#[derive(Serialize, Deserialize)]`](https://serde.rs/derive.html) -- [Examples](https://serde.rs/examples.html) -- [API documentation](https://docs.serde.rs/serde/) -- [Release notes](https://github.com/serde-rs/serde/releases) - -## Serde in action - -
- -Click to show Cargo.toml. -Run this code in the playground. - - -```toml -[dependencies] - -# The core APIs, including the Serialize and Deserialize traits. Always -# required when using Serde. The "derive" feature is only required when -# using #[derive(Serialize, Deserialize)] to make Serde work with structs -# and enums defined in your crate. -serde = { version = "1.0", features = ["derive"] } - -# Each data format lives in its own crate; the sample code below uses JSON -# but you may be using a different one. -serde_json = "1.0" -``` - -
-

- -```rust -use serde::{Serialize, Deserialize}; - -#[derive(Serialize, Deserialize, Debug)] -struct Point { - x: i32, - y: i32, -} - -fn main() { - let point = Point { x: 1, y: 2 }; - - // Convert the Point to a JSON string. - let serialized = serde_json::to_string(&point).unwrap(); - - // Prints serialized = {"x":1,"y":2} - println!("serialized = {}", serialized); - - // Convert the JSON string back to a Point. - let deserialized: Point = serde_json::from_str(&serialized).unwrap(); - - // Prints deserialized = Point { x: 1, y: 2 } - println!("deserialized = {:?}", deserialized); -} -``` - -## Getting help - -Serde is one of the most widely used Rust libraries so any place that Rustaceans -congregate will be able to help you out. For chat, consider trying the -[#general] or [#beginners] channels of the unofficial community Discord, the -[#rust-usage] channel of the official Rust Project Discord, or the -[#general][zulip] stream in Zulip. For asynchronous, consider the [\[rust\] tag -on StackOverflow][stackoverflow], the [/r/rust] subreddit which has a pinned -weekly easy questions post, or the Rust [Discourse forum][discourse]. It's -acceptable to file a support issue in this repo but they tend not to get as many -eyes as any of the above and may get closed without a response after some time. - -[#general]: https://discord.com/channels/273534239310479360/274215136414400513 -[#beginners]: https://discord.com/channels/273534239310479360/273541522815713281 -[#rust-usage]: https://discord.com/channels/442252698964721669/443150878111694848 -[zulip]: https://rust-lang.zulipchat.com/#narrow/stream/122651-general -[stackoverflow]: https://stackoverflow.com/questions/tagged/rust -[/r/rust]: https://www.reddit.com/r/rust -[discourse]: https://users.rust-lang.org - -
- -#### License - - -Licensed under either of Apache License, Version -2.0 or MIT license at your option. - - -
- - -Unless you explicitly state otherwise, any contribution intentionally submitted -for inclusion in Serde by you, as defined in the Apache-2.0 license, shall be -dual licensed as above, without any additional terms or conditions. - diff --git a/vendor/serde/build.rs b/vendor/serde/build.rs deleted file mode 100644 index c5307ab4..00000000 --- a/vendor/serde/build.rs +++ /dev/null @@ -1,139 +0,0 @@ -use std::env; -use std::process::Command; -use std::str::{self, FromStr}; - -// The rustc-cfg strings below are *not* public API. Please let us know by -// opening a GitHub issue if your build environment requires some way to enable -// these cfgs other than by executing our build script. -fn main() { - let minor = match rustc_minor_version() { - Some(minor) => minor, - None => return, - }; - - let target = env::var("TARGET").unwrap(); - let emscripten = target == "asmjs-unknown-emscripten" || target == "wasm32-unknown-emscripten"; - - // std::collections::Bound was stabilized in Rust 1.17 - // but it was moved to core::ops later in Rust 1.26: - // https://doc.rust-lang.org/core/ops/enum.Bound.html - if minor >= 26 { - println!("cargo:rustc-cfg=ops_bound"); - } else if minor >= 17 && cfg!(feature = "std") { - println!("cargo:rustc-cfg=collections_bound"); - } - - // core::cmp::Reverse stabilized in Rust 1.19: - // https://doc.rust-lang.org/stable/core/cmp/struct.Reverse.html - if minor >= 19 { - println!("cargo:rustc-cfg=core_reverse"); - } - - // CString::into_boxed_c_str and PathBuf::into_boxed_path stabilized in Rust 1.20: - // https://doc.rust-lang.org/std/ffi/struct.CString.html#method.into_boxed_c_str - // https://doc.rust-lang.org/std/path/struct.PathBuf.html#method.into_boxed_path - if minor >= 20 { - println!("cargo:rustc-cfg=de_boxed_c_str"); - println!("cargo:rustc-cfg=de_boxed_path"); - } - - // From> for Rc / Arc stabilized in Rust 1.21: - // https://doc.rust-lang.org/std/rc/struct.Rc.html#impl-From> - // https://doc.rust-lang.org/std/sync/struct.Arc.html#impl-From> - if minor >= 21 { - println!("cargo:rustc-cfg=de_rc_dst"); - } - - // Duration available in core since Rust 1.25: - // https://blog.rust-lang.org/2018/03/29/Rust-1.25.html#library-stabilizations - if minor >= 25 { - println!("cargo:rustc-cfg=core_duration"); - } - - // 128-bit integers stabilized in Rust 1.26: - // https://blog.rust-lang.org/2018/05/10/Rust-1.26.html - // - // Disabled on Emscripten targets before Rust 1.40 since - // Emscripten did not support 128-bit integers until Rust 1.40 - // (https://github.com/rust-lang/rust/pull/65251) - if minor >= 26 && (!emscripten || minor >= 40) { - println!("cargo:rustc-cfg=integer128"); - } - - // Inclusive ranges methods stabilized in Rust 1.27: - // https://github.com/rust-lang/rust/pull/50758 - // Also Iterator::try_for_each: - // https://blog.rust-lang.org/2018/06/21/Rust-1.27.html#library-stabilizations - if minor >= 27 { - println!("cargo:rustc-cfg=range_inclusive"); - println!("cargo:rustc-cfg=iterator_try_fold"); - } - - // Non-zero integers stabilized in Rust 1.28: - // https://blog.rust-lang.org/2018/08/02/Rust-1.28.html#library-stabilizations - if minor >= 28 { - println!("cargo:rustc-cfg=num_nonzero"); - } - - // Current minimum supported version of serde_derive crate is Rust 1.31. - if minor >= 31 { - println!("cargo:rustc-cfg=serde_derive"); - } - - // TryFrom, Atomic types, non-zero signed integers, and SystemTime::checked_add - // stabilized in Rust 1.34: - // https://blog.rust-lang.org/2019/04/11/Rust-1.34.0.html#tryfrom-and-tryinto - // https://blog.rust-lang.org/2019/04/11/Rust-1.34.0.html#library-stabilizations - if minor >= 34 { - println!("cargo:rustc-cfg=core_try_from"); - println!("cargo:rustc-cfg=num_nonzero_signed"); - println!("cargo:rustc-cfg=systemtime_checked_add"); - - // Whitelist of archs that support std::sync::atomic module. Ideally we - // would use #[cfg(target_has_atomic = "...")] but it is not stable yet. - // Instead this is based on rustc's compiler/rustc_target/src/spec/*.rs. - let has_atomic64 = target.starts_with("x86_64") - || target.starts_with("i686") - || target.starts_with("aarch64") - || target.starts_with("powerpc64") - || target.starts_with("sparc64") - || target.starts_with("mips64el") - || target.starts_with("riscv64"); - let has_atomic32 = has_atomic64 || emscripten; - if has_atomic64 { - println!("cargo:rustc-cfg=std_atomic64"); - } - if has_atomic32 { - println!("cargo:rustc-cfg=std_atomic"); - } - } -} - -fn rustc_minor_version() -> Option { - let rustc = match env::var_os("RUSTC") { - Some(rustc) => rustc, - None => return None, - }; - - let output = match Command::new(rustc).arg("--version").output() { - Ok(output) => output, - Err(_) => return None, - }; - - let version = match str::from_utf8(&output.stdout) { - Ok(version) => version, - Err(_) => return None, - }; - - let mut pieces = version.split('.'); - if pieces.next() != Some("rustc 1") { - return None; - } - - let next = match pieces.next() { - Some(next) => next, - None => return None, - }; - - u32::from_str(next).ok() -} diff --git a/vendor/serde/crates-io.md b/vendor/serde/crates-io.md deleted file mode 100644 index 07757614..00000000 --- a/vendor/serde/crates-io.md +++ /dev/null @@ -1,62 +0,0 @@ - - -**Serde is a framework for *ser*ializing and *de*serializing Rust data structures efficiently and generically.** - ---- - -You may be looking for: - -- [An overview of Serde](https://serde.rs/) -- [Data formats supported by Serde](https://serde.rs/#data-formats) -- [Setting up `#[derive(Serialize, Deserialize)]`](https://serde.rs/derive.html) -- [Examples](https://serde.rs/examples.html) -- [API documentation](https://docs.serde.rs/serde/) -- [Release notes](https://github.com/serde-rs/serde/releases) - -## Serde in action - -```rust -use serde::{Serialize, Deserialize}; - -#[derive(Serialize, Deserialize, Debug)] -struct Point { - x: i32, - y: i32, -} - -fn main() { - let point = Point { x: 1, y: 2 }; - - // Convert the Point to a JSON string. - let serialized = serde_json::to_string(&point).unwrap(); - - // Prints serialized = {"x":1,"y":2} - println!("serialized = {}", serialized); - - // Convert the JSON string back to a Point. - let deserialized: Point = serde_json::from_str(&serialized).unwrap(); - - // Prints deserialized = Point { x: 1, y: 2 } - println!("deserialized = {:?}", deserialized); -} -``` - -## Getting help - -Serde is one of the most widely used Rust libraries so any place that Rustaceans -congregate will be able to help you out. For chat, consider trying the -[#general] or [#beginners] channels of the unofficial community Discord, the -[#rust-usage] channel of the official Rust Project Discord, or the -[#general][zulip] stream in Zulip. For asynchronous, consider the [\[rust\] tag -on StackOverflow][stackoverflow], the [/r/rust] subreddit which has a pinned -weekly easy questions post, or the Rust [Discourse forum][discourse]. It's -acceptable to file a support issue in this repo but they tend not to get as many -eyes as any of the above and may get closed without a response after some time. - -[#general]: https://discord.com/channels/273534239310479360/274215136414400513 -[#beginners]: https://discord.com/channels/273534239310479360/273541522815713281 -[#rust-usage]: https://discord.com/channels/442252698964721669/443150878111694848 -[zulip]: https://rust-lang.zulipchat.com/#narrow/stream/122651-general -[stackoverflow]: https://stackoverflow.com/questions/tagged/rust -[/r/rust]: https://www.reddit.com/r/rust -[discourse]: https://users.rust-lang.org diff --git a/vendor/serde/src/de/ignored_any.rs b/vendor/serde/src/de/ignored_any.rs deleted file mode 100644 index 1d50f5ec..00000000 --- a/vendor/serde/src/de/ignored_any.rs +++ /dev/null @@ -1,243 +0,0 @@ -use lib::*; - -use de::{ - Deserialize, Deserializer, EnumAccess, Error, MapAccess, SeqAccess, VariantAccess, Visitor, -}; - -/// An efficient way of discarding data from a deserializer. -/// -/// Think of this like `serde_json::Value` in that it can be deserialized from -/// any type, except that it does not store any information about the data that -/// gets deserialized. -/// -/// ```edition2018 -/// use std::fmt; -/// use std::marker::PhantomData; -/// -/// use serde::de::{ -/// self, Deserialize, DeserializeSeed, Deserializer, IgnoredAny, SeqAccess, Visitor, -/// }; -/// -/// /// A seed that can be used to deserialize only the `n`th element of a sequence -/// /// while efficiently discarding elements of any type before or after index `n`. -/// /// -/// /// For example to deserialize only the element at index 3: -/// /// -/// /// ``` -/// /// NthElement::new(3).deserialize(deserializer) -/// /// ``` -/// pub struct NthElement { -/// n: usize, -/// marker: PhantomData, -/// } -/// -/// impl NthElement { -/// pub fn new(n: usize) -> Self { -/// NthElement { -/// n: n, -/// marker: PhantomData, -/// } -/// } -/// } -/// -/// impl<'de, T> Visitor<'de> for NthElement -/// where -/// T: Deserialize<'de>, -/// { -/// type Value = T; -/// -/// fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { -/// write!( -/// formatter, -/// "a sequence in which we care about element {}", -/// self.n -/// ) -/// } -/// -/// fn visit_seq(self, mut seq: A) -> Result -/// where -/// A: SeqAccess<'de>, -/// { -/// // Skip over the first `n` elements. -/// for i in 0..self.n { -/// // It is an error if the sequence ends before we get to element `n`. -/// if seq.next_element::()?.is_none() { -/// return Err(de::Error::invalid_length(i, &self)); -/// } -/// } -/// -/// // Deserialize the one we care about. -/// let nth = match seq.next_element()? { -/// Some(nth) => nth, -/// None => { -/// return Err(de::Error::invalid_length(self.n, &self)); -/// } -/// }; -/// -/// // Skip over any remaining elements in the sequence after `n`. -/// while let Some(IgnoredAny) = seq.next_element()? { -/// // ignore -/// } -/// -/// Ok(nth) -/// } -/// } -/// -/// impl<'de, T> DeserializeSeed<'de> for NthElement -/// where -/// T: Deserialize<'de>, -/// { -/// type Value = T; -/// -/// fn deserialize(self, deserializer: D) -> Result -/// where -/// D: Deserializer<'de>, -/// { -/// deserializer.deserialize_seq(self) -/// } -/// } -/// -/// # fn example<'de, D>(deserializer: D) -> Result<(), D::Error> -/// # where -/// # D: Deserializer<'de>, -/// # { -/// // Deserialize only the sequence element at index 3 from this deserializer. -/// // The element at index 3 is required to be a string. Elements before and -/// // after index 3 are allowed to be of any type. -/// let s: String = NthElement::new(3).deserialize(deserializer)?; -/// # Ok(()) -/// # } -/// ``` -#[derive(Copy, Clone, Debug, Default)] -pub struct IgnoredAny; - -impl<'de> Visitor<'de> for IgnoredAny { - type Value = IgnoredAny; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("anything at all") - } - - #[inline] - fn visit_bool(self, x: bool) -> Result { - let _ = x; - Ok(IgnoredAny) - } - - #[inline] - fn visit_i64(self, x: i64) -> Result { - let _ = x; - Ok(IgnoredAny) - } - - serde_if_integer128! { - #[inline] - fn visit_i128(self, x: i128) -> Result { - let _ = x; - Ok(IgnoredAny) - } - } - - #[inline] - fn visit_u64(self, x: u64) -> Result { - let _ = x; - Ok(IgnoredAny) - } - - serde_if_integer128! { - #[inline] - fn visit_u128(self, x: u128) -> Result { - let _ = x; - Ok(IgnoredAny) - } - } - - #[inline] - fn visit_f64(self, x: f64) -> Result { - let _ = x; - Ok(IgnoredAny) - } - - #[inline] - fn visit_str(self, s: &str) -> Result - where - E: Error, - { - let _ = s; - Ok(IgnoredAny) - } - - #[inline] - fn visit_none(self) -> Result { - Ok(IgnoredAny) - } - - #[inline] - fn visit_some(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - IgnoredAny::deserialize(deserializer) - } - - #[inline] - fn visit_newtype_struct(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - IgnoredAny::deserialize(deserializer) - } - - #[inline] - fn visit_unit(self) -> Result { - Ok(IgnoredAny) - } - - #[inline] - fn visit_seq(self, mut seq: A) -> Result - where - A: SeqAccess<'de>, - { - while let Some(IgnoredAny) = try!(seq.next_element()) { - // Gobble - } - Ok(IgnoredAny) - } - - #[inline] - fn visit_map(self, mut map: A) -> Result - where - A: MapAccess<'de>, - { - while let Some((IgnoredAny, IgnoredAny)) = try!(map.next_entry()) { - // Gobble - } - Ok(IgnoredAny) - } - - #[inline] - fn visit_bytes(self, bytes: &[u8]) -> Result - where - E: Error, - { - let _ = bytes; - Ok(IgnoredAny) - } - - fn visit_enum(self, data: A) -> Result - where - A: EnumAccess<'de>, - { - data.variant::()?.1.newtype_variant() - } -} - -impl<'de> Deserialize<'de> for IgnoredAny { - #[inline] - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_ignored_any(IgnoredAny) - } -} diff --git a/vendor/serde/src/de/impls.rs b/vendor/serde/src/de/impls.rs deleted file mode 100644 index 59d90d2c..00000000 --- a/vendor/serde/src/de/impls.rs +++ /dev/null @@ -1,2664 +0,0 @@ -use lib::*; - -use de::{ - Deserialize, Deserializer, EnumAccess, Error, SeqAccess, Unexpected, VariantAccess, Visitor, -}; - -#[cfg(any(core_duration, feature = "std", feature = "alloc"))] -use de::MapAccess; - -use seed::InPlaceSeed; - -#[cfg(any(feature = "std", feature = "alloc"))] -use __private::size_hint; - -//////////////////////////////////////////////////////////////////////////////// - -struct UnitVisitor; - -impl<'de> Visitor<'de> for UnitVisitor { - type Value = (); - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("unit") - } - - fn visit_unit(self) -> Result - where - E: Error, - { - Ok(()) - } -} - -impl<'de> Deserialize<'de> for () { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_unit(UnitVisitor) - } -} - -#[cfg(feature = "unstable")] -impl<'de> Deserialize<'de> for ! { - fn deserialize(_deserializer: D) -> Result - where - D: Deserializer<'de>, - { - Err(Error::custom("cannot deserialize `!`")) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -struct BoolVisitor; - -impl<'de> Visitor<'de> for BoolVisitor { - type Value = bool; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("a boolean") - } - - fn visit_bool(self, v: bool) -> Result - where - E: Error, - { - Ok(v) - } -} - -impl<'de> Deserialize<'de> for bool { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_bool(BoolVisitor) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -macro_rules! impl_deserialize_num { - ($ty:ident, $deserialize:ident $($methods:tt)*) => { - impl<'de> Deserialize<'de> for $ty { - #[inline] - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct PrimitiveVisitor; - - impl<'de> Visitor<'de> for PrimitiveVisitor { - type Value = $ty; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str(stringify!($ty)) - } - - $($methods)* - } - - deserializer.$deserialize(PrimitiveVisitor) - } - } - }; -} - -macro_rules! num_self { - ($ty:ident : $visit:ident) => { - #[inline] - fn $visit(self, v: $ty) -> Result - where - E: Error, - { - Ok(v) - } - }; -} - -macro_rules! num_as_self { - ($($ty:ident : $visit:ident)*) => { - $( - #[inline] - fn $visit(self, v: $ty) -> Result - where - E: Error, - { - Ok(v as Self::Value) - } - )* - }; -} - -macro_rules! int_to_int { - ($($ty:ident : $visit:ident)*) => { - $( - #[inline] - fn $visit(self, v: $ty) -> Result - where - E: Error, - { - if Self::Value::min_value() as i64 <= v as i64 && v as i64 <= Self::Value::max_value() as i64 { - Ok(v as Self::Value) - } else { - Err(Error::invalid_value(Unexpected::Signed(v as i64), &self)) - } - } - )* - }; -} - -macro_rules! int_to_uint { - ($($ty:ident : $visit:ident)*) => { - $( - #[inline] - fn $visit(self, v: $ty) -> Result - where - E: Error, - { - if 0 <= v && v as u64 <= Self::Value::max_value() as u64 { - Ok(v as Self::Value) - } else { - Err(Error::invalid_value(Unexpected::Signed(v as i64), &self)) - } - } - )* - }; -} - -macro_rules! uint_to_self { - ($($ty:ident : $visit:ident)*) => { - $( - #[inline] - fn $visit(self, v: $ty) -> Result - where - E: Error, - { - if v as u64 <= Self::Value::max_value() as u64 { - Ok(v as Self::Value) - } else { - Err(Error::invalid_value(Unexpected::Unsigned(v as u64), &self)) - } - } - )* - }; -} - -impl_deserialize_num! { - i8, deserialize_i8 - num_self!(i8:visit_i8); - int_to_int!(i16:visit_i16 i32:visit_i32 i64:visit_i64); - uint_to_self!(u8:visit_u8 u16:visit_u16 u32:visit_u32 u64:visit_u64); -} - -impl_deserialize_num! { - i16, deserialize_i16 - num_self!(i16:visit_i16); - num_as_self!(i8:visit_i8); - int_to_int!(i32:visit_i32 i64:visit_i64); - uint_to_self!(u8:visit_u8 u16:visit_u16 u32:visit_u32 u64:visit_u64); -} - -impl_deserialize_num! { - i32, deserialize_i32 - num_self!(i32:visit_i32); - num_as_self!(i8:visit_i8 i16:visit_i16); - int_to_int!(i64:visit_i64); - uint_to_self!(u8:visit_u8 u16:visit_u16 u32:visit_u32 u64:visit_u64); -} - -impl_deserialize_num! { - i64, deserialize_i64 - num_self!(i64:visit_i64); - num_as_self!(i8:visit_i8 i16:visit_i16 i32:visit_i32); - uint_to_self!(u8:visit_u8 u16:visit_u16 u32:visit_u32 u64:visit_u64); -} - -impl_deserialize_num! { - isize, deserialize_i64 - num_as_self!(i8:visit_i8 i16:visit_i16); - int_to_int!(i32:visit_i32 i64:visit_i64); - uint_to_self!(u8:visit_u8 u16:visit_u16 u32:visit_u32 u64:visit_u64); -} - -impl_deserialize_num! { - u8, deserialize_u8 - num_self!(u8:visit_u8); - int_to_uint!(i8:visit_i8 i16:visit_i16 i32:visit_i32 i64:visit_i64); - uint_to_self!(u16:visit_u16 u32:visit_u32 u64:visit_u64); -} - -impl_deserialize_num! { - u16, deserialize_u16 - num_self!(u16:visit_u16); - num_as_self!(u8:visit_u8); - int_to_uint!(i8:visit_i8 i16:visit_i16 i32:visit_i32 i64:visit_i64); - uint_to_self!(u32:visit_u32 u64:visit_u64); -} - -impl_deserialize_num! { - u32, deserialize_u32 - num_self!(u32:visit_u32); - num_as_self!(u8:visit_u8 u16:visit_u16); - int_to_uint!(i8:visit_i8 i16:visit_i16 i32:visit_i32 i64:visit_i64); - uint_to_self!(u64:visit_u64); -} - -impl_deserialize_num! { - u64, deserialize_u64 - num_self!(u64:visit_u64); - num_as_self!(u8:visit_u8 u16:visit_u16 u32:visit_u32); - int_to_uint!(i8:visit_i8 i16:visit_i16 i32:visit_i32 i64:visit_i64); -} - -impl_deserialize_num! { - usize, deserialize_u64 - num_as_self!(u8:visit_u8 u16:visit_u16); - int_to_uint!(i8:visit_i8 i16:visit_i16 i32:visit_i32 i64:visit_i64); - uint_to_self!(u32:visit_u32 u64:visit_u64); -} - -impl_deserialize_num! { - f32, deserialize_f32 - num_self!(f32:visit_f32); - num_as_self!(f64:visit_f64); - num_as_self!(i8:visit_i8 i16:visit_i16 i32:visit_i32 i64:visit_i64); - num_as_self!(u8:visit_u8 u16:visit_u16 u32:visit_u32 u64:visit_u64); -} - -impl_deserialize_num! { - f64, deserialize_f64 - num_self!(f64:visit_f64); - num_as_self!(f32:visit_f32); - num_as_self!(i8:visit_i8 i16:visit_i16 i32:visit_i32 i64:visit_i64); - num_as_self!(u8:visit_u8 u16:visit_u16 u32:visit_u32 u64:visit_u64); -} - -serde_if_integer128! { - impl_deserialize_num! { - i128, deserialize_i128 - num_self!(i128:visit_i128); - num_as_self!(i8:visit_i8 i16:visit_i16 i32:visit_i32 i64:visit_i64); - num_as_self!(u8:visit_u8 u16:visit_u16 u32:visit_u32 u64:visit_u64); - - #[inline] - fn visit_u128(self, v: u128) -> Result - where - E: Error, - { - if v <= i128::max_value() as u128 { - Ok(v as i128) - } else { - Err(Error::invalid_value(Unexpected::Other("u128"), &self)) - } - } - } - - impl_deserialize_num! { - u128, deserialize_u128 - num_self!(u128:visit_u128); - num_as_self!(u8:visit_u8 u16:visit_u16 u32:visit_u32 u64:visit_u64); - int_to_uint!(i8:visit_i8 i16:visit_i16 i32:visit_i32 i64:visit_i64); - - #[inline] - fn visit_i128(self, v: i128) -> Result - where - E: Error, - { - if 0 <= v { - Ok(v as u128) - } else { - Err(Error::invalid_value(Unexpected::Other("i128"), &self)) - } - } - } -} - -//////////////////////////////////////////////////////////////////////////////// - -struct CharVisitor; - -impl<'de> Visitor<'de> for CharVisitor { - type Value = char; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("a character") - } - - #[inline] - fn visit_char(self, v: char) -> Result - where - E: Error, - { - Ok(v) - } - - #[inline] - fn visit_str(self, v: &str) -> Result - where - E: Error, - { - let mut iter = v.chars(); - match (iter.next(), iter.next()) { - (Some(c), None) => Ok(c), - _ => Err(Error::invalid_value(Unexpected::Str(v), &self)), - } - } -} - -impl<'de> Deserialize<'de> for char { - #[inline] - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_char(CharVisitor) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(any(feature = "std", feature = "alloc"))] -struct StringVisitor; -#[cfg(any(feature = "std", feature = "alloc"))] -struct StringInPlaceVisitor<'a>(&'a mut String); - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'de> Visitor<'de> for StringVisitor { - type Value = String; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("a string") - } - - fn visit_str(self, v: &str) -> Result - where - E: Error, - { - Ok(v.to_owned()) - } - - fn visit_string(self, v: String) -> Result - where - E: Error, - { - Ok(v) - } - - fn visit_bytes(self, v: &[u8]) -> Result - where - E: Error, - { - match str::from_utf8(v) { - Ok(s) => Ok(s.to_owned()), - Err(_) => Err(Error::invalid_value(Unexpected::Bytes(v), &self)), - } - } - - fn visit_byte_buf(self, v: Vec) -> Result - where - E: Error, - { - match String::from_utf8(v) { - Ok(s) => Ok(s), - Err(e) => Err(Error::invalid_value( - Unexpected::Bytes(&e.into_bytes()), - &self, - )), - } - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'a, 'de> Visitor<'de> for StringInPlaceVisitor<'a> { - type Value = (); - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("a string") - } - - fn visit_str(self, v: &str) -> Result - where - E: Error, - { - self.0.clear(); - self.0.push_str(v); - Ok(()) - } - - fn visit_string(self, v: String) -> Result - where - E: Error, - { - *self.0 = v; - Ok(()) - } - - fn visit_bytes(self, v: &[u8]) -> Result - where - E: Error, - { - match str::from_utf8(v) { - Ok(s) => { - self.0.clear(); - self.0.push_str(s); - Ok(()) - } - Err(_) => Err(Error::invalid_value(Unexpected::Bytes(v), &self)), - } - } - - fn visit_byte_buf(self, v: Vec) -> Result - where - E: Error, - { - match String::from_utf8(v) { - Ok(s) => { - *self.0 = s; - Ok(()) - } - Err(e) => Err(Error::invalid_value( - Unexpected::Bytes(&e.into_bytes()), - &self, - )), - } - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'de> Deserialize<'de> for String { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_string(StringVisitor) - } - - fn deserialize_in_place(deserializer: D, place: &mut Self) -> Result<(), D::Error> - where - D: Deserializer<'de>, - { - deserializer.deserialize_string(StringInPlaceVisitor(place)) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -struct StrVisitor; - -impl<'a> Visitor<'a> for StrVisitor { - type Value = &'a str; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("a borrowed string") - } - - fn visit_borrowed_str(self, v: &'a str) -> Result - where - E: Error, - { - Ok(v) // so easy - } - - fn visit_borrowed_bytes(self, v: &'a [u8]) -> Result - where - E: Error, - { - str::from_utf8(v).map_err(|_| Error::invalid_value(Unexpected::Bytes(v), &self)) - } -} - -impl<'de: 'a, 'a> Deserialize<'de> for &'a str { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_str(StrVisitor) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -struct BytesVisitor; - -impl<'a> Visitor<'a> for BytesVisitor { - type Value = &'a [u8]; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("a borrowed byte array") - } - - fn visit_borrowed_bytes(self, v: &'a [u8]) -> Result - where - E: Error, - { - Ok(v) - } - - fn visit_borrowed_str(self, v: &'a str) -> Result - where - E: Error, - { - Ok(v.as_bytes()) - } -} - -impl<'de: 'a, 'a> Deserialize<'de> for &'a [u8] { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_bytes(BytesVisitor) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(feature = "std")] -struct CStringVisitor; - -#[cfg(feature = "std")] -impl<'de> Visitor<'de> for CStringVisitor { - type Value = CString; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("byte array") - } - - fn visit_seq(self, mut seq: A) -> Result - where - A: SeqAccess<'de>, - { - let len = size_hint::cautious(seq.size_hint()); - let mut values = Vec::with_capacity(len); - - while let Some(value) = try!(seq.next_element()) { - values.push(value); - } - - CString::new(values).map_err(Error::custom) - } - - fn visit_bytes(self, v: &[u8]) -> Result - where - E: Error, - { - CString::new(v).map_err(Error::custom) - } - - fn visit_byte_buf(self, v: Vec) -> Result - where - E: Error, - { - CString::new(v).map_err(Error::custom) - } - - fn visit_str(self, v: &str) -> Result - where - E: Error, - { - CString::new(v).map_err(Error::custom) - } - - fn visit_string(self, v: String) -> Result - where - E: Error, - { - CString::new(v).map_err(Error::custom) - } -} - -#[cfg(feature = "std")] -impl<'de> Deserialize<'de> for CString { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_byte_buf(CStringVisitor) - } -} - -macro_rules! forwarded_impl { - ( - $(#[doc = $doc:tt])* - ( $($id: ident),* ), $ty: ty, $func: expr - ) => { - $(#[doc = $doc])* - impl<'de $(, $id : Deserialize<'de>,)*> Deserialize<'de> for $ty { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - Deserialize::deserialize(deserializer).map($func) - } - } - } -} - -#[cfg(all(feature = "std", de_boxed_c_str))] -forwarded_impl!((), Box, CString::into_boxed_c_str); - -#[cfg(core_reverse)] -forwarded_impl!((T), Reverse, Reverse); - -//////////////////////////////////////////////////////////////////////////////// - -struct OptionVisitor { - marker: PhantomData, -} - -impl<'de, T> Visitor<'de> for OptionVisitor -where - T: Deserialize<'de>, -{ - type Value = Option; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("option") - } - - #[inline] - fn visit_unit(self) -> Result - where - E: Error, - { - Ok(None) - } - - #[inline] - fn visit_none(self) -> Result - where - E: Error, - { - Ok(None) - } - - #[inline] - fn visit_some(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - T::deserialize(deserializer).map(Some) - } - - #[doc(hidden)] - fn __private_visit_untagged_option(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - Ok(T::deserialize(deserializer).ok()) - } -} - -impl<'de, T> Deserialize<'de> for Option -where - T: Deserialize<'de>, -{ - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_option(OptionVisitor { - marker: PhantomData, - }) - } - - // The Some variant's repr is opaque, so we can't play cute tricks with its - // tag to have deserialize_in_place build the content in place unconditionally. - // - // FIXME: investigate whether branching on the old value being Some to - // deserialize_in_place the value is profitable (probably data-dependent?) -} - -//////////////////////////////////////////////////////////////////////////////// - -struct PhantomDataVisitor { - marker: PhantomData, -} - -impl<'de, T: ?Sized> Visitor<'de> for PhantomDataVisitor { - type Value = PhantomData; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("unit") - } - - #[inline] - fn visit_unit(self) -> Result - where - E: Error, - { - Ok(PhantomData) - } -} - -impl<'de, T: ?Sized> Deserialize<'de> for PhantomData { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let visitor = PhantomDataVisitor { - marker: PhantomData, - }; - deserializer.deserialize_unit_struct("PhantomData", visitor) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(any(feature = "std", feature = "alloc"))] -macro_rules! seq_impl { - ( - $ty:ident < T $(: $tbound1:ident $(+ $tbound2:ident)*)* $(, $typaram:ident : $bound1:ident $(+ $bound2:ident)*)* >, - $access:ident, - $clear:expr, - $with_capacity:expr, - $reserve:expr, - $insert:expr - ) => { - impl<'de, T $(, $typaram)*> Deserialize<'de> for $ty - where - T: Deserialize<'de> $(+ $tbound1 $(+ $tbound2)*)*, - $($typaram: $bound1 $(+ $bound2)*,)* - { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct SeqVisitor { - marker: PhantomData<$ty>, - } - - impl<'de, T $(, $typaram)*> Visitor<'de> for SeqVisitor - where - T: Deserialize<'de> $(+ $tbound1 $(+ $tbound2)*)*, - $($typaram: $bound1 $(+ $bound2)*,)* - { - type Value = $ty; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("a sequence") - } - - #[inline] - fn visit_seq(self, mut $access: A) -> Result - where - A: SeqAccess<'de>, - { - let mut values = $with_capacity; - - while let Some(value) = try!($access.next_element()) { - $insert(&mut values, value); - } - - Ok(values) - } - } - - let visitor = SeqVisitor { marker: PhantomData }; - deserializer.deserialize_seq(visitor) - } - - fn deserialize_in_place(deserializer: D, place: &mut Self) -> Result<(), D::Error> - where - D: Deserializer<'de>, - { - struct SeqInPlaceVisitor<'a, T: 'a $(, $typaram: 'a)*>(&'a mut $ty); - - impl<'a, 'de, T $(, $typaram)*> Visitor<'de> for SeqInPlaceVisitor<'a, T $(, $typaram)*> - where - T: Deserialize<'de> $(+ $tbound1 $(+ $tbound2)*)*, - $($typaram: $bound1 $(+ $bound2)*,)* - { - type Value = (); - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("a sequence") - } - - #[inline] - fn visit_seq(mut self, mut $access: A) -> Result - where - A: SeqAccess<'de>, - { - $clear(&mut self.0); - $reserve(&mut self.0, size_hint::cautious($access.size_hint())); - - // FIXME: try to overwrite old values here? (Vec, VecDeque, LinkedList) - while let Some(value) = try!($access.next_element()) { - $insert(&mut self.0, value); - } - - Ok(()) - } - } - - deserializer.deserialize_seq(SeqInPlaceVisitor(place)) - } - } - } -} - -// Dummy impl of reserve -#[cfg(any(feature = "std", feature = "alloc"))] -fn nop_reserve(_seq: T, _n: usize) {} - -#[cfg(any(feature = "std", feature = "alloc"))] -seq_impl!( - BinaryHeap, - seq, - BinaryHeap::clear, - BinaryHeap::with_capacity(size_hint::cautious(seq.size_hint())), - BinaryHeap::reserve, - BinaryHeap::push -); - -#[cfg(any(feature = "std", feature = "alloc"))] -seq_impl!( - BTreeSet, - seq, - BTreeSet::clear, - BTreeSet::new(), - nop_reserve, - BTreeSet::insert -); - -#[cfg(any(feature = "std", feature = "alloc"))] -seq_impl!( - LinkedList, - seq, - LinkedList::clear, - LinkedList::new(), - nop_reserve, - LinkedList::push_back -); - -#[cfg(feature = "std")] -seq_impl!( - HashSet, - seq, - HashSet::clear, - HashSet::with_capacity_and_hasher(size_hint::cautious(seq.size_hint()), S::default()), - HashSet::reserve, - HashSet::insert); - -#[cfg(any(feature = "std", feature = "alloc"))] -seq_impl!( - VecDeque, - seq, - VecDeque::clear, - VecDeque::with_capacity(size_hint::cautious(seq.size_hint())), - VecDeque::reserve, - VecDeque::push_back -); - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'de, T> Deserialize<'de> for Vec -where - T: Deserialize<'de>, -{ - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct VecVisitor { - marker: PhantomData, - } - - impl<'de, T> Visitor<'de> for VecVisitor - where - T: Deserialize<'de>, - { - type Value = Vec; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("a sequence") - } - - fn visit_seq(self, mut seq: A) -> Result - where - A: SeqAccess<'de>, - { - let mut values = Vec::with_capacity(size_hint::cautious(seq.size_hint())); - - while let Some(value) = try!(seq.next_element()) { - values.push(value); - } - - Ok(values) - } - } - - let visitor = VecVisitor { - marker: PhantomData, - }; - deserializer.deserialize_seq(visitor) - } - - fn deserialize_in_place(deserializer: D, place: &mut Self) -> Result<(), D::Error> - where - D: Deserializer<'de>, - { - struct VecInPlaceVisitor<'a, T: 'a>(&'a mut Vec); - - impl<'a, 'de, T> Visitor<'de> for VecInPlaceVisitor<'a, T> - where - T: Deserialize<'de>, - { - type Value = (); - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("a sequence") - } - - fn visit_seq(self, mut seq: A) -> Result - where - A: SeqAccess<'de>, - { - let hint = size_hint::cautious(seq.size_hint()); - if let Some(additional) = hint.checked_sub(self.0.len()) { - self.0.reserve(additional); - } - - for i in 0..self.0.len() { - let next = { - let next_place = InPlaceSeed(&mut self.0[i]); - try!(seq.next_element_seed(next_place)) - }; - if next.is_none() { - self.0.truncate(i); - return Ok(()); - } - } - - while let Some(value) = try!(seq.next_element()) { - self.0.push(value); - } - - Ok(()) - } - } - - deserializer.deserialize_seq(VecInPlaceVisitor(place)) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -struct ArrayVisitor { - marker: PhantomData, -} -struct ArrayInPlaceVisitor<'a, A: 'a>(&'a mut A); - -impl ArrayVisitor { - fn new() -> Self { - ArrayVisitor { - marker: PhantomData, - } - } -} - -impl<'de, T> Visitor<'de> for ArrayVisitor<[T; 0]> { - type Value = [T; 0]; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("an empty array") - } - - #[inline] - fn visit_seq(self, _: A) -> Result - where - A: SeqAccess<'de>, - { - Ok([]) - } -} - -// Does not require T: Deserialize<'de>. -impl<'de, T> Deserialize<'de> for [T; 0] { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_tuple(0, ArrayVisitor::<[T; 0]>::new()) - } -} - -macro_rules! array_impls { - ($($len:expr => ($($n:tt)+))+) => { - $( - impl<'de, T> Visitor<'de> for ArrayVisitor<[T; $len]> - where - T: Deserialize<'de>, - { - type Value = [T; $len]; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str(concat!("an array of length ", $len)) - } - - #[inline] - fn visit_seq(self, mut seq: A) -> Result - where - A: SeqAccess<'de>, - { - Ok([$( - match try!(seq.next_element()) { - Some(val) => val, - None => return Err(Error::invalid_length($n, &self)), - } - ),+]) - } - } - - impl<'a, 'de, T> Visitor<'de> for ArrayInPlaceVisitor<'a, [T; $len]> - where - T: Deserialize<'de>, - { - type Value = (); - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str(concat!("an array of length ", $len)) - } - - #[inline] - fn visit_seq(self, mut seq: A) -> Result - where - A: SeqAccess<'de>, - { - let mut fail_idx = None; - for (idx, dest) in self.0[..].iter_mut().enumerate() { - if try!(seq.next_element_seed(InPlaceSeed(dest))).is_none() { - fail_idx = Some(idx); - break; - } - } - if let Some(idx) = fail_idx { - return Err(Error::invalid_length(idx, &self)); - } - Ok(()) - } - } - - impl<'de, T> Deserialize<'de> for [T; $len] - where - T: Deserialize<'de>, - { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_tuple($len, ArrayVisitor::<[T; $len]>::new()) - } - - fn deserialize_in_place(deserializer: D, place: &mut Self) -> Result<(), D::Error> - where - D: Deserializer<'de>, - { - deserializer.deserialize_tuple($len, ArrayInPlaceVisitor(place)) - } - } - )+ - } -} - -array_impls! { - 1 => (0) - 2 => (0 1) - 3 => (0 1 2) - 4 => (0 1 2 3) - 5 => (0 1 2 3 4) - 6 => (0 1 2 3 4 5) - 7 => (0 1 2 3 4 5 6) - 8 => (0 1 2 3 4 5 6 7) - 9 => (0 1 2 3 4 5 6 7 8) - 10 => (0 1 2 3 4 5 6 7 8 9) - 11 => (0 1 2 3 4 5 6 7 8 9 10) - 12 => (0 1 2 3 4 5 6 7 8 9 10 11) - 13 => (0 1 2 3 4 5 6 7 8 9 10 11 12) - 14 => (0 1 2 3 4 5 6 7 8 9 10 11 12 13) - 15 => (0 1 2 3 4 5 6 7 8 9 10 11 12 13 14) - 16 => (0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15) - 17 => (0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16) - 18 => (0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17) - 19 => (0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18) - 20 => (0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19) - 21 => (0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20) - 22 => (0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21) - 23 => (0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22) - 24 => (0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23) - 25 => (0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24) - 26 => (0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25) - 27 => (0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26) - 28 => (0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27) - 29 => (0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28) - 30 => (0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29) - 31 => (0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30) - 32 => (0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31) -} - -//////////////////////////////////////////////////////////////////////////////// - -macro_rules! tuple_impls { - ($($len:tt => ($($n:tt $name:ident)+))+) => { - $( - impl<'de, $($name: Deserialize<'de>),+> Deserialize<'de> for ($($name,)+) { - #[inline] - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct TupleVisitor<$($name,)+> { - marker: PhantomData<($($name,)+)>, - } - - impl<'de, $($name: Deserialize<'de>),+> Visitor<'de> for TupleVisitor<$($name,)+> { - type Value = ($($name,)+); - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str(concat!("a tuple of size ", $len)) - } - - #[inline] - #[allow(non_snake_case)] - fn visit_seq(self, mut seq: A) -> Result - where - A: SeqAccess<'de>, - { - $( - let $name = match try!(seq.next_element()) { - Some(value) => value, - None => return Err(Error::invalid_length($n, &self)), - }; - )+ - - Ok(($($name,)+)) - } - } - - deserializer.deserialize_tuple($len, TupleVisitor { marker: PhantomData }) - } - - #[inline] - fn deserialize_in_place(deserializer: D, place: &mut Self) -> Result<(), D::Error> - where - D: Deserializer<'de>, - { - struct TupleInPlaceVisitor<'a, $($name: 'a,)+>(&'a mut ($($name,)+)); - - impl<'a, 'de, $($name: Deserialize<'de>),+> Visitor<'de> for TupleInPlaceVisitor<'a, $($name,)+> { - type Value = (); - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str(concat!("a tuple of size ", $len)) - } - - #[inline] - #[allow(non_snake_case)] - fn visit_seq(self, mut seq: A) -> Result - where - A: SeqAccess<'de>, - { - $( - if try!(seq.next_element_seed(InPlaceSeed(&mut (self.0).$n))).is_none() { - return Err(Error::invalid_length($n, &self)); - } - )+ - - Ok(()) - } - } - - deserializer.deserialize_tuple($len, TupleInPlaceVisitor(place)) - } - } - )+ - } -} - -tuple_impls! { - 1 => (0 T0) - 2 => (0 T0 1 T1) - 3 => (0 T0 1 T1 2 T2) - 4 => (0 T0 1 T1 2 T2 3 T3) - 5 => (0 T0 1 T1 2 T2 3 T3 4 T4) - 6 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5) - 7 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5 6 T6) - 8 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5 6 T6 7 T7) - 9 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5 6 T6 7 T7 8 T8) - 10 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5 6 T6 7 T7 8 T8 9 T9) - 11 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5 6 T6 7 T7 8 T8 9 T9 10 T10) - 12 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5 6 T6 7 T7 8 T8 9 T9 10 T10 11 T11) - 13 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5 6 T6 7 T7 8 T8 9 T9 10 T10 11 T11 12 T12) - 14 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5 6 T6 7 T7 8 T8 9 T9 10 T10 11 T11 12 T12 13 T13) - 15 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5 6 T6 7 T7 8 T8 9 T9 10 T10 11 T11 12 T12 13 T13 14 T14) - 16 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5 6 T6 7 T7 8 T8 9 T9 10 T10 11 T11 12 T12 13 T13 14 T14 15 T15) -} - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(any(feature = "std", feature = "alloc"))] -macro_rules! map_impl { - ( - $ty:ident < K $(: $kbound1:ident $(+ $kbound2:ident)*)*, V $(, $typaram:ident : $bound1:ident $(+ $bound2:ident)*)* >, - $access:ident, - $with_capacity:expr - ) => { - impl<'de, K, V $(, $typaram)*> Deserialize<'de> for $ty - where - K: Deserialize<'de> $(+ $kbound1 $(+ $kbound2)*)*, - V: Deserialize<'de>, - $($typaram: $bound1 $(+ $bound2)*),* - { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct MapVisitor { - marker: PhantomData<$ty>, - } - - impl<'de, K, V $(, $typaram)*> Visitor<'de> for MapVisitor - where - K: Deserialize<'de> $(+ $kbound1 $(+ $kbound2)*)*, - V: Deserialize<'de>, - $($typaram: $bound1 $(+ $bound2)*),* - { - type Value = $ty; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("a map") - } - - #[inline] - fn visit_map(self, mut $access: A) -> Result - where - A: MapAccess<'de>, - { - let mut values = $with_capacity; - - while let Some((key, value)) = try!($access.next_entry()) { - values.insert(key, value); - } - - Ok(values) - } - } - - let visitor = MapVisitor { marker: PhantomData }; - deserializer.deserialize_map(visitor) - } - } - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -map_impl!( - BTreeMap, - map, - BTreeMap::new()); - -#[cfg(feature = "std")] -map_impl!( - HashMap, - map, - HashMap::with_capacity_and_hasher(size_hint::cautious(map.size_hint()), S::default())); - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(feature = "std")] -macro_rules! parse_ip_impl { - ($expecting:tt $ty:ty; $size:tt) => { - impl<'de> Deserialize<'de> for $ty { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - if deserializer.is_human_readable() { - deserializer.deserialize_str(FromStrVisitor::new($expecting)) - } else { - <[u8; $size]>::deserialize(deserializer).map(<$ty>::from) - } - } - } - }; -} - -#[cfg(feature = "std")] -macro_rules! variant_identifier { - ( - $name_kind: ident ( $($variant: ident; $bytes: expr; $index: expr),* ) - $expecting_message: expr, - $variants_name: ident - ) => { - enum $name_kind { - $( $variant ),* - } - - static $variants_name: &'static [&'static str] = &[ $( stringify!($variant) ),*]; - - impl<'de> Deserialize<'de> for $name_kind { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct KindVisitor; - - impl<'de> Visitor<'de> for KindVisitor { - type Value = $name_kind; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str($expecting_message) - } - - fn visit_u64(self, value: u64) -> Result - where - E: Error, - { - match value { - $( - $index => Ok($name_kind :: $variant), - )* - _ => Err(Error::invalid_value(Unexpected::Unsigned(value), &self),), - } - } - - fn visit_str(self, value: &str) -> Result - where - E: Error, - { - match value { - $( - stringify!($variant) => Ok($name_kind :: $variant), - )* - _ => Err(Error::unknown_variant(value, $variants_name)), - } - } - - fn visit_bytes(self, value: &[u8]) -> Result - where - E: Error, - { - match value { - $( - $bytes => Ok($name_kind :: $variant), - )* - _ => { - match str::from_utf8(value) { - Ok(value) => Err(Error::unknown_variant(value, $variants_name)), - Err(_) => Err(Error::invalid_value(Unexpected::Bytes(value), &self)), - } - } - } - } - } - - deserializer.deserialize_identifier(KindVisitor) - } - } - } -} - -#[cfg(feature = "std")] -macro_rules! deserialize_enum { - ( - $name: ident $name_kind: ident ( $($variant: ident; $bytes: expr; $index: expr),* ) - $expecting_message: expr, - $deserializer: expr - ) => { - variant_identifier!{ - $name_kind ( $($variant; $bytes; $index),* ) - $expecting_message, - VARIANTS - } - - struct EnumVisitor; - impl<'de> Visitor<'de> for EnumVisitor { - type Value = $name; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str(concat!("a ", stringify!($name))) - } - - - fn visit_enum(self, data: A) -> Result - where - A: EnumAccess<'de>, - { - match try!(data.variant()) { - $( - ($name_kind :: $variant, v) => v.newtype_variant().map($name :: $variant), - )* - } - } - } - $deserializer.deserialize_enum(stringify!($name), VARIANTS, EnumVisitor) - } -} - -#[cfg(feature = "std")] -impl<'de> Deserialize<'de> for net::IpAddr { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - if deserializer.is_human_readable() { - deserializer.deserialize_str(FromStrVisitor::new("IP address")) - } else { - use lib::net::IpAddr; - deserialize_enum! { - IpAddr IpAddrKind (V4; b"V4"; 0, V6; b"V6"; 1) - "`V4` or `V6`", - deserializer - } - } - } -} - -#[cfg(feature = "std")] -parse_ip_impl!("IPv4 address" net::Ipv4Addr; 4); - -#[cfg(feature = "std")] -parse_ip_impl!("IPv6 address" net::Ipv6Addr; 16); - -#[cfg(feature = "std")] -macro_rules! parse_socket_impl { - ($expecting:tt $ty:ty, $new:expr) => { - impl<'de> Deserialize<'de> for $ty { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - if deserializer.is_human_readable() { - deserializer.deserialize_str(FromStrVisitor::new($expecting)) - } else { - <(_, u16)>::deserialize(deserializer).map(|(ip, port)| $new(ip, port)) - } - } - } - }; -} - -#[cfg(feature = "std")] -impl<'de> Deserialize<'de> for net::SocketAddr { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - if deserializer.is_human_readable() { - deserializer.deserialize_str(FromStrVisitor::new("socket address")) - } else { - use lib::net::SocketAddr; - deserialize_enum! { - SocketAddr SocketAddrKind (V4; b"V4"; 0, V6; b"V6"; 1) - "`V4` or `V6`", - deserializer - } - } - } -} - -#[cfg(feature = "std")] -parse_socket_impl!("IPv4 socket address" net::SocketAddrV4, net::SocketAddrV4::new); - -#[cfg(feature = "std")] -parse_socket_impl!("IPv6 socket address" net::SocketAddrV6, |ip, port| net::SocketAddrV6::new( - ip, port, 0, 0 -)); - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(feature = "std")] -struct PathVisitor; - -#[cfg(feature = "std")] -impl<'a> Visitor<'a> for PathVisitor { - type Value = &'a Path; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("a borrowed path") - } - - fn visit_borrowed_str(self, v: &'a str) -> Result - where - E: Error, - { - Ok(v.as_ref()) - } - - fn visit_borrowed_bytes(self, v: &'a [u8]) -> Result - where - E: Error, - { - str::from_utf8(v) - .map(AsRef::as_ref) - .map_err(|_| Error::invalid_value(Unexpected::Bytes(v), &self)) - } -} - -#[cfg(feature = "std")] -impl<'de: 'a, 'a> Deserialize<'de> for &'a Path { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_str(PathVisitor) - } -} - -#[cfg(feature = "std")] -struct PathBufVisitor; - -#[cfg(feature = "std")] -impl<'de> Visitor<'de> for PathBufVisitor { - type Value = PathBuf; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("path string") - } - - fn visit_str(self, v: &str) -> Result - where - E: Error, - { - Ok(From::from(v)) - } - - fn visit_string(self, v: String) -> Result - where - E: Error, - { - Ok(From::from(v)) - } - - fn visit_bytes(self, v: &[u8]) -> Result - where - E: Error, - { - str::from_utf8(v) - .map(From::from) - .map_err(|_| Error::invalid_value(Unexpected::Bytes(v), &self)) - } - - fn visit_byte_buf(self, v: Vec) -> Result - where - E: Error, - { - String::from_utf8(v) - .map(From::from) - .map_err(|e| Error::invalid_value(Unexpected::Bytes(&e.into_bytes()), &self)) - } -} - -#[cfg(feature = "std")] -impl<'de> Deserialize<'de> for PathBuf { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_string(PathBufVisitor) - } -} - -#[cfg(all(feature = "std", de_boxed_path))] -forwarded_impl!((), Box, PathBuf::into_boxed_path); - -//////////////////////////////////////////////////////////////////////////////// - -// If this were outside of the serde crate, it would just use: -// -// #[derive(Deserialize)] -// #[serde(variant_identifier)] -#[cfg(all(feature = "std", any(unix, windows)))] -variant_identifier! { - OsStringKind (Unix; b"Unix"; 0, Windows; b"Windows"; 1) - "`Unix` or `Windows`", - OSSTR_VARIANTS -} - -#[cfg(all(feature = "std", any(unix, windows)))] -struct OsStringVisitor; - -#[cfg(all(feature = "std", any(unix, windows)))] -impl<'de> Visitor<'de> for OsStringVisitor { - type Value = OsString; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("os string") - } - - #[cfg(unix)] - fn visit_enum(self, data: A) -> Result - where - A: EnumAccess<'de>, - { - use std::os::unix::ffi::OsStringExt; - - match try!(data.variant()) { - (OsStringKind::Unix, v) => v.newtype_variant().map(OsString::from_vec), - (OsStringKind::Windows, _) => Err(Error::custom( - "cannot deserialize Windows OS string on Unix", - )), - } - } - - #[cfg(windows)] - fn visit_enum(self, data: A) -> Result - where - A: EnumAccess<'de>, - { - use std::os::windows::ffi::OsStringExt; - - match try!(data.variant()) { - (OsStringKind::Windows, v) => v - .newtype_variant::>() - .map(|vec| OsString::from_wide(&vec)), - (OsStringKind::Unix, _) => Err(Error::custom( - "cannot deserialize Unix OS string on Windows", - )), - } - } -} - -#[cfg(all(feature = "std", any(unix, windows)))] -impl<'de> Deserialize<'de> for OsString { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_enum("OsString", OSSTR_VARIANTS, OsStringVisitor) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(any(feature = "std", feature = "alloc"))] -forwarded_impl!((T), Box, Box::new); - -#[cfg(any(feature = "std", feature = "alloc"))] -forwarded_impl!((T), Box<[T]>, Vec::into_boxed_slice); - -#[cfg(any(feature = "std", feature = "alloc"))] -forwarded_impl!((), Box, String::into_boxed_str); - -#[cfg(all( - not(de_rc_dst), - feature = "rc", - any(feature = "std", feature = "alloc") -))] -forwarded_impl! { - /// This impl requires the [`"rc"`] Cargo feature of Serde. - /// - /// Deserializing a data structure containing `Arc` will not attempt to - /// deduplicate `Arc` references to the same data. Every deserialized `Arc` - /// will end up with a strong count of 1. - /// - /// [`"rc"`]: https://serde.rs/feature-flags.html#-features-rc - (T), Arc, Arc::new -} - -#[cfg(all( - not(de_rc_dst), - feature = "rc", - any(feature = "std", feature = "alloc") -))] -forwarded_impl! { - /// This impl requires the [`"rc"`] Cargo feature of Serde. - /// - /// Deserializing a data structure containing `Rc` will not attempt to - /// deduplicate `Rc` references to the same data. Every deserialized `Rc` - /// will end up with a strong count of 1. - /// - /// [`"rc"`]: https://serde.rs/feature-flags.html#-features-rc - (T), Rc, Rc::new -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'de, 'a, T: ?Sized> Deserialize<'de> for Cow<'a, T> -where - T: ToOwned, - T::Owned: Deserialize<'de>, -{ - #[inline] - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - T::Owned::deserialize(deserializer).map(Cow::Owned) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -/// This impl requires the [`"rc"`] Cargo feature of Serde. The resulting -/// `Weak` has a reference count of 0 and cannot be upgraded. -/// -/// [`"rc"`]: https://serde.rs/feature-flags.html#-features-rc -#[cfg(all(feature = "rc", any(feature = "std", feature = "alloc")))] -impl<'de, T: ?Sized> Deserialize<'de> for RcWeak -where - T: Deserialize<'de>, -{ - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - try!(Option::::deserialize(deserializer)); - Ok(RcWeak::new()) - } -} - -/// This impl requires the [`"rc"`] Cargo feature of Serde. The resulting -/// `Weak` has a reference count of 0 and cannot be upgraded. -/// -/// [`"rc"`]: https://serde.rs/feature-flags.html#-features-rc -#[cfg(all(feature = "rc", any(feature = "std", feature = "alloc")))] -impl<'de, T: ?Sized> Deserialize<'de> for ArcWeak -where - T: Deserialize<'de>, -{ - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - try!(Option::::deserialize(deserializer)); - Ok(ArcWeak::new()) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(all(de_rc_dst, feature = "rc", any(feature = "std", feature = "alloc")))] -macro_rules! box_forwarded_impl { - ( - $(#[doc = $doc:tt])* - $t:ident - ) => { - $(#[doc = $doc])* - impl<'de, T: ?Sized> Deserialize<'de> for $t - where - Box: Deserialize<'de>, - { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - Box::deserialize(deserializer).map(Into::into) - } - } - }; -} - -#[cfg(all(de_rc_dst, feature = "rc", any(feature = "std", feature = "alloc")))] -box_forwarded_impl! { - /// This impl requires the [`"rc"`] Cargo feature of Serde. - /// - /// Deserializing a data structure containing `Rc` will not attempt to - /// deduplicate `Rc` references to the same data. Every deserialized `Rc` - /// will end up with a strong count of 1. - /// - /// [`"rc"`]: https://serde.rs/feature-flags.html#-features-rc - Rc -} - -#[cfg(all(de_rc_dst, feature = "rc", any(feature = "std", feature = "alloc")))] -box_forwarded_impl! { - /// This impl requires the [`"rc"`] Cargo feature of Serde. - /// - /// Deserializing a data structure containing `Arc` will not attempt to - /// deduplicate `Arc` references to the same data. Every deserialized `Arc` - /// will end up with a strong count of 1. - /// - /// [`"rc"`]: https://serde.rs/feature-flags.html#-features-rc - Arc -} - -//////////////////////////////////////////////////////////////////////////////// - -impl<'de, T> Deserialize<'de> for Cell -where - T: Deserialize<'de> + Copy, -{ - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - T::deserialize(deserializer).map(Cell::new) - } -} - -forwarded_impl!((T), RefCell, RefCell::new); - -#[cfg(feature = "std")] -forwarded_impl!((T), Mutex, Mutex::new); - -#[cfg(feature = "std")] -forwarded_impl!((T), RwLock, RwLock::new); - -//////////////////////////////////////////////////////////////////////////////// - -// This is a cleaned-up version of the impl generated by: -// -// #[derive(Deserialize)] -// #[serde(deny_unknown_fields)] -// struct Duration { -// secs: u64, -// nanos: u32, -// } -#[cfg(any(core_duration, feature = "std"))] -impl<'de> Deserialize<'de> for Duration { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - // If this were outside of the serde crate, it would just use: - // - // #[derive(Deserialize)] - // #[serde(field_identifier, rename_all = "lowercase")] - enum Field { - Secs, - Nanos, - } - - impl<'de> Deserialize<'de> for Field { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct FieldVisitor; - - impl<'de> Visitor<'de> for FieldVisitor { - type Value = Field; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("`secs` or `nanos`") - } - - fn visit_str(self, value: &str) -> Result - where - E: Error, - { - match value { - "secs" => Ok(Field::Secs), - "nanos" => Ok(Field::Nanos), - _ => Err(Error::unknown_field(value, FIELDS)), - } - } - - fn visit_bytes(self, value: &[u8]) -> Result - where - E: Error, - { - match value { - b"secs" => Ok(Field::Secs), - b"nanos" => Ok(Field::Nanos), - _ => { - let value = ::__private::from_utf8_lossy(value); - Err(Error::unknown_field(&value, FIELDS)) - } - } - } - } - - deserializer.deserialize_identifier(FieldVisitor) - } - } - - fn check_overflow(secs: u64, nanos: u32) -> Result<(), E> - where - E: Error, - { - static NANOS_PER_SEC: u32 = 1_000_000_000; - match secs.checked_add((nanos / NANOS_PER_SEC) as u64) { - Some(_) => Ok(()), - None => Err(E::custom("overflow deserializing Duration")), - } - } - - struct DurationVisitor; - - impl<'de> Visitor<'de> for DurationVisitor { - type Value = Duration; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("struct Duration") - } - - fn visit_seq(self, mut seq: A) -> Result - where - A: SeqAccess<'de>, - { - let secs: u64 = match try!(seq.next_element()) { - Some(value) => value, - None => { - return Err(Error::invalid_length(0, &self)); - } - }; - let nanos: u32 = match try!(seq.next_element()) { - Some(value) => value, - None => { - return Err(Error::invalid_length(1, &self)); - } - }; - try!(check_overflow(secs, nanos)); - Ok(Duration::new(secs, nanos)) - } - - fn visit_map(self, mut map: A) -> Result - where - A: MapAccess<'de>, - { - let mut secs: Option = None; - let mut nanos: Option = None; - while let Some(key) = try!(map.next_key()) { - match key { - Field::Secs => { - if secs.is_some() { - return Err(::duplicate_field("secs")); - } - secs = Some(try!(map.next_value())); - } - Field::Nanos => { - if nanos.is_some() { - return Err(::duplicate_field("nanos")); - } - nanos = Some(try!(map.next_value())); - } - } - } - let secs = match secs { - Some(secs) => secs, - None => return Err(::missing_field("secs")), - }; - let nanos = match nanos { - Some(nanos) => nanos, - None => return Err(::missing_field("nanos")), - }; - try!(check_overflow(secs, nanos)); - Ok(Duration::new(secs, nanos)) - } - } - - const FIELDS: &'static [&'static str] = &["secs", "nanos"]; - deserializer.deserialize_struct("Duration", FIELDS, DurationVisitor) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(feature = "std")] -impl<'de> Deserialize<'de> for SystemTime { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - // Reuse duration - enum Field { - Secs, - Nanos, - } - - impl<'de> Deserialize<'de> for Field { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct FieldVisitor; - - impl<'de> Visitor<'de> for FieldVisitor { - type Value = Field; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("`secs_since_epoch` or `nanos_since_epoch`") - } - - fn visit_str(self, value: &str) -> Result - where - E: Error, - { - match value { - "secs_since_epoch" => Ok(Field::Secs), - "nanos_since_epoch" => Ok(Field::Nanos), - _ => Err(Error::unknown_field(value, FIELDS)), - } - } - - fn visit_bytes(self, value: &[u8]) -> Result - where - E: Error, - { - match value { - b"secs_since_epoch" => Ok(Field::Secs), - b"nanos_since_epoch" => Ok(Field::Nanos), - _ => { - let value = String::from_utf8_lossy(value); - Err(Error::unknown_field(&value, FIELDS)) - } - } - } - } - - deserializer.deserialize_identifier(FieldVisitor) - } - } - - fn check_overflow(secs: u64, nanos: u32) -> Result<(), E> - where - E: Error, - { - static NANOS_PER_SEC: u32 = 1_000_000_000; - match secs.checked_add((nanos / NANOS_PER_SEC) as u64) { - Some(_) => Ok(()), - None => Err(E::custom("overflow deserializing SystemTime epoch offset")), - } - } - - struct DurationVisitor; - - impl<'de> Visitor<'de> for DurationVisitor { - type Value = Duration; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("struct SystemTime") - } - - fn visit_seq(self, mut seq: A) -> Result - where - A: SeqAccess<'de>, - { - let secs: u64 = match try!(seq.next_element()) { - Some(value) => value, - None => { - return Err(Error::invalid_length(0, &self)); - } - }; - let nanos: u32 = match try!(seq.next_element()) { - Some(value) => value, - None => { - return Err(Error::invalid_length(1, &self)); - } - }; - try!(check_overflow(secs, nanos)); - Ok(Duration::new(secs, nanos)) - } - - fn visit_map(self, mut map: A) -> Result - where - A: MapAccess<'de>, - { - let mut secs: Option = None; - let mut nanos: Option = None; - while let Some(key) = try!(map.next_key()) { - match key { - Field::Secs => { - if secs.is_some() { - return Err(::duplicate_field( - "secs_since_epoch", - )); - } - secs = Some(try!(map.next_value())); - } - Field::Nanos => { - if nanos.is_some() { - return Err(::duplicate_field( - "nanos_since_epoch", - )); - } - nanos = Some(try!(map.next_value())); - } - } - } - let secs = match secs { - Some(secs) => secs, - None => return Err(::missing_field("secs_since_epoch")), - }; - let nanos = match nanos { - Some(nanos) => nanos, - None => return Err(::missing_field("nanos_since_epoch")), - }; - try!(check_overflow(secs, nanos)); - Ok(Duration::new(secs, nanos)) - } - } - - const FIELDS: &'static [&'static str] = &["secs_since_epoch", "nanos_since_epoch"]; - let duration = try!(deserializer.deserialize_struct("SystemTime", FIELDS, DurationVisitor)); - #[cfg(systemtime_checked_add)] - let ret = UNIX_EPOCH - .checked_add(duration) - .ok_or_else(|| D::Error::custom("overflow deserializing SystemTime")); - #[cfg(not(systemtime_checked_add))] - let ret = Ok(UNIX_EPOCH + duration); - ret - } -} - -//////////////////////////////////////////////////////////////////////////////// - -// Similar to: -// -// #[derive(Deserialize)] -// #[serde(deny_unknown_fields)] -// struct Range { -// start: u64, -// end: u32, -// } -impl<'de, Idx> Deserialize<'de> for Range -where - Idx: Deserialize<'de>, -{ - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let (start, end) = deserializer.deserialize_struct( - "Range", - range::FIELDS, - range::RangeVisitor { - expecting: "struct Range", - phantom: PhantomData, - }, - )?; - Ok(start..end) - } -} - -#[cfg(range_inclusive)] -impl<'de, Idx> Deserialize<'de> for RangeInclusive -where - Idx: Deserialize<'de>, -{ - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let (start, end) = deserializer.deserialize_struct( - "RangeInclusive", - range::FIELDS, - range::RangeVisitor { - expecting: "struct RangeInclusive", - phantom: PhantomData, - }, - )?; - Ok(RangeInclusive::new(start, end)) - } -} - -mod range { - use lib::*; - - use de::{Deserialize, Deserializer, Error, MapAccess, SeqAccess, Visitor}; - - pub const FIELDS: &'static [&'static str] = &["start", "end"]; - - // If this were outside of the serde crate, it would just use: - // - // #[derive(Deserialize)] - // #[serde(field_identifier, rename_all = "lowercase")] - enum Field { - Start, - End, - } - - impl<'de> Deserialize<'de> for Field { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct FieldVisitor; - - impl<'de> Visitor<'de> for FieldVisitor { - type Value = Field; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("`start` or `end`") - } - - fn visit_str(self, value: &str) -> Result - where - E: Error, - { - match value { - "start" => Ok(Field::Start), - "end" => Ok(Field::End), - _ => Err(Error::unknown_field(value, FIELDS)), - } - } - - fn visit_bytes(self, value: &[u8]) -> Result - where - E: Error, - { - match value { - b"start" => Ok(Field::Start), - b"end" => Ok(Field::End), - _ => { - let value = ::__private::from_utf8_lossy(value); - Err(Error::unknown_field(&value, FIELDS)) - } - } - } - } - - deserializer.deserialize_identifier(FieldVisitor) - } - } - - pub struct RangeVisitor { - pub expecting: &'static str, - pub phantom: PhantomData, - } - - impl<'de, Idx> Visitor<'de> for RangeVisitor - where - Idx: Deserialize<'de>, - { - type Value = (Idx, Idx); - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str(self.expecting) - } - - fn visit_seq(self, mut seq: A) -> Result - where - A: SeqAccess<'de>, - { - let start: Idx = match try!(seq.next_element()) { - Some(value) => value, - None => { - return Err(Error::invalid_length(0, &self)); - } - }; - let end: Idx = match try!(seq.next_element()) { - Some(value) => value, - None => { - return Err(Error::invalid_length(1, &self)); - } - }; - Ok((start, end)) - } - - fn visit_map(self, mut map: A) -> Result - where - A: MapAccess<'de>, - { - let mut start: Option = None; - let mut end: Option = None; - while let Some(key) = try!(map.next_key()) { - match key { - Field::Start => { - if start.is_some() { - return Err(::duplicate_field("start")); - } - start = Some(try!(map.next_value())); - } - Field::End => { - if end.is_some() { - return Err(::duplicate_field("end")); - } - end = Some(try!(map.next_value())); - } - } - } - let start = match start { - Some(start) => start, - None => return Err(::missing_field("start")), - }; - let end = match end { - Some(end) => end, - None => return Err(::missing_field("end")), - }; - Ok((start, end)) - } - } -} - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(any(ops_bound, collections_bound))] -impl<'de, T> Deserialize<'de> for Bound -where - T: Deserialize<'de>, -{ - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - enum Field { - Unbounded, - Included, - Excluded, - } - - impl<'de> Deserialize<'de> for Field { - #[inline] - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct FieldVisitor; - - impl<'de> Visitor<'de> for FieldVisitor { - type Value = Field; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("`Unbounded`, `Included` or `Excluded`") - } - - fn visit_u64(self, value: u64) -> Result - where - E: Error, - { - match value { - 0 => Ok(Field::Unbounded), - 1 => Ok(Field::Included), - 2 => Ok(Field::Excluded), - _ => Err(Error::invalid_value(Unexpected::Unsigned(value), &self)), - } - } - - fn visit_str(self, value: &str) -> Result - where - E: Error, - { - match value { - "Unbounded" => Ok(Field::Unbounded), - "Included" => Ok(Field::Included), - "Excluded" => Ok(Field::Excluded), - _ => Err(Error::unknown_variant(value, VARIANTS)), - } - } - - fn visit_bytes(self, value: &[u8]) -> Result - where - E: Error, - { - match value { - b"Unbounded" => Ok(Field::Unbounded), - b"Included" => Ok(Field::Included), - b"Excluded" => Ok(Field::Excluded), - _ => match str::from_utf8(value) { - Ok(value) => Err(Error::unknown_variant(value, VARIANTS)), - Err(_) => { - Err(Error::invalid_value(Unexpected::Bytes(value), &self)) - } - }, - } - } - } - - deserializer.deserialize_identifier(FieldVisitor) - } - } - - struct BoundVisitor(PhantomData>); - - impl<'de, T> Visitor<'de> for BoundVisitor - where - T: Deserialize<'de>, - { - type Value = Bound; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("enum Bound") - } - - fn visit_enum(self, data: A) -> Result - where - A: EnumAccess<'de>, - { - match try!(data.variant()) { - (Field::Unbounded, v) => v.unit_variant().map(|()| Bound::Unbounded), - (Field::Included, v) => v.newtype_variant().map(Bound::Included), - (Field::Excluded, v) => v.newtype_variant().map(Bound::Excluded), - } - } - } - - const VARIANTS: &'static [&'static str] = &["Unbounded", "Included", "Excluded"]; - - deserializer.deserialize_enum("Bound", VARIANTS, BoundVisitor(PhantomData)) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -macro_rules! nonzero_integers { - ( $( $T: ident, )+ ) => { - $( - #[cfg(num_nonzero)] - impl<'de> Deserialize<'de> for num::$T { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let value = try!(Deserialize::deserialize(deserializer)); - match ::new(value) { - Some(nonzero) => Ok(nonzero), - None => Err(Error::custom("expected a non-zero value")), - } - } - } - )+ - }; -} - -nonzero_integers! { - NonZeroU8, - NonZeroU16, - NonZeroU32, - NonZeroU64, - NonZeroUsize, -} - -#[cfg(num_nonzero_signed)] -nonzero_integers! { - NonZeroI8, - NonZeroI16, - NonZeroI32, - NonZeroI64, - NonZeroIsize, -} - -// Currently 128-bit integers do not work on Emscripten targets so we need an -// additional `#[cfg]` -serde_if_integer128! { - nonzero_integers! { - NonZeroU128, - } - - #[cfg(num_nonzero_signed)] - nonzero_integers! { - NonZeroI128, - } -} - -//////////////////////////////////////////////////////////////////////////////// - -impl<'de, T, E> Deserialize<'de> for Result -where - T: Deserialize<'de>, - E: Deserialize<'de>, -{ - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - // If this were outside of the serde crate, it would just use: - // - // #[derive(Deserialize)] - // #[serde(variant_identifier)] - enum Field { - Ok, - Err, - } - - impl<'de> Deserialize<'de> for Field { - #[inline] - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct FieldVisitor; - - impl<'de> Visitor<'de> for FieldVisitor { - type Value = Field; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("`Ok` or `Err`") - } - - fn visit_u64(self, value: u64) -> Result - where - E: Error, - { - match value { - 0 => Ok(Field::Ok), - 1 => Ok(Field::Err), - _ => Err(Error::invalid_value(Unexpected::Unsigned(value), &self)), - } - } - - fn visit_str(self, value: &str) -> Result - where - E: Error, - { - match value { - "Ok" => Ok(Field::Ok), - "Err" => Ok(Field::Err), - _ => Err(Error::unknown_variant(value, VARIANTS)), - } - } - - fn visit_bytes(self, value: &[u8]) -> Result - where - E: Error, - { - match value { - b"Ok" => Ok(Field::Ok), - b"Err" => Ok(Field::Err), - _ => match str::from_utf8(value) { - Ok(value) => Err(Error::unknown_variant(value, VARIANTS)), - Err(_) => { - Err(Error::invalid_value(Unexpected::Bytes(value), &self)) - } - }, - } - } - } - - deserializer.deserialize_identifier(FieldVisitor) - } - } - - struct ResultVisitor(PhantomData>); - - impl<'de, T, E> Visitor<'de> for ResultVisitor - where - T: Deserialize<'de>, - E: Deserialize<'de>, - { - type Value = Result; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("enum Result") - } - - fn visit_enum(self, data: A) -> Result - where - A: EnumAccess<'de>, - { - match try!(data.variant()) { - (Field::Ok, v) => v.newtype_variant().map(Ok), - (Field::Err, v) => v.newtype_variant().map(Err), - } - } - } - - const VARIANTS: &'static [&'static str] = &["Ok", "Err"]; - - deserializer.deserialize_enum("Result", VARIANTS, ResultVisitor(PhantomData)) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -impl<'de, T> Deserialize<'de> for Wrapping -where - T: Deserialize<'de>, -{ - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - Deserialize::deserialize(deserializer).map(Wrapping) - } -} - -#[cfg(all(feature = "std", std_atomic))] -macro_rules! atomic_impl { - ($($ty:ident)*) => { - $( - impl<'de> Deserialize<'de> for $ty { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - Deserialize::deserialize(deserializer).map(Self::new) - } - } - )* - }; -} - -#[cfg(all(feature = "std", std_atomic))] -atomic_impl! { - AtomicBool - AtomicI8 AtomicI16 AtomicI32 AtomicIsize - AtomicU8 AtomicU16 AtomicU32 AtomicUsize -} - -#[cfg(all(feature = "std", std_atomic64))] -atomic_impl! { - AtomicI64 AtomicU64 -} - -#[cfg(feature = "std")] -struct FromStrVisitor { - expecting: &'static str, - ty: PhantomData, -} - -#[cfg(feature = "std")] -impl FromStrVisitor { - fn new(expecting: &'static str) -> Self { - FromStrVisitor { - expecting: expecting, - ty: PhantomData, - } - } -} - -#[cfg(feature = "std")] -impl<'de, T> Visitor<'de> for FromStrVisitor -where - T: str::FromStr, - T::Err: fmt::Display, -{ - type Value = T; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str(self.expecting) - } - - fn visit_str(self, s: &str) -> Result - where - E: Error, - { - s.parse().map_err(Error::custom) - } -} diff --git a/vendor/serde/src/de/mod.rs b/vendor/serde/src/de/mod.rs deleted file mode 100644 index 54e2fd64..00000000 --- a/vendor/serde/src/de/mod.rs +++ /dev/null @@ -1,2275 +0,0 @@ -//! Generic data structure deserialization framework. -//! -//! The two most important traits in this module are [`Deserialize`] and -//! [`Deserializer`]. -//! -//! - **A type that implements `Deserialize` is a data structure** that can be -//! deserialized from any data format supported by Serde, and conversely -//! - **A type that implements `Deserializer` is a data format** that can -//! deserialize any data structure supported by Serde. -//! -//! # The Deserialize trait -//! -//! Serde provides [`Deserialize`] implementations for many Rust primitive and -//! standard library types. The complete list is below. All of these can be -//! deserialized using Serde out of the box. -//! -//! Additionally, Serde provides a procedural macro called [`serde_derive`] to -//! automatically generate [`Deserialize`] implementations for structs and enums -//! in your program. See the [derive section of the manual] for how to use this. -//! -//! In rare cases it may be necessary to implement [`Deserialize`] manually for -//! some type in your program. See the [Implementing `Deserialize`] section of -//! the manual for more about this. -//! -//! Third-party crates may provide [`Deserialize`] implementations for types -//! that they expose. For example the [`linked-hash-map`] crate provides a -//! [`LinkedHashMap`] type that is deserializable by Serde because the -//! crate provides an implementation of [`Deserialize`] for it. -//! -//! # The Deserializer trait -//! -//! [`Deserializer`] implementations are provided by third-party crates, for -//! example [`serde_json`], [`serde_yaml`] and [`bincode`]. -//! -//! A partial list of well-maintained formats is given on the [Serde -//! website][data formats]. -//! -//! # Implementations of Deserialize provided by Serde -//! -//! This is a slightly different set of types than what is supported for -//! serialization. Some types can be serialized by Serde but not deserialized. -//! One example is `OsStr`. -//! -//! - **Primitive types**: -//! - bool -//! - i8, i16, i32, i64, i128, isize -//! - u8, u16, u32, u64, u128, usize -//! - f32, f64 -//! - char -//! - **Compound types**: -//! - \[T; 0\] through \[T; 32\] -//! - tuples up to size 16 -//! - **Common standard library types**: -//! - String -//! - Option\ -//! - Result\ -//! - PhantomData\ -//! - **Wrapper types**: -//! - Box\ -//! - Box\<\[T\]\> -//! - Box\ -//! - Cow\<'a, T\> -//! - Cell\ -//! - RefCell\ -//! - Mutex\ -//! - RwLock\ -//! - Rc\ *(if* features = ["rc"] *is enabled)* -//! - Arc\ *(if* features = ["rc"] *is enabled)* -//! - **Collection types**: -//! - BTreeMap\ -//! - BTreeSet\ -//! - BinaryHeap\ -//! - HashMap\ -//! - HashSet\ -//! - LinkedList\ -//! - VecDeque\ -//! - Vec\ -//! - **Zero-copy types**: -//! - &str -//! - &\[u8\] -//! - **FFI types**: -//! - CString -//! - Box\ -//! - OsString -//! - **Miscellaneous standard library types**: -//! - Duration -//! - SystemTime -//! - Path -//! - PathBuf -//! - Range\ -//! - RangeInclusive\ -//! - Bound\ -//! - num::NonZero* -//! - `!` *(unstable)* -//! - **Net types**: -//! - IpAddr -//! - Ipv4Addr -//! - Ipv6Addr -//! - SocketAddr -//! - SocketAddrV4 -//! - SocketAddrV6 -//! -//! [Implementing `Deserialize`]: https://serde.rs/impl-deserialize.html -//! [`Deserialize`]: ../trait.Deserialize.html -//! [`Deserializer`]: ../trait.Deserializer.html -//! [`LinkedHashMap`]: https://docs.rs/linked-hash-map/*/linked_hash_map/struct.LinkedHashMap.html -//! [`bincode`]: https://github.com/servo/bincode -//! [`linked-hash-map`]: https://crates.io/crates/linked-hash-map -//! [`serde_derive`]: https://crates.io/crates/serde_derive -//! [`serde_json`]: https://github.com/serde-rs/json -//! [`serde_yaml`]: https://github.com/dtolnay/serde-yaml -//! [derive section of the manual]: https://serde.rs/derive.html -//! [data formats]: https://serde.rs/#data-formats - -use lib::*; - -//////////////////////////////////////////////////////////////////////////////// - -pub mod value; - -mod ignored_any; -mod impls; -mod utf8; - -pub use self::ignored_any::IgnoredAny; - -#[cfg(feature = "std")] -#[doc(no_inline)] -pub use std::error::Error as StdError; -#[cfg(not(feature = "std"))] -#[doc(no_inline)] -pub use std_error::Error as StdError; - -//////////////////////////////////////////////////////////////////////////////// - -macro_rules! declare_error_trait { - (Error: Sized $(+ $($supertrait:ident)::+)*) => { - /// The `Error` trait allows `Deserialize` implementations to create descriptive - /// error messages belonging to the `Deserializer` against which they are - /// currently running. - /// - /// Every `Deserializer` declares an `Error` type that encompasses both - /// general-purpose deserialization errors as well as errors specific to the - /// particular deserialization format. For example the `Error` type of - /// `serde_json` can represent errors like an invalid JSON escape sequence or an - /// unterminated string literal, in addition to the error cases that are part of - /// this trait. - /// - /// Most deserializers should only need to provide the `Error::custom` method - /// and inherit the default behavior for the other methods. - /// - /// # Example implementation - /// - /// The [example data format] presented on the website shows an error - /// type appropriate for a basic JSON data format. - /// - /// [example data format]: https://serde.rs/data-format.html - pub trait Error: Sized $(+ $($supertrait)::+)* { - /// Raised when there is general error when deserializing a type. - /// - /// The message should not be capitalized and should not end with a period. - /// - /// ```edition2018 - /// # use std::str::FromStr; - /// # - /// # struct IpAddr; - /// # - /// # impl FromStr for IpAddr { - /// # type Err = String; - /// # - /// # fn from_str(_: &str) -> Result { - /// # unimplemented!() - /// # } - /// # } - /// # - /// use serde::de::{self, Deserialize, Deserializer}; - /// - /// impl<'de> Deserialize<'de> for IpAddr { - /// fn deserialize(deserializer: D) -> Result - /// where - /// D: Deserializer<'de>, - /// { - /// let s = String::deserialize(deserializer)?; - /// s.parse().map_err(de::Error::custom) - /// } - /// } - /// ``` - fn custom(msg: T) -> Self - where - T: Display; - - /// Raised when a `Deserialize` receives a type different from what it was - /// expecting. - /// - /// The `unexp` argument provides information about what type was received. - /// This is the type that was present in the input file or other source data - /// of the Deserializer. - /// - /// The `exp` argument provides information about what type was being - /// expected. This is the type that is written in the program. - /// - /// For example if we try to deserialize a String out of a JSON file - /// containing an integer, the unexpected type is the integer and the - /// expected type is the string. - #[cold] - fn invalid_type(unexp: Unexpected, exp: &Expected) -> Self { - Error::custom(format_args!("invalid type: {}, expected {}", unexp, exp)) - } - - /// Raised when a `Deserialize` receives a value of the right type but that - /// is wrong for some other reason. - /// - /// The `unexp` argument provides information about what value was received. - /// This is the value that was present in the input file or other source - /// data of the Deserializer. - /// - /// The `exp` argument provides information about what value was being - /// expected. This is the type that is written in the program. - /// - /// For example if we try to deserialize a String out of some binary data - /// that is not valid UTF-8, the unexpected value is the bytes and the - /// expected value is a string. - #[cold] - fn invalid_value(unexp: Unexpected, exp: &Expected) -> Self { - Error::custom(format_args!("invalid value: {}, expected {}", unexp, exp)) - } - - /// Raised when deserializing a sequence or map and the input data contains - /// too many or too few elements. - /// - /// The `len` argument is the number of elements encountered. The sequence - /// or map may have expected more arguments or fewer arguments. - /// - /// The `exp` argument provides information about what data was being - /// expected. For example `exp` might say that a tuple of size 6 was - /// expected. - #[cold] - fn invalid_length(len: usize, exp: &Expected) -> Self { - Error::custom(format_args!("invalid length {}, expected {}", len, exp)) - } - - /// Raised when a `Deserialize` enum type received a variant with an - /// unrecognized name. - #[cold] - fn unknown_variant(variant: &str, expected: &'static [&'static str]) -> Self { - if expected.is_empty() { - Error::custom(format_args!( - "unknown variant `{}`, there are no variants", - variant - )) - } else { - Error::custom(format_args!( - "unknown variant `{}`, expected {}", - variant, - OneOf { names: expected } - )) - } - } - - /// Raised when a `Deserialize` struct type received a field with an - /// unrecognized name. - #[cold] - fn unknown_field(field: &str, expected: &'static [&'static str]) -> Self { - if expected.is_empty() { - Error::custom(format_args!( - "unknown field `{}`, there are no fields", - field - )) - } else { - Error::custom(format_args!( - "unknown field `{}`, expected {}", - field, - OneOf { names: expected } - )) - } - } - - /// Raised when a `Deserialize` struct type expected to receive a required - /// field with a particular name but that field was not present in the - /// input. - #[cold] - fn missing_field(field: &'static str) -> Self { - Error::custom(format_args!("missing field `{}`", field)) - } - - /// Raised when a `Deserialize` struct type received more than one of the - /// same field. - #[cold] - fn duplicate_field(field: &'static str) -> Self { - Error::custom(format_args!("duplicate field `{}`", field)) - } - } - } -} - -#[cfg(feature = "std")] -declare_error_trait!(Error: Sized + StdError); - -#[cfg(not(feature = "std"))] -declare_error_trait!(Error: Sized + Debug + Display); - -/// `Unexpected` represents an unexpected invocation of any one of the `Visitor` -/// trait methods. -/// -/// This is used as an argument to the `invalid_type`, `invalid_value`, and -/// `invalid_length` methods of the `Error` trait to build error messages. -/// -/// ```edition2018 -/// # use std::fmt; -/// # -/// # use serde::de::{self, Unexpected, Visitor}; -/// # -/// # struct Example; -/// # -/// # impl<'de> Visitor<'de> for Example { -/// # type Value = (); -/// # -/// # fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { -/// # write!(formatter, "definitely not a boolean") -/// # } -/// # -/// fn visit_bool(self, v: bool) -> Result -/// where -/// E: de::Error, -/// { -/// Err(de::Error::invalid_type(Unexpected::Bool(v), &self)) -/// } -/// # } -/// ``` -#[derive(Copy, Clone, PartialEq, Debug)] -pub enum Unexpected<'a> { - /// The input contained a boolean value that was not expected. - Bool(bool), - - /// The input contained an unsigned integer `u8`, `u16`, `u32` or `u64` that - /// was not expected. - Unsigned(u64), - - /// The input contained a signed integer `i8`, `i16`, `i32` or `i64` that - /// was not expected. - Signed(i64), - - /// The input contained a floating point `f32` or `f64` that was not - /// expected. - Float(f64), - - /// The input contained a `char` that was not expected. - Char(char), - - /// The input contained a `&str` or `String` that was not expected. - Str(&'a str), - - /// The input contained a `&[u8]` or `Vec` that was not expected. - Bytes(&'a [u8]), - - /// The input contained a unit `()` that was not expected. - Unit, - - /// The input contained an `Option` that was not expected. - Option, - - /// The input contained a newtype struct that was not expected. - NewtypeStruct, - - /// The input contained a sequence that was not expected. - Seq, - - /// The input contained a map that was not expected. - Map, - - /// The input contained an enum that was not expected. - Enum, - - /// The input contained a unit variant that was not expected. - UnitVariant, - - /// The input contained a newtype variant that was not expected. - NewtypeVariant, - - /// The input contained a tuple variant that was not expected. - TupleVariant, - - /// The input contained a struct variant that was not expected. - StructVariant, - - /// A message stating what uncategorized thing the input contained that was - /// not expected. - /// - /// The message should be a noun or noun phrase, not capitalized and without - /// a period. An example message is "unoriginal superhero". - Other(&'a str), -} - -impl<'a> fmt::Display for Unexpected<'a> { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - use self::Unexpected::*; - match *self { - Bool(b) => write!(formatter, "boolean `{}`", b), - Unsigned(i) => write!(formatter, "integer `{}`", i), - Signed(i) => write!(formatter, "integer `{}`", i), - Float(f) => write!(formatter, "floating point `{}`", f), - Char(c) => write!(formatter, "character `{}`", c), - Str(s) => write!(formatter, "string {:?}", s), - Bytes(_) => write!(formatter, "byte array"), - Unit => write!(formatter, "unit value"), - Option => write!(formatter, "Option value"), - NewtypeStruct => write!(formatter, "newtype struct"), - Seq => write!(formatter, "sequence"), - Map => write!(formatter, "map"), - Enum => write!(formatter, "enum"), - UnitVariant => write!(formatter, "unit variant"), - NewtypeVariant => write!(formatter, "newtype variant"), - TupleVariant => write!(formatter, "tuple variant"), - StructVariant => write!(formatter, "struct variant"), - Other(other) => formatter.write_str(other), - } - } -} - -/// `Expected` represents an explanation of what data a `Visitor` was expecting -/// to receive. -/// -/// This is used as an argument to the `invalid_type`, `invalid_value`, and -/// `invalid_length` methods of the `Error` trait to build error messages. The -/// message should be a noun or noun phrase that completes the sentence "This -/// Visitor expects to receive ...", for example the message could be "an -/// integer between 0 and 64". The message should not be capitalized and should -/// not end with a period. -/// -/// Within the context of a `Visitor` implementation, the `Visitor` itself -/// (`&self`) is an implementation of this trait. -/// -/// ```edition2018 -/// # use std::fmt; -/// # -/// # use serde::de::{self, Unexpected, Visitor}; -/// # -/// # struct Example; -/// # -/// # impl<'de> Visitor<'de> for Example { -/// # type Value = (); -/// # -/// # fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { -/// # write!(formatter, "definitely not a boolean") -/// # } -/// # -/// fn visit_bool(self, v: bool) -> Result -/// where -/// E: de::Error, -/// { -/// Err(de::Error::invalid_type(Unexpected::Bool(v), &self)) -/// } -/// # } -/// ``` -/// -/// Outside of a `Visitor`, `&"..."` can be used. -/// -/// ```edition2018 -/// # use serde::de::{self, Unexpected}; -/// # -/// # fn example() -> Result<(), E> -/// # where -/// # E: de::Error, -/// # { -/// # let v = true; -/// return Err(de::Error::invalid_type(Unexpected::Bool(v), &"a negative integer")); -/// # } -/// ``` -pub trait Expected { - /// Format an explanation of what data was being expected. Same signature as - /// the `Display` and `Debug` traits. - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result; -} - -impl<'de, T> Expected for T -where - T: Visitor<'de>, -{ - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - self.expecting(formatter) - } -} - -impl<'a> Expected for &'a str { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str(self) - } -} - -impl<'a> Display for Expected + 'a { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - Expected::fmt(self, formatter) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -/// A **data structure** that can be deserialized from any data format supported -/// by Serde. -/// -/// Serde provides `Deserialize` implementations for many Rust primitive and -/// standard library types. The complete list is [here][de]. All of these can -/// be deserialized using Serde out of the box. -/// -/// Additionally, Serde provides a procedural macro called `serde_derive` to -/// automatically generate `Deserialize` implementations for structs and enums -/// in your program. See the [derive section of the manual][derive] for how to -/// use this. -/// -/// In rare cases it may be necessary to implement `Deserialize` manually for -/// some type in your program. See the [Implementing -/// `Deserialize`][impl-deserialize] section of the manual for more about this. -/// -/// Third-party crates may provide `Deserialize` implementations for types that -/// they expose. For example the `linked-hash-map` crate provides a -/// `LinkedHashMap` type that is deserializable by Serde because the crate -/// provides an implementation of `Deserialize` for it. -/// -/// [de]: https://docs.serde.rs/serde/de/index.html -/// [derive]: https://serde.rs/derive.html -/// [impl-deserialize]: https://serde.rs/impl-deserialize.html -/// -/// # Lifetime -/// -/// The `'de` lifetime of this trait is the lifetime of data that may be -/// borrowed by `Self` when deserialized. See the page [Understanding -/// deserializer lifetimes] for a more detailed explanation of these lifetimes. -/// -/// [Understanding deserializer lifetimes]: https://serde.rs/lifetimes.html -pub trait Deserialize<'de>: Sized { - /// Deserialize this value from the given Serde deserializer. - /// - /// See the [Implementing `Deserialize`][impl-deserialize] section of the - /// manual for more information about how to implement this method. - /// - /// [impl-deserialize]: https://serde.rs/impl-deserialize.html - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>; - - /// Deserializes a value into `self` from the given Deserializer. - /// - /// The purpose of this method is to allow the deserializer to reuse - /// resources and avoid copies. As such, if this method returns an error, - /// `self` will be in an indeterminate state where some parts of the struct - /// have been overwritten. Although whatever state that is will be - /// memory-safe. - /// - /// This is generally useful when repeatedly deserializing values that - /// are processed one at a time, where the value of `self` doesn't matter - /// when the next deserialization occurs. - /// - /// If you manually implement this, your recursive deserializations should - /// use `deserialize_in_place`. - /// - /// This method is stable and an official public API, but hidden from the - /// documentation because it is almost never what newbies are looking for. - /// Showing it in rustdoc would cause it to be featured more prominently - /// than it deserves. - #[doc(hidden)] - fn deserialize_in_place(deserializer: D, place: &mut Self) -> Result<(), D::Error> - where - D: Deserializer<'de>, - { - // Default implementation just delegates to `deserialize` impl. - *place = Deserialize::deserialize(deserializer)?; - Ok(()) - } -} - -/// A data structure that can be deserialized without borrowing any data from -/// the deserializer. -/// -/// This is primarily useful for trait bounds on functions. For example a -/// `from_str` function may be able to deserialize a data structure that borrows -/// from the input string, but a `from_reader` function may only deserialize -/// owned data. -/// -/// ```edition2018 -/// # use serde::de::{Deserialize, DeserializeOwned}; -/// # use std::io::{Read, Result}; -/// # -/// # trait Ignore { -/// fn from_str<'a, T>(s: &'a str) -> Result -/// where -/// T: Deserialize<'a>; -/// -/// fn from_reader(rdr: R) -> Result -/// where -/// R: Read, -/// T: DeserializeOwned; -/// # } -/// ``` -/// -/// # Lifetime -/// -/// The relationship between `Deserialize` and `DeserializeOwned` in trait -/// bounds is explained in more detail on the page [Understanding deserializer -/// lifetimes]. -/// -/// [Understanding deserializer lifetimes]: https://serde.rs/lifetimes.html -pub trait DeserializeOwned: for<'de> Deserialize<'de> {} -impl DeserializeOwned for T where T: for<'de> Deserialize<'de> {} - -/// `DeserializeSeed` is the stateful form of the `Deserialize` trait. If you -/// ever find yourself looking for a way to pass data into a `Deserialize` impl, -/// this trait is the way to do it. -/// -/// As one example of stateful deserialization consider deserializing a JSON -/// array into an existing buffer. Using the `Deserialize` trait we could -/// deserialize a JSON array into a `Vec` but it would be a freshly allocated -/// `Vec`; there is no way for `Deserialize` to reuse a previously allocated -/// buffer. Using `DeserializeSeed` instead makes this possible as in the -/// example code below. -/// -/// The canonical API for stateless deserialization looks like this: -/// -/// ```edition2018 -/// # use serde::Deserialize; -/// # -/// # enum Error {} -/// # -/// fn func<'de, T: Deserialize<'de>>() -> Result -/// # { -/// # unimplemented!() -/// # } -/// ``` -/// -/// Adjusting an API like this to support stateful deserialization is a matter -/// of accepting a seed as input: -/// -/// ```edition2018 -/// # use serde::de::DeserializeSeed; -/// # -/// # enum Error {} -/// # -/// fn func_seed<'de, T: DeserializeSeed<'de>>(seed: T) -> Result -/// # { -/// # let _ = seed; -/// # unimplemented!() -/// # } -/// ``` -/// -/// In practice the majority of deserialization is stateless. An API expecting a -/// seed can be appeased by passing `std::marker::PhantomData` as a seed in the -/// case of stateless deserialization. -/// -/// # Lifetime -/// -/// The `'de` lifetime of this trait is the lifetime of data that may be -/// borrowed by `Self::Value` when deserialized. See the page [Understanding -/// deserializer lifetimes] for a more detailed explanation of these lifetimes. -/// -/// [Understanding deserializer lifetimes]: https://serde.rs/lifetimes.html -/// -/// # Example -/// -/// Suppose we have JSON that looks like `[[1, 2], [3, 4, 5], [6]]` and we need -/// to deserialize it into a flat representation like `vec![1, 2, 3, 4, 5, 6]`. -/// Allocating a brand new `Vec` for each subarray would be slow. Instead we -/// would like to allocate a single `Vec` and then deserialize each subarray -/// into it. This requires stateful deserialization using the `DeserializeSeed` -/// trait. -/// -/// ```edition2018 -/// use std::fmt; -/// use std::marker::PhantomData; -/// -/// use serde::de::{Deserialize, DeserializeSeed, Deserializer, SeqAccess, Visitor}; -/// -/// // A DeserializeSeed implementation that uses stateful deserialization to -/// // append array elements onto the end of an existing vector. The preexisting -/// // state ("seed") in this case is the Vec. The `deserialize` method of -/// // `ExtendVec` will be traversing the inner arrays of the JSON input and -/// // appending each integer into the existing Vec. -/// struct ExtendVec<'a, T: 'a>(&'a mut Vec); -/// -/// impl<'de, 'a, T> DeserializeSeed<'de> for ExtendVec<'a, T> -/// where -/// T: Deserialize<'de>, -/// { -/// // The return type of the `deserialize` method. This implementation -/// // appends onto an existing vector but does not create any new data -/// // structure, so the return type is (). -/// type Value = (); -/// -/// fn deserialize(self, deserializer: D) -> Result -/// where -/// D: Deserializer<'de>, -/// { -/// // Visitor implementation that will walk an inner array of the JSON -/// // input. -/// struct ExtendVecVisitor<'a, T: 'a>(&'a mut Vec); -/// -/// impl<'de, 'a, T> Visitor<'de> for ExtendVecVisitor<'a, T> -/// where -/// T: Deserialize<'de>, -/// { -/// type Value = (); -/// -/// fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { -/// write!(formatter, "an array of integers") -/// } -/// -/// fn visit_seq(self, mut seq: A) -> Result<(), A::Error> -/// where -/// A: SeqAccess<'de>, -/// { -/// // Visit each element in the inner array and push it onto -/// // the existing vector. -/// while let Some(elem) = seq.next_element()? { -/// self.0.push(elem); -/// } -/// Ok(()) -/// } -/// } -/// -/// deserializer.deserialize_seq(ExtendVecVisitor(self.0)) -/// } -/// } -/// -/// // Visitor implementation that will walk the outer array of the JSON input. -/// struct FlattenedVecVisitor(PhantomData); -/// -/// impl<'de, T> Visitor<'de> for FlattenedVecVisitor -/// where -/// T: Deserialize<'de>, -/// { -/// // This Visitor constructs a single Vec to hold the flattened -/// // contents of the inner arrays. -/// type Value = Vec; -/// -/// fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { -/// write!(formatter, "an array of arrays") -/// } -/// -/// fn visit_seq(self, mut seq: A) -> Result, A::Error> -/// where -/// A: SeqAccess<'de>, -/// { -/// // Create a single Vec to hold the flattened contents. -/// let mut vec = Vec::new(); -/// -/// // Each iteration through this loop is one inner array. -/// while let Some(()) = seq.next_element_seed(ExtendVec(&mut vec))? { -/// // Nothing to do; inner array has been appended into `vec`. -/// } -/// -/// // Return the finished vec. -/// Ok(vec) -/// } -/// } -/// -/// # fn example<'de, D>(deserializer: D) -> Result<(), D::Error> -/// # where -/// # D: Deserializer<'de>, -/// # { -/// let visitor = FlattenedVecVisitor(PhantomData); -/// let flattened: Vec = deserializer.deserialize_seq(visitor)?; -/// # Ok(()) -/// # } -/// ``` -pub trait DeserializeSeed<'de>: Sized { - /// The type produced by using this seed. - type Value; - - /// Equivalent to the more common `Deserialize::deserialize` method, except - /// with some initial piece of data (the seed) passed in. - fn deserialize(self, deserializer: D) -> Result - where - D: Deserializer<'de>; -} - -impl<'de, T> DeserializeSeed<'de> for PhantomData -where - T: Deserialize<'de>, -{ - type Value = T; - - #[inline] - fn deserialize(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - T::deserialize(deserializer) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -/// A **data format** that can deserialize any data structure supported by -/// Serde. -/// -/// The role of this trait is to define the deserialization half of the [Serde -/// data model], which is a way to categorize every Rust data type into one of -/// 29 possible types. Each method of the `Deserializer` trait corresponds to one -/// of the types of the data model. -/// -/// Implementations of `Deserialize` map themselves into this data model by -/// passing to the `Deserializer` a `Visitor` implementation that can receive -/// these various types. -/// -/// The types that make up the Serde data model are: -/// -/// - **14 primitive types** -/// - bool -/// - i8, i16, i32, i64, i128 -/// - u8, u16, u32, u64, u128 -/// - f32, f64 -/// - char -/// - **string** -/// - UTF-8 bytes with a length and no null terminator. -/// - When serializing, all strings are handled equally. When deserializing, -/// there are three flavors of strings: transient, owned, and borrowed. -/// - **byte array** - \[u8\] -/// - Similar to strings, during deserialization byte arrays can be -/// transient, owned, or borrowed. -/// - **option** -/// - Either none or some value. -/// - **unit** -/// - The type of `()` in Rust. It represents an anonymous value containing -/// no data. -/// - **unit_struct** -/// - For example `struct Unit` or `PhantomData`. It represents a named -/// value containing no data. -/// - **unit_variant** -/// - For example the `E::A` and `E::B` in `enum E { A, B }`. -/// - **newtype_struct** -/// - For example `struct Millimeters(u8)`. -/// - **newtype_variant** -/// - For example the `E::N` in `enum E { N(u8) }`. -/// - **seq** -/// - A variably sized heterogeneous sequence of values, for example `Vec` -/// or `HashSet`. When serializing, the length may or may not be known -/// before iterating through all the data. When deserializing, the length -/// is determined by looking at the serialized data. -/// - **tuple** -/// - A statically sized heterogeneous sequence of values for which the -/// length will be known at deserialization time without looking at the -/// serialized data, for example `(u8,)` or `(String, u64, Vec)` or -/// `[u64; 10]`. -/// - **tuple_struct** -/// - A named tuple, for example `struct Rgb(u8, u8, u8)`. -/// - **tuple_variant** -/// - For example the `E::T` in `enum E { T(u8, u8) }`. -/// - **map** -/// - A heterogeneous key-value pairing, for example `BTreeMap`. -/// - **struct** -/// - A heterogeneous key-value pairing in which the keys are strings and -/// will be known at deserialization time without looking at the serialized -/// data, for example `struct S { r: u8, g: u8, b: u8 }`. -/// - **struct_variant** -/// - For example the `E::S` in `enum E { S { r: u8, g: u8, b: u8 } }`. -/// -/// The `Deserializer` trait supports two entry point styles which enables -/// different kinds of deserialization. -/// -/// 1. The `deserialize` method. Self-describing data formats like JSON are able -/// to look at the serialized data and tell what it represents. For example -/// the JSON deserializer may see an opening curly brace (`{`) and know that -/// it is seeing a map. If the data format supports -/// `Deserializer::deserialize_any`, it will drive the Visitor using whatever -/// type it sees in the input. JSON uses this approach when deserializing -/// `serde_json::Value` which is an enum that can represent any JSON -/// document. Without knowing what is in a JSON document, we can deserialize -/// it to `serde_json::Value` by going through -/// `Deserializer::deserialize_any`. -/// -/// 2. The various `deserialize_*` methods. Non-self-describing formats like -/// Bincode need to be told what is in the input in order to deserialize it. -/// The `deserialize_*` methods are hints to the deserializer for how to -/// interpret the next piece of input. Non-self-describing formats are not -/// able to deserialize something like `serde_json::Value` which relies on -/// `Deserializer::deserialize_any`. -/// -/// When implementing `Deserialize`, you should avoid relying on -/// `Deserializer::deserialize_any` unless you need to be told by the -/// Deserializer what type is in the input. Know that relying on -/// `Deserializer::deserialize_any` means your data type will be able to -/// deserialize from self-describing formats only, ruling out Bincode and many -/// others. -/// -/// [Serde data model]: https://serde.rs/data-model.html -/// -/// # Lifetime -/// -/// The `'de` lifetime of this trait is the lifetime of data that may be -/// borrowed from the input when deserializing. See the page [Understanding -/// deserializer lifetimes] for a more detailed explanation of these lifetimes. -/// -/// [Understanding deserializer lifetimes]: https://serde.rs/lifetimes.html -/// -/// # Example implementation -/// -/// The [example data format] presented on the website contains example code for -/// a basic JSON `Deserializer`. -/// -/// [example data format]: https://serde.rs/data-format.html -pub trait Deserializer<'de>: Sized { - /// The error type that can be returned if some error occurs during - /// deserialization. - type Error: Error; - - /// Require the `Deserializer` to figure out how to drive the visitor based - /// on what data type is in the input. - /// - /// When implementing `Deserialize`, you should avoid relying on - /// `Deserializer::deserialize_any` unless you need to be told by the - /// Deserializer what type is in the input. Know that relying on - /// `Deserializer::deserialize_any` means your data type will be able to - /// deserialize from self-describing formats only, ruling out Bincode and - /// many others. - fn deserialize_any(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting a `bool` value. - fn deserialize_bool(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting an `i8` value. - fn deserialize_i8(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting an `i16` value. - fn deserialize_i16(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting an `i32` value. - fn deserialize_i32(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting an `i64` value. - fn deserialize_i64(self, visitor: V) -> Result - where - V: Visitor<'de>; - - serde_if_integer128! { - /// Hint that the `Deserialize` type is expecting an `i128` value. - /// - /// This method is available only on Rust compiler versions >=1.26. The - /// default behavior unconditionally returns an error. - fn deserialize_i128(self, visitor: V) -> Result - where - V: Visitor<'de> - { - let _ = visitor; - Err(Error::custom("i128 is not supported")) - } - } - - /// Hint that the `Deserialize` type is expecting a `u8` value. - fn deserialize_u8(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting a `u16` value. - fn deserialize_u16(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting a `u32` value. - fn deserialize_u32(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting a `u64` value. - fn deserialize_u64(self, visitor: V) -> Result - where - V: Visitor<'de>; - - serde_if_integer128! { - /// Hint that the `Deserialize` type is expecting an `u128` value. - /// - /// This method is available only on Rust compiler versions >=1.26. The - /// default behavior unconditionally returns an error. - fn deserialize_u128(self, visitor: V) -> Result - where - V: Visitor<'de> - { - let _ = visitor; - Err(Error::custom("u128 is not supported")) - } - } - - /// Hint that the `Deserialize` type is expecting a `f32` value. - fn deserialize_f32(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting a `f64` value. - fn deserialize_f64(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting a `char` value. - fn deserialize_char(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting a string value and does - /// not benefit from taking ownership of buffered data owned by the - /// `Deserializer`. - /// - /// If the `Visitor` would benefit from taking ownership of `String` data, - /// indicate this to the `Deserializer` by using `deserialize_string` - /// instead. - fn deserialize_str(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting a string value and would - /// benefit from taking ownership of buffered data owned by the - /// `Deserializer`. - /// - /// If the `Visitor` would not benefit from taking ownership of `String` - /// data, indicate that to the `Deserializer` by using `deserialize_str` - /// instead. - fn deserialize_string(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting a byte array and does not - /// benefit from taking ownership of buffered data owned by the - /// `Deserializer`. - /// - /// If the `Visitor` would benefit from taking ownership of `Vec` data, - /// indicate this to the `Deserializer` by using `deserialize_byte_buf` - /// instead. - fn deserialize_bytes(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting a byte array and would - /// benefit from taking ownership of buffered data owned by the - /// `Deserializer`. - /// - /// If the `Visitor` would not benefit from taking ownership of `Vec` - /// data, indicate that to the `Deserializer` by using `deserialize_bytes` - /// instead. - fn deserialize_byte_buf(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting an optional value. - /// - /// This allows deserializers that encode an optional value as a nullable - /// value to convert the null value into `None` and a regular value into - /// `Some(value)`. - fn deserialize_option(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting a unit value. - fn deserialize_unit(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting a unit struct with a - /// particular name. - fn deserialize_unit_struct( - self, - name: &'static str, - visitor: V, - ) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting a newtype struct with a - /// particular name. - fn deserialize_newtype_struct( - self, - name: &'static str, - visitor: V, - ) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting a sequence of values. - fn deserialize_seq(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting a sequence of values and - /// knows how many values there are without looking at the serialized data. - fn deserialize_tuple(self, len: usize, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting a tuple struct with a - /// particular name and number of fields. - fn deserialize_tuple_struct( - self, - name: &'static str, - len: usize, - visitor: V, - ) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting a map of key-value pairs. - fn deserialize_map(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting a struct with a particular - /// name and fields. - fn deserialize_struct( - self, - name: &'static str, - fields: &'static [&'static str], - visitor: V, - ) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting an enum value with a - /// particular name and possible variants. - fn deserialize_enum( - self, - name: &'static str, - variants: &'static [&'static str], - visitor: V, - ) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type is expecting the name of a struct - /// field or the discriminant of an enum variant. - fn deserialize_identifier(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Hint that the `Deserialize` type needs to deserialize a value whose type - /// doesn't matter because it is ignored. - /// - /// Deserializers for non-self-describing formats may not support this mode. - fn deserialize_ignored_any(self, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Determine whether `Deserialize` implementations should expect to - /// deserialize their human-readable form. - /// - /// Some types have a human-readable form that may be somewhat expensive to - /// construct, as well as a binary form that is compact and efficient. - /// Generally text-based formats like JSON and YAML will prefer to use the - /// human-readable one and binary formats like Bincode will prefer the - /// compact one. - /// - /// ```edition2018 - /// # use std::ops::Add; - /// # use std::str::FromStr; - /// # - /// # struct Timestamp; - /// # - /// # impl Timestamp { - /// # const EPOCH: Timestamp = Timestamp; - /// # } - /// # - /// # impl FromStr for Timestamp { - /// # type Err = String; - /// # fn from_str(_: &str) -> Result { - /// # unimplemented!() - /// # } - /// # } - /// # - /// # struct Duration; - /// # - /// # impl Duration { - /// # fn seconds(_: u64) -> Self { unimplemented!() } - /// # } - /// # - /// # impl Add for Timestamp { - /// # type Output = Timestamp; - /// # fn add(self, _: Duration) -> Self::Output { - /// # unimplemented!() - /// # } - /// # } - /// # - /// use serde::de::{self, Deserialize, Deserializer}; - /// - /// impl<'de> Deserialize<'de> for Timestamp { - /// fn deserialize(deserializer: D) -> Result - /// where - /// D: Deserializer<'de>, - /// { - /// if deserializer.is_human_readable() { - /// // Deserialize from a human-readable string like "2015-05-15T17:01:00Z". - /// let s = String::deserialize(deserializer)?; - /// Timestamp::from_str(&s).map_err(de::Error::custom) - /// } else { - /// // Deserialize from a compact binary representation, seconds since - /// // the Unix epoch. - /// let n = u64::deserialize(deserializer)?; - /// Ok(Timestamp::EPOCH + Duration::seconds(n)) - /// } - /// } - /// } - /// ``` - /// - /// The default implementation of this method returns `true`. Data formats - /// may override this to `false` to request a compact form for types that - /// support one. Note that modifying this method to change a format from - /// human-readable to compact or vice versa should be regarded as a breaking - /// change, as a value serialized in human-readable mode is not required to - /// deserialize from the same data in compact mode. - #[inline] - fn is_human_readable(&self) -> bool { - true - } -} - -//////////////////////////////////////////////////////////////////////////////// - -/// This trait represents a visitor that walks through a deserializer. -/// -/// # Lifetime -/// -/// The `'de` lifetime of this trait is the requirement for lifetime of data -/// that may be borrowed by `Self::Value`. See the page [Understanding -/// deserializer lifetimes] for a more detailed explanation of these lifetimes. -/// -/// [Understanding deserializer lifetimes]: https://serde.rs/lifetimes.html -/// -/// # Example -/// -/// ```edition2018 -/// # use std::fmt; -/// # -/// # use serde::de::{self, Unexpected, Visitor}; -/// # -/// /// A visitor that deserializes a long string - a string containing at least -/// /// some minimum number of bytes. -/// struct LongString { -/// min: usize, -/// } -/// -/// impl<'de> Visitor<'de> for LongString { -/// type Value = String; -/// -/// fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { -/// write!(formatter, "a string containing at least {} bytes", self.min) -/// } -/// -/// fn visit_str(self, s: &str) -> Result -/// where -/// E: de::Error, -/// { -/// if s.len() >= self.min { -/// Ok(s.to_owned()) -/// } else { -/// Err(de::Error::invalid_value(Unexpected::Str(s), &self)) -/// } -/// } -/// } -/// ``` -pub trait Visitor<'de>: Sized { - /// The value produced by this visitor. - type Value; - - /// Format a message stating what data this Visitor expects to receive. - /// - /// This is used in error messages. The message should complete the sentence - /// "This Visitor expects to receive ...", for example the message could be - /// "an integer between 0 and 64". The message should not be capitalized and - /// should not end with a period. - /// - /// ```edition2018 - /// # use std::fmt; - /// # - /// # struct S { - /// # max: usize, - /// # } - /// # - /// # impl<'de> serde::de::Visitor<'de> for S { - /// # type Value = (); - /// # - /// fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - /// write!(formatter, "an integer between 0 and {}", self.max) - /// } - /// # } - /// ``` - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result; - - /// The input contains a boolean. - /// - /// The default implementation fails with a type error. - fn visit_bool(self, v: bool) -> Result - where - E: Error, - { - Err(Error::invalid_type(Unexpected::Bool(v), &self)) - } - - /// The input contains an `i8`. - /// - /// The default implementation forwards to [`visit_i64`]. - /// - /// [`visit_i64`]: #method.visit_i64 - fn visit_i8(self, v: i8) -> Result - where - E: Error, - { - self.visit_i64(v as i64) - } - - /// The input contains an `i16`. - /// - /// The default implementation forwards to [`visit_i64`]. - /// - /// [`visit_i64`]: #method.visit_i64 - fn visit_i16(self, v: i16) -> Result - where - E: Error, - { - self.visit_i64(v as i64) - } - - /// The input contains an `i32`. - /// - /// The default implementation forwards to [`visit_i64`]. - /// - /// [`visit_i64`]: #method.visit_i64 - fn visit_i32(self, v: i32) -> Result - where - E: Error, - { - self.visit_i64(v as i64) - } - - /// The input contains an `i64`. - /// - /// The default implementation fails with a type error. - fn visit_i64(self, v: i64) -> Result - where - E: Error, - { - Err(Error::invalid_type(Unexpected::Signed(v), &self)) - } - - serde_if_integer128! { - /// The input contains a `i128`. - /// - /// This method is available only on Rust compiler versions >=1.26. The - /// default implementation fails with a type error. - fn visit_i128(self, v: i128) -> Result - where - E: Error, - { - let _ = v; - Err(Error::invalid_type(Unexpected::Other("i128"), &self)) - } - } - - /// The input contains a `u8`. - /// - /// The default implementation forwards to [`visit_u64`]. - /// - /// [`visit_u64`]: #method.visit_u64 - fn visit_u8(self, v: u8) -> Result - where - E: Error, - { - self.visit_u64(v as u64) - } - - /// The input contains a `u16`. - /// - /// The default implementation forwards to [`visit_u64`]. - /// - /// [`visit_u64`]: #method.visit_u64 - fn visit_u16(self, v: u16) -> Result - where - E: Error, - { - self.visit_u64(v as u64) - } - - /// The input contains a `u32`. - /// - /// The default implementation forwards to [`visit_u64`]. - /// - /// [`visit_u64`]: #method.visit_u64 - fn visit_u32(self, v: u32) -> Result - where - E: Error, - { - self.visit_u64(v as u64) - } - - /// The input contains a `u64`. - /// - /// The default implementation fails with a type error. - fn visit_u64(self, v: u64) -> Result - where - E: Error, - { - Err(Error::invalid_type(Unexpected::Unsigned(v), &self)) - } - - serde_if_integer128! { - /// The input contains a `u128`. - /// - /// This method is available only on Rust compiler versions >=1.26. The - /// default implementation fails with a type error. - fn visit_u128(self, v: u128) -> Result - where - E: Error, - { - let _ = v; - Err(Error::invalid_type(Unexpected::Other("u128"), &self)) - } - } - - /// The input contains an `f32`. - /// - /// The default implementation forwards to [`visit_f64`]. - /// - /// [`visit_f64`]: #method.visit_f64 - fn visit_f32(self, v: f32) -> Result - where - E: Error, - { - self.visit_f64(v as f64) - } - - /// The input contains an `f64`. - /// - /// The default implementation fails with a type error. - fn visit_f64(self, v: f64) -> Result - where - E: Error, - { - Err(Error::invalid_type(Unexpected::Float(v), &self)) - } - - /// The input contains a `char`. - /// - /// The default implementation forwards to [`visit_str`] as a one-character - /// string. - /// - /// [`visit_str`]: #method.visit_str - #[inline] - fn visit_char(self, v: char) -> Result - where - E: Error, - { - self.visit_str(utf8::encode(v).as_str()) - } - - /// The input contains a string. The lifetime of the string is ephemeral and - /// it may be destroyed after this method returns. - /// - /// This method allows the `Deserializer` to avoid a copy by retaining - /// ownership of any buffered data. `Deserialize` implementations that do - /// not benefit from taking ownership of `String` data should indicate that - /// to the deserializer by using `Deserializer::deserialize_str` rather than - /// `Deserializer::deserialize_string`. - /// - /// It is never correct to implement `visit_string` without implementing - /// `visit_str`. Implement neither, both, or just `visit_str`. - fn visit_str(self, v: &str) -> Result - where - E: Error, - { - Err(Error::invalid_type(Unexpected::Str(v), &self)) - } - - /// The input contains a string that lives at least as long as the - /// `Deserializer`. - /// - /// This enables zero-copy deserialization of strings in some formats. For - /// example JSON input containing the JSON string `"borrowed"` can be - /// deserialized with zero copying into a `&'a str` as long as the input - /// data outlives `'a`. - /// - /// The default implementation forwards to `visit_str`. - #[inline] - fn visit_borrowed_str(self, v: &'de str) -> Result - where - E: Error, - { - self.visit_str(v) - } - - /// The input contains a string and ownership of the string is being given - /// to the `Visitor`. - /// - /// This method allows the `Visitor` to avoid a copy by taking ownership of - /// a string created by the `Deserializer`. `Deserialize` implementations - /// that benefit from taking ownership of `String` data should indicate that - /// to the deserializer by using `Deserializer::deserialize_string` rather - /// than `Deserializer::deserialize_str`, although not every deserializer - /// will honor such a request. - /// - /// It is never correct to implement `visit_string` without implementing - /// `visit_str`. Implement neither, both, or just `visit_str`. - /// - /// The default implementation forwards to `visit_str` and then drops the - /// `String`. - #[inline] - #[cfg(any(feature = "std", feature = "alloc"))] - fn visit_string(self, v: String) -> Result - where - E: Error, - { - self.visit_str(&v) - } - - /// The input contains a byte array. The lifetime of the byte array is - /// ephemeral and it may be destroyed after this method returns. - /// - /// This method allows the `Deserializer` to avoid a copy by retaining - /// ownership of any buffered data. `Deserialize` implementations that do - /// not benefit from taking ownership of `Vec` data should indicate that - /// to the deserializer by using `Deserializer::deserialize_bytes` rather - /// than `Deserializer::deserialize_byte_buf`. - /// - /// It is never correct to implement `visit_byte_buf` without implementing - /// `visit_bytes`. Implement neither, both, or just `visit_bytes`. - fn visit_bytes(self, v: &[u8]) -> Result - where - E: Error, - { - let _ = v; - Err(Error::invalid_type(Unexpected::Bytes(v), &self)) - } - - /// The input contains a byte array that lives at least as long as the - /// `Deserializer`. - /// - /// This enables zero-copy deserialization of bytes in some formats. For - /// example Bincode data containing bytes can be deserialized with zero - /// copying into a `&'a [u8]` as long as the input data outlives `'a`. - /// - /// The default implementation forwards to `visit_bytes`. - #[inline] - fn visit_borrowed_bytes(self, v: &'de [u8]) -> Result - where - E: Error, - { - self.visit_bytes(v) - } - - /// The input contains a byte array and ownership of the byte array is being - /// given to the `Visitor`. - /// - /// This method allows the `Visitor` to avoid a copy by taking ownership of - /// a byte buffer created by the `Deserializer`. `Deserialize` - /// implementations that benefit from taking ownership of `Vec` data - /// should indicate that to the deserializer by using - /// `Deserializer::deserialize_byte_buf` rather than - /// `Deserializer::deserialize_bytes`, although not every deserializer will - /// honor such a request. - /// - /// It is never correct to implement `visit_byte_buf` without implementing - /// `visit_bytes`. Implement neither, both, or just `visit_bytes`. - /// - /// The default implementation forwards to `visit_bytes` and then drops the - /// `Vec`. - #[cfg(any(feature = "std", feature = "alloc"))] - fn visit_byte_buf(self, v: Vec) -> Result - where - E: Error, - { - self.visit_bytes(&v) - } - - /// The input contains an optional that is absent. - /// - /// The default implementation fails with a type error. - fn visit_none(self) -> Result - where - E: Error, - { - Err(Error::invalid_type(Unexpected::Option, &self)) - } - - /// The input contains an optional that is present. - /// - /// The default implementation fails with a type error. - fn visit_some(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let _ = deserializer; - Err(Error::invalid_type(Unexpected::Option, &self)) - } - - /// The input contains a unit `()`. - /// - /// The default implementation fails with a type error. - fn visit_unit(self) -> Result - where - E: Error, - { - Err(Error::invalid_type(Unexpected::Unit, &self)) - } - - /// The input contains a newtype struct. - /// - /// The content of the newtype struct may be read from the given - /// `Deserializer`. - /// - /// The default implementation fails with a type error. - fn visit_newtype_struct(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let _ = deserializer; - Err(Error::invalid_type(Unexpected::NewtypeStruct, &self)) - } - - /// The input contains a sequence of elements. - /// - /// The default implementation fails with a type error. - fn visit_seq(self, seq: A) -> Result - where - A: SeqAccess<'de>, - { - let _ = seq; - Err(Error::invalid_type(Unexpected::Seq, &self)) - } - - /// The input contains a key-value map. - /// - /// The default implementation fails with a type error. - fn visit_map(self, map: A) -> Result - where - A: MapAccess<'de>, - { - let _ = map; - Err(Error::invalid_type(Unexpected::Map, &self)) - } - - /// The input contains an enum. - /// - /// The default implementation fails with a type error. - fn visit_enum(self, data: A) -> Result - where - A: EnumAccess<'de>, - { - let _ = data; - Err(Error::invalid_type(Unexpected::Enum, &self)) - } - - // Used when deserializing a flattened Option field. Not public API. - #[doc(hidden)] - fn __private_visit_untagged_option(self, _: D) -> Result - where - D: Deserializer<'de>, - { - Err(()) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -/// Provides a `Visitor` access to each element of a sequence in the input. -/// -/// This is a trait that a `Deserializer` passes to a `Visitor` implementation, -/// which deserializes each item in a sequence. -/// -/// # Lifetime -/// -/// The `'de` lifetime of this trait is the lifetime of data that may be -/// borrowed by deserialized sequence elements. See the page [Understanding -/// deserializer lifetimes] for a more detailed explanation of these lifetimes. -/// -/// [Understanding deserializer lifetimes]: https://serde.rs/lifetimes.html -/// -/// # Example implementation -/// -/// The [example data format] presented on the website demonstrates an -/// implementation of `SeqAccess` for a basic JSON data format. -/// -/// [example data format]: https://serde.rs/data-format.html -pub trait SeqAccess<'de> { - /// The error type that can be returned if some error occurs during - /// deserialization. - type Error: Error; - - /// This returns `Ok(Some(value))` for the next value in the sequence, or - /// `Ok(None)` if there are no more remaining items. - /// - /// `Deserialize` implementations should typically use - /// `SeqAccess::next_element` instead. - fn next_element_seed(&mut self, seed: T) -> Result, Self::Error> - where - T: DeserializeSeed<'de>; - - /// This returns `Ok(Some(value))` for the next value in the sequence, or - /// `Ok(None)` if there are no more remaining items. - /// - /// This method exists as a convenience for `Deserialize` implementations. - /// `SeqAccess` implementations should not override the default behavior. - #[inline] - fn next_element(&mut self) -> Result, Self::Error> - where - T: Deserialize<'de>, - { - self.next_element_seed(PhantomData) - } - - /// Returns the number of elements remaining in the sequence, if known. - #[inline] - fn size_hint(&self) -> Option { - None - } -} - -impl<'de, 'a, A: ?Sized> SeqAccess<'de> for &'a mut A -where - A: SeqAccess<'de>, -{ - type Error = A::Error; - - #[inline] - fn next_element_seed(&mut self, seed: T) -> Result, Self::Error> - where - T: DeserializeSeed<'de>, - { - (**self).next_element_seed(seed) - } - - #[inline] - fn next_element(&mut self) -> Result, Self::Error> - where - T: Deserialize<'de>, - { - (**self).next_element() - } - - #[inline] - fn size_hint(&self) -> Option { - (**self).size_hint() - } -} - -//////////////////////////////////////////////////////////////////////////////// - -/// Provides a `Visitor` access to each entry of a map in the input. -/// -/// This is a trait that a `Deserializer` passes to a `Visitor` implementation. -/// -/// # Lifetime -/// -/// The `'de` lifetime of this trait is the lifetime of data that may be -/// borrowed by deserialized map entries. See the page [Understanding -/// deserializer lifetimes] for a more detailed explanation of these lifetimes. -/// -/// [Understanding deserializer lifetimes]: https://serde.rs/lifetimes.html -/// -/// # Example implementation -/// -/// The [example data format] presented on the website demonstrates an -/// implementation of `MapAccess` for a basic JSON data format. -/// -/// [example data format]: https://serde.rs/data-format.html -pub trait MapAccess<'de> { - /// The error type that can be returned if some error occurs during - /// deserialization. - type Error: Error; - - /// This returns `Ok(Some(key))` for the next key in the map, or `Ok(None)` - /// if there are no more remaining entries. - /// - /// `Deserialize` implementations should typically use - /// `MapAccess::next_key` or `MapAccess::next_entry` instead. - fn next_key_seed(&mut self, seed: K) -> Result, Self::Error> - where - K: DeserializeSeed<'de>; - - /// This returns a `Ok(value)` for the next value in the map. - /// - /// `Deserialize` implementations should typically use - /// `MapAccess::next_value` instead. - /// - /// # Panics - /// - /// Calling `next_value_seed` before `next_key_seed` is incorrect and is - /// allowed to panic or return bogus results. - fn next_value_seed(&mut self, seed: V) -> Result - where - V: DeserializeSeed<'de>; - - /// This returns `Ok(Some((key, value)))` for the next (key-value) pair in - /// the map, or `Ok(None)` if there are no more remaining items. - /// - /// `MapAccess` implementations should override the default behavior if a - /// more efficient implementation is possible. - /// - /// `Deserialize` implementations should typically use - /// `MapAccess::next_entry` instead. - #[inline] - fn next_entry_seed( - &mut self, - kseed: K, - vseed: V, - ) -> Result, Self::Error> - where - K: DeserializeSeed<'de>, - V: DeserializeSeed<'de>, - { - match try!(self.next_key_seed(kseed)) { - Some(key) => { - let value = try!(self.next_value_seed(vseed)); - Ok(Some((key, value))) - } - None => Ok(None), - } - } - - /// This returns `Ok(Some(key))` for the next key in the map, or `Ok(None)` - /// if there are no more remaining entries. - /// - /// This method exists as a convenience for `Deserialize` implementations. - /// `MapAccess` implementations should not override the default behavior. - #[inline] - fn next_key(&mut self) -> Result, Self::Error> - where - K: Deserialize<'de>, - { - self.next_key_seed(PhantomData) - } - - /// This returns a `Ok(value)` for the next value in the map. - /// - /// This method exists as a convenience for `Deserialize` implementations. - /// `MapAccess` implementations should not override the default behavior. - /// - /// # Panics - /// - /// Calling `next_value` before `next_key` is incorrect and is allowed to - /// panic or return bogus results. - #[inline] - fn next_value(&mut self) -> Result - where - V: Deserialize<'de>, - { - self.next_value_seed(PhantomData) - } - - /// This returns `Ok(Some((key, value)))` for the next (key-value) pair in - /// the map, or `Ok(None)` if there are no more remaining items. - /// - /// This method exists as a convenience for `Deserialize` implementations. - /// `MapAccess` implementations should not override the default behavior. - #[inline] - fn next_entry(&mut self) -> Result, Self::Error> - where - K: Deserialize<'de>, - V: Deserialize<'de>, - { - self.next_entry_seed(PhantomData, PhantomData) - } - - /// Returns the number of entries remaining in the map, if known. - #[inline] - fn size_hint(&self) -> Option { - None - } -} - -impl<'de, 'a, A: ?Sized> MapAccess<'de> for &'a mut A -where - A: MapAccess<'de>, -{ - type Error = A::Error; - - #[inline] - fn next_key_seed(&mut self, seed: K) -> Result, Self::Error> - where - K: DeserializeSeed<'de>, - { - (**self).next_key_seed(seed) - } - - #[inline] - fn next_value_seed(&mut self, seed: V) -> Result - where - V: DeserializeSeed<'de>, - { - (**self).next_value_seed(seed) - } - - #[inline] - fn next_entry_seed( - &mut self, - kseed: K, - vseed: V, - ) -> Result, Self::Error> - where - K: DeserializeSeed<'de>, - V: DeserializeSeed<'de>, - { - (**self).next_entry_seed(kseed, vseed) - } - - #[inline] - fn next_entry(&mut self) -> Result, Self::Error> - where - K: Deserialize<'de>, - V: Deserialize<'de>, - { - (**self).next_entry() - } - - #[inline] - fn next_key(&mut self) -> Result, Self::Error> - where - K: Deserialize<'de>, - { - (**self).next_key() - } - - #[inline] - fn next_value(&mut self) -> Result - where - V: Deserialize<'de>, - { - (**self).next_value() - } - - #[inline] - fn size_hint(&self) -> Option { - (**self).size_hint() - } -} - -//////////////////////////////////////////////////////////////////////////////// - -/// Provides a `Visitor` access to the data of an enum in the input. -/// -/// `EnumAccess` is created by the `Deserializer` and passed to the -/// `Visitor` in order to identify which variant of an enum to deserialize. -/// -/// # Lifetime -/// -/// The `'de` lifetime of this trait is the lifetime of data that may be -/// borrowed by the deserialized enum variant. See the page [Understanding -/// deserializer lifetimes] for a more detailed explanation of these lifetimes. -/// -/// [Understanding deserializer lifetimes]: https://serde.rs/lifetimes.html -/// -/// # Example implementation -/// -/// The [example data format] presented on the website demonstrates an -/// implementation of `EnumAccess` for a basic JSON data format. -/// -/// [example data format]: https://serde.rs/data-format.html -pub trait EnumAccess<'de>: Sized { - /// The error type that can be returned if some error occurs during - /// deserialization. - type Error: Error; - /// The `Visitor` that will be used to deserialize the content of the enum - /// variant. - type Variant: VariantAccess<'de, Error = Self::Error>; - - /// `variant` is called to identify which variant to deserialize. - /// - /// `Deserialize` implementations should typically use `EnumAccess::variant` - /// instead. - fn variant_seed(self, seed: V) -> Result<(V::Value, Self::Variant), Self::Error> - where - V: DeserializeSeed<'de>; - - /// `variant` is called to identify which variant to deserialize. - /// - /// This method exists as a convenience for `Deserialize` implementations. - /// `EnumAccess` implementations should not override the default behavior. - #[inline] - fn variant(self) -> Result<(V, Self::Variant), Self::Error> - where - V: Deserialize<'de>, - { - self.variant_seed(PhantomData) - } -} - -/// `VariantAccess` is a visitor that is created by the `Deserializer` and -/// passed to the `Deserialize` to deserialize the content of a particular enum -/// variant. -/// -/// # Lifetime -/// -/// The `'de` lifetime of this trait is the lifetime of data that may be -/// borrowed by the deserialized enum variant. See the page [Understanding -/// deserializer lifetimes] for a more detailed explanation of these lifetimes. -/// -/// [Understanding deserializer lifetimes]: https://serde.rs/lifetimes.html -/// -/// # Example implementation -/// -/// The [example data format] presented on the website demonstrates an -/// implementation of `VariantAccess` for a basic JSON data format. -/// -/// [example data format]: https://serde.rs/data-format.html -pub trait VariantAccess<'de>: Sized { - /// The error type that can be returned if some error occurs during - /// deserialization. Must match the error type of our `EnumAccess`. - type Error: Error; - - /// Called when deserializing a variant with no values. - /// - /// If the data contains a different type of variant, the following - /// `invalid_type` error should be constructed: - /// - /// ```edition2018 - /// # use serde::de::{self, value, DeserializeSeed, Visitor, VariantAccess, Unexpected}; - /// # - /// # struct X; - /// # - /// # impl<'de> VariantAccess<'de> for X { - /// # type Error = value::Error; - /// # - /// fn unit_variant(self) -> Result<(), Self::Error> { - /// // What the data actually contained; suppose it is a tuple variant. - /// let unexp = Unexpected::TupleVariant; - /// Err(de::Error::invalid_type(unexp, &"unit variant")) - /// } - /// # - /// # fn newtype_variant_seed(self, _: T) -> Result - /// # where - /// # T: DeserializeSeed<'de>, - /// # { unimplemented!() } - /// # - /// # fn tuple_variant(self, _: usize, _: V) -> Result - /// # where - /// # V: Visitor<'de>, - /// # { unimplemented!() } - /// # - /// # fn struct_variant(self, _: &[&str], _: V) -> Result - /// # where - /// # V: Visitor<'de>, - /// # { unimplemented!() } - /// # } - /// ``` - fn unit_variant(self) -> Result<(), Self::Error>; - - /// Called when deserializing a variant with a single value. - /// - /// `Deserialize` implementations should typically use - /// `VariantAccess::newtype_variant` instead. - /// - /// If the data contains a different type of variant, the following - /// `invalid_type` error should be constructed: - /// - /// ```edition2018 - /// # use serde::de::{self, value, DeserializeSeed, Visitor, VariantAccess, Unexpected}; - /// # - /// # struct X; - /// # - /// # impl<'de> VariantAccess<'de> for X { - /// # type Error = value::Error; - /// # - /// # fn unit_variant(self) -> Result<(), Self::Error> { - /// # unimplemented!() - /// # } - /// # - /// fn newtype_variant_seed(self, _seed: T) -> Result - /// where - /// T: DeserializeSeed<'de>, - /// { - /// // What the data actually contained; suppose it is a unit variant. - /// let unexp = Unexpected::UnitVariant; - /// Err(de::Error::invalid_type(unexp, &"newtype variant")) - /// } - /// # - /// # fn tuple_variant(self, _: usize, _: V) -> Result - /// # where - /// # V: Visitor<'de>, - /// # { unimplemented!() } - /// # - /// # fn struct_variant(self, _: &[&str], _: V) -> Result - /// # where - /// # V: Visitor<'de>, - /// # { unimplemented!() } - /// # } - /// ``` - fn newtype_variant_seed(self, seed: T) -> Result - where - T: DeserializeSeed<'de>; - - /// Called when deserializing a variant with a single value. - /// - /// This method exists as a convenience for `Deserialize` implementations. - /// `VariantAccess` implementations should not override the default - /// behavior. - #[inline] - fn newtype_variant(self) -> Result - where - T: Deserialize<'de>, - { - self.newtype_variant_seed(PhantomData) - } - - /// Called when deserializing a tuple-like variant. - /// - /// The `len` is the number of fields expected in the tuple variant. - /// - /// If the data contains a different type of variant, the following - /// `invalid_type` error should be constructed: - /// - /// ```edition2018 - /// # use serde::de::{self, value, DeserializeSeed, Visitor, VariantAccess, Unexpected}; - /// # - /// # struct X; - /// # - /// # impl<'de> VariantAccess<'de> for X { - /// # type Error = value::Error; - /// # - /// # fn unit_variant(self) -> Result<(), Self::Error> { - /// # unimplemented!() - /// # } - /// # - /// # fn newtype_variant_seed(self, _: T) -> Result - /// # where - /// # T: DeserializeSeed<'de>, - /// # { unimplemented!() } - /// # - /// fn tuple_variant( - /// self, - /// _len: usize, - /// _visitor: V, - /// ) -> Result - /// where - /// V: Visitor<'de>, - /// { - /// // What the data actually contained; suppose it is a unit variant. - /// let unexp = Unexpected::UnitVariant; - /// Err(de::Error::invalid_type(unexp, &"tuple variant")) - /// } - /// # - /// # fn struct_variant(self, _: &[&str], _: V) -> Result - /// # where - /// # V: Visitor<'de>, - /// # { unimplemented!() } - /// # } - /// ``` - fn tuple_variant(self, len: usize, visitor: V) -> Result - where - V: Visitor<'de>; - - /// Called when deserializing a struct-like variant. - /// - /// The `fields` are the names of the fields of the struct variant. - /// - /// If the data contains a different type of variant, the following - /// `invalid_type` error should be constructed: - /// - /// ```edition2018 - /// # use serde::de::{self, value, DeserializeSeed, Visitor, VariantAccess, Unexpected}; - /// # - /// # struct X; - /// # - /// # impl<'de> VariantAccess<'de> for X { - /// # type Error = value::Error; - /// # - /// # fn unit_variant(self) -> Result<(), Self::Error> { - /// # unimplemented!() - /// # } - /// # - /// # fn newtype_variant_seed(self, _: T) -> Result - /// # where - /// # T: DeserializeSeed<'de>, - /// # { unimplemented!() } - /// # - /// # fn tuple_variant(self, _: usize, _: V) -> Result - /// # where - /// # V: Visitor<'de>, - /// # { unimplemented!() } - /// # - /// fn struct_variant( - /// self, - /// _fields: &'static [&'static str], - /// _visitor: V, - /// ) -> Result - /// where - /// V: Visitor<'de>, - /// { - /// // What the data actually contained; suppose it is a unit variant. - /// let unexp = Unexpected::UnitVariant; - /// Err(de::Error::invalid_type(unexp, &"struct variant")) - /// } - /// # } - /// ``` - fn struct_variant( - self, - fields: &'static [&'static str], - visitor: V, - ) -> Result - where - V: Visitor<'de>; -} - -//////////////////////////////////////////////////////////////////////////////// - -/// Converts an existing value into a `Deserializer` from which other values can -/// be deserialized. -/// -/// # Lifetime -/// -/// The `'de` lifetime of this trait is the lifetime of data that may be -/// borrowed from the resulting `Deserializer`. See the page [Understanding -/// deserializer lifetimes] for a more detailed explanation of these lifetimes. -/// -/// [Understanding deserializer lifetimes]: https://serde.rs/lifetimes.html -/// -/// # Example -/// -/// ```edition2018 -/// use std::str::FromStr; -/// use serde::Deserialize; -/// use serde::de::{value, IntoDeserializer}; -/// -/// #[derive(Deserialize)] -/// enum Setting { -/// On, -/// Off, -/// } -/// -/// impl FromStr for Setting { -/// type Err = value::Error; -/// -/// fn from_str(s: &str) -> Result { -/// Self::deserialize(s.into_deserializer()) -/// } -/// } -/// ``` -pub trait IntoDeserializer<'de, E: Error = value::Error> { - /// The type of the deserializer being converted into. - type Deserializer: Deserializer<'de, Error = E>; - - /// Convert this value into a deserializer. - fn into_deserializer(self) -> Self::Deserializer; -} - -//////////////////////////////////////////////////////////////////////////////// - -/// Used in error messages. -/// -/// - expected `a` -/// - expected `a` or `b` -/// - expected one of `a`, `b`, `c` -/// -/// The slice of names must not be empty. -struct OneOf { - names: &'static [&'static str], -} - -impl Display for OneOf { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - match self.names.len() { - 0 => panic!(), // special case elsewhere - 1 => write!(formatter, "`{}`", self.names[0]), - 2 => write!(formatter, "`{}` or `{}`", self.names[0], self.names[1]), - _ => { - try!(write!(formatter, "one of ")); - for (i, alt) in self.names.iter().enumerate() { - if i > 0 { - try!(write!(formatter, ", ")); - } - try!(write!(formatter, "`{}`", alt)); - } - Ok(()) - } - } - } -} diff --git a/vendor/serde/src/de/seed.rs b/vendor/serde/src/de/seed.rs deleted file mode 100644 index 13b7ea46..00000000 --- a/vendor/serde/src/de/seed.rs +++ /dev/null @@ -1,19 +0,0 @@ -use de::{Deserialize, DeserializeSeed, Deserializer}; - -/// A DeserializeSeed helper for implementing deserialize_in_place Visitors. -/// -/// Wraps a mutable reference and calls deserialize_in_place on it. -pub struct InPlaceSeed<'a, T: 'a>(pub &'a mut T); - -impl<'a, 'de, T> DeserializeSeed<'de> for InPlaceSeed<'a, T> -where - T: Deserialize<'de>, -{ - type Value = (); - fn deserialize(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - T::deserialize_in_place(deserializer, self.0) - } -} diff --git a/vendor/serde/src/de/utf8.rs b/vendor/serde/src/de/utf8.rs deleted file mode 100644 index 576fd03c..00000000 --- a/vendor/serde/src/de/utf8.rs +++ /dev/null @@ -1,46 +0,0 @@ -use lib::*; - -const TAG_CONT: u8 = 0b1000_0000; -const TAG_TWO_B: u8 = 0b1100_0000; -const TAG_THREE_B: u8 = 0b1110_0000; -const TAG_FOUR_B: u8 = 0b1111_0000; -const MAX_ONE_B: u32 = 0x80; -const MAX_TWO_B: u32 = 0x800; -const MAX_THREE_B: u32 = 0x10000; - -#[inline] -pub fn encode(c: char) -> Encode { - let code = c as u32; - let mut buf = [0; 4]; - let pos = if code < MAX_ONE_B { - buf[3] = code as u8; - 3 - } else if code < MAX_TWO_B { - buf[2] = (code >> 6 & 0x1F) as u8 | TAG_TWO_B; - buf[3] = (code & 0x3F) as u8 | TAG_CONT; - 2 - } else if code < MAX_THREE_B { - buf[1] = (code >> 12 & 0x0F) as u8 | TAG_THREE_B; - buf[2] = (code >> 6 & 0x3F) as u8 | TAG_CONT; - buf[3] = (code & 0x3F) as u8 | TAG_CONT; - 1 - } else { - buf[0] = (code >> 18 & 0x07) as u8 | TAG_FOUR_B; - buf[1] = (code >> 12 & 0x3F) as u8 | TAG_CONT; - buf[2] = (code >> 6 & 0x3F) as u8 | TAG_CONT; - buf[3] = (code & 0x3F) as u8 | TAG_CONT; - 0 - }; - Encode { buf: buf, pos: pos } -} - -pub struct Encode { - buf: [u8; 4], - pos: usize, -} - -impl Encode { - pub fn as_str(&self) -> &str { - str::from_utf8(&self.buf[self.pos..]).unwrap() - } -} diff --git a/vendor/serde/src/de/value.rs b/vendor/serde/src/de/value.rs deleted file mode 100644 index 1b154c3a..00000000 --- a/vendor/serde/src/de/value.rs +++ /dev/null @@ -1,1635 +0,0 @@ -//! Building blocks for deserializing basic values using the `IntoDeserializer` -//! trait. -//! -//! ```edition2018 -//! use std::str::FromStr; -//! use serde::Deserialize; -//! use serde::de::{value, IntoDeserializer}; -//! -//! #[derive(Deserialize)] -//! enum Setting { -//! On, -//! Off, -//! } -//! -//! impl FromStr for Setting { -//! type Err = value::Error; -//! -//! fn from_str(s: &str) -> Result { -//! Self::deserialize(s.into_deserializer()) -//! } -//! } -//! ``` - -use lib::*; - -use self::private::{First, Second}; -use __private::size_hint; -use de::{self, Deserializer, Expected, IntoDeserializer, SeqAccess, Visitor}; -use ser; - -//////////////////////////////////////////////////////////////////////////////// - -// For structs that contain a PhantomData. We do not want the trait -// bound `E: Clone` inferred by derive(Clone). -macro_rules! impl_copy_clone { - ($ty:ident $(<$lifetime:tt>)*) => { - impl<$($lifetime,)* E> Copy for $ty<$($lifetime,)* E> {} - - impl<$($lifetime,)* E> Clone for $ty<$($lifetime,)* E> { - fn clone(&self) -> Self { - *self - } - } - }; -} - -//////////////////////////////////////////////////////////////////////////////// - -/// A minimal representation of all possible errors that can occur using the -/// `IntoDeserializer` trait. -#[derive(Clone, PartialEq)] -pub struct Error { - err: ErrorImpl, -} - -#[cfg(any(feature = "std", feature = "alloc"))] -type ErrorImpl = Box; -#[cfg(not(any(feature = "std", feature = "alloc")))] -type ErrorImpl = (); - -impl de::Error for Error { - #[cfg(any(feature = "std", feature = "alloc"))] - #[cold] - fn custom(msg: T) -> Self - where - T: Display, - { - Error { - err: msg.to_string().into_boxed_str(), - } - } - - #[cfg(not(any(feature = "std", feature = "alloc")))] - #[cold] - fn custom(msg: T) -> Self - where - T: Display, - { - let _ = msg; - Error { err: () } - } -} - -impl ser::Error for Error { - #[cold] - fn custom(msg: T) -> Self - where - T: Display, - { - de::Error::custom(msg) - } -} - -impl Display for Error { - #[cfg(any(feature = "std", feature = "alloc"))] - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str(&self.err) - } - - #[cfg(not(any(feature = "std", feature = "alloc")))] - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("Serde deserialization error") - } -} - -impl Debug for Error { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - let mut debug = formatter.debug_tuple("Error"); - #[cfg(any(feature = "std", feature = "alloc"))] - debug.field(&self.err); - debug.finish() - } -} - -#[cfg(feature = "std")] -impl error::Error for Error { - fn description(&self) -> &str { - &self.err - } -} - -//////////////////////////////////////////////////////////////////////////////// - -impl<'de, E> IntoDeserializer<'de, E> for () -where - E: de::Error, -{ - type Deserializer = UnitDeserializer; - - fn into_deserializer(self) -> UnitDeserializer { - UnitDeserializer { - marker: PhantomData, - } - } -} - -/// A deserializer holding a `()`. -pub struct UnitDeserializer { - marker: PhantomData, -} - -impl_copy_clone!(UnitDeserializer); - -impl<'de, E> de::Deserializer<'de> for UnitDeserializer -where - E: de::Error, -{ - type Error = E; - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf unit unit_struct newtype_struct seq tuple tuple_struct - map struct enum identifier ignored_any - } - - fn deserialize_any(self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - visitor.visit_unit() - } - - fn deserialize_option(self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - visitor.visit_none() - } -} - -impl Debug for UnitDeserializer { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.debug_struct("UnitDeserializer").finish() - } -} - -//////////////////////////////////////////////////////////////////////////////// - -/// A deserializer that cannot be instantiated. -#[cfg(feature = "unstable")] -pub struct NeverDeserializer { - never: !, - marker: PhantomData, -} - -#[cfg(feature = "unstable")] -impl<'de, E> IntoDeserializer<'de, E> for ! -where - E: de::Error, -{ - type Deserializer = NeverDeserializer; - - fn into_deserializer(self) -> Self::Deserializer { - self - } -} - -#[cfg(feature = "unstable")] -impl<'de, E> de::Deserializer<'de> for NeverDeserializer -where - E: de::Error, -{ - type Error = E; - - fn deserialize_any(self, _visitor: V) -> Result - where - V: de::Visitor<'de>, - { - self.never - } - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf option unit unit_struct newtype_struct seq tuple - tuple_struct map struct enum identifier ignored_any - } -} - -//////////////////////////////////////////////////////////////////////////////// - -macro_rules! primitive_deserializer { - ($ty:ty, $doc:tt, $name:ident, $method:ident $($cast:tt)*) => { - #[doc = "A deserializer holding"] - #[doc = $doc] - pub struct $name { - value: $ty, - marker: PhantomData - } - - impl_copy_clone!($name); - - impl<'de, E> IntoDeserializer<'de, E> for $ty - where - E: de::Error, - { - type Deserializer = $name; - - fn into_deserializer(self) -> $name { - $name { - value: self, - marker: PhantomData, - } - } - } - - impl<'de, E> de::Deserializer<'de> for $name - where - E: de::Error, - { - type Error = E; - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str - string bytes byte_buf option unit unit_struct newtype_struct seq - tuple tuple_struct map struct enum identifier ignored_any - } - - fn deserialize_any(self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - visitor.$method(self.value $($cast)*) - } - } - - impl Debug for $name { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter - .debug_struct(stringify!($name)) - .field("value", &self.value) - .finish() - } - } - } -} - -primitive_deserializer!(bool, "a `bool`.", BoolDeserializer, visit_bool); -primitive_deserializer!(i8, "an `i8`.", I8Deserializer, visit_i8); -primitive_deserializer!(i16, "an `i16`.", I16Deserializer, visit_i16); -primitive_deserializer!(i32, "an `i32`.", I32Deserializer, visit_i32); -primitive_deserializer!(i64, "an `i64`.", I64Deserializer, visit_i64); -primitive_deserializer!(isize, "an `isize`.", IsizeDeserializer, visit_i64 as i64); -primitive_deserializer!(u8, "a `u8`.", U8Deserializer, visit_u8); -primitive_deserializer!(u16, "a `u16`.", U16Deserializer, visit_u16); -primitive_deserializer!(u64, "a `u64`.", U64Deserializer, visit_u64); -primitive_deserializer!(usize, "a `usize`.", UsizeDeserializer, visit_u64 as u64); -primitive_deserializer!(f32, "an `f32`.", F32Deserializer, visit_f32); -primitive_deserializer!(f64, "an `f64`.", F64Deserializer, visit_f64); -primitive_deserializer!(char, "a `char`.", CharDeserializer, visit_char); - -serde_if_integer128! { - primitive_deserializer!(i128, "an `i128`.", I128Deserializer, visit_i128); - primitive_deserializer!(u128, "a `u128`.", U128Deserializer, visit_u128); -} - -/// A deserializer holding a `u32`. -pub struct U32Deserializer { - value: u32, - marker: PhantomData, -} - -impl_copy_clone!(U32Deserializer); - -impl<'de, E> IntoDeserializer<'de, E> for u32 -where - E: de::Error, -{ - type Deserializer = U32Deserializer; - - fn into_deserializer(self) -> U32Deserializer { - U32Deserializer { - value: self, - marker: PhantomData, - } - } -} - -impl<'de, E> de::Deserializer<'de> for U32Deserializer -where - E: de::Error, -{ - type Error = E; - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf option unit unit_struct newtype_struct seq tuple - tuple_struct map struct identifier ignored_any - } - - fn deserialize_any(self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - visitor.visit_u32(self.value) - } - - fn deserialize_enum( - self, - name: &str, - variants: &'static [&'static str], - visitor: V, - ) -> Result - where - V: de::Visitor<'de>, - { - let _ = name; - let _ = variants; - visitor.visit_enum(self) - } -} - -impl<'de, E> de::EnumAccess<'de> for U32Deserializer -where - E: de::Error, -{ - type Error = E; - type Variant = private::UnitOnly; - - fn variant_seed(self, seed: T) -> Result<(T::Value, Self::Variant), Self::Error> - where - T: de::DeserializeSeed<'de>, - { - seed.deserialize(self).map(private::unit_only) - } -} - -impl Debug for U32Deserializer { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter - .debug_struct("U32Deserializer") - .field("value", &self.value) - .finish() - } -} - -//////////////////////////////////////////////////////////////////////////////// - -/// A deserializer holding a `&str`. -pub struct StrDeserializer<'a, E> { - value: &'a str, - marker: PhantomData, -} - -impl_copy_clone!(StrDeserializer<'de>); - -impl<'de, 'a, E> IntoDeserializer<'de, E> for &'a str -where - E: de::Error, -{ - type Deserializer = StrDeserializer<'a, E>; - - fn into_deserializer(self) -> StrDeserializer<'a, E> { - StrDeserializer { - value: self, - marker: PhantomData, - } - } -} - -impl<'de, 'a, E> de::Deserializer<'de> for StrDeserializer<'a, E> -where - E: de::Error, -{ - type Error = E; - - fn deserialize_any(self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - visitor.visit_str(self.value) - } - - fn deserialize_enum( - self, - name: &str, - variants: &'static [&'static str], - visitor: V, - ) -> Result - where - V: de::Visitor<'de>, - { - let _ = name; - let _ = variants; - visitor.visit_enum(self) - } - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf option unit unit_struct newtype_struct seq tuple - tuple_struct map struct identifier ignored_any - } -} - -impl<'de, 'a, E> de::EnumAccess<'de> for StrDeserializer<'a, E> -where - E: de::Error, -{ - type Error = E; - type Variant = private::UnitOnly; - - fn variant_seed(self, seed: T) -> Result<(T::Value, Self::Variant), Self::Error> - where - T: de::DeserializeSeed<'de>, - { - seed.deserialize(self).map(private::unit_only) - } -} - -impl<'a, E> Debug for StrDeserializer<'a, E> { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter - .debug_struct("StrDeserializer") - .field("value", &self.value) - .finish() - } -} - -//////////////////////////////////////////////////////////////////////////////// - -/// A deserializer holding a `&str` with a lifetime tied to another -/// deserializer. -pub struct BorrowedStrDeserializer<'de, E> { - value: &'de str, - marker: PhantomData, -} - -impl_copy_clone!(BorrowedStrDeserializer<'de>); - -impl<'de, E> BorrowedStrDeserializer<'de, E> { - /// Create a new borrowed deserializer from the given string. - pub fn new(value: &'de str) -> BorrowedStrDeserializer<'de, E> { - BorrowedStrDeserializer { - value: value, - marker: PhantomData, - } - } -} - -impl<'de, E> de::Deserializer<'de> for BorrowedStrDeserializer<'de, E> -where - E: de::Error, -{ - type Error = E; - - fn deserialize_any(self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - visitor.visit_borrowed_str(self.value) - } - - fn deserialize_enum( - self, - name: &str, - variants: &'static [&'static str], - visitor: V, - ) -> Result - where - V: de::Visitor<'de>, - { - let _ = name; - let _ = variants; - visitor.visit_enum(self) - } - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf option unit unit_struct newtype_struct seq tuple - tuple_struct map struct identifier ignored_any - } -} - -impl<'de, E> de::EnumAccess<'de> for BorrowedStrDeserializer<'de, E> -where - E: de::Error, -{ - type Error = E; - type Variant = private::UnitOnly; - - fn variant_seed(self, seed: T) -> Result<(T::Value, Self::Variant), Self::Error> - where - T: de::DeserializeSeed<'de>, - { - seed.deserialize(self).map(private::unit_only) - } -} - -impl<'de, E> Debug for BorrowedStrDeserializer<'de, E> { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter - .debug_struct("BorrowedStrDeserializer") - .field("value", &self.value) - .finish() - } -} - -//////////////////////////////////////////////////////////////////////////////// - -/// A deserializer holding a `String`. -#[cfg(any(feature = "std", feature = "alloc"))] -pub struct StringDeserializer { - value: String, - marker: PhantomData, -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl Clone for StringDeserializer { - fn clone(&self) -> Self { - StringDeserializer { - value: self.value.clone(), - marker: PhantomData, - } - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'de, E> IntoDeserializer<'de, E> for String -where - E: de::Error, -{ - type Deserializer = StringDeserializer; - - fn into_deserializer(self) -> StringDeserializer { - StringDeserializer { - value: self, - marker: PhantomData, - } - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'de, E> de::Deserializer<'de> for StringDeserializer -where - E: de::Error, -{ - type Error = E; - - fn deserialize_any(self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - visitor.visit_string(self.value) - } - - fn deserialize_enum( - self, - name: &str, - variants: &'static [&'static str], - visitor: V, - ) -> Result - where - V: de::Visitor<'de>, - { - let _ = name; - let _ = variants; - visitor.visit_enum(self) - } - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf option unit unit_struct newtype_struct seq tuple - tuple_struct map struct identifier ignored_any - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'de, 'a, E> de::EnumAccess<'de> for StringDeserializer -where - E: de::Error, -{ - type Error = E; - type Variant = private::UnitOnly; - - fn variant_seed(self, seed: T) -> Result<(T::Value, Self::Variant), Self::Error> - where - T: de::DeserializeSeed<'de>, - { - seed.deserialize(self).map(private::unit_only) - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl Debug for StringDeserializer { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter - .debug_struct("StringDeserializer") - .field("value", &self.value) - .finish() - } -} - -//////////////////////////////////////////////////////////////////////////////// - -/// A deserializer holding a `Cow`. -#[cfg(any(feature = "std", feature = "alloc"))] -pub struct CowStrDeserializer<'a, E> { - value: Cow<'a, str>, - marker: PhantomData, -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'a, E> Clone for CowStrDeserializer<'a, E> { - fn clone(&self) -> Self { - CowStrDeserializer { - value: self.value.clone(), - marker: PhantomData, - } - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'de, 'a, E> IntoDeserializer<'de, E> for Cow<'a, str> -where - E: de::Error, -{ - type Deserializer = CowStrDeserializer<'a, E>; - - fn into_deserializer(self) -> CowStrDeserializer<'a, E> { - CowStrDeserializer { - value: self, - marker: PhantomData, - } - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'de, 'a, E> de::Deserializer<'de> for CowStrDeserializer<'a, E> -where - E: de::Error, -{ - type Error = E; - - fn deserialize_any(self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - match self.value { - Cow::Borrowed(string) => visitor.visit_str(string), - Cow::Owned(string) => visitor.visit_string(string), - } - } - - fn deserialize_enum( - self, - name: &str, - variants: &'static [&'static str], - visitor: V, - ) -> Result - where - V: de::Visitor<'de>, - { - let _ = name; - let _ = variants; - visitor.visit_enum(self) - } - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf option unit unit_struct newtype_struct seq tuple - tuple_struct map struct identifier ignored_any - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'de, 'a, E> de::EnumAccess<'de> for CowStrDeserializer<'a, E> -where - E: de::Error, -{ - type Error = E; - type Variant = private::UnitOnly; - - fn variant_seed(self, seed: T) -> Result<(T::Value, Self::Variant), Self::Error> - where - T: de::DeserializeSeed<'de>, - { - seed.deserialize(self).map(private::unit_only) - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'a, E> Debug for CowStrDeserializer<'a, E> { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter - .debug_struct("CowStrDeserializer") - .field("value", &self.value) - .finish() - } -} - -//////////////////////////////////////////////////////////////////////////////// - -/// A deserializer holding a `&[u8]`. Always calls [`Visitor::visit_bytes`]. -pub struct BytesDeserializer<'a, E> { - value: &'a [u8], - marker: PhantomData, -} - -impl<'a, E> BytesDeserializer<'a, E> { - /// Create a new deserializer from the given bytes. - pub fn new(value: &'a [u8]) -> Self { - BytesDeserializer { - value: value, - marker: PhantomData, - } - } -} - -impl_copy_clone!(BytesDeserializer<'a>); - -impl<'de, 'a, E> IntoDeserializer<'de, E> for &'a [u8] -where - E: de::Error, -{ - type Deserializer = BytesDeserializer<'a, E>; - - fn into_deserializer(self) -> BytesDeserializer<'a, E> { - BytesDeserializer::new(self) - } -} - -impl<'de, 'a, E> Deserializer<'de> for BytesDeserializer<'a, E> -where - E: de::Error, -{ - type Error = E; - - fn deserialize_any(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - visitor.visit_bytes(self.value) - } - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf option unit unit_struct newtype_struct seq tuple - tuple_struct map struct enum identifier ignored_any - } -} - -impl<'a, E> Debug for BytesDeserializer<'a, E> { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter - .debug_struct("BytesDeserializer") - .field("value", &self.value) - .finish() - } -} - -/// A deserializer holding a `&[u8]` with a lifetime tied to another -/// deserializer. Always calls [`Visitor::visit_borrowed_bytes`]. -pub struct BorrowedBytesDeserializer<'de, E> { - value: &'de [u8], - marker: PhantomData, -} - -impl<'de, E> BorrowedBytesDeserializer<'de, E> { - /// Create a new borrowed deserializer from the given borrowed bytes. - pub fn new(value: &'de [u8]) -> Self { - BorrowedBytesDeserializer { - value: value, - marker: PhantomData, - } - } -} - -impl_copy_clone!(BorrowedBytesDeserializer<'de>); - -impl<'de, E> Deserializer<'de> for BorrowedBytesDeserializer<'de, E> -where - E: de::Error, -{ - type Error = E; - - fn deserialize_any(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - visitor.visit_borrowed_bytes(self.value) - } - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf option unit unit_struct newtype_struct seq tuple - tuple_struct map struct enum identifier ignored_any - } -} - -impl<'de, E> Debug for BorrowedBytesDeserializer<'de, E> { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter - .debug_struct("BorrowedBytesDeserializer") - .field("value", &self.value) - .finish() - } -} - -//////////////////////////////////////////////////////////////////////////////// - -/// A deserializer that iterates over a sequence. -#[derive(Clone)] -pub struct SeqDeserializer { - iter: iter::Fuse, - count: usize, - marker: PhantomData, -} - -impl SeqDeserializer -where - I: Iterator, -{ - /// Construct a new `SeqDeserializer`. - pub fn new(iter: I) -> Self { - SeqDeserializer { - iter: iter.fuse(), - count: 0, - marker: PhantomData, - } - } -} - -impl SeqDeserializer -where - I: Iterator, - E: de::Error, -{ - /// Check for remaining elements after passing a `SeqDeserializer` to - /// `Visitor::visit_seq`. - pub fn end(self) -> Result<(), E> { - let remaining = self.iter.count(); - if remaining == 0 { - Ok(()) - } else { - // First argument is the number of elements in the data, second - // argument is the number of elements expected by the Deserialize. - Err(de::Error::invalid_length( - self.count + remaining, - &ExpectedInSeq(self.count), - )) - } - } -} - -impl<'de, I, T, E> de::Deserializer<'de> for SeqDeserializer -where - I: Iterator, - T: IntoDeserializer<'de, E>, - E: de::Error, -{ - type Error = E; - - fn deserialize_any(mut self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - let v = try!(visitor.visit_seq(&mut self)); - try!(self.end()); - Ok(v) - } - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf option unit unit_struct newtype_struct seq tuple - tuple_struct map struct enum identifier ignored_any - } -} - -impl<'de, I, T, E> de::SeqAccess<'de> for SeqDeserializer -where - I: Iterator, - T: IntoDeserializer<'de, E>, - E: de::Error, -{ - type Error = E; - - fn next_element_seed(&mut self, seed: V) -> Result, Self::Error> - where - V: de::DeserializeSeed<'de>, - { - match self.iter.next() { - Some(value) => { - self.count += 1; - seed.deserialize(value.into_deserializer()).map(Some) - } - None => Ok(None), - } - } - - fn size_hint(&self) -> Option { - size_hint::from_bounds(&self.iter) - } -} - -struct ExpectedInSeq(usize); - -impl Expected for ExpectedInSeq { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - if self.0 == 1 { - write!(formatter, "1 element in sequence") - } else { - write!(formatter, "{} elements in sequence", self.0) - } - } -} - -impl Debug for SeqDeserializer -where - I: Debug, -{ - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter - .debug_struct("SeqDeserializer") - .field("iter", &self.iter) - .field("count", &self.count) - .finish() - } -} - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'de, T, E> IntoDeserializer<'de, E> for Vec -where - T: IntoDeserializer<'de, E>, - E: de::Error, -{ - type Deserializer = SeqDeserializer<::IntoIter, E>; - - fn into_deserializer(self) -> Self::Deserializer { - SeqDeserializer::new(self.into_iter()) - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'de, T, E> IntoDeserializer<'de, E> for BTreeSet -where - T: IntoDeserializer<'de, E> + Eq + Ord, - E: de::Error, -{ - type Deserializer = SeqDeserializer<::IntoIter, E>; - - fn into_deserializer(self) -> Self::Deserializer { - SeqDeserializer::new(self.into_iter()) - } -} - -#[cfg(feature = "std")] -impl<'de, T, S, E> IntoDeserializer<'de, E> for HashSet -where - T: IntoDeserializer<'de, E> + Eq + Hash, - S: BuildHasher, - E: de::Error, -{ - type Deserializer = SeqDeserializer<::IntoIter, E>; - - fn into_deserializer(self) -> Self::Deserializer { - SeqDeserializer::new(self.into_iter()) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -/// A deserializer holding a `SeqAccess`. -#[derive(Clone, Debug)] -pub struct SeqAccessDeserializer { - seq: A, -} - -impl SeqAccessDeserializer { - /// Construct a new `SeqAccessDeserializer`. - pub fn new(seq: A) -> Self { - SeqAccessDeserializer { seq: seq } - } -} - -impl<'de, A> de::Deserializer<'de> for SeqAccessDeserializer -where - A: de::SeqAccess<'de>, -{ - type Error = A::Error; - - fn deserialize_any(self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - visitor.visit_seq(self.seq) - } - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf option unit unit_struct newtype_struct seq tuple - tuple_struct map struct enum identifier ignored_any - } -} - -//////////////////////////////////////////////////////////////////////////////// - -/// A deserializer that iterates over a map. -pub struct MapDeserializer<'de, I, E> -where - I: Iterator, - I::Item: private::Pair, -{ - iter: iter::Fuse, - value: Option>, - count: usize, - lifetime: PhantomData<&'de ()>, - error: PhantomData, -} - -impl<'de, I, E> MapDeserializer<'de, I, E> -where - I: Iterator, - I::Item: private::Pair, -{ - /// Construct a new `MapDeserializer`. - pub fn new(iter: I) -> Self { - MapDeserializer { - iter: iter.fuse(), - value: None, - count: 0, - lifetime: PhantomData, - error: PhantomData, - } - } -} - -impl<'de, I, E> MapDeserializer<'de, I, E> -where - I: Iterator, - I::Item: private::Pair, - E: de::Error, -{ - /// Check for remaining elements after passing a `MapDeserializer` to - /// `Visitor::visit_map`. - pub fn end(self) -> Result<(), E> { - let remaining = self.iter.count(); - if remaining == 0 { - Ok(()) - } else { - // First argument is the number of elements in the data, second - // argument is the number of elements expected by the Deserialize. - Err(de::Error::invalid_length( - self.count + remaining, - &ExpectedInMap(self.count), - )) - } - } -} - -impl<'de, I, E> MapDeserializer<'de, I, E> -where - I: Iterator, - I::Item: private::Pair, -{ - fn next_pair(&mut self) -> Option<(First, Second)> { - match self.iter.next() { - Some(kv) => { - self.count += 1; - Some(private::Pair::split(kv)) - } - None => None, - } - } -} - -impl<'de, I, E> de::Deserializer<'de> for MapDeserializer<'de, I, E> -where - I: Iterator, - I::Item: private::Pair, - First: IntoDeserializer<'de, E>, - Second: IntoDeserializer<'de, E>, - E: de::Error, -{ - type Error = E; - - fn deserialize_any(mut self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - let value = try!(visitor.visit_map(&mut self)); - try!(self.end()); - Ok(value) - } - - fn deserialize_seq(mut self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - let value = try!(visitor.visit_seq(&mut self)); - try!(self.end()); - Ok(value) - } - - fn deserialize_tuple(self, len: usize, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - let _ = len; - self.deserialize_seq(visitor) - } - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf option unit unit_struct newtype_struct tuple_struct map - struct enum identifier ignored_any - } -} - -impl<'de, I, E> de::MapAccess<'de> for MapDeserializer<'de, I, E> -where - I: Iterator, - I::Item: private::Pair, - First: IntoDeserializer<'de, E>, - Second: IntoDeserializer<'de, E>, - E: de::Error, -{ - type Error = E; - - fn next_key_seed(&mut self, seed: T) -> Result, Self::Error> - where - T: de::DeserializeSeed<'de>, - { - match self.next_pair() { - Some((key, value)) => { - self.value = Some(value); - seed.deserialize(key.into_deserializer()).map(Some) - } - None => Ok(None), - } - } - - fn next_value_seed(&mut self, seed: T) -> Result - where - T: de::DeserializeSeed<'de>, - { - let value = self.value.take(); - // Panic because this indicates a bug in the program rather than an - // expected failure. - let value = value.expect("MapAccess::next_value called before next_key"); - seed.deserialize(value.into_deserializer()) - } - - fn next_entry_seed( - &mut self, - kseed: TK, - vseed: TV, - ) -> Result, Self::Error> - where - TK: de::DeserializeSeed<'de>, - TV: de::DeserializeSeed<'de>, - { - match self.next_pair() { - Some((key, value)) => { - let key = try!(kseed.deserialize(key.into_deserializer())); - let value = try!(vseed.deserialize(value.into_deserializer())); - Ok(Some((key, value))) - } - None => Ok(None), - } - } - - fn size_hint(&self) -> Option { - size_hint::from_bounds(&self.iter) - } -} - -impl<'de, I, E> de::SeqAccess<'de> for MapDeserializer<'de, I, E> -where - I: Iterator, - I::Item: private::Pair, - First: IntoDeserializer<'de, E>, - Second: IntoDeserializer<'de, E>, - E: de::Error, -{ - type Error = E; - - fn next_element_seed(&mut self, seed: T) -> Result, Self::Error> - where - T: de::DeserializeSeed<'de>, - { - match self.next_pair() { - Some((k, v)) => { - let de = PairDeserializer(k, v, PhantomData); - seed.deserialize(de).map(Some) - } - None => Ok(None), - } - } - - fn size_hint(&self) -> Option { - size_hint::from_bounds(&self.iter) - } -} - -// Cannot #[derive(Clone)] because of the bound `Second: Clone`. -impl<'de, I, E> Clone for MapDeserializer<'de, I, E> -where - I: Iterator + Clone, - I::Item: private::Pair, - Second: Clone, -{ - fn clone(&self) -> Self { - MapDeserializer { - iter: self.iter.clone(), - value: self.value.clone(), - count: self.count, - lifetime: self.lifetime, - error: self.error, - } - } -} - -impl<'de, I, E> Debug for MapDeserializer<'de, I, E> -where - I: Iterator + Debug, - I::Item: private::Pair, - Second: Debug, -{ - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter - .debug_struct("MapDeserializer") - .field("iter", &self.iter) - .field("value", &self.value) - .field("count", &self.count) - .finish() - } -} - -// Used in the `impl SeqAccess for MapDeserializer` to visit the map as a -// sequence of pairs. -struct PairDeserializer(A, B, PhantomData); - -impl<'de, A, B, E> de::Deserializer<'de> for PairDeserializer -where - A: IntoDeserializer<'de, E>, - B: IntoDeserializer<'de, E>, - E: de::Error, -{ - type Error = E; - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf option unit unit_struct newtype_struct tuple_struct map - struct enum identifier ignored_any - } - - fn deserialize_any(self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - self.deserialize_seq(visitor) - } - - fn deserialize_seq(self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - let mut pair_visitor = PairVisitor(Some(self.0), Some(self.1), PhantomData); - let pair = try!(visitor.visit_seq(&mut pair_visitor)); - if pair_visitor.1.is_none() { - Ok(pair) - } else { - let remaining = pair_visitor.size_hint().unwrap(); - // First argument is the number of elements in the data, second - // argument is the number of elements expected by the Deserialize. - Err(de::Error::invalid_length(2, &ExpectedInSeq(2 - remaining))) - } - } - - fn deserialize_tuple(self, len: usize, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - if len == 2 { - self.deserialize_seq(visitor) - } else { - // First argument is the number of elements in the data, second - // argument is the number of elements expected by the Deserialize. - Err(de::Error::invalid_length(2, &ExpectedInSeq(len))) - } - } -} - -struct PairVisitor(Option, Option, PhantomData); - -impl<'de, A, B, E> de::SeqAccess<'de> for PairVisitor -where - A: IntoDeserializer<'de, E>, - B: IntoDeserializer<'de, E>, - E: de::Error, -{ - type Error = E; - - fn next_element_seed(&mut self, seed: T) -> Result, Self::Error> - where - T: de::DeserializeSeed<'de>, - { - if let Some(k) = self.0.take() { - seed.deserialize(k.into_deserializer()).map(Some) - } else if let Some(v) = self.1.take() { - seed.deserialize(v.into_deserializer()).map(Some) - } else { - Ok(None) - } - } - - fn size_hint(&self) -> Option { - if self.0.is_some() { - Some(2) - } else if self.1.is_some() { - Some(1) - } else { - Some(0) - } - } -} - -struct ExpectedInMap(usize); - -impl Expected for ExpectedInMap { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - if self.0 == 1 { - write!(formatter, "1 element in map") - } else { - write!(formatter, "{} elements in map", self.0) - } - } -} - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'de, K, V, E> IntoDeserializer<'de, E> for BTreeMap -where - K: IntoDeserializer<'de, E> + Eq + Ord, - V: IntoDeserializer<'de, E>, - E: de::Error, -{ - type Deserializer = MapDeserializer<'de, ::IntoIter, E>; - - fn into_deserializer(self) -> Self::Deserializer { - MapDeserializer::new(self.into_iter()) - } -} - -#[cfg(feature = "std")] -impl<'de, K, V, S, E> IntoDeserializer<'de, E> for HashMap -where - K: IntoDeserializer<'de, E> + Eq + Hash, - V: IntoDeserializer<'de, E>, - S: BuildHasher, - E: de::Error, -{ - type Deserializer = MapDeserializer<'de, ::IntoIter, E>; - - fn into_deserializer(self) -> Self::Deserializer { - MapDeserializer::new(self.into_iter()) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -/// A deserializer holding a `MapAccess`. -#[derive(Clone, Debug)] -pub struct MapAccessDeserializer { - map: A, -} - -impl MapAccessDeserializer { - /// Construct a new `MapAccessDeserializer`. - pub fn new(map: A) -> Self { - MapAccessDeserializer { map: map } - } -} - -impl<'de, A> de::Deserializer<'de> for MapAccessDeserializer -where - A: de::MapAccess<'de>, -{ - type Error = A::Error; - - fn deserialize_any(self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - visitor.visit_map(self.map) - } - - fn deserialize_enum( - self, - _name: &str, - _variants: &'static [&'static str], - visitor: V, - ) -> Result - where - V: de::Visitor<'de>, - { - visitor.visit_enum(self) - } - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf option unit unit_struct newtype_struct seq tuple - tuple_struct map struct identifier ignored_any - } -} - -impl<'de, A> de::EnumAccess<'de> for MapAccessDeserializer -where - A: de::MapAccess<'de>, -{ - type Error = A::Error; - type Variant = private::MapAsEnum; - - fn variant_seed(mut self, seed: T) -> Result<(T::Value, Self::Variant), Self::Error> - where - T: de::DeserializeSeed<'de>, - { - match self.map.next_key_seed(seed)? { - Some(key) => Ok((key, private::map_as_enum(self.map))), - None => Err(de::Error::invalid_type(de::Unexpected::Map, &"enum")), - } - } -} - -//////////////////////////////////////////////////////////////////////////////// - -mod private { - use lib::*; - - use de::{self, DeserializeSeed, Deserializer, MapAccess, Unexpected, VariantAccess, Visitor}; - - pub struct UnitOnly { - marker: PhantomData, - } - - pub fn unit_only(t: T) -> (T, UnitOnly) { - ( - t, - UnitOnly { - marker: PhantomData, - }, - ) - } - - impl<'de, E> de::VariantAccess<'de> for UnitOnly - where - E: de::Error, - { - type Error = E; - - fn unit_variant(self) -> Result<(), Self::Error> { - Ok(()) - } - - fn newtype_variant_seed(self, _seed: T) -> Result - where - T: de::DeserializeSeed<'de>, - { - Err(de::Error::invalid_type( - Unexpected::UnitVariant, - &"newtype variant", - )) - } - - fn tuple_variant(self, _len: usize, _visitor: V) -> Result - where - V: de::Visitor<'de>, - { - Err(de::Error::invalid_type( - Unexpected::UnitVariant, - &"tuple variant", - )) - } - - fn struct_variant( - self, - _fields: &'static [&'static str], - _visitor: V, - ) -> Result - where - V: de::Visitor<'de>, - { - Err(de::Error::invalid_type( - Unexpected::UnitVariant, - &"struct variant", - )) - } - } - - pub struct MapAsEnum { - map: A, - } - - pub fn map_as_enum(map: A) -> MapAsEnum { - MapAsEnum { map: map } - } - - impl<'de, A> VariantAccess<'de> for MapAsEnum - where - A: MapAccess<'de>, - { - type Error = A::Error; - - fn unit_variant(mut self) -> Result<(), Self::Error> { - self.map.next_value() - } - - fn newtype_variant_seed(mut self, seed: T) -> Result - where - T: DeserializeSeed<'de>, - { - self.map.next_value_seed(seed) - } - - fn tuple_variant(mut self, len: usize, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.map.next_value_seed(SeedTupleVariant { - len: len, - visitor: visitor, - }) - } - - fn struct_variant( - mut self, - _fields: &'static [&'static str], - visitor: V, - ) -> Result - where - V: Visitor<'de>, - { - self.map - .next_value_seed(SeedStructVariant { visitor: visitor }) - } - } - - struct SeedTupleVariant { - len: usize, - visitor: V, - } - - impl<'de, V> DeserializeSeed<'de> for SeedTupleVariant - where - V: Visitor<'de>, - { - type Value = V::Value; - - fn deserialize(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_tuple(self.len, self.visitor) - } - } - - struct SeedStructVariant { - visitor: V, - } - - impl<'de, V> DeserializeSeed<'de> for SeedStructVariant - where - V: Visitor<'de>, - { - type Value = V::Value; - - fn deserialize(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_map(self.visitor) - } - } - - /// Avoid having to restate the generic types on `MapDeserializer`. The - /// `Iterator::Item` contains enough information to figure out K and V. - pub trait Pair { - type First; - type Second; - fn split(self) -> (Self::First, Self::Second); - } - - impl Pair for (A, B) { - type First = A; - type Second = B; - fn split(self) -> (A, B) { - self - } - } - - pub type First = ::First; - pub type Second = ::Second; -} diff --git a/vendor/serde/src/integer128.rs b/vendor/serde/src/integer128.rs deleted file mode 100644 index 0ee05bdc..00000000 --- a/vendor/serde/src/integer128.rs +++ /dev/null @@ -1,82 +0,0 @@ -/// Conditional compilation depending on whether Serde is built with support for -/// 128-bit integers. -/// -/// Data formats that wish to support Rust compiler versions older than 1.26 -/// (or targets that lack 128-bit integers) may place the i128 / u128 methods -/// of their Serializer and Deserializer behind this macro. -/// -/// Data formats that require a minimum Rust compiler version of at least 1.26, -/// or do not target platforms that lack 128-bit integers, do not need to -/// bother with this macro and may assume support for 128-bit integers. -/// -/// ```edition2018 -/// # use serde::__private::doc::Error; -/// # -/// # struct MySerializer; -/// # -/// use serde::{serde_if_integer128, Serializer}; -/// -/// impl Serializer for MySerializer { -/// type Ok = (); -/// type Error = Error; -/// -/// fn serialize_i64(self, v: i64) -> Result { -/// /* ... */ -/// # unimplemented!() -/// } -/// -/// /* ... */ -/// -/// serde_if_integer128! { -/// fn serialize_i128(self, v: i128) -> Result { -/// /* ... */ -/// # unimplemented!() -/// } -/// -/// fn serialize_u128(self, v: u128) -> Result { -/// /* ... */ -/// # unimplemented!() -/// } -/// } -/// # -/// # serde::__serialize_unimplemented! { -/// # bool i8 i16 i32 u8 u16 u32 u64 f32 f64 char str bytes none some -/// # unit unit_struct unit_variant newtype_struct newtype_variant seq -/// # tuple tuple_struct tuple_variant map struct struct_variant -/// # } -/// } -/// ``` -/// -/// When Serde is built with support for 128-bit integers, this macro expands -/// transparently into just the input tokens. -/// -/// ```edition2018 -/// macro_rules! serde_if_integer128 { -/// ($($tt:tt)*) => { -/// $($tt)* -/// }; -/// } -/// ``` -/// -/// When built without support for 128-bit integers, this macro expands to -/// nothing. -/// -/// ```edition2018 -/// macro_rules! serde_if_integer128 { -/// ($($tt:tt)*) => {}; -/// } -/// ``` -#[cfg(integer128)] -#[macro_export] -macro_rules! serde_if_integer128 { - ($($tt:tt)*) => { - $($tt)* - }; -} - -#[cfg(not(integer128))] -#[macro_export] -#[doc(hidden)] -macro_rules! serde_if_integer128 { - ($($tt:tt)*) => {}; -} diff --git a/vendor/serde/src/lib.rs b/vendor/serde/src/lib.rs deleted file mode 100644 index f6a8ac00..00000000 --- a/vendor/serde/src/lib.rs +++ /dev/null @@ -1,297 +0,0 @@ -//! # Serde -//! -//! Serde is a framework for ***ser***ializing and ***de***serializing Rust data -//! structures efficiently and generically. -//! -//! The Serde ecosystem consists of data structures that know how to serialize -//! and deserialize themselves along with data formats that know how to -//! serialize and deserialize other things. Serde provides the layer by which -//! these two groups interact with each other, allowing any supported data -//! structure to be serialized and deserialized using any supported data format. -//! -//! See the Serde website for additional documentation and -//! usage examples. -//! -//! ## Design -//! -//! Where many other languages rely on runtime reflection for serializing data, -//! Serde is instead built on Rust's powerful trait system. A data structure -//! that knows how to serialize and deserialize itself is one that implements -//! Serde's `Serialize` and `Deserialize` traits (or uses Serde's derive -//! attribute to automatically generate implementations at compile time). This -//! avoids any overhead of reflection or runtime type information. In fact in -//! many situations the interaction between data structure and data format can -//! be completely optimized away by the Rust compiler, leaving Serde -//! serialization to perform the same speed as a handwritten serializer for the -//! specific selection of data structure and data format. -//! -//! ## Data formats -//! -//! The following is a partial list of data formats that have been implemented -//! for Serde by the community. -//! -//! - [JSON], the ubiquitous JavaScript Object Notation used by many HTTP APIs. -//! - [Bincode], a compact binary format -//! used for IPC within the Servo rendering engine. -//! - [CBOR], a Concise Binary Object Representation designed for small message -//! size without the need for version negotiation. -//! - [YAML], a self-proclaimed human-friendly configuration language that ain't -//! markup language. -//! - [MessagePack], an efficient binary format that resembles a compact JSON. -//! - [TOML], a minimal configuration format used by [Cargo]. -//! - [Pickle], a format common in the Python world. -//! - [RON], a Rusty Object Notation. -//! - [BSON], the data storage and network transfer format used by MongoDB. -//! - [Avro], a binary format used within Apache Hadoop, with support for schema -//! definition. -//! - [JSON5], a superset of JSON including some productions from ES5. -//! - [Postcard], a no\_std and embedded-systems friendly compact binary format. -//! - [URL] query strings, in the x-www-form-urlencoded format. -//! - [Envy], a way to deserialize environment variables into Rust structs. -//! *(deserialization only)* -//! - [Envy Store], a way to deserialize [AWS Parameter Store] parameters into -//! Rust structs. *(deserialization only)* -//! - [S-expressions], the textual representation of code and data used by the -//! Lisp language family. -//! - [D-Bus]'s binary wire format. -//! - [FlexBuffers], the schemaless cousin of Google's FlatBuffers zero-copy serialization format. -//! - [DynamoDB Items], the format used by [rusoto_dynamodb] to transfer data to -//! and from DynamoDB. -//! -//! [JSON]: https://github.com/serde-rs/json -//! [Bincode]: https://github.com/servo/bincode -//! [CBOR]: https://github.com/enarx/ciborium -//! [YAML]: https://github.com/dtolnay/serde-yaml -//! [MessagePack]: https://github.com/3Hren/msgpack-rust -//! [TOML]: https://github.com/alexcrichton/toml-rs -//! [Pickle]: https://github.com/birkenfeld/serde-pickle -//! [RON]: https://github.com/ron-rs/ron -//! [BSON]: https://github.com/zonyitoo/bson-rs -//! [Avro]: https://github.com/flavray/avro-rs -//! [JSON5]: https://github.com/callum-oakley/json5-rs -//! [Postcard]: https://github.com/jamesmunns/postcard -//! [URL]: https://docs.rs/serde_qs -//! [Envy]: https://github.com/softprops/envy -//! [Envy Store]: https://github.com/softprops/envy-store -//! [Cargo]: https://doc.rust-lang.org/cargo/reference/manifest.html -//! [AWS Parameter Store]: https://docs.aws.amazon.com/systems-manager/latest/userguide/systems-manager-paramstore.html -//! [S-expressions]: https://github.com/rotty/lexpr-rs -//! [D-Bus]: https://docs.rs/zvariant -//! [FlexBuffers]: https://github.com/google/flatbuffers/tree/master/rust/flexbuffers -//! [DynamoDB Items]: https://docs.rs/serde_dynamo -//! [rusoto_dynamodb]: https://docs.rs/rusoto_dynamodb - -//////////////////////////////////////////////////////////////////////////////// - -// Serde types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/serde/1.0.132")] -// Support using Serde without the standard library! -#![cfg_attr(not(feature = "std"), no_std)] -// Unstable functionality only if the user asks for it. For tracking and -// discussion of these features please refer to this issue: -// -// https://github.com/serde-rs/serde/issues/812 -#![cfg_attr(feature = "unstable", feature(never_type))] -#![allow(unknown_lints, bare_trait_objects, deprecated)] -#![cfg_attr(feature = "cargo-clippy", allow(renamed_and_removed_lints))] -// Ignored clippy and clippy_pedantic lints -#![cfg_attr( - feature = "cargo-clippy", - allow( - // clippy bug: https://github.com/rust-lang/rust-clippy/issues/5704 - unnested_or_patterns, - // clippy bug: https://github.com/rust-lang/rust-clippy/issues/7768 - semicolon_if_nothing_returned, - // not available in our oldest supported compiler - checked_conversions, - empty_enum, - redundant_field_names, - redundant_static_lifetimes, - // integer and float ser/de requires these sorts of casts - cast_possible_truncation, - cast_possible_wrap, - cast_sign_loss, - // things are often more readable this way - cast_lossless, - module_name_repetitions, - option_if_let_else, - single_match_else, - type_complexity, - use_self, - zero_prefixed_literal, - // correctly used - enum_glob_use, - let_underscore_drop, - map_err_ignore, - result_unit_err, - wildcard_imports, - // not practical - needless_pass_by_value, - similar_names, - too_many_lines, - // preference - doc_markdown, - unseparated_literal_suffix, - // false positive - needless_doctest_main, - // noisy - missing_errors_doc, - must_use_candidate, - ) -)] -// Rustc lints. -#![deny(missing_docs, unused_imports)] - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(feature = "alloc")] -extern crate alloc; - -/// A facade around all the types we need from the `std`, `core`, and `alloc` -/// crates. This avoids elaborate import wrangling having to happen in every -/// module. -mod lib { - mod core { - #[cfg(not(feature = "std"))] - pub use core::*; - #[cfg(feature = "std")] - pub use std::*; - } - - pub use self::core::{cmp, iter, mem, num, ptr, slice, str}; - pub use self::core::{f32, f64}; - pub use self::core::{i16, i32, i64, i8, isize}; - pub use self::core::{u16, u32, u64, u8, usize}; - - pub use self::core::cell::{Cell, RefCell}; - pub use self::core::clone::{self, Clone}; - pub use self::core::convert::{self, From, Into}; - pub use self::core::default::{self, Default}; - pub use self::core::fmt::{self, Debug, Display}; - pub use self::core::marker::{self, PhantomData}; - pub use self::core::num::Wrapping; - pub use self::core::ops::Range; - pub use self::core::option::{self, Option}; - pub use self::core::result::{self, Result}; - - #[cfg(all(feature = "alloc", not(feature = "std")))] - pub use alloc::borrow::{Cow, ToOwned}; - #[cfg(feature = "std")] - pub use std::borrow::{Cow, ToOwned}; - - #[cfg(all(feature = "alloc", not(feature = "std")))] - pub use alloc::string::{String, ToString}; - #[cfg(feature = "std")] - pub use std::string::{String, ToString}; - - #[cfg(all(feature = "alloc", not(feature = "std")))] - pub use alloc::vec::Vec; - #[cfg(feature = "std")] - pub use std::vec::Vec; - - #[cfg(all(feature = "alloc", not(feature = "std")))] - pub use alloc::boxed::Box; - #[cfg(feature = "std")] - pub use std::boxed::Box; - - #[cfg(all(feature = "rc", feature = "alloc", not(feature = "std")))] - pub use alloc::rc::{Rc, Weak as RcWeak}; - #[cfg(all(feature = "rc", feature = "std"))] - pub use std::rc::{Rc, Weak as RcWeak}; - - #[cfg(all(feature = "rc", feature = "alloc", not(feature = "std")))] - pub use alloc::sync::{Arc, Weak as ArcWeak}; - #[cfg(all(feature = "rc", feature = "std"))] - pub use std::sync::{Arc, Weak as ArcWeak}; - - #[cfg(all(feature = "alloc", not(feature = "std")))] - pub use alloc::collections::{BTreeMap, BTreeSet, BinaryHeap, LinkedList, VecDeque}; - #[cfg(feature = "std")] - pub use std::collections::{BTreeMap, BTreeSet, BinaryHeap, LinkedList, VecDeque}; - - #[cfg(feature = "std")] - pub use std::{error, net}; - - #[cfg(feature = "std")] - pub use std::collections::{HashMap, HashSet}; - #[cfg(feature = "std")] - pub use std::ffi::{CStr, CString, OsStr, OsString}; - #[cfg(feature = "std")] - pub use std::hash::{BuildHasher, Hash}; - #[cfg(feature = "std")] - pub use std::io::Write; - #[cfg(feature = "std")] - pub use std::path::{Path, PathBuf}; - #[cfg(feature = "std")] - pub use std::sync::{Mutex, RwLock}; - #[cfg(feature = "std")] - pub use std::time::{SystemTime, UNIX_EPOCH}; - - #[cfg(all(feature = "std", collections_bound))] - pub use std::collections::Bound; - - #[cfg(core_reverse)] - pub use self::core::cmp::Reverse; - - #[cfg(ops_bound)] - pub use self::core::ops::Bound; - - #[cfg(range_inclusive)] - pub use self::core::ops::RangeInclusive; - - #[cfg(all(feature = "std", std_atomic))] - pub use std::sync::atomic::{ - AtomicBool, AtomicI16, AtomicI32, AtomicI8, AtomicIsize, AtomicU16, AtomicU32, AtomicU8, - AtomicUsize, Ordering, - }; - #[cfg(all(feature = "std", std_atomic64))] - pub use std::sync::atomic::{AtomicI64, AtomicU64}; - - #[cfg(any(core_duration, feature = "std"))] - pub use self::core::time::Duration; -} - -//////////////////////////////////////////////////////////////////////////////// - -#[macro_use] -mod macros; - -#[macro_use] -mod integer128; - -pub mod de; -pub mod ser; - -#[doc(inline)] -pub use de::{Deserialize, Deserializer}; -#[doc(inline)] -pub use ser::{Serialize, Serializer}; - -// Used by generated code and doc tests. Not public API. -#[doc(hidden)] -#[path = "private/mod.rs"] -pub mod __private; - -#[allow(unused_imports)] -use self::__private as export; -#[allow(unused_imports)] -use self::__private as private; - -#[path = "de/seed.rs"] -mod seed; - -#[cfg(not(feature = "std"))] -mod std_error; - -// Re-export #[derive(Serialize, Deserialize)]. -// -// The reason re-exporting is not enabled by default is that disabling it would -// be annoying for crates that provide handwritten impls or data formats. They -// would need to disable default features and then explicitly re-enable std. -#[cfg(feature = "serde_derive")] -#[allow(unused_imports)] -#[macro_use] -extern crate serde_derive; -#[cfg(feature = "serde_derive")] -#[doc(hidden)] -pub use serde_derive::*; diff --git a/vendor/serde/src/macros.rs b/vendor/serde/src/macros.rs deleted file mode 100644 index 6502a23a..00000000 --- a/vendor/serde/src/macros.rs +++ /dev/null @@ -1,236 +0,0 @@ -// Super explicit first paragraph because this shows up at the top level and -// trips up people who are just looking for basic Serialize / Deserialize -// documentation. -// -/// Helper macro when implementing the `Deserializer` part of a new data format -/// for Serde. -/// -/// Some [`Deserializer`] implementations for self-describing formats do not -/// care what hint the [`Visitor`] gives them, they just want to blindly call -/// the [`Visitor`] method corresponding to the data they can tell is in the -/// input. This requires repetitive implementations of all the [`Deserializer`] -/// trait methods. -/// -/// ```edition2018 -/// # use serde::forward_to_deserialize_any; -/// # use serde::de::{value, Deserializer, Visitor}; -/// # -/// # struct MyDeserializer; -/// # -/// # impl<'de> Deserializer<'de> for MyDeserializer { -/// # type Error = value::Error; -/// # -/// # fn deserialize_any(self, _: V) -> Result -/// # where -/// # V: Visitor<'de>, -/// # { -/// # unimplemented!() -/// # } -/// # -/// #[inline] -/// fn deserialize_bool(self, visitor: V) -> Result -/// where -/// V: Visitor<'de>, -/// { -/// self.deserialize_any(visitor) -/// } -/// # -/// # forward_to_deserialize_any! { -/// # i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string -/// # bytes byte_buf option unit unit_struct newtype_struct seq tuple -/// # tuple_struct map struct enum identifier ignored_any -/// # } -/// # } -/// ``` -/// -/// The `forward_to_deserialize_any!` macro implements these simple forwarding -/// methods so that they forward directly to [`Deserializer::deserialize_any`]. -/// You can choose which methods to forward. -/// -/// ```edition2018 -/// # use serde::forward_to_deserialize_any; -/// # use serde::de::{value, Deserializer, Visitor}; -/// # -/// # struct MyDeserializer; -/// # -/// impl<'de> Deserializer<'de> for MyDeserializer { -/// # type Error = value::Error; -/// # -/// fn deserialize_any(self, visitor: V) -> Result -/// where -/// V: Visitor<'de>, -/// { -/// /* ... */ -/// # let _ = visitor; -/// # unimplemented!() -/// } -/// -/// forward_to_deserialize_any! { -/// bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string -/// bytes byte_buf option unit unit_struct newtype_struct seq tuple -/// tuple_struct map struct enum identifier ignored_any -/// } -/// } -/// ``` -/// -/// The macro assumes the convention that your `Deserializer` lifetime parameter -/// is called `'de` and that the `Visitor` type parameters on each method are -/// called `V`. A different type parameter and a different lifetime can be -/// specified explicitly if necessary. -/// -/// ```edition2018 -/// # use std::marker::PhantomData; -/// # -/// # use serde::forward_to_deserialize_any; -/// # use serde::de::{value, Deserializer, Visitor}; -/// # -/// # struct MyDeserializer(PhantomData); -/// # -/// # impl<'q, V> Deserializer<'q> for MyDeserializer { -/// # type Error = value::Error; -/// # -/// # fn deserialize_any(self, visitor: W) -> Result -/// # where -/// # W: Visitor<'q>, -/// # { -/// # unimplemented!() -/// # } -/// # -/// forward_to_deserialize_any! { -/// > -/// bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string -/// bytes byte_buf option unit unit_struct newtype_struct seq tuple -/// tuple_struct map struct enum identifier ignored_any -/// } -/// # } -/// ``` -/// -/// [`Deserializer`]: trait.Deserializer.html -/// [`Visitor`]: de/trait.Visitor.html -/// [`Deserializer::deserialize_any`]: trait.Deserializer.html#tymethod.deserialize_any -#[macro_export(local_inner_macros)] -macro_rules! forward_to_deserialize_any { - (<$visitor:ident: Visitor<$lifetime:tt>> $($func:ident)*) => { - $(forward_to_deserialize_any_helper!{$func<$lifetime, $visitor>})* - }; - // This case must be after the previous one. - ($($func:ident)*) => { - $(forward_to_deserialize_any_helper!{$func<'de, V>})* - }; -} - -#[doc(hidden)] -#[macro_export] -macro_rules! forward_to_deserialize_any_method { - ($func:ident<$l:tt, $v:ident>($($arg:ident : $ty:ty),*)) => { - #[inline] - fn $func<$v>(self, $($arg: $ty,)* visitor: $v) -> $crate::__private::Result<$v::Value, Self::Error> - where - $v: $crate::de::Visitor<$l>, - { - $( - let _ = $arg; - )* - self.deserialize_any(visitor) - } - }; -} - -#[doc(hidden)] -#[macro_export(local_inner_macros)] -macro_rules! forward_to_deserialize_any_helper { - (bool<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_bool<$l, $v>()} - }; - (i8<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_i8<$l, $v>()} - }; - (i16<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_i16<$l, $v>()} - }; - (i32<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_i32<$l, $v>()} - }; - (i64<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_i64<$l, $v>()} - }; - (i128<$l:tt, $v:ident>) => { - serde_if_integer128! { - forward_to_deserialize_any_method!{deserialize_i128<$l, $v>()} - } - }; - (u8<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_u8<$l, $v>()} - }; - (u16<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_u16<$l, $v>()} - }; - (u32<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_u32<$l, $v>()} - }; - (u64<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_u64<$l, $v>()} - }; - (u128<$l:tt, $v:ident>) => { - serde_if_integer128! { - forward_to_deserialize_any_method!{deserialize_u128<$l, $v>()} - } - }; - (f32<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_f32<$l, $v>()} - }; - (f64<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_f64<$l, $v>()} - }; - (char<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_char<$l, $v>()} - }; - (str<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_str<$l, $v>()} - }; - (string<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_string<$l, $v>()} - }; - (bytes<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_bytes<$l, $v>()} - }; - (byte_buf<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_byte_buf<$l, $v>()} - }; - (option<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_option<$l, $v>()} - }; - (unit<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_unit<$l, $v>()} - }; - (unit_struct<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_unit_struct<$l, $v>(name: &'static str)} - }; - (newtype_struct<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_newtype_struct<$l, $v>(name: &'static str)} - }; - (seq<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_seq<$l, $v>()} - }; - (tuple<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_tuple<$l, $v>(len: usize)} - }; - (tuple_struct<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_tuple_struct<$l, $v>(name: &'static str, len: usize)} - }; - (map<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_map<$l, $v>()} - }; - (struct<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_struct<$l, $v>(name: &'static str, fields: &'static [&'static str])} - }; - (enum<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_enum<$l, $v>(name: &'static str, variants: &'static [&'static str])} - }; - (identifier<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_identifier<$l, $v>()} - }; - (ignored_any<$l:tt, $v:ident>) => { - forward_to_deserialize_any_method!{deserialize_ignored_any<$l, $v>()} - }; -} diff --git a/vendor/serde/src/private/de.rs b/vendor/serde/src/private/de.rs deleted file mode 100644 index a387d404..00000000 --- a/vendor/serde/src/private/de.rs +++ /dev/null @@ -1,2959 +0,0 @@ -use lib::*; - -use de::value::{BorrowedBytesDeserializer, BytesDeserializer}; -use de::{Deserialize, Deserializer, Error, IntoDeserializer, Visitor}; - -#[cfg(any(feature = "std", feature = "alloc"))] -use de::{DeserializeSeed, MapAccess, Unexpected}; - -#[cfg(any(feature = "std", feature = "alloc"))] -pub use self::content::{ - Content, ContentDeserializer, ContentRefDeserializer, EnumDeserializer, - InternallyTaggedUnitVisitor, TagContentOtherField, TagContentOtherFieldVisitor, - TagOrContentField, TagOrContentFieldVisitor, TaggedContentVisitor, UntaggedUnitVisitor, -}; - -pub use seed::InPlaceSeed; - -/// If the missing field is of type `Option` then treat is as `None`, -/// otherwise it is an error. -pub fn missing_field<'de, V, E>(field: &'static str) -> Result -where - V: Deserialize<'de>, - E: Error, -{ - struct MissingFieldDeserializer(&'static str, PhantomData); - - impl<'de, E> Deserializer<'de> for MissingFieldDeserializer - where - E: Error, - { - type Error = E; - - fn deserialize_any(self, _visitor: V) -> Result - where - V: Visitor<'de>, - { - Err(Error::missing_field(self.0)) - } - - fn deserialize_option(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - visitor.visit_none() - } - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf unit unit_struct newtype_struct seq tuple - tuple_struct map struct enum identifier ignored_any - } - } - - let deserializer = MissingFieldDeserializer(field, PhantomData); - Deserialize::deserialize(deserializer) -} - -#[cfg(any(feature = "std", feature = "alloc"))] -pub fn borrow_cow_str<'de: 'a, 'a, D, R>(deserializer: D) -> Result -where - D: Deserializer<'de>, - R: From>, -{ - struct CowStrVisitor; - - impl<'a> Visitor<'a> for CowStrVisitor { - type Value = Cow<'a, str>; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("a string") - } - - fn visit_str(self, v: &str) -> Result - where - E: Error, - { - Ok(Cow::Owned(v.to_owned())) - } - - fn visit_borrowed_str(self, v: &'a str) -> Result - where - E: Error, - { - Ok(Cow::Borrowed(v)) - } - - fn visit_string(self, v: String) -> Result - where - E: Error, - { - Ok(Cow::Owned(v)) - } - - fn visit_bytes(self, v: &[u8]) -> Result - where - E: Error, - { - match str::from_utf8(v) { - Ok(s) => Ok(Cow::Owned(s.to_owned())), - Err(_) => Err(Error::invalid_value(Unexpected::Bytes(v), &self)), - } - } - - fn visit_borrowed_bytes(self, v: &'a [u8]) -> Result - where - E: Error, - { - match str::from_utf8(v) { - Ok(s) => Ok(Cow::Borrowed(s)), - Err(_) => Err(Error::invalid_value(Unexpected::Bytes(v), &self)), - } - } - - fn visit_byte_buf(self, v: Vec) -> Result - where - E: Error, - { - match String::from_utf8(v) { - Ok(s) => Ok(Cow::Owned(s)), - Err(e) => Err(Error::invalid_value( - Unexpected::Bytes(&e.into_bytes()), - &self, - )), - } - } - } - - deserializer.deserialize_str(CowStrVisitor).map(From::from) -} - -#[cfg(any(feature = "std", feature = "alloc"))] -pub fn borrow_cow_bytes<'de: 'a, 'a, D, R>(deserializer: D) -> Result -where - D: Deserializer<'de>, - R: From>, -{ - struct CowBytesVisitor; - - impl<'a> Visitor<'a> for CowBytesVisitor { - type Value = Cow<'a, [u8]>; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("a byte array") - } - - fn visit_str(self, v: &str) -> Result - where - E: Error, - { - Ok(Cow::Owned(v.as_bytes().to_vec())) - } - - fn visit_borrowed_str(self, v: &'a str) -> Result - where - E: Error, - { - Ok(Cow::Borrowed(v.as_bytes())) - } - - fn visit_string(self, v: String) -> Result - where - E: Error, - { - Ok(Cow::Owned(v.into_bytes())) - } - - fn visit_bytes(self, v: &[u8]) -> Result - where - E: Error, - { - Ok(Cow::Owned(v.to_vec())) - } - - fn visit_borrowed_bytes(self, v: &'a [u8]) -> Result - where - E: Error, - { - Ok(Cow::Borrowed(v)) - } - - fn visit_byte_buf(self, v: Vec) -> Result - where - E: Error, - { - Ok(Cow::Owned(v)) - } - } - - deserializer - .deserialize_bytes(CowBytesVisitor) - .map(From::from) -} - -#[cfg(any(feature = "std", feature = "alloc"))] -mod content { - // This module is private and nothing here should be used outside of - // generated code. - // - // We will iterate on the implementation for a few releases and only have to - // worry about backward compatibility for the `untagged` and `tag` attributes - // rather than for this entire mechanism. - // - // This issue is tracking making some of this stuff public: - // https://github.com/serde-rs/serde/issues/741 - - use lib::*; - - use __private::size_hint; - use de::{ - self, Deserialize, DeserializeSeed, Deserializer, EnumAccess, Expected, IgnoredAny, - MapAccess, SeqAccess, Unexpected, Visitor, - }; - - /// Used from generated code to buffer the contents of the Deserializer when - /// deserializing untagged enums and internally tagged enums. - /// - /// Not public API. Use serde-value instead. - #[derive(Debug)] - pub enum Content<'de> { - Bool(bool), - - U8(u8), - U16(u16), - U32(u32), - U64(u64), - - I8(i8), - I16(i16), - I32(i32), - I64(i64), - - F32(f32), - F64(f64), - - Char(char), - String(String), - Str(&'de str), - ByteBuf(Vec), - Bytes(&'de [u8]), - - None, - Some(Box>), - - Unit, - Newtype(Box>), - Seq(Vec>), - Map(Vec<(Content<'de>, Content<'de>)>), - } - - impl<'de> Content<'de> { - pub fn as_str(&self) -> Option<&str> { - match *self { - Content::Str(x) => Some(x), - Content::String(ref x) => Some(x), - Content::Bytes(x) => str::from_utf8(x).ok(), - Content::ByteBuf(ref x) => str::from_utf8(x).ok(), - _ => None, - } - } - - #[cold] - fn unexpected(&self) -> Unexpected { - match *self { - Content::Bool(b) => Unexpected::Bool(b), - Content::U8(n) => Unexpected::Unsigned(n as u64), - Content::U16(n) => Unexpected::Unsigned(n as u64), - Content::U32(n) => Unexpected::Unsigned(n as u64), - Content::U64(n) => Unexpected::Unsigned(n), - Content::I8(n) => Unexpected::Signed(n as i64), - Content::I16(n) => Unexpected::Signed(n as i64), - Content::I32(n) => Unexpected::Signed(n as i64), - Content::I64(n) => Unexpected::Signed(n), - Content::F32(f) => Unexpected::Float(f as f64), - Content::F64(f) => Unexpected::Float(f), - Content::Char(c) => Unexpected::Char(c), - Content::String(ref s) => Unexpected::Str(s), - Content::Str(s) => Unexpected::Str(s), - Content::ByteBuf(ref b) => Unexpected::Bytes(b), - Content::Bytes(b) => Unexpected::Bytes(b), - Content::None | Content::Some(_) => Unexpected::Option, - Content::Unit => Unexpected::Unit, - Content::Newtype(_) => Unexpected::NewtypeStruct, - Content::Seq(_) => Unexpected::Seq, - Content::Map(_) => Unexpected::Map, - } - } - } - - impl<'de> Deserialize<'de> for Content<'de> { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - // Untagged and internally tagged enums are only supported in - // self-describing formats. - let visitor = ContentVisitor { value: PhantomData }; - deserializer.deserialize_any(visitor) - } - } - - struct ContentVisitor<'de> { - value: PhantomData>, - } - - impl<'de> ContentVisitor<'de> { - fn new() -> Self { - ContentVisitor { value: PhantomData } - } - } - - impl<'de> Visitor<'de> for ContentVisitor<'de> { - type Value = Content<'de>; - - fn expecting(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.write_str("any value") - } - - fn visit_bool(self, value: bool) -> Result - where - F: de::Error, - { - Ok(Content::Bool(value)) - } - - fn visit_i8(self, value: i8) -> Result - where - F: de::Error, - { - Ok(Content::I8(value)) - } - - fn visit_i16(self, value: i16) -> Result - where - F: de::Error, - { - Ok(Content::I16(value)) - } - - fn visit_i32(self, value: i32) -> Result - where - F: de::Error, - { - Ok(Content::I32(value)) - } - - fn visit_i64(self, value: i64) -> Result - where - F: de::Error, - { - Ok(Content::I64(value)) - } - - fn visit_u8(self, value: u8) -> Result - where - F: de::Error, - { - Ok(Content::U8(value)) - } - - fn visit_u16(self, value: u16) -> Result - where - F: de::Error, - { - Ok(Content::U16(value)) - } - - fn visit_u32(self, value: u32) -> Result - where - F: de::Error, - { - Ok(Content::U32(value)) - } - - fn visit_u64(self, value: u64) -> Result - where - F: de::Error, - { - Ok(Content::U64(value)) - } - - fn visit_f32(self, value: f32) -> Result - where - F: de::Error, - { - Ok(Content::F32(value)) - } - - fn visit_f64(self, value: f64) -> Result - where - F: de::Error, - { - Ok(Content::F64(value)) - } - - fn visit_char(self, value: char) -> Result - where - F: de::Error, - { - Ok(Content::Char(value)) - } - - fn visit_str(self, value: &str) -> Result - where - F: de::Error, - { - Ok(Content::String(value.into())) - } - - fn visit_borrowed_str(self, value: &'de str) -> Result - where - F: de::Error, - { - Ok(Content::Str(value)) - } - - fn visit_string(self, value: String) -> Result - where - F: de::Error, - { - Ok(Content::String(value)) - } - - fn visit_bytes(self, value: &[u8]) -> Result - where - F: de::Error, - { - Ok(Content::ByteBuf(value.into())) - } - - fn visit_borrowed_bytes(self, value: &'de [u8]) -> Result - where - F: de::Error, - { - Ok(Content::Bytes(value)) - } - - fn visit_byte_buf(self, value: Vec) -> Result - where - F: de::Error, - { - Ok(Content::ByteBuf(value)) - } - - fn visit_unit(self) -> Result - where - F: de::Error, - { - Ok(Content::Unit) - } - - fn visit_none(self) -> Result - where - F: de::Error, - { - Ok(Content::None) - } - - fn visit_some(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - Deserialize::deserialize(deserializer).map(|v| Content::Some(Box::new(v))) - } - - fn visit_newtype_struct(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - Deserialize::deserialize(deserializer).map(|v| Content::Newtype(Box::new(v))) - } - - fn visit_seq(self, mut visitor: V) -> Result - where - V: SeqAccess<'de>, - { - let mut vec = Vec::with_capacity(size_hint::cautious(visitor.size_hint())); - while let Some(e) = try!(visitor.next_element()) { - vec.push(e); - } - Ok(Content::Seq(vec)) - } - - fn visit_map(self, mut visitor: V) -> Result - where - V: MapAccess<'de>, - { - let mut vec = Vec::with_capacity(size_hint::cautious(visitor.size_hint())); - while let Some(kv) = try!(visitor.next_entry()) { - vec.push(kv); - } - Ok(Content::Map(vec)) - } - - fn visit_enum(self, _visitor: V) -> Result - where - V: EnumAccess<'de>, - { - Err(de::Error::custom( - "untagged and internally tagged enums do not support enum input", - )) - } - } - - /// This is the type of the map keys in an internally tagged enum. - /// - /// Not public API. - pub enum TagOrContent<'de> { - Tag, - Content(Content<'de>), - } - - struct TagOrContentVisitor<'de> { - name: &'static str, - value: PhantomData>, - } - - impl<'de> TagOrContentVisitor<'de> { - fn new(name: &'static str) -> Self { - TagOrContentVisitor { - name: name, - value: PhantomData, - } - } - } - - impl<'de> DeserializeSeed<'de> for TagOrContentVisitor<'de> { - type Value = TagOrContent<'de>; - - fn deserialize(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - // Internally tagged enums are only supported in self-describing - // formats. - deserializer.deserialize_any(self) - } - } - - impl<'de> Visitor<'de> for TagOrContentVisitor<'de> { - type Value = TagOrContent<'de>; - - fn expecting(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - write!(fmt, "a type tag `{}` or any other value", self.name) - } - - fn visit_bool(self, value: bool) -> Result - where - F: de::Error, - { - ContentVisitor::new() - .visit_bool(value) - .map(TagOrContent::Content) - } - - fn visit_i8(self, value: i8) -> Result - where - F: de::Error, - { - ContentVisitor::new() - .visit_i8(value) - .map(TagOrContent::Content) - } - - fn visit_i16(self, value: i16) -> Result - where - F: de::Error, - { - ContentVisitor::new() - .visit_i16(value) - .map(TagOrContent::Content) - } - - fn visit_i32(self, value: i32) -> Result - where - F: de::Error, - { - ContentVisitor::new() - .visit_i32(value) - .map(TagOrContent::Content) - } - - fn visit_i64(self, value: i64) -> Result - where - F: de::Error, - { - ContentVisitor::new() - .visit_i64(value) - .map(TagOrContent::Content) - } - - fn visit_u8(self, value: u8) -> Result - where - F: de::Error, - { - ContentVisitor::new() - .visit_u8(value) - .map(TagOrContent::Content) - } - - fn visit_u16(self, value: u16) -> Result - where - F: de::Error, - { - ContentVisitor::new() - .visit_u16(value) - .map(TagOrContent::Content) - } - - fn visit_u32(self, value: u32) -> Result - where - F: de::Error, - { - ContentVisitor::new() - .visit_u32(value) - .map(TagOrContent::Content) - } - - fn visit_u64(self, value: u64) -> Result - where - F: de::Error, - { - ContentVisitor::new() - .visit_u64(value) - .map(TagOrContent::Content) - } - - fn visit_f32(self, value: f32) -> Result - where - F: de::Error, - { - ContentVisitor::new() - .visit_f32(value) - .map(TagOrContent::Content) - } - - fn visit_f64(self, value: f64) -> Result - where - F: de::Error, - { - ContentVisitor::new() - .visit_f64(value) - .map(TagOrContent::Content) - } - - fn visit_char(self, value: char) -> Result - where - F: de::Error, - { - ContentVisitor::new() - .visit_char(value) - .map(TagOrContent::Content) - } - - fn visit_str(self, value: &str) -> Result - where - F: de::Error, - { - if value == self.name { - Ok(TagOrContent::Tag) - } else { - ContentVisitor::new() - .visit_str(value) - .map(TagOrContent::Content) - } - } - - fn visit_borrowed_str(self, value: &'de str) -> Result - where - F: de::Error, - { - if value == self.name { - Ok(TagOrContent::Tag) - } else { - ContentVisitor::new() - .visit_borrowed_str(value) - .map(TagOrContent::Content) - } - } - - fn visit_string(self, value: String) -> Result - where - F: de::Error, - { - if value == self.name { - Ok(TagOrContent::Tag) - } else { - ContentVisitor::new() - .visit_string(value) - .map(TagOrContent::Content) - } - } - - fn visit_bytes(self, value: &[u8]) -> Result - where - F: de::Error, - { - if value == self.name.as_bytes() { - Ok(TagOrContent::Tag) - } else { - ContentVisitor::new() - .visit_bytes(value) - .map(TagOrContent::Content) - } - } - - fn visit_borrowed_bytes(self, value: &'de [u8]) -> Result - where - F: de::Error, - { - if value == self.name.as_bytes() { - Ok(TagOrContent::Tag) - } else { - ContentVisitor::new() - .visit_borrowed_bytes(value) - .map(TagOrContent::Content) - } - } - - fn visit_byte_buf(self, value: Vec) -> Result - where - F: de::Error, - { - if value == self.name.as_bytes() { - Ok(TagOrContent::Tag) - } else { - ContentVisitor::new() - .visit_byte_buf(value) - .map(TagOrContent::Content) - } - } - - fn visit_unit(self) -> Result - where - F: de::Error, - { - ContentVisitor::new() - .visit_unit() - .map(TagOrContent::Content) - } - - fn visit_none(self) -> Result - where - F: de::Error, - { - ContentVisitor::new() - .visit_none() - .map(TagOrContent::Content) - } - - fn visit_some(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - ContentVisitor::new() - .visit_some(deserializer) - .map(TagOrContent::Content) - } - - fn visit_newtype_struct(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - ContentVisitor::new() - .visit_newtype_struct(deserializer) - .map(TagOrContent::Content) - } - - fn visit_seq(self, visitor: V) -> Result - where - V: SeqAccess<'de>, - { - ContentVisitor::new() - .visit_seq(visitor) - .map(TagOrContent::Content) - } - - fn visit_map(self, visitor: V) -> Result - where - V: MapAccess<'de>, - { - ContentVisitor::new() - .visit_map(visitor) - .map(TagOrContent::Content) - } - - fn visit_enum(self, visitor: V) -> Result - where - V: EnumAccess<'de>, - { - ContentVisitor::new() - .visit_enum(visitor) - .map(TagOrContent::Content) - } - } - - /// Used by generated code to deserialize an internally tagged enum. - /// - /// Not public API. - pub struct TaggedContent<'de, T> { - pub tag: T, - pub content: Content<'de>, - } - - /// Not public API. - pub struct TaggedContentVisitor<'de, T> { - tag_name: &'static str, - expecting: &'static str, - value: PhantomData>, - } - - impl<'de, T> TaggedContentVisitor<'de, T> { - /// Visitor for the content of an internally tagged enum with the given - /// tag name. - pub fn new(name: &'static str, expecting: &'static str) -> Self { - TaggedContentVisitor { - tag_name: name, - expecting: expecting, - value: PhantomData, - } - } - } - - impl<'de, T> DeserializeSeed<'de> for TaggedContentVisitor<'de, T> - where - T: Deserialize<'de>, - { - type Value = TaggedContent<'de, T>; - - fn deserialize(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - // Internally tagged enums are only supported in self-describing - // formats. - deserializer.deserialize_any(self) - } - } - - impl<'de, T> Visitor<'de> for TaggedContentVisitor<'de, T> - where - T: Deserialize<'de>, - { - type Value = TaggedContent<'de, T>; - - fn expecting(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.write_str(self.expecting) - } - - fn visit_seq(self, mut seq: S) -> Result - where - S: SeqAccess<'de>, - { - let tag = match try!(seq.next_element()) { - Some(tag) => tag, - None => { - return Err(de::Error::missing_field(self.tag_name)); - } - }; - let rest = de::value::SeqAccessDeserializer::new(seq); - Ok(TaggedContent { - tag: tag, - content: try!(Content::deserialize(rest)), - }) - } - - fn visit_map(self, mut map: M) -> Result - where - M: MapAccess<'de>, - { - let mut tag = None; - let mut vec = Vec::with_capacity(size_hint::cautious(map.size_hint())); - while let Some(k) = try!(map.next_key_seed(TagOrContentVisitor::new(self.tag_name))) { - match k { - TagOrContent::Tag => { - if tag.is_some() { - return Err(de::Error::duplicate_field(self.tag_name)); - } - tag = Some(try!(map.next_value())); - } - TagOrContent::Content(k) => { - let v = try!(map.next_value()); - vec.push((k, v)); - } - } - } - match tag { - None => Err(de::Error::missing_field(self.tag_name)), - Some(tag) => Ok(TaggedContent { - tag: tag, - content: Content::Map(vec), - }), - } - } - } - - /// Used by generated code to deserialize an adjacently tagged enum. - /// - /// Not public API. - pub enum TagOrContentField { - Tag, - Content, - } - - /// Not public API. - pub struct TagOrContentFieldVisitor { - pub tag: &'static str, - pub content: &'static str, - } - - impl<'de> DeserializeSeed<'de> for TagOrContentFieldVisitor { - type Value = TagOrContentField; - - fn deserialize(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_str(self) - } - } - - impl<'de> Visitor<'de> for TagOrContentFieldVisitor { - type Value = TagOrContentField; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - write!(formatter, "{:?} or {:?}", self.tag, self.content) - } - - fn visit_str(self, field: &str) -> Result - where - E: de::Error, - { - if field == self.tag { - Ok(TagOrContentField::Tag) - } else if field == self.content { - Ok(TagOrContentField::Content) - } else { - Err(de::Error::invalid_value(Unexpected::Str(field), &self)) - } - } - } - - /// Used by generated code to deserialize an adjacently tagged enum when - /// ignoring unrelated fields is allowed. - /// - /// Not public API. - pub enum TagContentOtherField { - Tag, - Content, - Other, - } - - /// Not public API. - pub struct TagContentOtherFieldVisitor { - pub tag: &'static str, - pub content: &'static str, - } - - impl<'de> DeserializeSeed<'de> for TagContentOtherFieldVisitor { - type Value = TagContentOtherField; - - fn deserialize(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_str(self) - } - } - - impl<'de> Visitor<'de> for TagContentOtherFieldVisitor { - type Value = TagContentOtherField; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - write!( - formatter, - "{:?}, {:?}, or other ignored fields", - self.tag, self.content - ) - } - - fn visit_str(self, field: &str) -> Result - where - E: de::Error, - { - if field == self.tag { - Ok(TagContentOtherField::Tag) - } else if field == self.content { - Ok(TagContentOtherField::Content) - } else { - Ok(TagContentOtherField::Other) - } - } - } - - /// Not public API - pub struct ContentDeserializer<'de, E> { - content: Content<'de>, - err: PhantomData, - } - - impl<'de, E> ContentDeserializer<'de, E> - where - E: de::Error, - { - #[cold] - fn invalid_type(self, exp: &Expected) -> E { - de::Error::invalid_type(self.content.unexpected(), exp) - } - - fn deserialize_integer(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match self.content { - Content::U8(v) => visitor.visit_u8(v), - Content::U16(v) => visitor.visit_u16(v), - Content::U32(v) => visitor.visit_u32(v), - Content::U64(v) => visitor.visit_u64(v), - Content::I8(v) => visitor.visit_i8(v), - Content::I16(v) => visitor.visit_i16(v), - Content::I32(v) => visitor.visit_i32(v), - Content::I64(v) => visitor.visit_i64(v), - _ => Err(self.invalid_type(&visitor)), - } - } - - fn deserialize_float(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match self.content { - Content::F32(v) => visitor.visit_f32(v), - Content::F64(v) => visitor.visit_f64(v), - Content::U8(v) => visitor.visit_u8(v), - Content::U16(v) => visitor.visit_u16(v), - Content::U32(v) => visitor.visit_u32(v), - Content::U64(v) => visitor.visit_u64(v), - Content::I8(v) => visitor.visit_i8(v), - Content::I16(v) => visitor.visit_i16(v), - Content::I32(v) => visitor.visit_i32(v), - Content::I64(v) => visitor.visit_i64(v), - _ => Err(self.invalid_type(&visitor)), - } - } - } - - fn visit_content_seq<'de, V, E>(content: Vec>, visitor: V) -> Result - where - V: Visitor<'de>, - E: de::Error, - { - let seq = content.into_iter().map(ContentDeserializer::new); - let mut seq_visitor = de::value::SeqDeserializer::new(seq); - let value = try!(visitor.visit_seq(&mut seq_visitor)); - try!(seq_visitor.end()); - Ok(value) - } - - fn visit_content_map<'de, V, E>( - content: Vec<(Content<'de>, Content<'de>)>, - visitor: V, - ) -> Result - where - V: Visitor<'de>, - E: de::Error, - { - let map = content - .into_iter() - .map(|(k, v)| (ContentDeserializer::new(k), ContentDeserializer::new(v))); - let mut map_visitor = de::value::MapDeserializer::new(map); - let value = try!(visitor.visit_map(&mut map_visitor)); - try!(map_visitor.end()); - Ok(value) - } - - /// Used when deserializing an internally tagged enum because the content - /// will be used exactly once. - impl<'de, E> Deserializer<'de> for ContentDeserializer<'de, E> - where - E: de::Error, - { - type Error = E; - - fn deserialize_any(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match self.content { - Content::Bool(v) => visitor.visit_bool(v), - Content::U8(v) => visitor.visit_u8(v), - Content::U16(v) => visitor.visit_u16(v), - Content::U32(v) => visitor.visit_u32(v), - Content::U64(v) => visitor.visit_u64(v), - Content::I8(v) => visitor.visit_i8(v), - Content::I16(v) => visitor.visit_i16(v), - Content::I32(v) => visitor.visit_i32(v), - Content::I64(v) => visitor.visit_i64(v), - Content::F32(v) => visitor.visit_f32(v), - Content::F64(v) => visitor.visit_f64(v), - Content::Char(v) => visitor.visit_char(v), - Content::String(v) => visitor.visit_string(v), - Content::Str(v) => visitor.visit_borrowed_str(v), - Content::ByteBuf(v) => visitor.visit_byte_buf(v), - Content::Bytes(v) => visitor.visit_borrowed_bytes(v), - Content::Unit => visitor.visit_unit(), - Content::None => visitor.visit_none(), - Content::Some(v) => visitor.visit_some(ContentDeserializer::new(*v)), - Content::Newtype(v) => visitor.visit_newtype_struct(ContentDeserializer::new(*v)), - Content::Seq(v) => visit_content_seq(v, visitor), - Content::Map(v) => visit_content_map(v, visitor), - } - } - - fn deserialize_bool(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match self.content { - Content::Bool(v) => visitor.visit_bool(v), - _ => Err(self.invalid_type(&visitor)), - } - } - - fn deserialize_i8(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_integer(visitor) - } - - fn deserialize_i16(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_integer(visitor) - } - - fn deserialize_i32(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_integer(visitor) - } - - fn deserialize_i64(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_integer(visitor) - } - - fn deserialize_u8(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_integer(visitor) - } - - fn deserialize_u16(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_integer(visitor) - } - - fn deserialize_u32(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_integer(visitor) - } - - fn deserialize_u64(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_integer(visitor) - } - - fn deserialize_f32(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_float(visitor) - } - - fn deserialize_f64(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_float(visitor) - } - - fn deserialize_char(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match self.content { - Content::Char(v) => visitor.visit_char(v), - Content::String(v) => visitor.visit_string(v), - Content::Str(v) => visitor.visit_borrowed_str(v), - _ => Err(self.invalid_type(&visitor)), - } - } - - fn deserialize_str(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_string(visitor) - } - - fn deserialize_string(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match self.content { - Content::String(v) => visitor.visit_string(v), - Content::Str(v) => visitor.visit_borrowed_str(v), - Content::ByteBuf(v) => visitor.visit_byte_buf(v), - Content::Bytes(v) => visitor.visit_borrowed_bytes(v), - _ => Err(self.invalid_type(&visitor)), - } - } - - fn deserialize_bytes(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_byte_buf(visitor) - } - - fn deserialize_byte_buf(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match self.content { - Content::String(v) => visitor.visit_string(v), - Content::Str(v) => visitor.visit_borrowed_str(v), - Content::ByteBuf(v) => visitor.visit_byte_buf(v), - Content::Bytes(v) => visitor.visit_borrowed_bytes(v), - Content::Seq(v) => visit_content_seq(v, visitor), - _ => Err(self.invalid_type(&visitor)), - } - } - - fn deserialize_option(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match self.content { - Content::None => visitor.visit_none(), - Content::Some(v) => visitor.visit_some(ContentDeserializer::new(*v)), - Content::Unit => visitor.visit_unit(), - _ => visitor.visit_some(self), - } - } - - fn deserialize_unit(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match self.content { - Content::Unit => visitor.visit_unit(), - _ => Err(self.invalid_type(&visitor)), - } - } - - fn deserialize_unit_struct( - self, - _name: &'static str, - visitor: V, - ) -> Result - where - V: Visitor<'de>, - { - match self.content { - // As a special case, allow deserializing untagged newtype - // variant containing unit struct. - // - // #[derive(Deserialize)] - // struct Info; - // - // #[derive(Deserialize)] - // #[serde(tag = "topic")] - // enum Message { - // Info(Info), - // } - // - // We want {"topic":"Info"} to deserialize even though - // ordinarily unit structs do not deserialize from empty map/seq. - Content::Map(ref v) if v.is_empty() => visitor.visit_unit(), - Content::Seq(ref v) if v.is_empty() => visitor.visit_unit(), - _ => self.deserialize_any(visitor), - } - } - - fn deserialize_newtype_struct( - self, - _name: &str, - visitor: V, - ) -> Result - where - V: Visitor<'de>, - { - match self.content { - Content::Newtype(v) => visitor.visit_newtype_struct(ContentDeserializer::new(*v)), - _ => visitor.visit_newtype_struct(self), - } - } - - fn deserialize_seq(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match self.content { - Content::Seq(v) => visit_content_seq(v, visitor), - _ => Err(self.invalid_type(&visitor)), - } - } - - fn deserialize_tuple(self, _len: usize, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_seq(visitor) - } - - fn deserialize_tuple_struct( - self, - _name: &'static str, - _len: usize, - visitor: V, - ) -> Result - where - V: Visitor<'de>, - { - self.deserialize_seq(visitor) - } - - fn deserialize_map(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match self.content { - Content::Map(v) => visit_content_map(v, visitor), - _ => Err(self.invalid_type(&visitor)), - } - } - - fn deserialize_struct( - self, - _name: &'static str, - _fields: &'static [&'static str], - visitor: V, - ) -> Result - where - V: Visitor<'de>, - { - match self.content { - Content::Seq(v) => visit_content_seq(v, visitor), - Content::Map(v) => visit_content_map(v, visitor), - _ => Err(self.invalid_type(&visitor)), - } - } - - fn deserialize_enum( - self, - _name: &str, - _variants: &'static [&'static str], - visitor: V, - ) -> Result - where - V: Visitor<'de>, - { - let (variant, value) = match self.content { - Content::Map(value) => { - let mut iter = value.into_iter(); - let (variant, value) = match iter.next() { - Some(v) => v, - None => { - return Err(de::Error::invalid_value( - de::Unexpected::Map, - &"map with a single key", - )); - } - }; - // enums are encoded in json as maps with a single key:value pair - if iter.next().is_some() { - return Err(de::Error::invalid_value( - de::Unexpected::Map, - &"map with a single key", - )); - } - (variant, Some(value)) - } - s @ Content::String(_) | s @ Content::Str(_) => (s, None), - other => { - return Err(de::Error::invalid_type( - other.unexpected(), - &"string or map", - )); - } - }; - - visitor.visit_enum(EnumDeserializer::new(variant, value)) - } - - fn deserialize_identifier(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match self.content { - Content::String(v) => visitor.visit_string(v), - Content::Str(v) => visitor.visit_borrowed_str(v), - Content::ByteBuf(v) => visitor.visit_byte_buf(v), - Content::Bytes(v) => visitor.visit_borrowed_bytes(v), - Content::U8(v) => visitor.visit_u8(v), - Content::U64(v) => visitor.visit_u64(v), - _ => Err(self.invalid_type(&visitor)), - } - } - - fn deserialize_ignored_any(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - drop(self); - visitor.visit_unit() - } - } - - impl<'de, E> ContentDeserializer<'de, E> { - /// private API, don't use - pub fn new(content: Content<'de>) -> Self { - ContentDeserializer { - content: content, - err: PhantomData, - } - } - } - - pub struct EnumDeserializer<'de, E> - where - E: de::Error, - { - variant: Content<'de>, - value: Option>, - err: PhantomData, - } - - impl<'de, E> EnumDeserializer<'de, E> - where - E: de::Error, - { - pub fn new(variant: Content<'de>, value: Option>) -> EnumDeserializer<'de, E> { - EnumDeserializer { - variant: variant, - value: value, - err: PhantomData, - } - } - } - - impl<'de, E> de::EnumAccess<'de> for EnumDeserializer<'de, E> - where - E: de::Error, - { - type Error = E; - type Variant = VariantDeserializer<'de, Self::Error>; - - fn variant_seed(self, seed: V) -> Result<(V::Value, Self::Variant), E> - where - V: de::DeserializeSeed<'de>, - { - let visitor = VariantDeserializer { - value: self.value, - err: PhantomData, - }; - seed.deserialize(ContentDeserializer::new(self.variant)) - .map(|v| (v, visitor)) - } - } - - pub struct VariantDeserializer<'de, E> - where - E: de::Error, - { - value: Option>, - err: PhantomData, - } - - impl<'de, E> de::VariantAccess<'de> for VariantDeserializer<'de, E> - where - E: de::Error, - { - type Error = E; - - fn unit_variant(self) -> Result<(), E> { - match self.value { - Some(value) => de::Deserialize::deserialize(ContentDeserializer::new(value)), - None => Ok(()), - } - } - - fn newtype_variant_seed(self, seed: T) -> Result - where - T: de::DeserializeSeed<'de>, - { - match self.value { - Some(value) => seed.deserialize(ContentDeserializer::new(value)), - None => Err(de::Error::invalid_type( - de::Unexpected::UnitVariant, - &"newtype variant", - )), - } - } - - fn tuple_variant(self, _len: usize, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - match self.value { - Some(Content::Seq(v)) => { - de::Deserializer::deserialize_any(SeqDeserializer::new(v), visitor) - } - Some(other) => Err(de::Error::invalid_type( - other.unexpected(), - &"tuple variant", - )), - None => Err(de::Error::invalid_type( - de::Unexpected::UnitVariant, - &"tuple variant", - )), - } - } - - fn struct_variant( - self, - _fields: &'static [&'static str], - visitor: V, - ) -> Result - where - V: de::Visitor<'de>, - { - match self.value { - Some(Content::Map(v)) => { - de::Deserializer::deserialize_any(MapDeserializer::new(v), visitor) - } - Some(Content::Seq(v)) => { - de::Deserializer::deserialize_any(SeqDeserializer::new(v), visitor) - } - Some(other) => Err(de::Error::invalid_type( - other.unexpected(), - &"struct variant", - )), - None => Err(de::Error::invalid_type( - de::Unexpected::UnitVariant, - &"struct variant", - )), - } - } - } - - struct SeqDeserializer<'de, E> - where - E: de::Error, - { - iter: > as IntoIterator>::IntoIter, - err: PhantomData, - } - - impl<'de, E> SeqDeserializer<'de, E> - where - E: de::Error, - { - fn new(vec: Vec>) -> Self { - SeqDeserializer { - iter: vec.into_iter(), - err: PhantomData, - } - } - } - - impl<'de, E> de::Deserializer<'de> for SeqDeserializer<'de, E> - where - E: de::Error, - { - type Error = E; - - #[inline] - fn deserialize_any(mut self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - let len = self.iter.len(); - if len == 0 { - visitor.visit_unit() - } else { - let ret = try!(visitor.visit_seq(&mut self)); - let remaining = self.iter.len(); - if remaining == 0 { - Ok(ret) - } else { - Err(de::Error::invalid_length(len, &"fewer elements in array")) - } - } - } - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf option unit unit_struct newtype_struct seq tuple - tuple_struct map struct enum identifier ignored_any - } - } - - impl<'de, E> de::SeqAccess<'de> for SeqDeserializer<'de, E> - where - E: de::Error, - { - type Error = E; - - fn next_element_seed(&mut self, seed: T) -> Result, Self::Error> - where - T: de::DeserializeSeed<'de>, - { - match self.iter.next() { - Some(value) => seed.deserialize(ContentDeserializer::new(value)).map(Some), - None => Ok(None), - } - } - - fn size_hint(&self) -> Option { - size_hint::from_bounds(&self.iter) - } - } - - struct MapDeserializer<'de, E> - where - E: de::Error, - { - iter: , Content<'de>)> as IntoIterator>::IntoIter, - value: Option>, - err: PhantomData, - } - - impl<'de, E> MapDeserializer<'de, E> - where - E: de::Error, - { - fn new(map: Vec<(Content<'de>, Content<'de>)>) -> Self { - MapDeserializer { - iter: map.into_iter(), - value: None, - err: PhantomData, - } - } - } - - impl<'de, E> de::MapAccess<'de> for MapDeserializer<'de, E> - where - E: de::Error, - { - type Error = E; - - fn next_key_seed(&mut self, seed: T) -> Result, Self::Error> - where - T: de::DeserializeSeed<'de>, - { - match self.iter.next() { - Some((key, value)) => { - self.value = Some(value); - seed.deserialize(ContentDeserializer::new(key)).map(Some) - } - None => Ok(None), - } - } - - fn next_value_seed(&mut self, seed: T) -> Result - where - T: de::DeserializeSeed<'de>, - { - match self.value.take() { - Some(value) => seed.deserialize(ContentDeserializer::new(value)), - None => Err(de::Error::custom("value is missing")), - } - } - - fn size_hint(&self) -> Option { - size_hint::from_bounds(&self.iter) - } - } - - impl<'de, E> de::Deserializer<'de> for MapDeserializer<'de, E> - where - E: de::Error, - { - type Error = E; - - #[inline] - fn deserialize_any(self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - visitor.visit_map(self) - } - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf option unit unit_struct newtype_struct seq tuple - tuple_struct map struct enum identifier ignored_any - } - } - - /// Not public API. - pub struct ContentRefDeserializer<'a, 'de: 'a, E> { - content: &'a Content<'de>, - err: PhantomData, - } - - impl<'a, 'de, E> ContentRefDeserializer<'a, 'de, E> - where - E: de::Error, - { - #[cold] - fn invalid_type(self, exp: &Expected) -> E { - de::Error::invalid_type(self.content.unexpected(), exp) - } - - fn deserialize_integer(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match *self.content { - Content::U8(v) => visitor.visit_u8(v), - Content::U16(v) => visitor.visit_u16(v), - Content::U32(v) => visitor.visit_u32(v), - Content::U64(v) => visitor.visit_u64(v), - Content::I8(v) => visitor.visit_i8(v), - Content::I16(v) => visitor.visit_i16(v), - Content::I32(v) => visitor.visit_i32(v), - Content::I64(v) => visitor.visit_i64(v), - _ => Err(self.invalid_type(&visitor)), - } - } - - fn deserialize_float(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match *self.content { - Content::F32(v) => visitor.visit_f32(v), - Content::F64(v) => visitor.visit_f64(v), - Content::U8(v) => visitor.visit_u8(v), - Content::U16(v) => visitor.visit_u16(v), - Content::U32(v) => visitor.visit_u32(v), - Content::U64(v) => visitor.visit_u64(v), - Content::I8(v) => visitor.visit_i8(v), - Content::I16(v) => visitor.visit_i16(v), - Content::I32(v) => visitor.visit_i32(v), - Content::I64(v) => visitor.visit_i64(v), - _ => Err(self.invalid_type(&visitor)), - } - } - } - - fn visit_content_seq_ref<'a, 'de, V, E>( - content: &'a [Content<'de>], - visitor: V, - ) -> Result - where - V: Visitor<'de>, - E: de::Error, - { - let seq = content.iter().map(ContentRefDeserializer::new); - let mut seq_visitor = de::value::SeqDeserializer::new(seq); - let value = try!(visitor.visit_seq(&mut seq_visitor)); - try!(seq_visitor.end()); - Ok(value) - } - - fn visit_content_map_ref<'a, 'de, V, E>( - content: &'a [(Content<'de>, Content<'de>)], - visitor: V, - ) -> Result - where - V: Visitor<'de>, - E: de::Error, - { - let map = content.iter().map(|&(ref k, ref v)| { - ( - ContentRefDeserializer::new(k), - ContentRefDeserializer::new(v), - ) - }); - let mut map_visitor = de::value::MapDeserializer::new(map); - let value = try!(visitor.visit_map(&mut map_visitor)); - try!(map_visitor.end()); - Ok(value) - } - - /// Used when deserializing an untagged enum because the content may need - /// to be used more than once. - impl<'de, 'a, E> Deserializer<'de> for ContentRefDeserializer<'a, 'de, E> - where - E: de::Error, - { - type Error = E; - - fn deserialize_any(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match *self.content { - Content::Bool(v) => visitor.visit_bool(v), - Content::U8(v) => visitor.visit_u8(v), - Content::U16(v) => visitor.visit_u16(v), - Content::U32(v) => visitor.visit_u32(v), - Content::U64(v) => visitor.visit_u64(v), - Content::I8(v) => visitor.visit_i8(v), - Content::I16(v) => visitor.visit_i16(v), - Content::I32(v) => visitor.visit_i32(v), - Content::I64(v) => visitor.visit_i64(v), - Content::F32(v) => visitor.visit_f32(v), - Content::F64(v) => visitor.visit_f64(v), - Content::Char(v) => visitor.visit_char(v), - Content::String(ref v) => visitor.visit_str(v), - Content::Str(v) => visitor.visit_borrowed_str(v), - Content::ByteBuf(ref v) => visitor.visit_bytes(v), - Content::Bytes(v) => visitor.visit_borrowed_bytes(v), - Content::Unit => visitor.visit_unit(), - Content::None => visitor.visit_none(), - Content::Some(ref v) => visitor.visit_some(ContentRefDeserializer::new(v)), - Content::Newtype(ref v) => { - visitor.visit_newtype_struct(ContentRefDeserializer::new(v)) - } - Content::Seq(ref v) => visit_content_seq_ref(v, visitor), - Content::Map(ref v) => visit_content_map_ref(v, visitor), - } - } - - fn deserialize_bool(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match *self.content { - Content::Bool(v) => visitor.visit_bool(v), - _ => Err(self.invalid_type(&visitor)), - } - } - - fn deserialize_i8(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_integer(visitor) - } - - fn deserialize_i16(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_integer(visitor) - } - - fn deserialize_i32(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_integer(visitor) - } - - fn deserialize_i64(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_integer(visitor) - } - - fn deserialize_u8(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_integer(visitor) - } - - fn deserialize_u16(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_integer(visitor) - } - - fn deserialize_u32(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_integer(visitor) - } - - fn deserialize_u64(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_integer(visitor) - } - - fn deserialize_f32(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_float(visitor) - } - - fn deserialize_f64(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_float(visitor) - } - - fn deserialize_char(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match *self.content { - Content::Char(v) => visitor.visit_char(v), - Content::String(ref v) => visitor.visit_str(v), - Content::Str(v) => visitor.visit_borrowed_str(v), - _ => Err(self.invalid_type(&visitor)), - } - } - - fn deserialize_str(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match *self.content { - Content::String(ref v) => visitor.visit_str(v), - Content::Str(v) => visitor.visit_borrowed_str(v), - Content::ByteBuf(ref v) => visitor.visit_bytes(v), - Content::Bytes(v) => visitor.visit_borrowed_bytes(v), - _ => Err(self.invalid_type(&visitor)), - } - } - - fn deserialize_string(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_str(visitor) - } - - fn deserialize_bytes(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match *self.content { - Content::String(ref v) => visitor.visit_str(v), - Content::Str(v) => visitor.visit_borrowed_str(v), - Content::ByteBuf(ref v) => visitor.visit_bytes(v), - Content::Bytes(v) => visitor.visit_borrowed_bytes(v), - Content::Seq(ref v) => visit_content_seq_ref(v, visitor), - _ => Err(self.invalid_type(&visitor)), - } - } - - fn deserialize_byte_buf(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_bytes(visitor) - } - - fn deserialize_option(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match *self.content { - Content::None => visitor.visit_none(), - Content::Some(ref v) => visitor.visit_some(ContentRefDeserializer::new(v)), - Content::Unit => visitor.visit_unit(), - _ => visitor.visit_some(self), - } - } - - fn deserialize_unit(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match *self.content { - Content::Unit => visitor.visit_unit(), - _ => Err(self.invalid_type(&visitor)), - } - } - - fn deserialize_unit_struct( - self, - _name: &'static str, - visitor: V, - ) -> Result - where - V: Visitor<'de>, - { - self.deserialize_unit(visitor) - } - - fn deserialize_newtype_struct(self, _name: &str, visitor: V) -> Result - where - V: Visitor<'de>, - { - match *self.content { - Content::Newtype(ref v) => { - visitor.visit_newtype_struct(ContentRefDeserializer::new(v)) - } - _ => visitor.visit_newtype_struct(self), - } - } - - fn deserialize_seq(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match *self.content { - Content::Seq(ref v) => visit_content_seq_ref(v, visitor), - _ => Err(self.invalid_type(&visitor)), - } - } - - fn deserialize_tuple(self, _len: usize, visitor: V) -> Result - where - V: Visitor<'de>, - { - self.deserialize_seq(visitor) - } - - fn deserialize_tuple_struct( - self, - _name: &'static str, - _len: usize, - visitor: V, - ) -> Result - where - V: Visitor<'de>, - { - self.deserialize_seq(visitor) - } - - fn deserialize_map(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match *self.content { - Content::Map(ref v) => visit_content_map_ref(v, visitor), - _ => Err(self.invalid_type(&visitor)), - } - } - - fn deserialize_struct( - self, - _name: &'static str, - _fields: &'static [&'static str], - visitor: V, - ) -> Result - where - V: Visitor<'de>, - { - match *self.content { - Content::Seq(ref v) => visit_content_seq_ref(v, visitor), - Content::Map(ref v) => visit_content_map_ref(v, visitor), - _ => Err(self.invalid_type(&visitor)), - } - } - - fn deserialize_enum( - self, - _name: &str, - _variants: &'static [&'static str], - visitor: V, - ) -> Result - where - V: Visitor<'de>, - { - let (variant, value) = match *self.content { - Content::Map(ref value) => { - let mut iter = value.iter(); - let &(ref variant, ref value) = match iter.next() { - Some(v) => v, - None => { - return Err(de::Error::invalid_value( - de::Unexpected::Map, - &"map with a single key", - )); - } - }; - // enums are encoded in json as maps with a single key:value pair - if iter.next().is_some() { - return Err(de::Error::invalid_value( - de::Unexpected::Map, - &"map with a single key", - )); - } - (variant, Some(value)) - } - ref s @ Content::String(_) | ref s @ Content::Str(_) => (s, None), - ref other => { - return Err(de::Error::invalid_type( - other.unexpected(), - &"string or map", - )); - } - }; - - visitor.visit_enum(EnumRefDeserializer { - variant: variant, - value: value, - err: PhantomData, - }) - } - - fn deserialize_identifier(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match *self.content { - Content::String(ref v) => visitor.visit_str(v), - Content::Str(v) => visitor.visit_borrowed_str(v), - Content::ByteBuf(ref v) => visitor.visit_bytes(v), - Content::Bytes(v) => visitor.visit_borrowed_bytes(v), - Content::U8(v) => visitor.visit_u8(v), - Content::U64(v) => visitor.visit_u64(v), - _ => Err(self.invalid_type(&visitor)), - } - } - - fn deserialize_ignored_any(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - visitor.visit_unit() - } - } - - impl<'a, 'de, E> ContentRefDeserializer<'a, 'de, E> { - /// private API, don't use - pub fn new(content: &'a Content<'de>) -> Self { - ContentRefDeserializer { - content: content, - err: PhantomData, - } - } - } - - struct EnumRefDeserializer<'a, 'de: 'a, E> - where - E: de::Error, - { - variant: &'a Content<'de>, - value: Option<&'a Content<'de>>, - err: PhantomData, - } - - impl<'de, 'a, E> de::EnumAccess<'de> for EnumRefDeserializer<'a, 'de, E> - where - E: de::Error, - { - type Error = E; - type Variant = VariantRefDeserializer<'a, 'de, Self::Error>; - - fn variant_seed(self, seed: V) -> Result<(V::Value, Self::Variant), Self::Error> - where - V: de::DeserializeSeed<'de>, - { - let visitor = VariantRefDeserializer { - value: self.value, - err: PhantomData, - }; - seed.deserialize(ContentRefDeserializer::new(self.variant)) - .map(|v| (v, visitor)) - } - } - - struct VariantRefDeserializer<'a, 'de: 'a, E> - where - E: de::Error, - { - value: Option<&'a Content<'de>>, - err: PhantomData, - } - - impl<'de, 'a, E> de::VariantAccess<'de> for VariantRefDeserializer<'a, 'de, E> - where - E: de::Error, - { - type Error = E; - - fn unit_variant(self) -> Result<(), E> { - match self.value { - Some(value) => de::Deserialize::deserialize(ContentRefDeserializer::new(value)), - None => Ok(()), - } - } - - fn newtype_variant_seed(self, seed: T) -> Result - where - T: de::DeserializeSeed<'de>, - { - match self.value { - Some(value) => seed.deserialize(ContentRefDeserializer::new(value)), - None => Err(de::Error::invalid_type( - de::Unexpected::UnitVariant, - &"newtype variant", - )), - } - } - - fn tuple_variant(self, _len: usize, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - match self.value { - Some(&Content::Seq(ref v)) => { - de::Deserializer::deserialize_any(SeqRefDeserializer::new(v), visitor) - } - Some(other) => Err(de::Error::invalid_type( - other.unexpected(), - &"tuple variant", - )), - None => Err(de::Error::invalid_type( - de::Unexpected::UnitVariant, - &"tuple variant", - )), - } - } - - fn struct_variant( - self, - _fields: &'static [&'static str], - visitor: V, - ) -> Result - where - V: de::Visitor<'de>, - { - match self.value { - Some(&Content::Map(ref v)) => { - de::Deserializer::deserialize_any(MapRefDeserializer::new(v), visitor) - } - Some(&Content::Seq(ref v)) => { - de::Deserializer::deserialize_any(SeqRefDeserializer::new(v), visitor) - } - Some(other) => Err(de::Error::invalid_type( - other.unexpected(), - &"struct variant", - )), - None => Err(de::Error::invalid_type( - de::Unexpected::UnitVariant, - &"struct variant", - )), - } - } - } - - struct SeqRefDeserializer<'a, 'de: 'a, E> - where - E: de::Error, - { - iter: <&'a [Content<'de>] as IntoIterator>::IntoIter, - err: PhantomData, - } - - impl<'a, 'de, E> SeqRefDeserializer<'a, 'de, E> - where - E: de::Error, - { - fn new(slice: &'a [Content<'de>]) -> Self { - SeqRefDeserializer { - iter: slice.iter(), - err: PhantomData, - } - } - } - - impl<'de, 'a, E> de::Deserializer<'de> for SeqRefDeserializer<'a, 'de, E> - where - E: de::Error, - { - type Error = E; - - #[inline] - fn deserialize_any(mut self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - let len = self.iter.len(); - if len == 0 { - visitor.visit_unit() - } else { - let ret = try!(visitor.visit_seq(&mut self)); - let remaining = self.iter.len(); - if remaining == 0 { - Ok(ret) - } else { - Err(de::Error::invalid_length(len, &"fewer elements in array")) - } - } - } - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf option unit unit_struct newtype_struct seq tuple - tuple_struct map struct enum identifier ignored_any - } - } - - impl<'de, 'a, E> de::SeqAccess<'de> for SeqRefDeserializer<'a, 'de, E> - where - E: de::Error, - { - type Error = E; - - fn next_element_seed(&mut self, seed: T) -> Result, Self::Error> - where - T: de::DeserializeSeed<'de>, - { - match self.iter.next() { - Some(value) => seed - .deserialize(ContentRefDeserializer::new(value)) - .map(Some), - None => Ok(None), - } - } - - fn size_hint(&self) -> Option { - size_hint::from_bounds(&self.iter) - } - } - - struct MapRefDeserializer<'a, 'de: 'a, E> - where - E: de::Error, - { - iter: <&'a [(Content<'de>, Content<'de>)] as IntoIterator>::IntoIter, - value: Option<&'a Content<'de>>, - err: PhantomData, - } - - impl<'a, 'de, E> MapRefDeserializer<'a, 'de, E> - where - E: de::Error, - { - fn new(map: &'a [(Content<'de>, Content<'de>)]) -> Self { - MapRefDeserializer { - iter: map.iter(), - value: None, - err: PhantomData, - } - } - } - - impl<'de, 'a, E> de::MapAccess<'de> for MapRefDeserializer<'a, 'de, E> - where - E: de::Error, - { - type Error = E; - - fn next_key_seed(&mut self, seed: T) -> Result, Self::Error> - where - T: de::DeserializeSeed<'de>, - { - match self.iter.next() { - Some(&(ref key, ref value)) => { - self.value = Some(value); - seed.deserialize(ContentRefDeserializer::new(key)).map(Some) - } - None => Ok(None), - } - } - - fn next_value_seed(&mut self, seed: T) -> Result - where - T: de::DeserializeSeed<'de>, - { - match self.value.take() { - Some(value) => seed.deserialize(ContentRefDeserializer::new(value)), - None => Err(de::Error::custom("value is missing")), - } - } - - fn size_hint(&self) -> Option { - size_hint::from_bounds(&self.iter) - } - } - - impl<'de, 'a, E> de::Deserializer<'de> for MapRefDeserializer<'a, 'de, E> - where - E: de::Error, - { - type Error = E; - - #[inline] - fn deserialize_any(self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - visitor.visit_map(self) - } - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf option unit unit_struct newtype_struct seq tuple - tuple_struct map struct enum identifier ignored_any - } - } - - impl<'de, E> de::IntoDeserializer<'de, E> for ContentDeserializer<'de, E> - where - E: de::Error, - { - type Deserializer = Self; - - fn into_deserializer(self) -> Self { - self - } - } - - impl<'de, 'a, E> de::IntoDeserializer<'de, E> for ContentRefDeserializer<'a, 'de, E> - where - E: de::Error, - { - type Deserializer = Self; - - fn into_deserializer(self) -> Self { - self - } - } - - /// Visitor for deserializing an internally tagged unit variant. - /// - /// Not public API. - pub struct InternallyTaggedUnitVisitor<'a> { - type_name: &'a str, - variant_name: &'a str, - } - - impl<'a> InternallyTaggedUnitVisitor<'a> { - /// Not public API. - pub fn new(type_name: &'a str, variant_name: &'a str) -> Self { - InternallyTaggedUnitVisitor { - type_name: type_name, - variant_name: variant_name, - } - } - } - - impl<'de, 'a> Visitor<'de> for InternallyTaggedUnitVisitor<'a> { - type Value = (); - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - write!( - formatter, - "unit variant {}::{}", - self.type_name, self.variant_name - ) - } - - fn visit_seq(self, _: S) -> Result<(), S::Error> - where - S: SeqAccess<'de>, - { - Ok(()) - } - - fn visit_map(self, mut access: M) -> Result<(), M::Error> - where - M: MapAccess<'de>, - { - while try!(access.next_entry::()).is_some() {} - Ok(()) - } - } - - /// Visitor for deserializing an untagged unit variant. - /// - /// Not public API. - pub struct UntaggedUnitVisitor<'a> { - type_name: &'a str, - variant_name: &'a str, - } - - impl<'a> UntaggedUnitVisitor<'a> { - /// Not public API. - pub fn new(type_name: &'a str, variant_name: &'a str) -> Self { - UntaggedUnitVisitor { - type_name: type_name, - variant_name: variant_name, - } - } - } - - impl<'de, 'a> Visitor<'de> for UntaggedUnitVisitor<'a> { - type Value = (); - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - write!( - formatter, - "unit variant {}::{}", - self.type_name, self.variant_name - ) - } - - fn visit_unit(self) -> Result<(), E> - where - E: de::Error, - { - Ok(()) - } - - fn visit_none(self) -> Result<(), E> - where - E: de::Error, - { - Ok(()) - } - } -} - -//////////////////////////////////////////////////////////////////////////////// - -// Like `IntoDeserializer` but also implemented for `&[u8]`. This is used for -// the newtype fallthrough case of `field_identifier`. -// -// #[derive(Deserialize)] -// #[serde(field_identifier)] -// enum F { -// A, -// B, -// Other(String), // deserialized using IdentifierDeserializer -// } -pub trait IdentifierDeserializer<'de, E: Error> { - type Deserializer: Deserializer<'de, Error = E>; - - fn from(self) -> Self::Deserializer; -} - -pub struct Borrowed<'de, T: 'de + ?Sized>(pub &'de T); - -impl<'de, E> IdentifierDeserializer<'de, E> for u64 -where - E: Error, -{ - type Deserializer = >::Deserializer; - - fn from(self) -> Self::Deserializer { - self.into_deserializer() - } -} - -pub struct StrDeserializer<'a, E> { - value: &'a str, - marker: PhantomData, -} - -impl<'de, 'a, E> Deserializer<'de> for StrDeserializer<'a, E> -where - E: Error, -{ - type Error = E; - - fn deserialize_any(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - visitor.visit_str(self.value) - } - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf option unit unit_struct newtype_struct seq tuple - tuple_struct map struct enum identifier ignored_any - } -} - -pub struct BorrowedStrDeserializer<'de, E> { - value: &'de str, - marker: PhantomData, -} - -impl<'de, E> Deserializer<'de> for BorrowedStrDeserializer<'de, E> -where - E: Error, -{ - type Error = E; - - fn deserialize_any(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - visitor.visit_borrowed_str(self.value) - } - - forward_to_deserialize_any! { - bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string - bytes byte_buf option unit unit_struct newtype_struct seq tuple - tuple_struct map struct enum identifier ignored_any - } -} - -impl<'a, E> IdentifierDeserializer<'a, E> for &'a str -where - E: Error, -{ - type Deserializer = StrDeserializer<'a, E>; - - fn from(self) -> Self::Deserializer { - StrDeserializer { - value: self, - marker: PhantomData, - } - } -} - -impl<'de, E> IdentifierDeserializer<'de, E> for Borrowed<'de, str> -where - E: Error, -{ - type Deserializer = BorrowedStrDeserializer<'de, E>; - - fn from(self) -> Self::Deserializer { - BorrowedStrDeserializer { - value: self.0, - marker: PhantomData, - } - } -} - -impl<'a, E> IdentifierDeserializer<'a, E> for &'a [u8] -where - E: Error, -{ - type Deserializer = BytesDeserializer<'a, E>; - - fn from(self) -> Self::Deserializer { - BytesDeserializer::new(self) - } -} - -impl<'de, E> IdentifierDeserializer<'de, E> for Borrowed<'de, [u8]> -where - E: Error, -{ - type Deserializer = BorrowedBytesDeserializer<'de, E>; - - fn from(self) -> Self::Deserializer { - BorrowedBytesDeserializer::new(self.0) - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -pub struct FlatMapDeserializer<'a, 'de: 'a, E>( - pub &'a mut Vec, Content<'de>)>>, - pub PhantomData, -); - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'a, 'de, E> FlatMapDeserializer<'a, 'de, E> -where - E: Error, -{ - fn deserialize_other() -> Result { - Err(Error::custom("can only flatten structs and maps")) - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -macro_rules! forward_to_deserialize_other { - ($($func:ident ( $($arg:ty),* ))*) => { - $( - fn $func(self, $(_: $arg,)* _visitor: V) -> Result - where - V: Visitor<'de>, - { - Self::deserialize_other() - } - )* - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'a, 'de, E> Deserializer<'de> for FlatMapDeserializer<'a, 'de, E> -where - E: Error, -{ - type Error = E; - - fn deserialize_any(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - visitor.visit_map(FlatInternallyTaggedAccess { - iter: self.0.iter_mut(), - pending: None, - _marker: PhantomData, - }) - } - - fn deserialize_enum( - self, - name: &'static str, - variants: &'static [&'static str], - visitor: V, - ) -> Result - where - V: Visitor<'de>, - { - for item in self.0.iter_mut() { - // items in the vector are nulled out when used. So we can only use - // an item if it's still filled in and if the field is one we care - // about. - let use_item = match *item { - None => false, - Some((ref c, _)) => c.as_str().map_or(false, |x| variants.contains(&x)), - }; - - if use_item { - let (key, value) = item.take().unwrap(); - return visitor.visit_enum(EnumDeserializer::new(key, Some(value))); - } - } - - Err(Error::custom(format_args!( - "no variant of enum {} found in flattened data", - name - ))) - } - - fn deserialize_map(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - visitor.visit_map(FlatMapAccess::new(self.0.iter())) - } - - fn deserialize_struct( - self, - _: &'static str, - fields: &'static [&'static str], - visitor: V, - ) -> Result - where - V: Visitor<'de>, - { - visitor.visit_map(FlatStructAccess::new(self.0.iter_mut(), fields)) - } - - fn deserialize_newtype_struct(self, _name: &str, visitor: V) -> Result - where - V: Visitor<'de>, - { - visitor.visit_newtype_struct(self) - } - - fn deserialize_option(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - match visitor.__private_visit_untagged_option(self) { - Ok(value) => Ok(value), - Err(()) => Self::deserialize_other(), - } - } - - fn deserialize_unit(self, visitor: V) -> Result - where - V: Visitor<'de>, - { - visitor.visit_unit() - } - - forward_to_deserialize_other! { - deserialize_bool() - deserialize_i8() - deserialize_i16() - deserialize_i32() - deserialize_i64() - deserialize_u8() - deserialize_u16() - deserialize_u32() - deserialize_u64() - deserialize_f32() - deserialize_f64() - deserialize_char() - deserialize_str() - deserialize_string() - deserialize_bytes() - deserialize_byte_buf() - deserialize_unit_struct(&'static str) - deserialize_seq() - deserialize_tuple(usize) - deserialize_tuple_struct(&'static str, usize) - deserialize_identifier() - deserialize_ignored_any() - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -pub struct FlatMapAccess<'a, 'de: 'a, E> { - iter: slice::Iter<'a, Option<(Content<'de>, Content<'de>)>>, - pending_content: Option<&'a Content<'de>>, - _marker: PhantomData, -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'a, 'de, E> FlatMapAccess<'a, 'de, E> { - fn new( - iter: slice::Iter<'a, Option<(Content<'de>, Content<'de>)>>, - ) -> FlatMapAccess<'a, 'de, E> { - FlatMapAccess { - iter: iter, - pending_content: None, - _marker: PhantomData, - } - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'a, 'de, E> MapAccess<'de> for FlatMapAccess<'a, 'de, E> -where - E: Error, -{ - type Error = E; - - fn next_key_seed(&mut self, seed: T) -> Result, Self::Error> - where - T: DeserializeSeed<'de>, - { - for item in &mut self.iter { - // Items in the vector are nulled out when used by a struct. - if let Some((ref key, ref content)) = *item { - self.pending_content = Some(content); - return seed.deserialize(ContentRefDeserializer::new(key)).map(Some); - } - } - Ok(None) - } - - fn next_value_seed(&mut self, seed: T) -> Result - where - T: DeserializeSeed<'de>, - { - match self.pending_content.take() { - Some(value) => seed.deserialize(ContentRefDeserializer::new(value)), - None => Err(Error::custom("value is missing")), - } - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -pub struct FlatStructAccess<'a, 'de: 'a, E> { - iter: slice::IterMut<'a, Option<(Content<'de>, Content<'de>)>>, - pending_content: Option>, - fields: &'static [&'static str], - _marker: PhantomData, -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'a, 'de, E> FlatStructAccess<'a, 'de, E> { - fn new( - iter: slice::IterMut<'a, Option<(Content<'de>, Content<'de>)>>, - fields: &'static [&'static str], - ) -> FlatStructAccess<'a, 'de, E> { - FlatStructAccess { - iter: iter, - pending_content: None, - fields: fields, - _marker: PhantomData, - } - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'a, 'de, E> MapAccess<'de> for FlatStructAccess<'a, 'de, E> -where - E: Error, -{ - type Error = E; - - fn next_key_seed(&mut self, seed: T) -> Result, Self::Error> - where - T: DeserializeSeed<'de>, - { - while let Some(item) = self.iter.next() { - // items in the vector are nulled out when used. So we can only use - // an item if it's still filled in and if the field is one we care - // about. In case we do not know which fields we want, we take them all. - let use_item = match *item { - None => false, - Some((ref c, _)) => c.as_str().map_or(false, |key| self.fields.contains(&key)), - }; - - if use_item { - let (key, content) = item.take().unwrap(); - self.pending_content = Some(content); - return seed.deserialize(ContentDeserializer::new(key)).map(Some); - } - } - Ok(None) - } - - fn next_value_seed(&mut self, seed: T) -> Result - where - T: DeserializeSeed<'de>, - { - match self.pending_content.take() { - Some(value) => seed.deserialize(ContentDeserializer::new(value)), - None => Err(Error::custom("value is missing")), - } - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -pub struct FlatInternallyTaggedAccess<'a, 'de: 'a, E> { - iter: slice::IterMut<'a, Option<(Content<'de>, Content<'de>)>>, - pending: Option<&'a Content<'de>>, - _marker: PhantomData, -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'a, 'de, E> MapAccess<'de> for FlatInternallyTaggedAccess<'a, 'de, E> -where - E: Error, -{ - type Error = E; - - fn next_key_seed(&mut self, seed: T) -> Result, Self::Error> - where - T: DeserializeSeed<'de>, - { - for item in &mut self.iter { - if let Some((ref key, ref content)) = *item { - // Do not take(), instead borrow this entry. The internally tagged - // enum does its own buffering so we can't tell whether this entry - // is going to be consumed. Borrowing here leaves the entry - // available for later flattened fields. - self.pending = Some(content); - return seed.deserialize(ContentRefDeserializer::new(key)).map(Some); - } - } - Ok(None) - } - - fn next_value_seed(&mut self, seed: T) -> Result - where - T: DeserializeSeed<'de>, - { - match self.pending.take() { - Some(value) => seed.deserialize(ContentRefDeserializer::new(value)), - None => panic!("value is missing"), - } - } -} diff --git a/vendor/serde/src/private/doc.rs b/vendor/serde/src/private/doc.rs deleted file mode 100644 index f597af84..00000000 --- a/vendor/serde/src/private/doc.rs +++ /dev/null @@ -1,159 +0,0 @@ -// Used only by Serde doc tests. Not public API. - -use lib::*; - -use ser; - -#[doc(hidden)] -#[derive(Debug)] -pub struct Error; - -impl ser::Error for Error { - fn custom(_: T) -> Self - where - T: Display, - { - unimplemented!() - } -} - -#[cfg(feature = "std")] -impl error::Error for Error { - fn description(&self) -> &str { - unimplemented!() - } -} - -impl Display for Error { - fn fmt(&self, _: &mut fmt::Formatter) -> fmt::Result { - unimplemented!() - } -} - -#[doc(hidden)] -#[macro_export] -macro_rules! __private_serialize { - () => { - trait Serialize { - fn serialize(&self, serializer: S) -> Result - where - S: $crate::Serializer; - } - }; -} - -#[doc(hidden)] -#[macro_export(local_inner_macros)] -macro_rules! __serialize_unimplemented { - ($($func:ident)*) => { - $( - __serialize_unimplemented_helper!($func); - )* - }; -} - -#[doc(hidden)] -#[macro_export] -macro_rules! __serialize_unimplemented_method { - ($func:ident $(<$t:ident>)* ($($arg:ty),*) -> $ret:ident) => { - fn $func $(<$t: ?Sized + $crate::Serialize>)* (self $(, _: $arg)*) -> $crate::__private::Result { - unimplemented!() - } - }; -} - -#[doc(hidden)] -#[macro_export(local_inner_macros)] -macro_rules! __serialize_unimplemented_helper { - (bool) => { - __serialize_unimplemented_method!(serialize_bool(bool) -> Ok); - }; - (i8) => { - __serialize_unimplemented_method!(serialize_i8(i8) -> Ok); - }; - (i16) => { - __serialize_unimplemented_method!(serialize_i16(i16) -> Ok); - }; - (i32) => { - __serialize_unimplemented_method!(serialize_i32(i32) -> Ok); - }; - (i64) => { - __serialize_unimplemented_method!(serialize_i64(i64) -> Ok); - }; - (u8) => { - __serialize_unimplemented_method!(serialize_u8(u8) -> Ok); - }; - (u16) => { - __serialize_unimplemented_method!(serialize_u16(u16) -> Ok); - }; - (u32) => { - __serialize_unimplemented_method!(serialize_u32(u32) -> Ok); - }; - (u64) => { - __serialize_unimplemented_method!(serialize_u64(u64) -> Ok); - }; - (f32) => { - __serialize_unimplemented_method!(serialize_f32(f32) -> Ok); - }; - (f64) => { - __serialize_unimplemented_method!(serialize_f64(f64) -> Ok); - }; - (char) => { - __serialize_unimplemented_method!(serialize_char(char) -> Ok); - }; - (str) => { - __serialize_unimplemented_method!(serialize_str(&str) -> Ok); - }; - (bytes) => { - __serialize_unimplemented_method!(serialize_bytes(&[u8]) -> Ok); - }; - (none) => { - __serialize_unimplemented_method!(serialize_none() -> Ok); - }; - (some) => { - __serialize_unimplemented_method!(serialize_some(&T) -> Ok); - }; - (unit) => { - __serialize_unimplemented_method!(serialize_unit() -> Ok); - }; - (unit_struct) => { - __serialize_unimplemented_method!(serialize_unit_struct(&str) -> Ok); - }; - (unit_variant) => { - __serialize_unimplemented_method!(serialize_unit_variant(&str, u32, &str) -> Ok); - }; - (newtype_struct) => { - __serialize_unimplemented_method!(serialize_newtype_struct(&str, &T) -> Ok); - }; - (newtype_variant) => { - __serialize_unimplemented_method!(serialize_newtype_variant(&str, u32, &str, &T) -> Ok); - }; - (seq) => { - type SerializeSeq = $crate::ser::Impossible; - __serialize_unimplemented_method!(serialize_seq(Option) -> SerializeSeq); - }; - (tuple) => { - type SerializeTuple = $crate::ser::Impossible; - __serialize_unimplemented_method!(serialize_tuple(usize) -> SerializeTuple); - }; - (tuple_struct) => { - type SerializeTupleStruct = $crate::ser::Impossible; - __serialize_unimplemented_method!(serialize_tuple_struct(&str, usize) -> SerializeTupleStruct); - }; - (tuple_variant) => { - type SerializeTupleVariant = $crate::ser::Impossible; - __serialize_unimplemented_method!(serialize_tuple_variant(&str, u32, &str, usize) -> SerializeTupleVariant); - }; - (map) => { - type SerializeMap = $crate::ser::Impossible; - __serialize_unimplemented_method!(serialize_map(Option) -> SerializeMap); - }; - (struct) => { - type SerializeStruct = $crate::ser::Impossible; - __serialize_unimplemented_method!(serialize_struct(&str, usize) -> SerializeStruct); - }; - (struct_variant) => { - type SerializeStructVariant = $crate::ser::Impossible; - __serialize_unimplemented_method!(serialize_struct_variant(&str, u32, &str, usize) -> SerializeStructVariant); - }; -} diff --git a/vendor/serde/src/private/mod.rs b/vendor/serde/src/private/mod.rs deleted file mode 100644 index 71e82a89..00000000 --- a/vendor/serde/src/private/mod.rs +++ /dev/null @@ -1,50 +0,0 @@ -#[cfg(serde_derive)] -pub mod de; -#[cfg(serde_derive)] -pub mod ser; - -pub mod size_hint; - -// FIXME: #[cfg(doctest)] once https://github.com/rust-lang/rust/issues/67295 is fixed. -pub mod doc; - -pub use lib::clone::Clone; -pub use lib::convert::{From, Into}; -pub use lib::default::Default; -pub use lib::fmt::{self, Formatter}; -pub use lib::marker::PhantomData; -pub use lib::option::Option::{self, None, Some}; -pub use lib::ptr; -pub use lib::result::Result::{self, Err, Ok}; - -pub use self::string::from_utf8_lossy; - -#[cfg(any(feature = "alloc", feature = "std"))] -pub use lib::{ToString, Vec}; - -#[cfg(core_try_from)] -pub use lib::convert::TryFrom; - -mod string { - use lib::*; - - #[cfg(any(feature = "std", feature = "alloc"))] - pub fn from_utf8_lossy(bytes: &[u8]) -> Cow { - String::from_utf8_lossy(bytes) - } - - // The generated code calls this like: - // - // let value = &_serde::__private::from_utf8_lossy(bytes); - // Err(_serde::de::Error::unknown_variant(value, VARIANTS)) - // - // so it is okay for the return type to be different from the std case as long - // as the above works. - #[cfg(not(any(feature = "std", feature = "alloc")))] - pub fn from_utf8_lossy(bytes: &[u8]) -> &str { - // Three unicode replacement characters if it fails. They look like a - // white-on-black question mark. The user will recognize it as invalid - // UTF-8. - str::from_utf8(bytes).unwrap_or("\u{fffd}\u{fffd}\u{fffd}") - } -} diff --git a/vendor/serde/src/private/ser.rs b/vendor/serde/src/private/ser.rs deleted file mode 100644 index 6ee99938..00000000 --- a/vendor/serde/src/private/ser.rs +++ /dev/null @@ -1,1310 +0,0 @@ -use lib::*; - -use ser::{self, Impossible, Serialize, SerializeMap, SerializeStruct, Serializer}; - -#[cfg(any(feature = "std", feature = "alloc"))] -use self::content::{ - Content, ContentSerializer, SerializeStructVariantAsMapValue, SerializeTupleVariantAsMapValue, -}; - -/// Used to check that serde(getter) attributes return the expected type. -/// Not public API. -pub fn constrain(t: &T) -> &T { - t -} - -/// Not public API. -pub fn serialize_tagged_newtype( - serializer: S, - type_ident: &'static str, - variant_ident: &'static str, - tag: &'static str, - variant_name: &'static str, - value: &T, -) -> Result -where - S: Serializer, - T: Serialize, -{ - value.serialize(TaggedSerializer { - type_ident: type_ident, - variant_ident: variant_ident, - tag: tag, - variant_name: variant_name, - delegate: serializer, - }) -} - -struct TaggedSerializer { - type_ident: &'static str, - variant_ident: &'static str, - tag: &'static str, - variant_name: &'static str, - delegate: S, -} - -enum Unsupported { - Boolean, - Integer, - Float, - Char, - String, - ByteArray, - Optional, - Unit, - #[cfg(any(feature = "std", feature = "alloc"))] - UnitStruct, - Sequence, - Tuple, - TupleStruct, - Enum, -} - -impl Display for Unsupported { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - match *self { - Unsupported::Boolean => formatter.write_str("a boolean"), - Unsupported::Integer => formatter.write_str("an integer"), - Unsupported::Float => formatter.write_str("a float"), - Unsupported::Char => formatter.write_str("a char"), - Unsupported::String => formatter.write_str("a string"), - Unsupported::ByteArray => formatter.write_str("a byte array"), - Unsupported::Optional => formatter.write_str("an optional"), - Unsupported::Unit => formatter.write_str("unit"), - #[cfg(any(feature = "std", feature = "alloc"))] - Unsupported::UnitStruct => formatter.write_str("unit struct"), - Unsupported::Sequence => formatter.write_str("a sequence"), - Unsupported::Tuple => formatter.write_str("a tuple"), - Unsupported::TupleStruct => formatter.write_str("a tuple struct"), - Unsupported::Enum => formatter.write_str("an enum"), - } - } -} - -impl TaggedSerializer -where - S: Serializer, -{ - fn bad_type(self, what: Unsupported) -> S::Error { - ser::Error::custom(format_args!( - "cannot serialize tagged newtype variant {}::{} containing {}", - self.type_ident, self.variant_ident, what - )) - } -} - -impl Serializer for TaggedSerializer -where - S: Serializer, -{ - type Ok = S::Ok; - type Error = S::Error; - - type SerializeSeq = Impossible; - type SerializeTuple = Impossible; - type SerializeTupleStruct = Impossible; - type SerializeMap = S::SerializeMap; - type SerializeStruct = S::SerializeStruct; - - #[cfg(not(any(feature = "std", feature = "alloc")))] - type SerializeTupleVariant = Impossible; - #[cfg(any(feature = "std", feature = "alloc"))] - type SerializeTupleVariant = SerializeTupleVariantAsMapValue; - - #[cfg(not(any(feature = "std", feature = "alloc")))] - type SerializeStructVariant = Impossible; - #[cfg(any(feature = "std", feature = "alloc"))] - type SerializeStructVariant = SerializeStructVariantAsMapValue; - - fn serialize_bool(self, _: bool) -> Result { - Err(self.bad_type(Unsupported::Boolean)) - } - - fn serialize_i8(self, _: i8) -> Result { - Err(self.bad_type(Unsupported::Integer)) - } - - fn serialize_i16(self, _: i16) -> Result { - Err(self.bad_type(Unsupported::Integer)) - } - - fn serialize_i32(self, _: i32) -> Result { - Err(self.bad_type(Unsupported::Integer)) - } - - fn serialize_i64(self, _: i64) -> Result { - Err(self.bad_type(Unsupported::Integer)) - } - - fn serialize_u8(self, _: u8) -> Result { - Err(self.bad_type(Unsupported::Integer)) - } - - fn serialize_u16(self, _: u16) -> Result { - Err(self.bad_type(Unsupported::Integer)) - } - - fn serialize_u32(self, _: u32) -> Result { - Err(self.bad_type(Unsupported::Integer)) - } - - fn serialize_u64(self, _: u64) -> Result { - Err(self.bad_type(Unsupported::Integer)) - } - - fn serialize_f32(self, _: f32) -> Result { - Err(self.bad_type(Unsupported::Float)) - } - - fn serialize_f64(self, _: f64) -> Result { - Err(self.bad_type(Unsupported::Float)) - } - - fn serialize_char(self, _: char) -> Result { - Err(self.bad_type(Unsupported::Char)) - } - - fn serialize_str(self, _: &str) -> Result { - Err(self.bad_type(Unsupported::String)) - } - - fn serialize_bytes(self, _: &[u8]) -> Result { - Err(self.bad_type(Unsupported::ByteArray)) - } - - fn serialize_none(self) -> Result { - Err(self.bad_type(Unsupported::Optional)) - } - - fn serialize_some(self, _: &T) -> Result - where - T: Serialize, - { - Err(self.bad_type(Unsupported::Optional)) - } - - fn serialize_unit(self) -> Result { - Err(self.bad_type(Unsupported::Unit)) - } - - fn serialize_unit_struct(self, _: &'static str) -> Result { - let mut map = try!(self.delegate.serialize_map(Some(1))); - try!(map.serialize_entry(self.tag, self.variant_name)); - map.end() - } - - fn serialize_unit_variant( - self, - _: &'static str, - _: u32, - inner_variant: &'static str, - ) -> Result { - let mut map = try!(self.delegate.serialize_map(Some(2))); - try!(map.serialize_entry(self.tag, self.variant_name)); - try!(map.serialize_entry(inner_variant, &())); - map.end() - } - - fn serialize_newtype_struct( - self, - _: &'static str, - value: &T, - ) -> Result - where - T: Serialize, - { - value.serialize(self) - } - - fn serialize_newtype_variant( - self, - _: &'static str, - _: u32, - inner_variant: &'static str, - inner_value: &T, - ) -> Result - where - T: Serialize, - { - let mut map = try!(self.delegate.serialize_map(Some(2))); - try!(map.serialize_entry(self.tag, self.variant_name)); - try!(map.serialize_entry(inner_variant, inner_value)); - map.end() - } - - fn serialize_seq(self, _: Option) -> Result { - Err(self.bad_type(Unsupported::Sequence)) - } - - fn serialize_tuple(self, _: usize) -> Result { - Err(self.bad_type(Unsupported::Tuple)) - } - - fn serialize_tuple_struct( - self, - _: &'static str, - _: usize, - ) -> Result { - Err(self.bad_type(Unsupported::TupleStruct)) - } - - #[cfg(not(any(feature = "std", feature = "alloc")))] - fn serialize_tuple_variant( - self, - _: &'static str, - _: u32, - _: &'static str, - _: usize, - ) -> Result { - // Lack of push-based serialization means we need to buffer the content - // of the tuple variant, so it requires std. - Err(self.bad_type(Unsupported::Enum)) - } - - #[cfg(any(feature = "std", feature = "alloc"))] - fn serialize_tuple_variant( - self, - _: &'static str, - _: u32, - inner_variant: &'static str, - len: usize, - ) -> Result { - let mut map = try!(self.delegate.serialize_map(Some(2))); - try!(map.serialize_entry(self.tag, self.variant_name)); - try!(map.serialize_key(inner_variant)); - Ok(SerializeTupleVariantAsMapValue::new( - map, - inner_variant, - len, - )) - } - - fn serialize_map(self, len: Option) -> Result { - let mut map = try!(self.delegate.serialize_map(len.map(|len| len + 1))); - try!(map.serialize_entry(self.tag, self.variant_name)); - Ok(map) - } - - fn serialize_struct( - self, - name: &'static str, - len: usize, - ) -> Result { - let mut state = try!(self.delegate.serialize_struct(name, len + 1)); - try!(state.serialize_field(self.tag, self.variant_name)); - Ok(state) - } - - #[cfg(not(any(feature = "std", feature = "alloc")))] - fn serialize_struct_variant( - self, - _: &'static str, - _: u32, - _: &'static str, - _: usize, - ) -> Result { - // Lack of push-based serialization means we need to buffer the content - // of the struct variant, so it requires std. - Err(self.bad_type(Unsupported::Enum)) - } - - #[cfg(any(feature = "std", feature = "alloc"))] - fn serialize_struct_variant( - self, - _: &'static str, - _: u32, - inner_variant: &'static str, - len: usize, - ) -> Result { - let mut map = try!(self.delegate.serialize_map(Some(2))); - try!(map.serialize_entry(self.tag, self.variant_name)); - try!(map.serialize_key(inner_variant)); - Ok(SerializeStructVariantAsMapValue::new( - map, - inner_variant, - len, - )) - } - - #[cfg(not(any(feature = "std", feature = "alloc")))] - fn collect_str(self, _: &T) -> Result - where - T: Display, - { - Err(self.bad_type(Unsupported::String)) - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -mod content { - use lib::*; - - use ser::{self, Serialize, Serializer}; - - pub struct SerializeTupleVariantAsMapValue { - map: M, - name: &'static str, - fields: Vec, - } - - impl SerializeTupleVariantAsMapValue { - pub fn new(map: M, name: &'static str, len: usize) -> Self { - SerializeTupleVariantAsMapValue { - map: map, - name: name, - fields: Vec::with_capacity(len), - } - } - } - - impl ser::SerializeTupleVariant for SerializeTupleVariantAsMapValue - where - M: ser::SerializeMap, - { - type Ok = M::Ok; - type Error = M::Error; - - fn serialize_field(&mut self, value: &T) -> Result<(), M::Error> - where - T: Serialize, - { - let value = try!(value.serialize(ContentSerializer::::new())); - self.fields.push(value); - Ok(()) - } - - fn end(mut self) -> Result { - try!(self - .map - .serialize_value(&Content::TupleStruct(self.name, self.fields))); - self.map.end() - } - } - - pub struct SerializeStructVariantAsMapValue { - map: M, - name: &'static str, - fields: Vec<(&'static str, Content)>, - } - - impl SerializeStructVariantAsMapValue { - pub fn new(map: M, name: &'static str, len: usize) -> Self { - SerializeStructVariantAsMapValue { - map: map, - name: name, - fields: Vec::with_capacity(len), - } - } - } - - impl ser::SerializeStructVariant for SerializeStructVariantAsMapValue - where - M: ser::SerializeMap, - { - type Ok = M::Ok; - type Error = M::Error; - - fn serialize_field( - &mut self, - key: &'static str, - value: &T, - ) -> Result<(), M::Error> - where - T: Serialize, - { - let value = try!(value.serialize(ContentSerializer::::new())); - self.fields.push((key, value)); - Ok(()) - } - - fn end(mut self) -> Result { - try!(self - .map - .serialize_value(&Content::Struct(self.name, self.fields))); - self.map.end() - } - } - - pub enum Content { - Bool(bool), - - U8(u8), - U16(u16), - U32(u32), - U64(u64), - - I8(i8), - I16(i16), - I32(i32), - I64(i64), - - F32(f32), - F64(f64), - - Char(char), - String(String), - Bytes(Vec), - - None, - Some(Box), - - Unit, - UnitStruct(&'static str), - UnitVariant(&'static str, u32, &'static str), - NewtypeStruct(&'static str, Box), - NewtypeVariant(&'static str, u32, &'static str, Box), - - Seq(Vec), - Tuple(Vec), - TupleStruct(&'static str, Vec), - TupleVariant(&'static str, u32, &'static str, Vec), - Map(Vec<(Content, Content)>), - Struct(&'static str, Vec<(&'static str, Content)>), - StructVariant( - &'static str, - u32, - &'static str, - Vec<(&'static str, Content)>, - ), - } - - impl Serialize for Content { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - match *self { - Content::Bool(b) => serializer.serialize_bool(b), - Content::U8(u) => serializer.serialize_u8(u), - Content::U16(u) => serializer.serialize_u16(u), - Content::U32(u) => serializer.serialize_u32(u), - Content::U64(u) => serializer.serialize_u64(u), - Content::I8(i) => serializer.serialize_i8(i), - Content::I16(i) => serializer.serialize_i16(i), - Content::I32(i) => serializer.serialize_i32(i), - Content::I64(i) => serializer.serialize_i64(i), - Content::F32(f) => serializer.serialize_f32(f), - Content::F64(f) => serializer.serialize_f64(f), - Content::Char(c) => serializer.serialize_char(c), - Content::String(ref s) => serializer.serialize_str(s), - Content::Bytes(ref b) => serializer.serialize_bytes(b), - Content::None => serializer.serialize_none(), - Content::Some(ref c) => serializer.serialize_some(&**c), - Content::Unit => serializer.serialize_unit(), - Content::UnitStruct(n) => serializer.serialize_unit_struct(n), - Content::UnitVariant(n, i, v) => serializer.serialize_unit_variant(n, i, v), - Content::NewtypeStruct(n, ref c) => serializer.serialize_newtype_struct(n, &**c), - Content::NewtypeVariant(n, i, v, ref c) => { - serializer.serialize_newtype_variant(n, i, v, &**c) - } - Content::Seq(ref elements) => elements.serialize(serializer), - Content::Tuple(ref elements) => { - use ser::SerializeTuple; - let mut tuple = try!(serializer.serialize_tuple(elements.len())); - for e in elements { - try!(tuple.serialize_element(e)); - } - tuple.end() - } - Content::TupleStruct(n, ref fields) => { - use ser::SerializeTupleStruct; - let mut ts = try!(serializer.serialize_tuple_struct(n, fields.len())); - for f in fields { - try!(ts.serialize_field(f)); - } - ts.end() - } - Content::TupleVariant(n, i, v, ref fields) => { - use ser::SerializeTupleVariant; - let mut tv = try!(serializer.serialize_tuple_variant(n, i, v, fields.len())); - for f in fields { - try!(tv.serialize_field(f)); - } - tv.end() - } - Content::Map(ref entries) => { - use ser::SerializeMap; - let mut map = try!(serializer.serialize_map(Some(entries.len()))); - for &(ref k, ref v) in entries { - try!(map.serialize_entry(k, v)); - } - map.end() - } - Content::Struct(n, ref fields) => { - use ser::SerializeStruct; - let mut s = try!(serializer.serialize_struct(n, fields.len())); - for &(k, ref v) in fields { - try!(s.serialize_field(k, v)); - } - s.end() - } - Content::StructVariant(n, i, v, ref fields) => { - use ser::SerializeStructVariant; - let mut sv = try!(serializer.serialize_struct_variant(n, i, v, fields.len())); - for &(k, ref v) in fields { - try!(sv.serialize_field(k, v)); - } - sv.end() - } - } - } - } - - pub struct ContentSerializer { - error: PhantomData, - } - - impl ContentSerializer { - pub fn new() -> Self { - ContentSerializer { error: PhantomData } - } - } - - impl Serializer for ContentSerializer - where - E: ser::Error, - { - type Ok = Content; - type Error = E; - - type SerializeSeq = SerializeSeq; - type SerializeTuple = SerializeTuple; - type SerializeTupleStruct = SerializeTupleStruct; - type SerializeTupleVariant = SerializeTupleVariant; - type SerializeMap = SerializeMap; - type SerializeStruct = SerializeStruct; - type SerializeStructVariant = SerializeStructVariant; - - fn serialize_bool(self, v: bool) -> Result { - Ok(Content::Bool(v)) - } - - fn serialize_i8(self, v: i8) -> Result { - Ok(Content::I8(v)) - } - - fn serialize_i16(self, v: i16) -> Result { - Ok(Content::I16(v)) - } - - fn serialize_i32(self, v: i32) -> Result { - Ok(Content::I32(v)) - } - - fn serialize_i64(self, v: i64) -> Result { - Ok(Content::I64(v)) - } - - fn serialize_u8(self, v: u8) -> Result { - Ok(Content::U8(v)) - } - - fn serialize_u16(self, v: u16) -> Result { - Ok(Content::U16(v)) - } - - fn serialize_u32(self, v: u32) -> Result { - Ok(Content::U32(v)) - } - - fn serialize_u64(self, v: u64) -> Result { - Ok(Content::U64(v)) - } - - fn serialize_f32(self, v: f32) -> Result { - Ok(Content::F32(v)) - } - - fn serialize_f64(self, v: f64) -> Result { - Ok(Content::F64(v)) - } - - fn serialize_char(self, v: char) -> Result { - Ok(Content::Char(v)) - } - - fn serialize_str(self, value: &str) -> Result { - Ok(Content::String(value.to_owned())) - } - - fn serialize_bytes(self, value: &[u8]) -> Result { - Ok(Content::Bytes(value.to_owned())) - } - - fn serialize_none(self) -> Result { - Ok(Content::None) - } - - fn serialize_some(self, value: &T) -> Result - where - T: Serialize, - { - Ok(Content::Some(Box::new(try!(value.serialize(self))))) - } - - fn serialize_unit(self) -> Result { - Ok(Content::Unit) - } - - fn serialize_unit_struct(self, name: &'static str) -> Result { - Ok(Content::UnitStruct(name)) - } - - fn serialize_unit_variant( - self, - name: &'static str, - variant_index: u32, - variant: &'static str, - ) -> Result { - Ok(Content::UnitVariant(name, variant_index, variant)) - } - - fn serialize_newtype_struct( - self, - name: &'static str, - value: &T, - ) -> Result - where - T: Serialize, - { - Ok(Content::NewtypeStruct( - name, - Box::new(try!(value.serialize(self))), - )) - } - - fn serialize_newtype_variant( - self, - name: &'static str, - variant_index: u32, - variant: &'static str, - value: &T, - ) -> Result - where - T: Serialize, - { - Ok(Content::NewtypeVariant( - name, - variant_index, - variant, - Box::new(try!(value.serialize(self))), - )) - } - - fn serialize_seq(self, len: Option) -> Result { - Ok(SerializeSeq { - elements: Vec::with_capacity(len.unwrap_or(0)), - error: PhantomData, - }) - } - - fn serialize_tuple(self, len: usize) -> Result { - Ok(SerializeTuple { - elements: Vec::with_capacity(len), - error: PhantomData, - }) - } - - fn serialize_tuple_struct( - self, - name: &'static str, - len: usize, - ) -> Result { - Ok(SerializeTupleStruct { - name: name, - fields: Vec::with_capacity(len), - error: PhantomData, - }) - } - - fn serialize_tuple_variant( - self, - name: &'static str, - variant_index: u32, - variant: &'static str, - len: usize, - ) -> Result { - Ok(SerializeTupleVariant { - name: name, - variant_index: variant_index, - variant: variant, - fields: Vec::with_capacity(len), - error: PhantomData, - }) - } - - fn serialize_map(self, len: Option) -> Result { - Ok(SerializeMap { - entries: Vec::with_capacity(len.unwrap_or(0)), - key: None, - error: PhantomData, - }) - } - - fn serialize_struct( - self, - name: &'static str, - len: usize, - ) -> Result { - Ok(SerializeStruct { - name: name, - fields: Vec::with_capacity(len), - error: PhantomData, - }) - } - - fn serialize_struct_variant( - self, - name: &'static str, - variant_index: u32, - variant: &'static str, - len: usize, - ) -> Result { - Ok(SerializeStructVariant { - name: name, - variant_index: variant_index, - variant: variant, - fields: Vec::with_capacity(len), - error: PhantomData, - }) - } - } - - pub struct SerializeSeq { - elements: Vec, - error: PhantomData, - } - - impl ser::SerializeSeq for SerializeSeq - where - E: ser::Error, - { - type Ok = Content; - type Error = E; - - fn serialize_element(&mut self, value: &T) -> Result<(), E> - where - T: Serialize, - { - let value = try!(value.serialize(ContentSerializer::::new())); - self.elements.push(value); - Ok(()) - } - - fn end(self) -> Result { - Ok(Content::Seq(self.elements)) - } - } - - pub struct SerializeTuple { - elements: Vec, - error: PhantomData, - } - - impl ser::SerializeTuple for SerializeTuple - where - E: ser::Error, - { - type Ok = Content; - type Error = E; - - fn serialize_element(&mut self, value: &T) -> Result<(), E> - where - T: Serialize, - { - let value = try!(value.serialize(ContentSerializer::::new())); - self.elements.push(value); - Ok(()) - } - - fn end(self) -> Result { - Ok(Content::Tuple(self.elements)) - } - } - - pub struct SerializeTupleStruct { - name: &'static str, - fields: Vec, - error: PhantomData, - } - - impl ser::SerializeTupleStruct for SerializeTupleStruct - where - E: ser::Error, - { - type Ok = Content; - type Error = E; - - fn serialize_field(&mut self, value: &T) -> Result<(), E> - where - T: Serialize, - { - let value = try!(value.serialize(ContentSerializer::::new())); - self.fields.push(value); - Ok(()) - } - - fn end(self) -> Result { - Ok(Content::TupleStruct(self.name, self.fields)) - } - } - - pub struct SerializeTupleVariant { - name: &'static str, - variant_index: u32, - variant: &'static str, - fields: Vec, - error: PhantomData, - } - - impl ser::SerializeTupleVariant for SerializeTupleVariant - where - E: ser::Error, - { - type Ok = Content; - type Error = E; - - fn serialize_field(&mut self, value: &T) -> Result<(), E> - where - T: Serialize, - { - let value = try!(value.serialize(ContentSerializer::::new())); - self.fields.push(value); - Ok(()) - } - - fn end(self) -> Result { - Ok(Content::TupleVariant( - self.name, - self.variant_index, - self.variant, - self.fields, - )) - } - } - - pub struct SerializeMap { - entries: Vec<(Content, Content)>, - key: Option, - error: PhantomData, - } - - impl ser::SerializeMap for SerializeMap - where - E: ser::Error, - { - type Ok = Content; - type Error = E; - - fn serialize_key(&mut self, key: &T) -> Result<(), E> - where - T: Serialize, - { - let key = try!(key.serialize(ContentSerializer::::new())); - self.key = Some(key); - Ok(()) - } - - fn serialize_value(&mut self, value: &T) -> Result<(), E> - where - T: Serialize, - { - let key = self - .key - .take() - .expect("serialize_value called before serialize_key"); - let value = try!(value.serialize(ContentSerializer::::new())); - self.entries.push((key, value)); - Ok(()) - } - - fn end(self) -> Result { - Ok(Content::Map(self.entries)) - } - - fn serialize_entry(&mut self, key: &K, value: &V) -> Result<(), E> - where - K: Serialize, - V: Serialize, - { - let key = try!(key.serialize(ContentSerializer::::new())); - let value = try!(value.serialize(ContentSerializer::::new())); - self.entries.push((key, value)); - Ok(()) - } - } - - pub struct SerializeStruct { - name: &'static str, - fields: Vec<(&'static str, Content)>, - error: PhantomData, - } - - impl ser::SerializeStruct for SerializeStruct - where - E: ser::Error, - { - type Ok = Content; - type Error = E; - - fn serialize_field(&mut self, key: &'static str, value: &T) -> Result<(), E> - where - T: Serialize, - { - let value = try!(value.serialize(ContentSerializer::::new())); - self.fields.push((key, value)); - Ok(()) - } - - fn end(self) -> Result { - Ok(Content::Struct(self.name, self.fields)) - } - } - - pub struct SerializeStructVariant { - name: &'static str, - variant_index: u32, - variant: &'static str, - fields: Vec<(&'static str, Content)>, - error: PhantomData, - } - - impl ser::SerializeStructVariant for SerializeStructVariant - where - E: ser::Error, - { - type Ok = Content; - type Error = E; - - fn serialize_field(&mut self, key: &'static str, value: &T) -> Result<(), E> - where - T: Serialize, - { - let value = try!(value.serialize(ContentSerializer::::new())); - self.fields.push((key, value)); - Ok(()) - } - - fn end(self) -> Result { - Ok(Content::StructVariant( - self.name, - self.variant_index, - self.variant, - self.fields, - )) - } - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -pub struct FlatMapSerializer<'a, M: 'a>(pub &'a mut M); - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'a, M> FlatMapSerializer<'a, M> -where - M: SerializeMap + 'a, -{ - fn bad_type(what: Unsupported) -> M::Error { - ser::Error::custom(format_args!( - "can only flatten structs and maps (got {})", - what - )) - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'a, M> Serializer for FlatMapSerializer<'a, M> -where - M: SerializeMap + 'a, -{ - type Ok = (); - type Error = M::Error; - - type SerializeSeq = Impossible; - type SerializeTuple = Impossible; - type SerializeTupleStruct = Impossible; - type SerializeMap = FlatMapSerializeMap<'a, M>; - type SerializeStruct = FlatMapSerializeStruct<'a, M>; - type SerializeTupleVariant = Impossible; - type SerializeStructVariant = FlatMapSerializeStructVariantAsMapValue<'a, M>; - - fn serialize_bool(self, _: bool) -> Result { - Err(Self::bad_type(Unsupported::Boolean)) - } - - fn serialize_i8(self, _: i8) -> Result { - Err(Self::bad_type(Unsupported::Integer)) - } - - fn serialize_i16(self, _: i16) -> Result { - Err(Self::bad_type(Unsupported::Integer)) - } - - fn serialize_i32(self, _: i32) -> Result { - Err(Self::bad_type(Unsupported::Integer)) - } - - fn serialize_i64(self, _: i64) -> Result { - Err(Self::bad_type(Unsupported::Integer)) - } - - fn serialize_u8(self, _: u8) -> Result { - Err(Self::bad_type(Unsupported::Integer)) - } - - fn serialize_u16(self, _: u16) -> Result { - Err(Self::bad_type(Unsupported::Integer)) - } - - fn serialize_u32(self, _: u32) -> Result { - Err(Self::bad_type(Unsupported::Integer)) - } - - fn serialize_u64(self, _: u64) -> Result { - Err(Self::bad_type(Unsupported::Integer)) - } - - fn serialize_f32(self, _: f32) -> Result { - Err(Self::bad_type(Unsupported::Float)) - } - - fn serialize_f64(self, _: f64) -> Result { - Err(Self::bad_type(Unsupported::Float)) - } - - fn serialize_char(self, _: char) -> Result { - Err(Self::bad_type(Unsupported::Char)) - } - - fn serialize_str(self, _: &str) -> Result { - Err(Self::bad_type(Unsupported::String)) - } - - fn serialize_bytes(self, _: &[u8]) -> Result { - Err(Self::bad_type(Unsupported::ByteArray)) - } - - fn serialize_none(self) -> Result { - Ok(()) - } - - fn serialize_some(self, value: &T) -> Result - where - T: Serialize, - { - value.serialize(self) - } - - fn serialize_unit(self) -> Result { - Ok(()) - } - - fn serialize_unit_struct(self, _: &'static str) -> Result { - Err(Self::bad_type(Unsupported::UnitStruct)) - } - - fn serialize_unit_variant( - self, - _: &'static str, - _: u32, - _: &'static str, - ) -> Result { - Err(Self::bad_type(Unsupported::Enum)) - } - - fn serialize_newtype_struct( - self, - _: &'static str, - value: &T, - ) -> Result - where - T: Serialize, - { - value.serialize(self) - } - - fn serialize_newtype_variant( - self, - _: &'static str, - _: u32, - variant: &'static str, - value: &T, - ) -> Result - where - T: Serialize, - { - try!(self.0.serialize_key(variant)); - self.0.serialize_value(value) - } - - fn serialize_seq(self, _: Option) -> Result { - Err(Self::bad_type(Unsupported::Sequence)) - } - - fn serialize_tuple(self, _: usize) -> Result { - Err(Self::bad_type(Unsupported::Tuple)) - } - - fn serialize_tuple_struct( - self, - _: &'static str, - _: usize, - ) -> Result { - Err(Self::bad_type(Unsupported::TupleStruct)) - } - - fn serialize_tuple_variant( - self, - _: &'static str, - _: u32, - _: &'static str, - _: usize, - ) -> Result { - Err(Self::bad_type(Unsupported::Enum)) - } - - fn serialize_map(self, _: Option) -> Result { - Ok(FlatMapSerializeMap(self.0)) - } - - fn serialize_struct( - self, - _: &'static str, - _: usize, - ) -> Result { - Ok(FlatMapSerializeStruct(self.0)) - } - - fn serialize_struct_variant( - self, - _: &'static str, - _: u32, - inner_variant: &'static str, - _: usize, - ) -> Result { - try!(self.0.serialize_key(inner_variant)); - Ok(FlatMapSerializeStructVariantAsMapValue::new( - self.0, - inner_variant, - )) - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -pub struct FlatMapSerializeMap<'a, M: 'a>(&'a mut M); - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'a, M> ser::SerializeMap for FlatMapSerializeMap<'a, M> -where - M: SerializeMap + 'a, -{ - type Ok = (); - type Error = M::Error; - - fn serialize_key(&mut self, key: &T) -> Result<(), Self::Error> - where - T: Serialize, - { - self.0.serialize_key(key) - } - - fn serialize_value(&mut self, value: &T) -> Result<(), Self::Error> - where - T: Serialize, - { - self.0.serialize_value(value) - } - - fn serialize_entry( - &mut self, - key: &K, - value: &V, - ) -> Result<(), Self::Error> - where - K: Serialize, - V: Serialize, - { - self.0.serialize_entry(key, value) - } - - fn end(self) -> Result<(), Self::Error> { - Ok(()) - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -pub struct FlatMapSerializeStruct<'a, M: 'a>(&'a mut M); - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'a, M> ser::SerializeStruct for FlatMapSerializeStruct<'a, M> -where - M: SerializeMap + 'a, -{ - type Ok = (); - type Error = M::Error; - - fn serialize_field( - &mut self, - key: &'static str, - value: &T, - ) -> Result<(), Self::Error> - where - T: Serialize, - { - self.0.serialize_entry(key, value) - } - - fn end(self) -> Result<(), Self::Error> { - Ok(()) - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -pub struct FlatMapSerializeStructVariantAsMapValue<'a, M: 'a> { - map: &'a mut M, - name: &'static str, - fields: Vec<(&'static str, Content)>, -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'a, M> FlatMapSerializeStructVariantAsMapValue<'a, M> -where - M: SerializeMap + 'a, -{ - fn new(map: &'a mut M, name: &'static str) -> FlatMapSerializeStructVariantAsMapValue<'a, M> { - FlatMapSerializeStructVariantAsMapValue { - map: map, - name: name, - fields: Vec::new(), - } - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl<'a, M> ser::SerializeStructVariant for FlatMapSerializeStructVariantAsMapValue<'a, M> -where - M: SerializeMap + 'a, -{ - type Ok = (); - type Error = M::Error; - - fn serialize_field( - &mut self, - key: &'static str, - value: &T, - ) -> Result<(), Self::Error> - where - T: Serialize, - { - let value = try!(value.serialize(ContentSerializer::::new())); - self.fields.push((key, value)); - Ok(()) - } - - fn end(self) -> Result<(), Self::Error> { - try!(self - .map - .serialize_value(&Content::Struct(self.name, self.fields))); - Ok(()) - } -} diff --git a/vendor/serde/src/private/size_hint.rs b/vendor/serde/src/private/size_hint.rs deleted file mode 100644 index ca71e616..00000000 --- a/vendor/serde/src/private/size_hint.rs +++ /dev/null @@ -1,21 +0,0 @@ -use lib::*; - -pub fn from_bounds(iter: &I) -> Option -where - I: Iterator, -{ - helper(iter.size_hint()) -} - -#[cfg(any(feature = "std", feature = "alloc"))] -#[inline] -pub fn cautious(hint: Option) -> usize { - cmp::min(hint.unwrap_or(0), 4096) -} - -fn helper(bounds: (usize, Option)) -> Option { - match bounds { - (lower, Some(upper)) if lower == upper => Some(upper), - _ => None, - } -} diff --git a/vendor/serde/src/ser/fmt.rs b/vendor/serde/src/ser/fmt.rs deleted file mode 100644 index e7e09a1b..00000000 --- a/vendor/serde/src/ser/fmt.rs +++ /dev/null @@ -1,174 +0,0 @@ -use lib::*; -use ser::{Error, Impossible, Serialize, Serializer}; - -impl Error for fmt::Error { - fn custom(_msg: T) -> Self { - fmt::Error - } -} - -macro_rules! fmt_primitives { - ($($f:ident: $t:ty,)*) => { - $( - fn $f(self, v: $t) -> fmt::Result { - Display::fmt(&v, self) - } - )* - }; -} - -/// ```edition2018 -/// use serde::Serialize; -/// use std::fmt::{self, Display}; -/// -/// #[derive(Serialize)] -/// #[serde(rename_all = "kebab-case")] -/// pub enum MessageType { -/// StartRequest, -/// EndRequest, -/// } -/// -/// impl Display for MessageType { -/// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { -/// self.serialize(f) -/// } -/// } -/// ``` -impl<'a, 'b> Serializer for &'a mut fmt::Formatter<'b> { - type Ok = (); - type Error = fmt::Error; - type SerializeSeq = Impossible<(), fmt::Error>; - type SerializeTuple = Impossible<(), fmt::Error>; - type SerializeTupleStruct = Impossible<(), fmt::Error>; - type SerializeTupleVariant = Impossible<(), fmt::Error>; - type SerializeMap = Impossible<(), fmt::Error>; - type SerializeStruct = Impossible<(), fmt::Error>; - type SerializeStructVariant = Impossible<(), fmt::Error>; - - fmt_primitives! { - serialize_bool: bool, - serialize_i8: i8, - serialize_i16: i16, - serialize_i32: i32, - serialize_i64: i64, - serialize_u8: u8, - serialize_u16: u16, - serialize_u32: u32, - serialize_u64: u64, - serialize_f32: f32, - serialize_f64: f64, - serialize_char: char, - serialize_str: &str, - serialize_unit_struct: &'static str, - } - - serde_if_integer128! { - fmt_primitives! { - serialize_i128: i128, - serialize_u128: u128, - } - } - - fn serialize_unit_variant( - self, - _name: &'static str, - _variant_index: u32, - variant: &'static str, - ) -> fmt::Result { - Display::fmt(variant, self) - } - - fn serialize_newtype_struct(self, _name: &'static str, value: &T) -> fmt::Result - where - T: Serialize, - { - Serialize::serialize(value, self) - } - - fn serialize_bytes(self, _v: &[u8]) -> fmt::Result { - Err(fmt::Error) - } - - fn serialize_none(self) -> fmt::Result { - Err(fmt::Error) - } - - fn serialize_some(self, _value: &T) -> fmt::Result - where - T: Serialize, - { - Err(fmt::Error) - } - - fn serialize_unit(self) -> fmt::Result { - Err(fmt::Error) - } - - fn serialize_newtype_variant( - self, - _name: &'static str, - _variant_index: u32, - _variant: &'static str, - _value: &T, - ) -> fmt::Result - where - T: Serialize, - { - Err(fmt::Error) - } - - fn serialize_seq(self, _len: Option) -> Result { - Err(fmt::Error) - } - - fn serialize_tuple(self, _len: usize) -> Result { - Err(fmt::Error) - } - - fn serialize_tuple_struct( - self, - _name: &'static str, - _len: usize, - ) -> Result { - Err(fmt::Error) - } - - fn serialize_tuple_variant( - self, - _name: &'static str, - _variant_index: u32, - _variant: &'static str, - _len: usize, - ) -> Result { - Err(fmt::Error) - } - - fn serialize_map(self, _len: Option) -> Result { - Err(fmt::Error) - } - - fn serialize_struct( - self, - _name: &'static str, - _len: usize, - ) -> Result { - Err(fmt::Error) - } - - fn serialize_struct_variant( - self, - _name: &'static str, - _variant_index: u32, - _variant: &'static str, - _len: usize, - ) -> Result { - Err(fmt::Error) - } - - fn collect_str(self, value: &T) -> fmt::Result - where - T: Display, - { - Display::fmt(value, self) - } -} diff --git a/vendor/serde/src/ser/impls.rs b/vendor/serde/src/ser/impls.rs deleted file mode 100644 index c254ac65..00000000 --- a/vendor/serde/src/ser/impls.rs +++ /dev/null @@ -1,935 +0,0 @@ -use lib::*; - -use ser::{Error, Serialize, SerializeTuple, Serializer}; - -//////////////////////////////////////////////////////////////////////////////// - -macro_rules! primitive_impl { - ($ty:ident, $method:ident $($cast:tt)*) => { - impl Serialize for $ty { - #[inline] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.$method(*self $($cast)*) - } - } - } -} - -primitive_impl!(bool, serialize_bool); -primitive_impl!(isize, serialize_i64 as i64); -primitive_impl!(i8, serialize_i8); -primitive_impl!(i16, serialize_i16); -primitive_impl!(i32, serialize_i32); -primitive_impl!(i64, serialize_i64); -primitive_impl!(usize, serialize_u64 as u64); -primitive_impl!(u8, serialize_u8); -primitive_impl!(u16, serialize_u16); -primitive_impl!(u32, serialize_u32); -primitive_impl!(u64, serialize_u64); -primitive_impl!(f32, serialize_f32); -primitive_impl!(f64, serialize_f64); -primitive_impl!(char, serialize_char); - -serde_if_integer128! { - primitive_impl!(i128, serialize_i128); - primitive_impl!(u128, serialize_u128); -} - -//////////////////////////////////////////////////////////////////////////////// - -impl Serialize for str { - #[inline] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_str(self) - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -impl Serialize for String { - #[inline] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_str(self) - } -} - -impl<'a> Serialize for fmt::Arguments<'a> { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.collect_str(self) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(feature = "std")] -impl Serialize for CStr { - #[inline] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_bytes(self.to_bytes()) - } -} - -#[cfg(feature = "std")] -impl Serialize for CString { - #[inline] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_bytes(self.to_bytes()) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -impl Serialize for Option -where - T: Serialize, -{ - #[inline] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - match *self { - Some(ref value) => serializer.serialize_some(value), - None => serializer.serialize_none(), - } - } -} - -//////////////////////////////////////////////////////////////////////////////// - -impl Serialize for PhantomData { - #[inline] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_unit_struct("PhantomData") - } -} - -//////////////////////////////////////////////////////////////////////////////// - -// Does not require T: Serialize. -impl Serialize for [T; 0] { - #[inline] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - try!(serializer.serialize_tuple(0)).end() - } -} - -macro_rules! array_impls { - ($($len:tt)+) => { - $( - impl Serialize for [T; $len] - where - T: Serialize, - { - #[inline] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - let mut seq = try!(serializer.serialize_tuple($len)); - for e in self { - try!(seq.serialize_element(e)); - } - seq.end() - } - } - )+ - } -} - -array_impls! { - 01 02 03 04 05 06 07 08 09 10 - 11 12 13 14 15 16 17 18 19 20 - 21 22 23 24 25 26 27 28 29 30 - 31 32 -} - -//////////////////////////////////////////////////////////////////////////////// - -impl Serialize for [T] -where - T: Serialize, -{ - #[inline] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.collect_seq(self) - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -macro_rules! seq_impl { - ($ty:ident < T $(: $tbound1:ident $(+ $tbound2:ident)*)* $(, $typaram:ident : $bound:ident)* >) => { - impl Serialize for $ty - where - T: Serialize $(+ $tbound1 $(+ $tbound2)*)*, - $($typaram: $bound,)* - { - #[inline] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.collect_seq(self) - } - } - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -seq_impl!(BinaryHeap); - -#[cfg(any(feature = "std", feature = "alloc"))] -seq_impl!(BTreeSet); - -#[cfg(feature = "std")] -seq_impl!(HashSet); - -#[cfg(any(feature = "std", feature = "alloc"))] -seq_impl!(LinkedList); - -#[cfg(any(feature = "std", feature = "alloc"))] -seq_impl!(Vec); - -#[cfg(any(feature = "std", feature = "alloc"))] -seq_impl!(VecDeque); - -//////////////////////////////////////////////////////////////////////////////// - -impl Serialize for Range -where - Idx: Serialize, -{ - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - use super::SerializeStruct; - let mut state = try!(serializer.serialize_struct("Range", 2)); - try!(state.serialize_field("start", &self.start)); - try!(state.serialize_field("end", &self.end)); - state.end() - } -} - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(range_inclusive)] -impl Serialize for RangeInclusive -where - Idx: Serialize, -{ - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - use super::SerializeStruct; - let mut state = try!(serializer.serialize_struct("RangeInclusive", 2)); - try!(state.serialize_field("start", &self.start())); - try!(state.serialize_field("end", &self.end())); - state.end() - } -} - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(any(ops_bound, collections_bound))] -impl Serialize for Bound -where - T: Serialize, -{ - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - match *self { - Bound::Unbounded => serializer.serialize_unit_variant("Bound", 0, "Unbounded"), - Bound::Included(ref value) => { - serializer.serialize_newtype_variant("Bound", 1, "Included", value) - } - Bound::Excluded(ref value) => { - serializer.serialize_newtype_variant("Bound", 2, "Excluded", value) - } - } - } -} - -//////////////////////////////////////////////////////////////////////////////// - -impl Serialize for () { - #[inline] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_unit() - } -} - -#[cfg(feature = "unstable")] -impl Serialize for ! { - fn serialize(&self, _serializer: S) -> Result - where - S: Serializer, - { - *self - } -} - -//////////////////////////////////////////////////////////////////////////////// - -macro_rules! tuple_impls { - ($($len:expr => ($($n:tt $name:ident)+))+) => { - $( - impl<$($name),+> Serialize for ($($name,)+) - where - $($name: Serialize,)+ - { - #[inline] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - let mut tuple = try!(serializer.serialize_tuple($len)); - $( - try!(tuple.serialize_element(&self.$n)); - )+ - tuple.end() - } - } - )+ - } -} - -tuple_impls! { - 1 => (0 T0) - 2 => (0 T0 1 T1) - 3 => (0 T0 1 T1 2 T2) - 4 => (0 T0 1 T1 2 T2 3 T3) - 5 => (0 T0 1 T1 2 T2 3 T3 4 T4) - 6 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5) - 7 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5 6 T6) - 8 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5 6 T6 7 T7) - 9 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5 6 T6 7 T7 8 T8) - 10 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5 6 T6 7 T7 8 T8 9 T9) - 11 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5 6 T6 7 T7 8 T8 9 T9 10 T10) - 12 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5 6 T6 7 T7 8 T8 9 T9 10 T10 11 T11) - 13 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5 6 T6 7 T7 8 T8 9 T9 10 T10 11 T11 12 T12) - 14 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5 6 T6 7 T7 8 T8 9 T9 10 T10 11 T11 12 T12 13 T13) - 15 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5 6 T6 7 T7 8 T8 9 T9 10 T10 11 T11 12 T12 13 T13 14 T14) - 16 => (0 T0 1 T1 2 T2 3 T3 4 T4 5 T5 6 T6 7 T7 8 T8 9 T9 10 T10 11 T11 12 T12 13 T13 14 T14 15 T15) -} - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(any(feature = "std", feature = "alloc"))] -macro_rules! map_impl { - ($ty:ident < K $(: $kbound1:ident $(+ $kbound2:ident)*)*, V $(, $typaram:ident : $bound:ident)* >) => { - impl Serialize for $ty - where - K: Serialize $(+ $kbound1 $(+ $kbound2)*)*, - V: Serialize, - $($typaram: $bound,)* - { - #[inline] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.collect_map(self) - } - } - } -} - -#[cfg(any(feature = "std", feature = "alloc"))] -map_impl!(BTreeMap); - -#[cfg(feature = "std")] -map_impl!(HashMap); - -//////////////////////////////////////////////////////////////////////////////// - -macro_rules! deref_impl { - ( - $(#[doc = $doc:tt])* - <$($desc:tt)+ - ) => { - $(#[doc = $doc])* - impl <$($desc)+ { - #[inline] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - (**self).serialize(serializer) - } - } - }; -} - -deref_impl!(<'a, T: ?Sized> Serialize for &'a T where T: Serialize); -deref_impl!(<'a, T: ?Sized> Serialize for &'a mut T where T: Serialize); - -#[cfg(any(feature = "std", feature = "alloc"))] -deref_impl!( Serialize for Box where T: Serialize); - -#[cfg(all(feature = "rc", any(feature = "std", feature = "alloc")))] -deref_impl! { - /// This impl requires the [`"rc"`] Cargo feature of Serde. - /// - /// Serializing a data structure containing `Rc` will serialize a copy of - /// the contents of the `Rc` each time the `Rc` is referenced within the - /// data structure. Serialization will not attempt to deduplicate these - /// repeated data. - /// - /// [`"rc"`]: https://serde.rs/feature-flags.html#-features-rc - Serialize for Rc where T: Serialize -} - -#[cfg(all(feature = "rc", any(feature = "std", feature = "alloc")))] -deref_impl! { - /// This impl requires the [`"rc"`] Cargo feature of Serde. - /// - /// Serializing a data structure containing `Arc` will serialize a copy of - /// the contents of the `Arc` each time the `Arc` is referenced within the - /// data structure. Serialization will not attempt to deduplicate these - /// repeated data. - /// - /// [`"rc"`]: https://serde.rs/feature-flags.html#-features-rc - Serialize for Arc where T: Serialize -} - -#[cfg(any(feature = "std", feature = "alloc"))] -deref_impl!(<'a, T: ?Sized> Serialize for Cow<'a, T> where T: Serialize + ToOwned); - -//////////////////////////////////////////////////////////////////////////////// - -/// This impl requires the [`"rc"`] Cargo feature of Serde. -/// -/// [`"rc"`]: https://serde.rs/feature-flags.html#-features-rc -#[cfg(all(feature = "rc", any(feature = "std", feature = "alloc")))] -impl Serialize for RcWeak -where - T: Serialize, -{ - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - self.upgrade().serialize(serializer) - } -} - -/// This impl requires the [`"rc"`] Cargo feature of Serde. -/// -/// [`"rc"`]: https://serde.rs/feature-flags.html#-features-rc -#[cfg(all(feature = "rc", any(feature = "std", feature = "alloc")))] -impl Serialize for ArcWeak -where - T: Serialize, -{ - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - self.upgrade().serialize(serializer) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -macro_rules! nonzero_integers { - ( $( $T: ident, )+ ) => { - $( - #[cfg(num_nonzero)] - impl Serialize for num::$T { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - self.get().serialize(serializer) - } - } - )+ - } -} - -nonzero_integers! { - NonZeroU8, - NonZeroU16, - NonZeroU32, - NonZeroU64, - NonZeroUsize, -} - -#[cfg(num_nonzero_signed)] -nonzero_integers! { - NonZeroI8, - NonZeroI16, - NonZeroI32, - NonZeroI64, - NonZeroIsize, -} - -// Currently 128-bit integers do not work on Emscripten targets so we need an -// additional `#[cfg]` -serde_if_integer128! { - nonzero_integers! { - NonZeroU128, - } - - #[cfg(num_nonzero_signed)] - nonzero_integers! { - NonZeroI128, - } -} - -impl Serialize for Cell -where - T: Serialize + Copy, -{ - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - self.get().serialize(serializer) - } -} - -impl Serialize for RefCell -where - T: Serialize, -{ - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - match self.try_borrow() { - Ok(value) => value.serialize(serializer), - Err(_) => Err(S::Error::custom("already mutably borrowed")), - } - } -} - -#[cfg(feature = "std")] -impl Serialize for Mutex -where - T: Serialize, -{ - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - match self.lock() { - Ok(locked) => locked.serialize(serializer), - Err(_) => Err(S::Error::custom("lock poison error while serializing")), - } - } -} - -#[cfg(feature = "std")] -impl Serialize for RwLock -where - T: Serialize, -{ - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - match self.read() { - Ok(locked) => locked.serialize(serializer), - Err(_) => Err(S::Error::custom("lock poison error while serializing")), - } - } -} - -//////////////////////////////////////////////////////////////////////////////// - -impl Serialize for Result -where - T: Serialize, - E: Serialize, -{ - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - match *self { - Result::Ok(ref value) => serializer.serialize_newtype_variant("Result", 0, "Ok", value), - Result::Err(ref value) => { - serializer.serialize_newtype_variant("Result", 1, "Err", value) - } - } - } -} - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(any(core_duration, feature = "std"))] -impl Serialize for Duration { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - use super::SerializeStruct; - let mut state = try!(serializer.serialize_struct("Duration", 2)); - try!(state.serialize_field("secs", &self.as_secs())); - try!(state.serialize_field("nanos", &self.subsec_nanos())); - state.end() - } -} - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(feature = "std")] -impl Serialize for SystemTime { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - use super::SerializeStruct; - let duration_since_epoch = self - .duration_since(UNIX_EPOCH) - .map_err(|_| S::Error::custom("SystemTime must be later than UNIX_EPOCH"))?; - let mut state = try!(serializer.serialize_struct("SystemTime", 2)); - try!(state.serialize_field("secs_since_epoch", &duration_since_epoch.as_secs())); - try!(state.serialize_field("nanos_since_epoch", &duration_since_epoch.subsec_nanos())); - state.end() - } -} - -//////////////////////////////////////////////////////////////////////////////// - -/// Serialize a value that implements `Display` as a string, when that string is -/// statically known to never have more than a constant `MAX_LEN` bytes. -/// -/// Panics if the `Display` impl tries to write more than `MAX_LEN` bytes. -#[cfg(feature = "std")] -macro_rules! serialize_display_bounded_length { - ($value:expr, $max:expr, $serializer:expr) => {{ - let mut buffer = [0u8; $max]; - let remaining_len = { - let mut remaining = &mut buffer[..]; - write!(remaining, "{}", $value).unwrap(); - remaining.len() - }; - let written_len = buffer.len() - remaining_len; - let written = &buffer[..written_len]; - - // write! only provides fmt::Formatter to Display implementations, which - // has methods write_str and write_char but no method to write arbitrary - // bytes. Therefore `written` must be valid UTF-8. - let written_str = str::from_utf8(written).expect("must be valid UTF-8"); - $serializer.serialize_str(written_str) - }}; -} - -#[cfg(feature = "std")] -impl Serialize for net::IpAddr { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - if serializer.is_human_readable() { - match *self { - net::IpAddr::V4(ref a) => a.serialize(serializer), - net::IpAddr::V6(ref a) => a.serialize(serializer), - } - } else { - match *self { - net::IpAddr::V4(ref a) => { - serializer.serialize_newtype_variant("IpAddr", 0, "V4", a) - } - net::IpAddr::V6(ref a) => { - serializer.serialize_newtype_variant("IpAddr", 1, "V6", a) - } - } - } - } -} - -#[cfg(feature = "std")] -const DEC_DIGITS_LUT: &'static [u8] = b"\ - 0001020304050607080910111213141516171819\ - 2021222324252627282930313233343536373839\ - 4041424344454647484950515253545556575859\ - 6061626364656667686970717273747576777879\ - 8081828384858687888990919293949596979899"; - -#[cfg(feature = "std")] -#[inline] -fn format_u8(mut n: u8, out: &mut [u8]) -> usize { - if n >= 100 { - let d1 = ((n % 100) << 1) as usize; - n /= 100; - out[0] = b'0' + n; - out[1] = DEC_DIGITS_LUT[d1]; - out[2] = DEC_DIGITS_LUT[d1 + 1]; - 3 - } else if n >= 10 { - let d1 = (n << 1) as usize; - out[0] = DEC_DIGITS_LUT[d1]; - out[1] = DEC_DIGITS_LUT[d1 + 1]; - 2 - } else { - out[0] = b'0' + n; - 1 - } -} - -#[cfg(feature = "std")] -#[test] -fn test_format_u8() { - let mut i = 0u8; - - loop { - let mut buf = [0u8; 3]; - let written = format_u8(i, &mut buf); - assert_eq!(i.to_string().as_bytes(), &buf[..written]); - - match i.checked_add(1) { - Some(next) => i = next, - None => break, - } - } -} - -#[cfg(feature = "std")] -impl Serialize for net::Ipv4Addr { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - if serializer.is_human_readable() { - const MAX_LEN: usize = 15; - debug_assert_eq!(MAX_LEN, "101.102.103.104".len()); - let mut buf = [b'.'; MAX_LEN]; - let mut written = format_u8(self.octets()[0], &mut buf); - for oct in &self.octets()[1..] { - // Skip over delimiters that we initialized buf with - written += format_u8(*oct, &mut buf[written + 1..]) + 1; - } - // We've only written ASCII bytes to the buffer, so it is valid UTF-8 - serializer.serialize_str(unsafe { str::from_utf8_unchecked(&buf[..written]) }) - } else { - self.octets().serialize(serializer) - } - } -} - -#[cfg(feature = "std")] -impl Serialize for net::Ipv6Addr { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - if serializer.is_human_readable() { - const MAX_LEN: usize = 39; - debug_assert_eq!(MAX_LEN, "1001:1002:1003:1004:1005:1006:1007:1008".len()); - serialize_display_bounded_length!(self, MAX_LEN, serializer) - } else { - self.octets().serialize(serializer) - } - } -} - -#[cfg(feature = "std")] -impl Serialize for net::SocketAddr { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - if serializer.is_human_readable() { - match *self { - net::SocketAddr::V4(ref addr) => addr.serialize(serializer), - net::SocketAddr::V6(ref addr) => addr.serialize(serializer), - } - } else { - match *self { - net::SocketAddr::V4(ref addr) => { - serializer.serialize_newtype_variant("SocketAddr", 0, "V4", addr) - } - net::SocketAddr::V6(ref addr) => { - serializer.serialize_newtype_variant("SocketAddr", 1, "V6", addr) - } - } - } - } -} - -#[cfg(feature = "std")] -impl Serialize for net::SocketAddrV4 { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - if serializer.is_human_readable() { - const MAX_LEN: usize = 21; - debug_assert_eq!(MAX_LEN, "101.102.103.104:65000".len()); - serialize_display_bounded_length!(self, MAX_LEN, serializer) - } else { - (self.ip(), self.port()).serialize(serializer) - } - } -} - -#[cfg(feature = "std")] -impl Serialize for net::SocketAddrV6 { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - if serializer.is_human_readable() { - const MAX_LEN: usize = 58; - debug_assert_eq!( - MAX_LEN, - "[1001:1002:1003:1004:1005:1006:1007:1008%4294967295]:65000".len() - ); - serialize_display_bounded_length!(self, MAX_LEN, serializer) - } else { - (self.ip(), self.port()).serialize(serializer) - } - } -} - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(feature = "std")] -impl Serialize for Path { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - match self.to_str() { - Some(s) => s.serialize(serializer), - None => Err(Error::custom("path contains invalid UTF-8 characters")), - } - } -} - -#[cfg(feature = "std")] -impl Serialize for PathBuf { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - self.as_path().serialize(serializer) - } -} - -#[cfg(all(feature = "std", any(unix, windows)))] -impl Serialize for OsStr { - #[cfg(unix)] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - use std::os::unix::ffi::OsStrExt; - serializer.serialize_newtype_variant("OsString", 0, "Unix", self.as_bytes()) - } - - #[cfg(windows)] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - use std::os::windows::ffi::OsStrExt; - let val = self.encode_wide().collect::>(); - serializer.serialize_newtype_variant("OsString", 1, "Windows", &val) - } -} - -#[cfg(all(feature = "std", any(unix, windows)))] -impl Serialize for OsString { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - self.as_os_str().serialize(serializer) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -impl Serialize for Wrapping -where - T: Serialize, -{ - #[inline] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - self.0.serialize(serializer) - } -} - -#[cfg(core_reverse)] -impl Serialize for Reverse -where - T: Serialize, -{ - #[inline] - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - self.0.serialize(serializer) - } -} - -//////////////////////////////////////////////////////////////////////////////// - -#[cfg(all(feature = "std", std_atomic))] -macro_rules! atomic_impl { - ($($ty:ident)*) => { - $( - impl Serialize for $ty { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - self.load(Ordering::SeqCst).serialize(serializer) - } - } - )* - } -} - -#[cfg(all(feature = "std", std_atomic))] -atomic_impl! { - AtomicBool - AtomicI8 AtomicI16 AtomicI32 AtomicIsize - AtomicU8 AtomicU16 AtomicU32 AtomicUsize -} - -#[cfg(all(feature = "std", std_atomic64))] -atomic_impl! { - AtomicI64 AtomicU64 -} diff --git a/vendor/serde/src/ser/impossible.rs b/vendor/serde/src/ser/impossible.rs deleted file mode 100644 index e8df9ca7..00000000 --- a/vendor/serde/src/ser/impossible.rs +++ /dev/null @@ -1,216 +0,0 @@ -//! This module contains `Impossible` serializer and its implementations. - -use lib::*; - -use ser::{ - self, Serialize, SerializeMap, SerializeSeq, SerializeStruct, SerializeStructVariant, - SerializeTuple, SerializeTupleStruct, SerializeTupleVariant, -}; - -/// Helper type for implementing a `Serializer` that does not support -/// serializing one of the compound types. -/// -/// This type cannot be instantiated, but implements every one of the traits -/// corresponding to the [`Serializer`] compound types: [`SerializeSeq`], -/// [`SerializeTuple`], [`SerializeTupleStruct`], [`SerializeTupleVariant`], -/// [`SerializeMap`], [`SerializeStruct`], and [`SerializeStructVariant`]. -/// -/// ```edition2018 -/// # use serde::ser::{Serializer, Impossible}; -/// # use serde::__private::doc::Error; -/// # -/// # struct MySerializer; -/// # -/// impl Serializer for MySerializer { -/// type Ok = (); -/// type Error = Error; -/// -/// type SerializeSeq = Impossible<(), Error>; -/// /* other associated types */ -/// -/// /// This data format does not support serializing sequences. -/// fn serialize_seq(self, -/// len: Option) -/// -> Result { -/// // Given Impossible cannot be instantiated, the only -/// // thing we can do here is to return an error. -/// # stringify! { -/// Err(...) -/// # }; -/// # unimplemented!() -/// } -/// -/// /* other Serializer methods */ -/// # serde::__serialize_unimplemented! { -/// # bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str bytes none some -/// # unit unit_struct unit_variant newtype_struct newtype_variant -/// # tuple tuple_struct tuple_variant map struct struct_variant -/// # } -/// } -/// ``` -/// -/// [`Serializer`]: trait.Serializer.html -/// [`SerializeSeq`]: trait.SerializeSeq.html -/// [`SerializeTuple`]: trait.SerializeTuple.html -/// [`SerializeTupleStruct`]: trait.SerializeTupleStruct.html -/// [`SerializeTupleVariant`]: trait.SerializeTupleVariant.html -/// [`SerializeMap`]: trait.SerializeMap.html -/// [`SerializeStruct`]: trait.SerializeStruct.html -/// [`SerializeStructVariant`]: trait.SerializeStructVariant.html -pub struct Impossible { - void: Void, - ok: PhantomData, - error: PhantomData, -} - -enum Void {} - -impl SerializeSeq for Impossible -where - Error: ser::Error, -{ - type Ok = Ok; - type Error = Error; - - fn serialize_element(&mut self, value: &T) -> Result<(), Error> - where - T: Serialize, - { - let _ = value; - match self.void {} - } - - fn end(self) -> Result { - match self.void {} - } -} - -impl SerializeTuple for Impossible -where - Error: ser::Error, -{ - type Ok = Ok; - type Error = Error; - - fn serialize_element(&mut self, value: &T) -> Result<(), Error> - where - T: Serialize, - { - let _ = value; - match self.void {} - } - - fn end(self) -> Result { - match self.void {} - } -} - -impl SerializeTupleStruct for Impossible -where - Error: ser::Error, -{ - type Ok = Ok; - type Error = Error; - - fn serialize_field(&mut self, value: &T) -> Result<(), Error> - where - T: Serialize, - { - let _ = value; - match self.void {} - } - - fn end(self) -> Result { - match self.void {} - } -} - -impl SerializeTupleVariant for Impossible -where - Error: ser::Error, -{ - type Ok = Ok; - type Error = Error; - - fn serialize_field(&mut self, value: &T) -> Result<(), Error> - where - T: Serialize, - { - let _ = value; - match self.void {} - } - - fn end(self) -> Result { - match self.void {} - } -} - -impl SerializeMap for Impossible -where - Error: ser::Error, -{ - type Ok = Ok; - type Error = Error; - - fn serialize_key(&mut self, key: &T) -> Result<(), Error> - where - T: Serialize, - { - let _ = key; - match self.void {} - } - - fn serialize_value(&mut self, value: &T) -> Result<(), Error> - where - T: Serialize, - { - let _ = value; - match self.void {} - } - - fn end(self) -> Result { - match self.void {} - } -} - -impl SerializeStruct for Impossible -where - Error: ser::Error, -{ - type Ok = Ok; - type Error = Error; - - fn serialize_field(&mut self, key: &'static str, value: &T) -> Result<(), Error> - where - T: Serialize, - { - let _ = key; - let _ = value; - match self.void {} - } - - fn end(self) -> Result { - match self.void {} - } -} - -impl SerializeStructVariant for Impossible -where - Error: ser::Error, -{ - type Ok = Ok; - type Error = Error; - - fn serialize_field(&mut self, key: &'static str, value: &T) -> Result<(), Error> - where - T: Serialize, - { - let _ = key; - let _ = value; - match self.void {} - } - - fn end(self) -> Result { - match self.void {} - } -} diff --git a/vendor/serde/src/ser/mod.rs b/vendor/serde/src/ser/mod.rs deleted file mode 100644 index d5bca09f..00000000 --- a/vendor/serde/src/ser/mod.rs +++ /dev/null @@ -1,1986 +0,0 @@ -//! Generic data structure serialization framework. -//! -//! The two most important traits in this module are [`Serialize`] and -//! [`Serializer`]. -//! -//! - **A type that implements `Serialize` is a data structure** that can be -//! serialized to any data format supported by Serde, and conversely -//! - **A type that implements `Serializer` is a data format** that can -//! serialize any data structure supported by Serde. -//! -//! # The Serialize trait -//! -//! Serde provides [`Serialize`] implementations for many Rust primitive and -//! standard library types. The complete list is below. All of these can be -//! serialized using Serde out of the box. -//! -//! Additionally, Serde provides a procedural macro called [`serde_derive`] to -//! automatically generate [`Serialize`] implementations for structs and enums -//! in your program. See the [derive section of the manual] for how to use this. -//! -//! In rare cases it may be necessary to implement [`Serialize`] manually for -//! some type in your program. See the [Implementing `Serialize`] section of the -//! manual for more about this. -//! -//! Third-party crates may provide [`Serialize`] implementations for types that -//! they expose. For example the [`linked-hash-map`] crate provides a -//! [`LinkedHashMap`] type that is serializable by Serde because the crate -//! provides an implementation of [`Serialize`] for it. -//! -//! # The Serializer trait -//! -//! [`Serializer`] implementations are provided by third-party crates, for -//! example [`serde_json`], [`serde_yaml`] and [`bincode`]. -//! -//! A partial list of well-maintained formats is given on the [Serde -//! website][data formats]. -//! -//! # Implementations of Serialize provided by Serde -//! -//! - **Primitive types**: -//! - bool -//! - i8, i16, i32, i64, i128, isize -//! - u8, u16, u32, u64, u128, usize -//! - f32, f64 -//! - char -//! - str -//! - &T and &mut T -//! - **Compound types**: -//! - \[T\] -//! - \[T; 0\] through \[T; 32\] -//! - tuples up to size 16 -//! - **Common standard library types**: -//! - String -//! - Option\ -//! - Result\ -//! - PhantomData\ -//! - **Wrapper types**: -//! - Box\ -//! - Cow\<'a, T\> -//! - Cell\ -//! - RefCell\ -//! - Mutex\ -//! - RwLock\ -//! - Rc\ *(if* features = ["rc"] *is enabled)* -//! - Arc\ *(if* features = ["rc"] *is enabled)* -//! - **Collection types**: -//! - BTreeMap\ -//! - BTreeSet\ -//! - BinaryHeap\ -//! - HashMap\ -//! - HashSet\ -//! - LinkedList\ -//! - VecDeque\ -//! - Vec\ -//! - **FFI types**: -//! - CStr -//! - CString -//! - OsStr -//! - OsString -//! - **Miscellaneous standard library types**: -//! - Duration -//! - SystemTime -//! - Path -//! - PathBuf -//! - Range\ -//! - RangeInclusive\ -//! - Bound\ -//! - num::NonZero* -//! - `!` *(unstable)* -//! - **Net types**: -//! - IpAddr -//! - Ipv4Addr -//! - Ipv6Addr -//! - SocketAddr -//! - SocketAddrV4 -//! - SocketAddrV6 -//! -//! [Implementing `Serialize`]: https://serde.rs/impl-serialize.html -//! [`LinkedHashMap`]: https://docs.rs/linked-hash-map/*/linked_hash_map/struct.LinkedHashMap.html -//! [`Serialize`]: ../trait.Serialize.html -//! [`Serializer`]: ../trait.Serializer.html -//! [`bincode`]: https://github.com/servo/bincode -//! [`linked-hash-map`]: https://crates.io/crates/linked-hash-map -//! [`serde_derive`]: https://crates.io/crates/serde_derive -//! [`serde_json`]: https://github.com/serde-rs/json -//! [`serde_yaml`]: https://github.com/dtolnay/serde-yaml -//! [derive section of the manual]: https://serde.rs/derive.html -//! [data formats]: https://serde.rs/#data-formats - -use lib::*; - -mod fmt; -mod impls; -mod impossible; - -pub use self::impossible::Impossible; - -#[cfg(feature = "std")] -#[doc(no_inline)] -pub use std::error::Error as StdError; -#[cfg(not(feature = "std"))] -#[doc(no_inline)] -pub use std_error::Error as StdError; - -//////////////////////////////////////////////////////////////////////////////// - -macro_rules! declare_error_trait { - (Error: Sized $(+ $($supertrait:ident)::+)*) => { - /// Trait used by `Serialize` implementations to generically construct - /// errors belonging to the `Serializer` against which they are - /// currently running. - /// - /// # Example implementation - /// - /// The [example data format] presented on the website shows an error - /// type appropriate for a basic JSON data format. - /// - /// [example data format]: https://serde.rs/data-format.html - pub trait Error: Sized $(+ $($supertrait)::+)* { - /// Used when a [`Serialize`] implementation encounters any error - /// while serializing a type. - /// - /// The message should not be capitalized and should not end with a - /// period. - /// - /// For example, a filesystem [`Path`] may refuse to serialize - /// itself if it contains invalid UTF-8 data. - /// - /// ```edition2018 - /// # struct Path; - /// # - /// # impl Path { - /// # fn to_str(&self) -> Option<&str> { - /// # unimplemented!() - /// # } - /// # } - /// # - /// use serde::ser::{self, Serialize, Serializer}; - /// - /// impl Serialize for Path { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// match self.to_str() { - /// Some(s) => serializer.serialize_str(s), - /// None => Err(ser::Error::custom("path contains invalid UTF-8 characters")), - /// } - /// } - /// } - /// ``` - /// - /// [`Path`]: https://doc.rust-lang.org/std/path/struct.Path.html - /// [`Serialize`]: ../trait.Serialize.html - fn custom(msg: T) -> Self - where - T: Display; - } - } -} - -#[cfg(feature = "std")] -declare_error_trait!(Error: Sized + StdError); - -#[cfg(not(feature = "std"))] -declare_error_trait!(Error: Sized + Debug + Display); - -//////////////////////////////////////////////////////////////////////////////// - -/// A **data structure** that can be serialized into any data format supported -/// by Serde. -/// -/// Serde provides `Serialize` implementations for many Rust primitive and -/// standard library types. The complete list is [here][ser]. All of these can -/// be serialized using Serde out of the box. -/// -/// Additionally, Serde provides a procedural macro called [`serde_derive`] to -/// automatically generate `Serialize` implementations for structs and enums in -/// your program. See the [derive section of the manual] for how to use this. -/// -/// In rare cases it may be necessary to implement `Serialize` manually for some -/// type in your program. See the [Implementing `Serialize`] section of the -/// manual for more about this. -/// -/// Third-party crates may provide `Serialize` implementations for types that -/// they expose. For example the [`linked-hash-map`] crate provides a -/// [`LinkedHashMap`] type that is serializable by Serde because the crate -/// provides an implementation of `Serialize` for it. -/// -/// [Implementing `Serialize`]: https://serde.rs/impl-serialize.html -/// [`LinkedHashMap`]: https://docs.rs/linked-hash-map/*/linked_hash_map/struct.LinkedHashMap.html -/// [`linked-hash-map`]: https://crates.io/crates/linked-hash-map -/// [`serde_derive`]: https://crates.io/crates/serde_derive -/// [derive section of the manual]: https://serde.rs/derive.html -/// [ser]: https://docs.serde.rs/serde/ser/index.html -pub trait Serialize { - /// Serialize this value into the given Serde serializer. - /// - /// See the [Implementing `Serialize`] section of the manual for more - /// information about how to implement this method. - /// - /// ```edition2018 - /// use serde::ser::{Serialize, SerializeStruct, Serializer}; - /// - /// struct Person { - /// name: String, - /// age: u8, - /// phones: Vec, - /// } - /// - /// // This is what #[derive(Serialize)] would generate. - /// impl Serialize for Person { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// let mut s = serializer.serialize_struct("Person", 3)?; - /// s.serialize_field("name", &self.name)?; - /// s.serialize_field("age", &self.age)?; - /// s.serialize_field("phones", &self.phones)?; - /// s.end() - /// } - /// } - /// ``` - /// - /// [Implementing `Serialize`]: https://serde.rs/impl-serialize.html - fn serialize(&self, serializer: S) -> Result - where - S: Serializer; -} - -//////////////////////////////////////////////////////////////////////////////// - -/// A **data format** that can serialize any data structure supported by Serde. -/// -/// The role of this trait is to define the serialization half of the [Serde -/// data model], which is a way to categorize every Rust data structure into one -/// of 29 possible types. Each method of the `Serializer` trait corresponds to -/// one of the types of the data model. -/// -/// Implementations of `Serialize` map themselves into this data model by -/// invoking exactly one of the `Serializer` methods. -/// -/// The types that make up the Serde data model are: -/// -/// - **14 primitive types** -/// - bool -/// - i8, i16, i32, i64, i128 -/// - u8, u16, u32, u64, u128 -/// - f32, f64 -/// - char -/// - **string** -/// - UTF-8 bytes with a length and no null terminator. -/// - When serializing, all strings are handled equally. When deserializing, -/// there are three flavors of strings: transient, owned, and borrowed. -/// - **byte array** - \[u8\] -/// - Similar to strings, during deserialization byte arrays can be -/// transient, owned, or borrowed. -/// - **option** -/// - Either none or some value. -/// - **unit** -/// - The type of `()` in Rust. It represents an anonymous value containing -/// no data. -/// - **unit_struct** -/// - For example `struct Unit` or `PhantomData`. It represents a named -/// value containing no data. -/// - **unit_variant** -/// - For example the `E::A` and `E::B` in `enum E { A, B }`. -/// - **newtype_struct** -/// - For example `struct Millimeters(u8)`. -/// - **newtype_variant** -/// - For example the `E::N` in `enum E { N(u8) }`. -/// - **seq** -/// - A variably sized heterogeneous sequence of values, for example -/// `Vec` or `HashSet`. When serializing, the length may or may not -/// be known before iterating through all the data. When deserializing, -/// the length is determined by looking at the serialized data. -/// - **tuple** -/// - A statically sized heterogeneous sequence of values for which the -/// length will be known at deserialization time without looking at the -/// serialized data, for example `(u8,)` or `(String, u64, Vec)` or -/// `[u64; 10]`. -/// - **tuple_struct** -/// - A named tuple, for example `struct Rgb(u8, u8, u8)`. -/// - **tuple_variant** -/// - For example the `E::T` in `enum E { T(u8, u8) }`. -/// - **map** -/// - A heterogeneous key-value pairing, for example `BTreeMap`. -/// - **struct** -/// - A heterogeneous key-value pairing in which the keys are strings and -/// will be known at deserialization time without looking at the -/// serialized data, for example `struct S { r: u8, g: u8, b: u8 }`. -/// - **struct_variant** -/// - For example the `E::S` in `enum E { S { r: u8, g: u8, b: u8 } }`. -/// -/// Many Serde serializers produce text or binary data as output, for example -/// JSON or Bincode. This is not a requirement of the `Serializer` trait, and -/// there are serializers that do not produce text or binary output. One example -/// is the `serde_json::value::Serializer` (distinct from the main `serde_json` -/// serializer) that produces a `serde_json::Value` data structure in memory as -/// output. -/// -/// [Serde data model]: https://serde.rs/data-model.html -/// -/// # Example implementation -/// -/// The [example data format] presented on the website contains example code for -/// a basic JSON `Serializer`. -/// -/// [example data format]: https://serde.rs/data-format.html -pub trait Serializer: Sized { - /// The output type produced by this `Serializer` during successful - /// serialization. Most serializers that produce text or binary output - /// should set `Ok = ()` and serialize into an [`io::Write`] or buffer - /// contained within the `Serializer` instance. Serializers that build - /// in-memory data structures may be simplified by using `Ok` to propagate - /// the data structure around. - /// - /// [`io::Write`]: https://doc.rust-lang.org/std/io/trait.Write.html - type Ok; - - /// The error type when some error occurs during serialization. - type Error: Error; - - /// Type returned from [`serialize_seq`] for serializing the content of the - /// sequence. - /// - /// [`serialize_seq`]: #tymethod.serialize_seq - type SerializeSeq: SerializeSeq; - - /// Type returned from [`serialize_tuple`] for serializing the content of - /// the tuple. - /// - /// [`serialize_tuple`]: #tymethod.serialize_tuple - type SerializeTuple: SerializeTuple; - - /// Type returned from [`serialize_tuple_struct`] for serializing the - /// content of the tuple struct. - /// - /// [`serialize_tuple_struct`]: #tymethod.serialize_tuple_struct - type SerializeTupleStruct: SerializeTupleStruct; - - /// Type returned from [`serialize_tuple_variant`] for serializing the - /// content of the tuple variant. - /// - /// [`serialize_tuple_variant`]: #tymethod.serialize_tuple_variant - type SerializeTupleVariant: SerializeTupleVariant; - - /// Type returned from [`serialize_map`] for serializing the content of the - /// map. - /// - /// [`serialize_map`]: #tymethod.serialize_map - type SerializeMap: SerializeMap; - - /// Type returned from [`serialize_struct`] for serializing the content of - /// the struct. - /// - /// [`serialize_struct`]: #tymethod.serialize_struct - type SerializeStruct: SerializeStruct; - - /// Type returned from [`serialize_struct_variant`] for serializing the - /// content of the struct variant. - /// - /// [`serialize_struct_variant`]: #tymethod.serialize_struct_variant - type SerializeStructVariant: SerializeStructVariant; - - /// Serialize a `bool` value. - /// - /// ```edition2018 - /// # use serde::Serializer; - /// # - /// # serde::__private_serialize!(); - /// # - /// impl Serialize for bool { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.serialize_bool(*self) - /// } - /// } - /// ``` - fn serialize_bool(self, v: bool) -> Result; - - /// Serialize an `i8` value. - /// - /// If the format does not differentiate between `i8` and `i64`, a - /// reasonable implementation would be to cast the value to `i64` and - /// forward to `serialize_i64`. - /// - /// ```edition2018 - /// # use serde::Serializer; - /// # - /// # serde::__private_serialize!(); - /// # - /// impl Serialize for i8 { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.serialize_i8(*self) - /// } - /// } - /// ``` - fn serialize_i8(self, v: i8) -> Result; - - /// Serialize an `i16` value. - /// - /// If the format does not differentiate between `i16` and `i64`, a - /// reasonable implementation would be to cast the value to `i64` and - /// forward to `serialize_i64`. - /// - /// ```edition2018 - /// # use serde::Serializer; - /// # - /// # serde::__private_serialize!(); - /// # - /// impl Serialize for i16 { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.serialize_i16(*self) - /// } - /// } - /// ``` - fn serialize_i16(self, v: i16) -> Result; - - /// Serialize an `i32` value. - /// - /// If the format does not differentiate between `i32` and `i64`, a - /// reasonable implementation would be to cast the value to `i64` and - /// forward to `serialize_i64`. - /// - /// ```edition2018 - /// # use serde::Serializer; - /// # - /// # serde::__private_serialize!(); - /// # - /// impl Serialize for i32 { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.serialize_i32(*self) - /// } - /// } - /// ``` - fn serialize_i32(self, v: i32) -> Result; - - /// Serialize an `i64` value. - /// - /// ```edition2018 - /// # use serde::Serializer; - /// # - /// # serde::__private_serialize!(); - /// # - /// impl Serialize for i64 { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.serialize_i64(*self) - /// } - /// } - /// ``` - fn serialize_i64(self, v: i64) -> Result; - - serde_if_integer128! { - /// Serialize an `i128` value. - /// - /// ```edition2018 - /// # use serde::Serializer; - /// # - /// # serde::__private_serialize!(); - /// # - /// impl Serialize for i128 { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.serialize_i128(*self) - /// } - /// } - /// ``` - /// - /// This method is available only on Rust compiler versions >=1.26. The - /// default behavior unconditionally returns an error. - fn serialize_i128(self, v: i128) -> Result { - let _ = v; - Err(Error::custom("i128 is not supported")) - } - } - - /// Serialize a `u8` value. - /// - /// If the format does not differentiate between `u8` and `u64`, a - /// reasonable implementation would be to cast the value to `u64` and - /// forward to `serialize_u64`. - /// - /// ```edition2018 - /// # use serde::Serializer; - /// # - /// # serde::__private_serialize!(); - /// # - /// impl Serialize for u8 { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.serialize_u8(*self) - /// } - /// } - /// ``` - fn serialize_u8(self, v: u8) -> Result; - - /// Serialize a `u16` value. - /// - /// If the format does not differentiate between `u16` and `u64`, a - /// reasonable implementation would be to cast the value to `u64` and - /// forward to `serialize_u64`. - /// - /// ```edition2018 - /// # use serde::Serializer; - /// # - /// # serde::__private_serialize!(); - /// # - /// impl Serialize for u16 { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.serialize_u16(*self) - /// } - /// } - /// ``` - fn serialize_u16(self, v: u16) -> Result; - - /// Serialize a `u32` value. - /// - /// If the format does not differentiate between `u32` and `u64`, a - /// reasonable implementation would be to cast the value to `u64` and - /// forward to `serialize_u64`. - /// - /// ```edition2018 - /// # use serde::Serializer; - /// # - /// # serde::__private_serialize!(); - /// # - /// impl Serialize for u32 { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.serialize_u32(*self) - /// } - /// } - /// ``` - fn serialize_u32(self, v: u32) -> Result; - - /// Serialize a `u64` value. - /// - /// ```edition2018 - /// # use serde::Serializer; - /// # - /// # serde::__private_serialize!(); - /// # - /// impl Serialize for u64 { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.serialize_u64(*self) - /// } - /// } - /// ``` - fn serialize_u64(self, v: u64) -> Result; - - serde_if_integer128! { - /// Serialize a `u128` value. - /// - /// ```edition2018 - /// # use serde::Serializer; - /// # - /// # serde::__private_serialize!(); - /// # - /// impl Serialize for u128 { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.serialize_u128(*self) - /// } - /// } - /// ``` - /// - /// This method is available only on Rust compiler versions >=1.26. The - /// default behavior unconditionally returns an error. - fn serialize_u128(self, v: u128) -> Result { - let _ = v; - Err(Error::custom("u128 is not supported")) - } - } - - /// Serialize an `f32` value. - /// - /// If the format does not differentiate between `f32` and `f64`, a - /// reasonable implementation would be to cast the value to `f64` and - /// forward to `serialize_f64`. - /// - /// ```edition2018 - /// # use serde::Serializer; - /// # - /// # serde::__private_serialize!(); - /// # - /// impl Serialize for f32 { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.serialize_f32(*self) - /// } - /// } - /// ``` - fn serialize_f32(self, v: f32) -> Result; - - /// Serialize an `f64` value. - /// - /// ```edition2018 - /// # use serde::Serializer; - /// # - /// # serde::__private_serialize!(); - /// # - /// impl Serialize for f64 { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.serialize_f64(*self) - /// } - /// } - /// ``` - fn serialize_f64(self, v: f64) -> Result; - - /// Serialize a character. - /// - /// If the format does not support characters, it is reasonable to serialize - /// it as a single element `str` or a `u32`. - /// - /// ```edition2018 - /// # use serde::Serializer; - /// # - /// # serde::__private_serialize!(); - /// # - /// impl Serialize for char { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.serialize_char(*self) - /// } - /// } - /// ``` - fn serialize_char(self, v: char) -> Result; - - /// Serialize a `&str`. - /// - /// ```edition2018 - /// # use serde::Serializer; - /// # - /// # serde::__private_serialize!(); - /// # - /// impl Serialize for str { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.serialize_str(self) - /// } - /// } - /// ``` - fn serialize_str(self, v: &str) -> Result; - - /// Serialize a chunk of raw byte data. - /// - /// Enables serializers to serialize byte slices more compactly or more - /// efficiently than other types of slices. If no efficient implementation - /// is available, a reasonable implementation would be to forward to - /// `serialize_seq`. If forwarded, the implementation looks usually just - /// like this: - /// - /// ```edition2018 - /// # use serde::ser::{Serializer, SerializeSeq}; - /// # use serde::__private::doc::Error; - /// # - /// # struct MySerializer; - /// # - /// # impl Serializer for MySerializer { - /// # type Ok = (); - /// # type Error = Error; - /// # - /// fn serialize_bytes(self, v: &[u8]) -> Result { - /// let mut seq = self.serialize_seq(Some(v.len()))?; - /// for b in v { - /// seq.serialize_element(b)?; - /// } - /// seq.end() - /// } - /// # - /// # serde::__serialize_unimplemented! { - /// # bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str none some - /// # unit unit_struct unit_variant newtype_struct newtype_variant - /// # seq tuple tuple_struct tuple_variant map struct struct_variant - /// # } - /// # } - /// ``` - fn serialize_bytes(self, v: &[u8]) -> Result; - - /// Serialize a [`None`] value. - /// - /// ```edition2018 - /// # use serde::{Serialize, Serializer}; - /// # - /// # enum Option { - /// # Some(T), - /// # None, - /// # } - /// # - /// # use self::Option::{Some, None}; - /// # - /// impl Serialize for Option - /// where - /// T: Serialize, - /// { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// match *self { - /// Some(ref value) => serializer.serialize_some(value), - /// None => serializer.serialize_none(), - /// } - /// } - /// } - /// # - /// # fn main() {} - /// ``` - /// - /// [`None`]: https://doc.rust-lang.org/std/option/enum.Option.html#variant.None - fn serialize_none(self) -> Result; - - /// Serialize a [`Some(T)`] value. - /// - /// ```edition2018 - /// # use serde::{Serialize, Serializer}; - /// # - /// # enum Option { - /// # Some(T), - /// # None, - /// # } - /// # - /// # use self::Option::{Some, None}; - /// # - /// impl Serialize for Option - /// where - /// T: Serialize, - /// { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// match *self { - /// Some(ref value) => serializer.serialize_some(value), - /// None => serializer.serialize_none(), - /// } - /// } - /// } - /// # - /// # fn main() {} - /// ``` - /// - /// [`Some(T)`]: https://doc.rust-lang.org/std/option/enum.Option.html#variant.Some - fn serialize_some(self, value: &T) -> Result - where - T: Serialize; - - /// Serialize a `()` value. - /// - /// ```edition2018 - /// # use serde::Serializer; - /// # - /// # serde::__private_serialize!(); - /// # - /// impl Serialize for () { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.serialize_unit() - /// } - /// } - /// ``` - fn serialize_unit(self) -> Result; - - /// Serialize a unit struct like `struct Unit` or `PhantomData`. - /// - /// A reasonable implementation would be to forward to `serialize_unit`. - /// - /// ```edition2018 - /// use serde::{Serialize, Serializer}; - /// - /// struct Nothing; - /// - /// impl Serialize for Nothing { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.serialize_unit_struct("Nothing") - /// } - /// } - /// ``` - fn serialize_unit_struct(self, name: &'static str) -> Result; - - /// Serialize a unit variant like `E::A` in `enum E { A, B }`. - /// - /// The `name` is the name of the enum, the `variant_index` is the index of - /// this variant within the enum, and the `variant` is the name of the - /// variant. - /// - /// ```edition2018 - /// use serde::{Serialize, Serializer}; - /// - /// enum E { - /// A, - /// B, - /// } - /// - /// impl Serialize for E { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// match *self { - /// E::A => serializer.serialize_unit_variant("E", 0, "A"), - /// E::B => serializer.serialize_unit_variant("E", 1, "B"), - /// } - /// } - /// } - /// ``` - fn serialize_unit_variant( - self, - name: &'static str, - variant_index: u32, - variant: &'static str, - ) -> Result; - - /// Serialize a newtype struct like `struct Millimeters(u8)`. - /// - /// Serializers are encouraged to treat newtype structs as insignificant - /// wrappers around the data they contain. A reasonable implementation would - /// be to forward to `value.serialize(self)`. - /// - /// ```edition2018 - /// use serde::{Serialize, Serializer}; - /// - /// struct Millimeters(u8); - /// - /// impl Serialize for Millimeters { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.serialize_newtype_struct("Millimeters", &self.0) - /// } - /// } - /// ``` - fn serialize_newtype_struct( - self, - name: &'static str, - value: &T, - ) -> Result - where - T: Serialize; - - /// Serialize a newtype variant like `E::N` in `enum E { N(u8) }`. - /// - /// The `name` is the name of the enum, the `variant_index` is the index of - /// this variant within the enum, and the `variant` is the name of the - /// variant. The `value` is the data contained within this newtype variant. - /// - /// ```edition2018 - /// use serde::{Serialize, Serializer}; - /// - /// enum E { - /// M(String), - /// N(u8), - /// } - /// - /// impl Serialize for E { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// match *self { - /// E::M(ref s) => serializer.serialize_newtype_variant("E", 0, "M", s), - /// E::N(n) => serializer.serialize_newtype_variant("E", 1, "N", &n), - /// } - /// } - /// } - /// ``` - fn serialize_newtype_variant( - self, - name: &'static str, - variant_index: u32, - variant: &'static str, - value: &T, - ) -> Result - where - T: Serialize; - - /// Begin to serialize a variably sized sequence. This call must be - /// followed by zero or more calls to `serialize_element`, then a call to - /// `end`. - /// - /// The argument is the number of elements in the sequence, which may or may - /// not be computable before the sequence is iterated. Some serializers only - /// support sequences whose length is known up front. - /// - /// ```edition2018 - /// # use std::marker::PhantomData; - /// # - /// # struct Vec(PhantomData); - /// # - /// # impl Vec { - /// # fn len(&self) -> usize { - /// # unimplemented!() - /// # } - /// # } - /// # - /// # impl<'a, T> IntoIterator for &'a Vec { - /// # type Item = &'a T; - /// # type IntoIter = Box>; - /// # - /// # fn into_iter(self) -> Self::IntoIter { - /// # unimplemented!() - /// # } - /// # } - /// # - /// use serde::ser::{Serialize, Serializer, SerializeSeq}; - /// - /// impl Serialize for Vec - /// where - /// T: Serialize, - /// { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// let mut seq = serializer.serialize_seq(Some(self.len()))?; - /// for element in self { - /// seq.serialize_element(element)?; - /// } - /// seq.end() - /// } - /// } - /// ``` - fn serialize_seq(self, len: Option) -> Result; - - /// Begin to serialize a statically sized sequence whose length will be - /// known at deserialization time without looking at the serialized data. - /// This call must be followed by zero or more calls to `serialize_element`, - /// then a call to `end`. - /// - /// ```edition2018 - /// use serde::ser::{Serialize, Serializer, SerializeTuple}; - /// - /// # mod fool { - /// # trait Serialize {} - /// impl Serialize for (A, B, C) - /// # {} - /// # } - /// # - /// # struct Tuple3(A, B, C); - /// # - /// # impl Serialize for Tuple3 - /// where - /// A: Serialize, - /// B: Serialize, - /// C: Serialize, - /// { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// let mut tup = serializer.serialize_tuple(3)?; - /// tup.serialize_element(&self.0)?; - /// tup.serialize_element(&self.1)?; - /// tup.serialize_element(&self.2)?; - /// tup.end() - /// } - /// } - /// ``` - /// - /// ```edition2018 - /// use serde::ser::{Serialize, SerializeTuple, Serializer}; - /// - /// const VRAM_SIZE: usize = 386; - /// struct Vram([u16; VRAM_SIZE]); - /// - /// impl Serialize for Vram { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// let mut seq = serializer.serialize_tuple(VRAM_SIZE)?; - /// for element in &self.0[..] { - /// seq.serialize_element(element)?; - /// } - /// seq.end() - /// } - /// } - /// ``` - fn serialize_tuple(self, len: usize) -> Result; - - /// Begin to serialize a tuple struct like `struct Rgb(u8, u8, u8)`. This - /// call must be followed by zero or more calls to `serialize_field`, then a - /// call to `end`. - /// - /// The `name` is the name of the tuple struct and the `len` is the number - /// of data fields that will be serialized. - /// - /// ```edition2018 - /// use serde::ser::{Serialize, SerializeTupleStruct, Serializer}; - /// - /// struct Rgb(u8, u8, u8); - /// - /// impl Serialize for Rgb { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// let mut ts = serializer.serialize_tuple_struct("Rgb", 3)?; - /// ts.serialize_field(&self.0)?; - /// ts.serialize_field(&self.1)?; - /// ts.serialize_field(&self.2)?; - /// ts.end() - /// } - /// } - /// ``` - fn serialize_tuple_struct( - self, - name: &'static str, - len: usize, - ) -> Result; - - /// Begin to serialize a tuple variant like `E::T` in `enum E { T(u8, u8) - /// }`. This call must be followed by zero or more calls to - /// `serialize_field`, then a call to `end`. - /// - /// The `name` is the name of the enum, the `variant_index` is the index of - /// this variant within the enum, the `variant` is the name of the variant, - /// and the `len` is the number of data fields that will be serialized. - /// - /// ```edition2018 - /// use serde::ser::{Serialize, SerializeTupleVariant, Serializer}; - /// - /// enum E { - /// T(u8, u8), - /// U(String, u32, u32), - /// } - /// - /// impl Serialize for E { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// match *self { - /// E::T(ref a, ref b) => { - /// let mut tv = serializer.serialize_tuple_variant("E", 0, "T", 2)?; - /// tv.serialize_field(a)?; - /// tv.serialize_field(b)?; - /// tv.end() - /// } - /// E::U(ref a, ref b, ref c) => { - /// let mut tv = serializer.serialize_tuple_variant("E", 1, "U", 3)?; - /// tv.serialize_field(a)?; - /// tv.serialize_field(b)?; - /// tv.serialize_field(c)?; - /// tv.end() - /// } - /// } - /// } - /// } - /// ``` - fn serialize_tuple_variant( - self, - name: &'static str, - variant_index: u32, - variant: &'static str, - len: usize, - ) -> Result; - - /// Begin to serialize a map. This call must be followed by zero or more - /// calls to `serialize_key` and `serialize_value`, then a call to `end`. - /// - /// The argument is the number of elements in the map, which may or may not - /// be computable before the map is iterated. Some serializers only support - /// maps whose length is known up front. - /// - /// ```edition2018 - /// # use std::marker::PhantomData; - /// # - /// # struct HashMap(PhantomData, PhantomData); - /// # - /// # impl HashMap { - /// # fn len(&self) -> usize { - /// # unimplemented!() - /// # } - /// # } - /// # - /// # impl<'a, K, V> IntoIterator for &'a HashMap { - /// # type Item = (&'a K, &'a V); - /// # type IntoIter = Box>; - /// # - /// # fn into_iter(self) -> Self::IntoIter { - /// # unimplemented!() - /// # } - /// # } - /// # - /// use serde::ser::{Serialize, Serializer, SerializeMap}; - /// - /// impl Serialize for HashMap - /// where - /// K: Serialize, - /// V: Serialize, - /// { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// let mut map = serializer.serialize_map(Some(self.len()))?; - /// for (k, v) in self { - /// map.serialize_entry(k, v)?; - /// } - /// map.end() - /// } - /// } - /// ``` - fn serialize_map(self, len: Option) -> Result; - - /// Begin to serialize a struct like `struct Rgb { r: u8, g: u8, b: u8 }`. - /// This call must be followed by zero or more calls to `serialize_field`, - /// then a call to `end`. - /// - /// The `name` is the name of the struct and the `len` is the number of - /// data fields that will be serialized. - /// - /// ```edition2018 - /// use serde::ser::{Serialize, SerializeStruct, Serializer}; - /// - /// struct Rgb { - /// r: u8, - /// g: u8, - /// b: u8, - /// } - /// - /// impl Serialize for Rgb { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// let mut rgb = serializer.serialize_struct("Rgb", 3)?; - /// rgb.serialize_field("r", &self.r)?; - /// rgb.serialize_field("g", &self.g)?; - /// rgb.serialize_field("b", &self.b)?; - /// rgb.end() - /// } - /// } - /// ``` - fn serialize_struct( - self, - name: &'static str, - len: usize, - ) -> Result; - - /// Begin to serialize a struct variant like `E::S` in `enum E { S { r: u8, - /// g: u8, b: u8 } }`. This call must be followed by zero or more calls to - /// `serialize_field`, then a call to `end`. - /// - /// The `name` is the name of the enum, the `variant_index` is the index of - /// this variant within the enum, the `variant` is the name of the variant, - /// and the `len` is the number of data fields that will be serialized. - /// - /// ```edition2018 - /// use serde::ser::{Serialize, SerializeStructVariant, Serializer}; - /// - /// enum E { - /// S { r: u8, g: u8, b: u8 }, - /// } - /// - /// impl Serialize for E { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// match *self { - /// E::S { - /// ref r, - /// ref g, - /// ref b, - /// } => { - /// let mut sv = serializer.serialize_struct_variant("E", 0, "S", 3)?; - /// sv.serialize_field("r", r)?; - /// sv.serialize_field("g", g)?; - /// sv.serialize_field("b", b)?; - /// sv.end() - /// } - /// } - /// } - /// } - /// ``` - fn serialize_struct_variant( - self, - name: &'static str, - variant_index: u32, - variant: &'static str, - len: usize, - ) -> Result; - - /// Collect an iterator as a sequence. - /// - /// The default implementation serializes each item yielded by the iterator - /// using [`serialize_seq`]. Implementors should not need to override this - /// method. - /// - /// ```edition2018 - /// use serde::{Serialize, Serializer}; - /// - /// struct SecretlyOneHigher { - /// data: Vec, - /// } - /// - /// impl Serialize for SecretlyOneHigher { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.collect_seq(self.data.iter().map(|x| x + 1)) - /// } - /// } - /// ``` - /// - /// [`serialize_seq`]: #tymethod.serialize_seq - fn collect_seq(self, iter: I) -> Result - where - I: IntoIterator, - ::Item: Serialize, - { - let iter = iter.into_iter(); - let mut serializer = try!(self.serialize_seq(iterator_len_hint(&iter))); - - #[cfg(iterator_try_fold)] - { - let mut iter = iter; - try!(iter.try_for_each(|item| serializer.serialize_element(&item))); - } - - #[cfg(not(iterator_try_fold))] - { - for item in iter { - try!(serializer.serialize_element(&item)); - } - } - - serializer.end() - } - - /// Collect an iterator as a map. - /// - /// The default implementation serializes each pair yielded by the iterator - /// using [`serialize_map`]. Implementors should not need to override this - /// method. - /// - /// ```edition2018 - /// use serde::{Serialize, Serializer}; - /// use std::collections::BTreeSet; - /// - /// struct MapToUnit { - /// keys: BTreeSet, - /// } - /// - /// // Serializes as a map in which the values are all unit. - /// impl Serialize for MapToUnit { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.collect_map(self.keys.iter().map(|k| (k, ()))) - /// } - /// } - /// ``` - /// - /// [`serialize_map`]: #tymethod.serialize_map - fn collect_map(self, iter: I) -> Result - where - K: Serialize, - V: Serialize, - I: IntoIterator, - { - let iter = iter.into_iter(); - let mut serializer = try!(self.serialize_map(iterator_len_hint(&iter))); - - #[cfg(iterator_try_fold)] - { - let mut iter = iter; - try!(iter.try_for_each(|(key, value)| serializer.serialize_entry(&key, &value))); - } - - #[cfg(not(iterator_try_fold))] - { - for (key, value) in iter { - try!(serializer.serialize_entry(&key, &value)); - } - } - - serializer.end() - } - - /// Serialize a string produced by an implementation of `Display`. - /// - /// The default implementation builds a heap-allocated [`String`] and - /// delegates to [`serialize_str`]. Serializers are encouraged to provide a - /// more efficient implementation if possible. - /// - /// ```edition2018 - /// # struct DateTime; - /// # - /// # impl DateTime { - /// # fn naive_local(&self) -> () { () } - /// # fn offset(&self) -> () { () } - /// # } - /// # - /// use serde::{Serialize, Serializer}; - /// - /// impl Serialize for DateTime { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.collect_str(&format_args!("{:?}{:?}", - /// self.naive_local(), - /// self.offset())) - /// } - /// } - /// ``` - /// - /// [`String`]: https://doc.rust-lang.org/std/string/struct.String.html - /// [`serialize_str`]: #tymethod.serialize_str - #[cfg(any(feature = "std", feature = "alloc"))] - fn collect_str(self, value: &T) -> Result - where - T: Display, - { - self.serialize_str(&value.to_string()) - } - - /// Serialize a string produced by an implementation of `Display`. - /// - /// Serializers that use `no_std` are required to provide an implementation - /// of this method. If no more sensible behavior is possible, the - /// implementation is expected to return an error. - /// - /// ```edition2018 - /// # struct DateTime; - /// # - /// # impl DateTime { - /// # fn naive_local(&self) -> () { () } - /// # fn offset(&self) -> () { () } - /// # } - /// # - /// use serde::{Serialize, Serializer}; - /// - /// impl Serialize for DateTime { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// serializer.collect_str(&format_args!("{:?}{:?}", - /// self.naive_local(), - /// self.offset())) - /// } - /// } - /// ``` - #[cfg(not(any(feature = "std", feature = "alloc")))] - fn collect_str(self, value: &T) -> Result - where - T: Display; - - /// Determine whether `Serialize` implementations should serialize in - /// human-readable form. - /// - /// Some types have a human-readable form that may be somewhat expensive to - /// construct, as well as a binary form that is compact and efficient. - /// Generally text-based formats like JSON and YAML will prefer to use the - /// human-readable one and binary formats like Bincode will prefer the - /// compact one. - /// - /// ```edition2018 - /// # use std::fmt::{self, Display}; - /// # - /// # struct Timestamp; - /// # - /// # impl Timestamp { - /// # fn seconds_since_epoch(&self) -> u64 { unimplemented!() } - /// # } - /// # - /// # impl Display for Timestamp { - /// # fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - /// # unimplemented!() - /// # } - /// # } - /// # - /// use serde::{Serialize, Serializer}; - /// - /// impl Serialize for Timestamp { - /// fn serialize(&self, serializer: S) -> Result - /// where - /// S: Serializer, - /// { - /// if serializer.is_human_readable() { - /// // Serialize to a human-readable string "2015-05-15T17:01:00Z". - /// self.to_string().serialize(serializer) - /// } else { - /// // Serialize to a compact binary representation. - /// self.seconds_since_epoch().serialize(serializer) - /// } - /// } - /// } - /// ``` - /// - /// The default implementation of this method returns `true`. Data formats - /// may override this to `false` to request a compact form for types that - /// support one. Note that modifying this method to change a format from - /// human-readable to compact or vice versa should be regarded as a breaking - /// change, as a value serialized in human-readable mode is not required to - /// deserialize from the same data in compact mode. - #[inline] - fn is_human_readable(&self) -> bool { - true - } -} - -/// Returned from `Serializer::serialize_seq`. -/// -/// # Example use -/// -/// ```edition2018 -/// # use std::marker::PhantomData; -/// # -/// # struct Vec(PhantomData); -/// # -/// # impl Vec { -/// # fn len(&self) -> usize { -/// # unimplemented!() -/// # } -/// # } -/// # -/// # impl<'a, T> IntoIterator for &'a Vec { -/// # type Item = &'a T; -/// # type IntoIter = Box>; -/// # fn into_iter(self) -> Self::IntoIter { -/// # unimplemented!() -/// # } -/// # } -/// # -/// use serde::ser::{Serialize, Serializer, SerializeSeq}; -/// -/// impl Serialize for Vec -/// where -/// T: Serialize, -/// { -/// fn serialize(&self, serializer: S) -> Result -/// where -/// S: Serializer, -/// { -/// let mut seq = serializer.serialize_seq(Some(self.len()))?; -/// for element in self { -/// seq.serialize_element(element)?; -/// } -/// seq.end() -/// } -/// } -/// ``` -/// -/// # Example implementation -/// -/// The [example data format] presented on the website demonstrates an -/// implementation of `SerializeSeq` for a basic JSON data format. -/// -/// [example data format]: https://serde.rs/data-format.html -pub trait SerializeSeq { - /// Must match the `Ok` type of our `Serializer`. - type Ok; - - /// Must match the `Error` type of our `Serializer`. - type Error: Error; - - /// Serialize a sequence element. - fn serialize_element(&mut self, value: &T) -> Result<(), Self::Error> - where - T: Serialize; - - /// Finish serializing a sequence. - fn end(self) -> Result; -} - -/// Returned from `Serializer::serialize_tuple`. -/// -/// # Example use -/// -/// ```edition2018 -/// use serde::ser::{Serialize, Serializer, SerializeTuple}; -/// -/// # mod fool { -/// # trait Serialize {} -/// impl Serialize for (A, B, C) -/// # {} -/// # } -/// # -/// # struct Tuple3(A, B, C); -/// # -/// # impl Serialize for Tuple3 -/// where -/// A: Serialize, -/// B: Serialize, -/// C: Serialize, -/// { -/// fn serialize(&self, serializer: S) -> Result -/// where -/// S: Serializer, -/// { -/// let mut tup = serializer.serialize_tuple(3)?; -/// tup.serialize_element(&self.0)?; -/// tup.serialize_element(&self.1)?; -/// tup.serialize_element(&self.2)?; -/// tup.end() -/// } -/// } -/// ``` -/// -/// ```edition2018 -/// # use std::marker::PhantomData; -/// # -/// # struct Array(PhantomData); -/// # -/// # impl Array { -/// # fn len(&self) -> usize { -/// # unimplemented!() -/// # } -/// # } -/// # -/// # impl<'a, T> IntoIterator for &'a Array { -/// # type Item = &'a T; -/// # type IntoIter = Box>; -/// # fn into_iter(self) -> Self::IntoIter { -/// # unimplemented!() -/// # } -/// # } -/// # -/// use serde::ser::{Serialize, Serializer, SerializeTuple}; -/// -/// # mod fool { -/// # trait Serialize {} -/// impl Serialize for [T; 16] -/// # {} -/// # } -/// # -/// # impl Serialize for Array -/// where -/// T: Serialize, -/// { -/// fn serialize(&self, serializer: S) -> Result -/// where -/// S: Serializer, -/// { -/// let mut seq = serializer.serialize_tuple(16)?; -/// for element in self { -/// seq.serialize_element(element)?; -/// } -/// seq.end() -/// } -/// } -/// ``` -/// -/// # Example implementation -/// -/// The [example data format] presented on the website demonstrates an -/// implementation of `SerializeTuple` for a basic JSON data format. -/// -/// [example data format]: https://serde.rs/data-format.html -pub trait SerializeTuple { - /// Must match the `Ok` type of our `Serializer`. - type Ok; - - /// Must match the `Error` type of our `Serializer`. - type Error: Error; - - /// Serialize a tuple element. - fn serialize_element(&mut self, value: &T) -> Result<(), Self::Error> - where - T: Serialize; - - /// Finish serializing a tuple. - fn end(self) -> Result; -} - -/// Returned from `Serializer::serialize_tuple_struct`. -/// -/// # Example use -/// -/// ```edition2018 -/// use serde::ser::{Serialize, SerializeTupleStruct, Serializer}; -/// -/// struct Rgb(u8, u8, u8); -/// -/// impl Serialize for Rgb { -/// fn serialize(&self, serializer: S) -> Result -/// where -/// S: Serializer, -/// { -/// let mut ts = serializer.serialize_tuple_struct("Rgb", 3)?; -/// ts.serialize_field(&self.0)?; -/// ts.serialize_field(&self.1)?; -/// ts.serialize_field(&self.2)?; -/// ts.end() -/// } -/// } -/// ``` -/// -/// # Example implementation -/// -/// The [example data format] presented on the website demonstrates an -/// implementation of `SerializeTupleStruct` for a basic JSON data format. -/// -/// [example data format]: https://serde.rs/data-format.html -pub trait SerializeTupleStruct { - /// Must match the `Ok` type of our `Serializer`. - type Ok; - - /// Must match the `Error` type of our `Serializer`. - type Error: Error; - - /// Serialize a tuple struct field. - fn serialize_field(&mut self, value: &T) -> Result<(), Self::Error> - where - T: Serialize; - - /// Finish serializing a tuple struct. - fn end(self) -> Result; -} - -/// Returned from `Serializer::serialize_tuple_variant`. -/// -/// # Example use -/// -/// ```edition2018 -/// use serde::ser::{Serialize, SerializeTupleVariant, Serializer}; -/// -/// enum E { -/// T(u8, u8), -/// U(String, u32, u32), -/// } -/// -/// impl Serialize for E { -/// fn serialize(&self, serializer: S) -> Result -/// where -/// S: Serializer, -/// { -/// match *self { -/// E::T(ref a, ref b) => { -/// let mut tv = serializer.serialize_tuple_variant("E", 0, "T", 2)?; -/// tv.serialize_field(a)?; -/// tv.serialize_field(b)?; -/// tv.end() -/// } -/// E::U(ref a, ref b, ref c) => { -/// let mut tv = serializer.serialize_tuple_variant("E", 1, "U", 3)?; -/// tv.serialize_field(a)?; -/// tv.serialize_field(b)?; -/// tv.serialize_field(c)?; -/// tv.end() -/// } -/// } -/// } -/// } -/// ``` -/// -/// # Example implementation -/// -/// The [example data format] presented on the website demonstrates an -/// implementation of `SerializeTupleVariant` for a basic JSON data format. -/// -/// [example data format]: https://serde.rs/data-format.html -pub trait SerializeTupleVariant { - /// Must match the `Ok` type of our `Serializer`. - type Ok; - - /// Must match the `Error` type of our `Serializer`. - type Error: Error; - - /// Serialize a tuple variant field. - fn serialize_field(&mut self, value: &T) -> Result<(), Self::Error> - where - T: Serialize; - - /// Finish serializing a tuple variant. - fn end(self) -> Result; -} - -/// Returned from `Serializer::serialize_map`. -/// -/// # Example use -/// -/// ```edition2018 -/// # use std::marker::PhantomData; -/// # -/// # struct HashMap(PhantomData, PhantomData); -/// # -/// # impl HashMap { -/// # fn len(&self) -> usize { -/// # unimplemented!() -/// # } -/// # } -/// # -/// # impl<'a, K, V> IntoIterator for &'a HashMap { -/// # type Item = (&'a K, &'a V); -/// # type IntoIter = Box>; -/// # -/// # fn into_iter(self) -> Self::IntoIter { -/// # unimplemented!() -/// # } -/// # } -/// # -/// use serde::ser::{Serialize, Serializer, SerializeMap}; -/// -/// impl Serialize for HashMap -/// where -/// K: Serialize, -/// V: Serialize, -/// { -/// fn serialize(&self, serializer: S) -> Result -/// where -/// S: Serializer, -/// { -/// let mut map = serializer.serialize_map(Some(self.len()))?; -/// for (k, v) in self { -/// map.serialize_entry(k, v)?; -/// } -/// map.end() -/// } -/// } -/// ``` -/// -/// # Example implementation -/// -/// The [example data format] presented on the website demonstrates an -/// implementation of `SerializeMap` for a basic JSON data format. -/// -/// [example data format]: https://serde.rs/data-format.html -pub trait SerializeMap { - /// Must match the `Ok` type of our `Serializer`. - type Ok; - - /// Must match the `Error` type of our `Serializer`. - type Error: Error; - - /// Serialize a map key. - /// - /// If possible, `Serialize` implementations are encouraged to use - /// `serialize_entry` instead as it may be implemented more efficiently in - /// some formats compared to a pair of calls to `serialize_key` and - /// `serialize_value`. - fn serialize_key(&mut self, key: &T) -> Result<(), Self::Error> - where - T: Serialize; - - /// Serialize a map value. - /// - /// # Panics - /// - /// Calling `serialize_value` before `serialize_key` is incorrect and is - /// allowed to panic or produce bogus results. - fn serialize_value(&mut self, value: &T) -> Result<(), Self::Error> - where - T: Serialize; - - /// Serialize a map entry consisting of a key and a value. - /// - /// Some [`Serialize`] types are not able to hold a key and value in memory - /// at the same time so `SerializeMap` implementations are required to - /// support [`serialize_key`] and [`serialize_value`] individually. The - /// `serialize_entry` method allows serializers to optimize for the case - /// where key and value are both available. [`Serialize`] implementations - /// are encouraged to use `serialize_entry` if possible. - /// - /// The default implementation delegates to [`serialize_key`] and - /// [`serialize_value`]. This is appropriate for serializers that do not - /// care about performance or are not able to optimize `serialize_entry` any - /// better than this. - /// - /// [`Serialize`]: ../trait.Serialize.html - /// [`serialize_key`]: #tymethod.serialize_key - /// [`serialize_value`]: #tymethod.serialize_value - fn serialize_entry( - &mut self, - key: &K, - value: &V, - ) -> Result<(), Self::Error> - where - K: Serialize, - V: Serialize, - { - try!(self.serialize_key(key)); - self.serialize_value(value) - } - - /// Finish serializing a map. - fn end(self) -> Result; -} - -/// Returned from `Serializer::serialize_struct`. -/// -/// # Example use -/// -/// ```edition2018 -/// use serde::ser::{Serialize, SerializeStruct, Serializer}; -/// -/// struct Rgb { -/// r: u8, -/// g: u8, -/// b: u8, -/// } -/// -/// impl Serialize for Rgb { -/// fn serialize(&self, serializer: S) -> Result -/// where -/// S: Serializer, -/// { -/// let mut rgb = serializer.serialize_struct("Rgb", 3)?; -/// rgb.serialize_field("r", &self.r)?; -/// rgb.serialize_field("g", &self.g)?; -/// rgb.serialize_field("b", &self.b)?; -/// rgb.end() -/// } -/// } -/// ``` -/// -/// # Example implementation -/// -/// The [example data format] presented on the website demonstrates an -/// implementation of `SerializeStruct` for a basic JSON data format. -/// -/// [example data format]: https://serde.rs/data-format.html -pub trait SerializeStruct { - /// Must match the `Ok` type of our `Serializer`. - type Ok; - - /// Must match the `Error` type of our `Serializer`. - type Error: Error; - - /// Serialize a struct field. - fn serialize_field( - &mut self, - key: &'static str, - value: &T, - ) -> Result<(), Self::Error> - where - T: Serialize; - - /// Indicate that a struct field has been skipped. - #[inline] - fn skip_field(&mut self, key: &'static str) -> Result<(), Self::Error> { - let _ = key; - Ok(()) - } - - /// Finish serializing a struct. - fn end(self) -> Result; -} - -/// Returned from `Serializer::serialize_struct_variant`. -/// -/// # Example use -/// -/// ```edition2018 -/// use serde::ser::{Serialize, SerializeStructVariant, Serializer}; -/// -/// enum E { -/// S { r: u8, g: u8, b: u8 }, -/// } -/// -/// impl Serialize for E { -/// fn serialize(&self, serializer: S) -> Result -/// where -/// S: Serializer, -/// { -/// match *self { -/// E::S { -/// ref r, -/// ref g, -/// ref b, -/// } => { -/// let mut sv = serializer.serialize_struct_variant("E", 0, "S", 3)?; -/// sv.serialize_field("r", r)?; -/// sv.serialize_field("g", g)?; -/// sv.serialize_field("b", b)?; -/// sv.end() -/// } -/// } -/// } -/// } -/// ``` -/// -/// # Example implementation -/// -/// The [example data format] presented on the website demonstrates an -/// implementation of `SerializeStructVariant` for a basic JSON data format. -/// -/// [example data format]: https://serde.rs/data-format.html -pub trait SerializeStructVariant { - /// Must match the `Ok` type of our `Serializer`. - type Ok; - - /// Must match the `Error` type of our `Serializer`. - type Error: Error; - - /// Serialize a struct variant field. - fn serialize_field( - &mut self, - key: &'static str, - value: &T, - ) -> Result<(), Self::Error> - where - T: Serialize; - - /// Indicate that a struct variant field has been skipped. - #[inline] - fn skip_field(&mut self, key: &'static str) -> Result<(), Self::Error> { - let _ = key; - Ok(()) - } - - /// Finish serializing a struct variant. - fn end(self) -> Result; -} - -fn iterator_len_hint(iter: &I) -> Option -where - I: Iterator, -{ - match iter.size_hint() { - (lo, Some(hi)) if lo == hi => Some(lo), - _ => None, - } -} diff --git a/vendor/serde/src/std_error.rs b/vendor/serde/src/std_error.rs deleted file mode 100644 index 1055e0ff..00000000 --- a/vendor/serde/src/std_error.rs +++ /dev/null @@ -1,48 +0,0 @@ -use lib::{Debug, Display}; - -/// Either a re-export of std::error::Error or a new identical trait, depending -/// on whether Serde's "std" feature is enabled. -/// -/// Serde's error traits [`serde::ser::Error`] and [`serde::de::Error`] require -/// [`std::error::Error`] as a supertrait, but only when Serde is built with -/// "std" enabled. Data formats that don't care about no\_std support should -/// generally provide their error types with a `std::error::Error` impl -/// directly: -/// -/// ```edition2018 -/// #[derive(Debug)] -/// struct MySerError {...} -/// -/// impl serde::ser::Error for MySerError {...} -/// -/// impl std::fmt::Display for MySerError {...} -/// -/// // We don't support no_std! -/// impl std::error::Error for MySerError {} -/// ``` -/// -/// Data formats that *do* support no\_std may either have a "std" feature of -/// their own: -/// -/// ```toml -/// [features] -/// std = ["serde/std"] -/// ``` -/// -/// ```edition2018 -/// #[cfg(feature = "std")] -/// impl std::error::Error for MySerError {} -/// ``` -/// -/// ... or else provide the std Error impl unconditionally via Serde's -/// re-export: -/// -/// ```edition2018 -/// impl serde::ser::StdError for MySerError {} -/// ``` -pub trait Error: Debug + Display { - /// The underlying cause of this error, if any. - fn source(&self) -> Option<&(Error + 'static)> { - None - } -} diff --git a/vendor/serde_derive/.cargo-checksum.json b/vendor/serde_derive/.cargo-checksum.json deleted file mode 100644 index 445a5fcf..00000000 --- a/vendor/serde_derive/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{"Cargo.toml":"6e9e3ab6251f5cea4496386e3fe0d3b43e495316494837573b49fa6d1cda65e5","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"5cf9d2158d70048a2916360ad59d9079f6233c6f68781a7a792e70f8b772d8ce","build.rs":"d0fc2047a792e90b95df7be8d1e75732b231b3a60cb481feebfca46b7edb1209","crates-io.md":"25ed421fe25d0f6f74c4b78674144bef2843a5f78bf552d0a8ec633be69d282b","src/bound.rs":"f3a1a7b76426921ee435e9963c02c2fc7663c15aecfdf6dc1198aacf3f4c2e70","src/de.rs":"c5a41016ce15f8176a2d7a8445ba06d2eb8de0863c1fea0dab51c395dd7dccff","src/dummy.rs":"ad78556876053e74be976e91032200666ffbeeb6f7e92f3a7a8463fea1f60ac5","src/fragment.rs":"5548ba65a53d90a296f60c1328a7a7fb040db467f59c2f5210b2fb320457145d","src/internals/ast.rs":"b019865eef92c1ddbb9029423ac22179f132dc655a51c09fb2a42f4aaef172fd","src/internals/attr.rs":"d3ae0ad0e7d40fbf3c8a5f86c8a8c5b5f289c3318dd79afc6c5c2ce9b3523931","src/internals/case.rs":"9492f0c5142d7b7e8cd39c86d13a855e5ce4489425adb2b96aed89e1b7851ac0","src/internals/check.rs":"11ea94257d2a2ee2276938a6beb4ae11b74c39225c1e342e6df1e7d2b2924496","src/internals/ctxt.rs":"6fa544ae52914498a62a395818ebdc1b36ac2fb5903c60afb741a864ad559f1c","src/internals/mod.rs":"f32138ff19d57eb00f88ba11f6b015efab2102657804f71ebbf386a3698dad91","src/internals/receiver.rs":"ad30c3e4583ef07f74c7905f22a6580af25ebd431fdf1e04878b9a770ca4ede6","src/internals/respan.rs":"899753859c58ce5f532a3ec4584796a52f13ed5a0533191e48c953ba5c1b52ff","src/internals/symbol.rs":"3c9ce461773b7df3bb64d82aa5a0d93052c3bb0e60209db6c0b5c10ee9cfc9cf","src/lib.rs":"441a27da40134a10aad6c11c55e22da62d7b9586fe68381e899a00147194f804","src/pretend.rs":"73fe121ced5804e77d37512bd2c7548be249dcab3eeb0bad59f82e64188f9ace","src/ser.rs":"0d99c841f6c7bc9751ab225fe42d1f8b7fe56e36903efcb4ff10bf6e35c390ba","src/try.rs":"b171b0088c23ebf4bfa07ba457881b41ac5e547d55dd16f737ea988d34badf61"},"package":"ecc0db5cb2556c0e558887d9bbdcf6ac4471e83ff66cf696e5419024d1606276"} \ No newline at end of file diff --git a/vendor/serde_derive/Cargo.toml b/vendor/serde_derive/Cargo.toml deleted file mode 100644 index 6e848460..00000000 --- a/vendor/serde_derive/Cargo.toml +++ /dev/null @@ -1,44 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies. -# -# If you are reading this file be aware that the original Cargo.toml -# will likely look very different (and much more reasonable). -# See Cargo.toml.orig for the original contents. - -[package] -rust-version = "1.31" -name = "serde_derive" -version = "1.0.132" -authors = ["Erick Tryzelaar ", "David Tolnay "] -include = ["build.rs", "src/**/*.rs", "crates-io.md", "README.md", "LICENSE-APACHE", "LICENSE-MIT"] -description = "Macros 1.1 implementation of #[derive(Serialize, Deserialize)]" -homepage = "https://serde.rs" -documentation = "https://serde.rs/derive.html" -readme = "crates-io.md" -keywords = ["serde", "serialization", "no_std"] -license = "MIT OR Apache-2.0" -repository = "https://github.com/serde-rs/serde" -[package.metadata.docs.rs] -targets = ["x86_64-unknown-linux-gnu"] - -[lib] -name = "serde_derive" -proc-macro = true -[dependencies.proc-macro2] -version = "1.0" - -[dependencies.quote] -version = "1.0" - -[dependencies.syn] -version = "1.0.60" -[dev-dependencies.serde] -version = "1.0" - -[features] -default = [] -deserialize_in_place = [] diff --git a/vendor/serde_derive/LICENSE-APACHE b/vendor/serde_derive/LICENSE-APACHE deleted file mode 100644 index 16fe87b0..00000000 --- a/vendor/serde_derive/LICENSE-APACHE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/vendor/serde_derive/LICENSE-MIT b/vendor/serde_derive/LICENSE-MIT deleted file mode 100644 index 31aa7938..00000000 --- a/vendor/serde_derive/LICENSE-MIT +++ /dev/null @@ -1,23 +0,0 @@ -Permission is hereby granted, free of charge, to any -person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the -Software without restriction, including without -limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software -is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice -shall be included in all copies or substantial portions -of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/vendor/serde_derive/README.md b/vendor/serde_derive/README.md deleted file mode 100644 index 14b00982..00000000 --- a/vendor/serde_derive/README.md +++ /dev/null @@ -1,111 +0,0 @@ -# Serde   [![Build Status]][actions] [![Latest Version]][crates.io] [![serde: rustc 1.13+]][Rust 1.13] [![serde_derive: rustc 1.31+]][Rust 1.31] - -[Build Status]: https://img.shields.io/github/workflow/status/serde-rs/serde/CI/master -[actions]: https://github.com/serde-rs/serde/actions?query=branch%3Amaster -[Latest Version]: https://img.shields.io/crates/v/serde.svg -[crates.io]: https://crates.io/crates/serde -[serde: rustc 1.13+]: https://img.shields.io/badge/serde-rustc_1.13+-lightgray.svg -[serde_derive: rustc 1.31+]: https://img.shields.io/badge/serde_derive-rustc_1.31+-lightgray.svg -[Rust 1.13]: https://blog.rust-lang.org/2016/11/10/Rust-1.13.html -[Rust 1.31]: https://blog.rust-lang.org/2018/12/06/Rust-1.31-and-rust-2018.html - -**Serde is a framework for *ser*ializing and *de*serializing Rust data structures efficiently and generically.** - ---- - -You may be looking for: - -- [An overview of Serde](https://serde.rs/) -- [Data formats supported by Serde](https://serde.rs/#data-formats) -- [Setting up `#[derive(Serialize, Deserialize)]`](https://serde.rs/derive.html) -- [Examples](https://serde.rs/examples.html) -- [API documentation](https://docs.serde.rs/serde/) -- [Release notes](https://github.com/serde-rs/serde/releases) - -## Serde in action - - -

- -```rust -use serde::{Serialize, Deserialize}; - -#[derive(Serialize, Deserialize, Debug)] -struct Point { - x: i32, - y: i32, -} - -fn main() { - let point = Point { x: 1, y: 2 }; - - // Convert the Point to a JSON string. - let serialized = serde_json::to_string(&point).unwrap(); - - // Prints serialized = {"x":1,"y":2} - println!("serialized = {}", serialized); - - // Convert the JSON string back to a Point. - let deserialized: Point = serde_json::from_str(&serialized).unwrap(); - - // Prints deserialized = Point { x: 1, y: 2 } - println!("deserialized = {:?}", deserialized); -} -``` - -## Getting help - -Serde is one of the most widely used Rust libraries so any place that Rustaceans -congregate will be able to help you out. For chat, consider trying the -[#general] or [#beginners] channels of the unofficial community Discord, the -[#rust-usage] channel of the official Rust Project Discord, or the -[#general][zulip] stream in Zulip. For asynchronous, consider the [\[rust\] tag -on StackOverflow][stackoverflow], the [/r/rust] subreddit which has a pinned -weekly easy questions post, or the Rust [Discourse forum][discourse]. It's -acceptable to file a support issue in this repo but they tend not to get as many -eyes as any of the above and may get closed without a response after some time. - -[#general]: https://discord.com/channels/273534239310479360/274215136414400513 -[#beginners]: https://discord.com/channels/273534239310479360/273541522815713281 -[#rust-usage]: https://discord.com/channels/442252698964721669/443150878111694848 -[zulip]: https://rust-lang.zulipchat.com/#narrow/stream/122651-general -[stackoverflow]: https://stackoverflow.com/questions/tagged/rust -[/r/rust]: https://www.reddit.com/r/rust -[discourse]: https://users.rust-lang.org - -
- -#### License - - -Licensed under either of Apache License, Version -2.0 or MIT license at your option. - - -
- - -Unless you explicitly state otherwise, any contribution intentionally submitted -for inclusion in Serde by you, as defined in the Apache-2.0 license, shall be -dual licensed as above, without any additional terms or conditions. - diff --git a/vendor/serde_derive/build.rs b/vendor/serde_derive/build.rs deleted file mode 100644 index d0c827a6..00000000 --- a/vendor/serde_derive/build.rs +++ /dev/null @@ -1,36 +0,0 @@ -use std::env; -use std::process::Command; -use std::str; - -// The rustc-cfg strings below are *not* public API. Please let us know by -// opening a GitHub issue if your build environment requires some way to enable -// these cfgs other than by executing our build script. -fn main() { - let minor = match rustc_minor_version() { - Some(minor) => minor, - None => return, - }; - - // Underscore const names stabilized in Rust 1.37: - // https://blog.rust-lang.org/2019/08/15/Rust-1.37.0.html#using-unnamed-const-items-for-macros - if minor >= 37 { - println!("cargo:rustc-cfg=underscore_consts"); - } - - // The ptr::addr_of! macro stabilized in Rust 1.51: - // https://blog.rust-lang.org/2021/03/25/Rust-1.51.0.html#stabilized-apis - if minor >= 51 { - println!("cargo:rustc-cfg=ptr_addr_of"); - } -} - -fn rustc_minor_version() -> Option { - let rustc = env::var_os("RUSTC")?; - let output = Command::new(rustc).arg("--version").output().ok()?; - let version = str::from_utf8(&output.stdout).ok()?; - let mut pieces = version.split('.'); - if pieces.next() != Some("rustc 1") { - return None; - } - pieces.next()?.parse().ok() -} diff --git a/vendor/serde_derive/crates-io.md b/vendor/serde_derive/crates-io.md deleted file mode 100644 index 07757614..00000000 --- a/vendor/serde_derive/crates-io.md +++ /dev/null @@ -1,62 +0,0 @@ - - -**Serde is a framework for *ser*ializing and *de*serializing Rust data structures efficiently and generically.** - ---- - -You may be looking for: - -- [An overview of Serde](https://serde.rs/) -- [Data formats supported by Serde](https://serde.rs/#data-formats) -- [Setting up `#[derive(Serialize, Deserialize)]`](https://serde.rs/derive.html) -- [Examples](https://serde.rs/examples.html) -- [API documentation](https://docs.serde.rs/serde/) -- [Release notes](https://github.com/serde-rs/serde/releases) - -## Serde in action - -```rust -use serde::{Serialize, Deserialize}; - -#[derive(Serialize, Deserialize, Debug)] -struct Point { - x: i32, - y: i32, -} - -fn main() { - let point = Point { x: 1, y: 2 }; - - // Convert the Point to a JSON string. - let serialized = serde_json::to_string(&point).unwrap(); - - // Prints serialized = {"x":1,"y":2} - println!("serialized = {}", serialized); - - // Convert the JSON string back to a Point. - let deserialized: Point = serde_json::from_str(&serialized).unwrap(); - - // Prints deserialized = Point { x: 1, y: 2 } - println!("deserialized = {:?}", deserialized); -} -``` - -## Getting help - -Serde is one of the most widely used Rust libraries so any place that Rustaceans -congregate will be able to help you out. For chat, consider trying the -[#general] or [#beginners] channels of the unofficial community Discord, the -[#rust-usage] channel of the official Rust Project Discord, or the -[#general][zulip] stream in Zulip. For asynchronous, consider the [\[rust\] tag -on StackOverflow][stackoverflow], the [/r/rust] subreddit which has a pinned -weekly easy questions post, or the Rust [Discourse forum][discourse]. It's -acceptable to file a support issue in this repo but they tend not to get as many -eyes as any of the above and may get closed without a response after some time. - -[#general]: https://discord.com/channels/273534239310479360/274215136414400513 -[#beginners]: https://discord.com/channels/273534239310479360/273541522815713281 -[#rust-usage]: https://discord.com/channels/442252698964721669/443150878111694848 -[zulip]: https://rust-lang.zulipchat.com/#narrow/stream/122651-general -[stackoverflow]: https://stackoverflow.com/questions/tagged/rust -[/r/rust]: https://www.reddit.com/r/rust -[discourse]: https://users.rust-lang.org diff --git a/vendor/serde_derive/src/bound.rs b/vendor/serde_derive/src/bound.rs deleted file mode 100644 index abca467b..00000000 --- a/vendor/serde_derive/src/bound.rs +++ /dev/null @@ -1,408 +0,0 @@ -use std::collections::HashSet; - -use syn; -use syn::punctuated::{Pair, Punctuated}; - -use internals::ast::{Container, Data}; -use internals::{attr, ungroup}; - -use proc_macro2::Span; - -// Remove the default from every type parameter because in the generated impls -// they look like associated types: "error: associated type bindings are not -// allowed here". -pub fn without_defaults(generics: &syn::Generics) -> syn::Generics { - syn::Generics { - params: generics - .params - .iter() - .map(|param| match param { - syn::GenericParam::Type(param) => syn::GenericParam::Type(syn::TypeParam { - eq_token: None, - default: None, - ..param.clone() - }), - _ => param.clone(), - }) - .collect(), - ..generics.clone() - } -} - -pub fn with_where_predicates( - generics: &syn::Generics, - predicates: &[syn::WherePredicate], -) -> syn::Generics { - let mut generics = generics.clone(); - generics - .make_where_clause() - .predicates - .extend(predicates.iter().cloned()); - generics -} - -pub fn with_where_predicates_from_fields( - cont: &Container, - generics: &syn::Generics, - from_field: fn(&attr::Field) -> Option<&[syn::WherePredicate]>, -) -> syn::Generics { - let predicates = cont - .data - .all_fields() - .filter_map(|field| from_field(&field.attrs)) - .flat_map(<[syn::WherePredicate]>::to_vec); - - let mut generics = generics.clone(); - generics.make_where_clause().predicates.extend(predicates); - generics -} - -pub fn with_where_predicates_from_variants( - cont: &Container, - generics: &syn::Generics, - from_variant: fn(&attr::Variant) -> Option<&[syn::WherePredicate]>, -) -> syn::Generics { - let variants = match &cont.data { - Data::Enum(variants) => variants, - Data::Struct(_, _) => { - return generics.clone(); - } - }; - - let predicates = variants - .iter() - .filter_map(|variant| from_variant(&variant.attrs)) - .flat_map(<[syn::WherePredicate]>::to_vec); - - let mut generics = generics.clone(); - generics.make_where_clause().predicates.extend(predicates); - generics -} - -// Puts the given bound on any generic type parameters that are used in fields -// for which filter returns true. -// -// For example, the following struct needs the bound `A: Serialize, B: -// Serialize`. -// -// struct S<'b, A, B: 'b, C> { -// a: A, -// b: Option<&'b B> -// #[serde(skip_serializing)] -// c: C, -// } -pub fn with_bound( - cont: &Container, - generics: &syn::Generics, - filter: fn(&attr::Field, Option<&attr::Variant>) -> bool, - bound: &syn::Path, -) -> syn::Generics { - struct FindTyParams<'ast> { - // Set of all generic type parameters on the current struct (A, B, C in - // the example). Initialized up front. - all_type_params: HashSet, - - // Set of generic type parameters used in fields for which filter - // returns true (A and B in the example). Filled in as the visitor sees - // them. - relevant_type_params: HashSet, - - // Fields whose type is an associated type of one of the generic type - // parameters. - associated_type_usage: Vec<&'ast syn::TypePath>, - } - - impl<'ast> FindTyParams<'ast> { - fn visit_field(&mut self, field: &'ast syn::Field) { - if let syn::Type::Path(ty) = ungroup(&field.ty) { - if let Some(Pair::Punctuated(t, _)) = ty.path.segments.pairs().next() { - if self.all_type_params.contains(&t.ident) { - self.associated_type_usage.push(ty); - } - } - } - self.visit_type(&field.ty); - } - - fn visit_path(&mut self, path: &'ast syn::Path) { - if let Some(seg) = path.segments.last() { - if seg.ident == "PhantomData" { - // Hardcoded exception, because PhantomData implements - // Serialize and Deserialize whether or not T implements it. - return; - } - } - if path.leading_colon.is_none() && path.segments.len() == 1 { - let id = &path.segments[0].ident; - if self.all_type_params.contains(id) { - self.relevant_type_params.insert(id.clone()); - } - } - for segment in &path.segments { - self.visit_path_segment(segment); - } - } - - // Everything below is simply traversing the syntax tree. - - fn visit_type(&mut self, ty: &'ast syn::Type) { - match ty { - syn::Type::Array(ty) => self.visit_type(&ty.elem), - syn::Type::BareFn(ty) => { - for arg in &ty.inputs { - self.visit_type(&arg.ty); - } - self.visit_return_type(&ty.output); - } - syn::Type::Group(ty) => self.visit_type(&ty.elem), - syn::Type::ImplTrait(ty) => { - for bound in &ty.bounds { - self.visit_type_param_bound(bound); - } - } - syn::Type::Macro(ty) => self.visit_macro(&ty.mac), - syn::Type::Paren(ty) => self.visit_type(&ty.elem), - syn::Type::Path(ty) => { - if let Some(qself) = &ty.qself { - self.visit_type(&qself.ty); - } - self.visit_path(&ty.path); - } - syn::Type::Ptr(ty) => self.visit_type(&ty.elem), - syn::Type::Reference(ty) => self.visit_type(&ty.elem), - syn::Type::Slice(ty) => self.visit_type(&ty.elem), - syn::Type::TraitObject(ty) => { - for bound in &ty.bounds { - self.visit_type_param_bound(bound); - } - } - syn::Type::Tuple(ty) => { - for elem in &ty.elems { - self.visit_type(elem); - } - } - - syn::Type::Infer(_) | syn::Type::Never(_) | syn::Type::Verbatim(_) => {} - - #[cfg(test)] - syn::Type::__TestExhaustive(_) => unimplemented!(), - #[cfg(not(test))] - _ => {} - } - } - - fn visit_path_segment(&mut self, segment: &'ast syn::PathSegment) { - self.visit_path_arguments(&segment.arguments); - } - - fn visit_path_arguments(&mut self, arguments: &'ast syn::PathArguments) { - match arguments { - syn::PathArguments::None => {} - syn::PathArguments::AngleBracketed(arguments) => { - for arg in &arguments.args { - match arg { - syn::GenericArgument::Type(arg) => self.visit_type(arg), - syn::GenericArgument::Binding(arg) => self.visit_type(&arg.ty), - syn::GenericArgument::Lifetime(_) - | syn::GenericArgument::Constraint(_) - | syn::GenericArgument::Const(_) => {} - } - } - } - syn::PathArguments::Parenthesized(arguments) => { - for argument in &arguments.inputs { - self.visit_type(argument); - } - self.visit_return_type(&arguments.output); - } - } - } - - fn visit_return_type(&mut self, return_type: &'ast syn::ReturnType) { - match return_type { - syn::ReturnType::Default => {} - syn::ReturnType::Type(_, output) => self.visit_type(output), - } - } - - fn visit_type_param_bound(&mut self, bound: &'ast syn::TypeParamBound) { - match bound { - syn::TypeParamBound::Trait(bound) => self.visit_path(&bound.path), - syn::TypeParamBound::Lifetime(_) => {} - } - } - - // Type parameter should not be considered used by a macro path. - // - // struct TypeMacro { - // mac: T!(), - // marker: PhantomData, - // } - fn visit_macro(&mut self, _mac: &'ast syn::Macro) {} - } - - let all_type_params = generics - .type_params() - .map(|param| param.ident.clone()) - .collect(); - - let mut visitor = FindTyParams { - all_type_params, - relevant_type_params: HashSet::new(), - associated_type_usage: Vec::new(), - }; - match &cont.data { - Data::Enum(variants) => { - for variant in variants.iter() { - let relevant_fields = variant - .fields - .iter() - .filter(|field| filter(&field.attrs, Some(&variant.attrs))); - for field in relevant_fields { - visitor.visit_field(field.original); - } - } - } - Data::Struct(_, fields) => { - for field in fields.iter().filter(|field| filter(&field.attrs, None)) { - visitor.visit_field(field.original); - } - } - } - - let relevant_type_params = visitor.relevant_type_params; - let associated_type_usage = visitor.associated_type_usage; - let new_predicates = generics - .type_params() - .map(|param| param.ident.clone()) - .filter(|id| relevant_type_params.contains(id)) - .map(|id| syn::TypePath { - qself: None, - path: id.into(), - }) - .chain(associated_type_usage.into_iter().cloned()) - .map(|bounded_ty| { - syn::WherePredicate::Type(syn::PredicateType { - lifetimes: None, - // the type parameter that is being bounded e.g. T - bounded_ty: syn::Type::Path(bounded_ty), - colon_token: ::default(), - // the bound e.g. Serialize - bounds: vec![syn::TypeParamBound::Trait(syn::TraitBound { - paren_token: None, - modifier: syn::TraitBoundModifier::None, - lifetimes: None, - path: bound.clone(), - })] - .into_iter() - .collect(), - }) - }); - - let mut generics = generics.clone(); - generics - .make_where_clause() - .predicates - .extend(new_predicates); - generics -} - -pub fn with_self_bound( - cont: &Container, - generics: &syn::Generics, - bound: &syn::Path, -) -> syn::Generics { - let mut generics = generics.clone(); - generics - .make_where_clause() - .predicates - .push(syn::WherePredicate::Type(syn::PredicateType { - lifetimes: None, - // the type that is being bounded e.g. MyStruct<'a, T> - bounded_ty: type_of_item(cont), - colon_token: ::default(), - // the bound e.g. Default - bounds: vec![syn::TypeParamBound::Trait(syn::TraitBound { - paren_token: None, - modifier: syn::TraitBoundModifier::None, - lifetimes: None, - path: bound.clone(), - })] - .into_iter() - .collect(), - })); - generics -} - -pub fn with_lifetime_bound(generics: &syn::Generics, lifetime: &str) -> syn::Generics { - let bound = syn::Lifetime::new(lifetime, Span::call_site()); - let def = syn::LifetimeDef { - attrs: Vec::new(), - lifetime: bound.clone(), - colon_token: None, - bounds: Punctuated::new(), - }; - - let params = Some(syn::GenericParam::Lifetime(def)) - .into_iter() - .chain(generics.params.iter().cloned().map(|mut param| { - match &mut param { - syn::GenericParam::Lifetime(param) => { - param.bounds.push(bound.clone()); - } - syn::GenericParam::Type(param) => { - param - .bounds - .push(syn::TypeParamBound::Lifetime(bound.clone())); - } - syn::GenericParam::Const(_) => {} - } - param - })) - .collect(); - - syn::Generics { - params, - ..generics.clone() - } -} - -fn type_of_item(cont: &Container) -> syn::Type { - syn::Type::Path(syn::TypePath { - qself: None, - path: syn::Path { - leading_colon: None, - segments: vec![syn::PathSegment { - ident: cont.ident.clone(), - arguments: syn::PathArguments::AngleBracketed( - syn::AngleBracketedGenericArguments { - colon2_token: None, - lt_token: ::default(), - args: cont - .generics - .params - .iter() - .map(|param| match param { - syn::GenericParam::Type(param) => { - syn::GenericArgument::Type(syn::Type::Path(syn::TypePath { - qself: None, - path: param.ident.clone().into(), - })) - } - syn::GenericParam::Lifetime(param) => { - syn::GenericArgument::Lifetime(param.lifetime.clone()) - } - syn::GenericParam::Const(_) => { - panic!("Serde does not support const generics yet"); - } - }) - .collect(), - gt_token: ]>::default(), - }, - ), - }] - .into_iter() - .collect(), - }, - }) -} diff --git a/vendor/serde_derive/src/de.rs b/vendor/serde_derive/src/de.rs deleted file mode 100644 index ff7bc42f..00000000 --- a/vendor/serde_derive/src/de.rs +++ /dev/null @@ -1,3132 +0,0 @@ -use proc_macro2::{Literal, Span, TokenStream}; -use quote::ToTokens; -use syn::punctuated::Punctuated; -use syn::spanned::Spanned; -use syn::{self, Ident, Index, Member}; - -use bound; -use dummy; -use fragment::{Expr, Fragment, Match, Stmts}; -use internals::ast::{Container, Data, Field, Style, Variant}; -use internals::{attr, replace_receiver, ungroup, Ctxt, Derive}; -use pretend; - -use std::collections::BTreeSet; -use std::ptr; - -pub fn expand_derive_deserialize( - input: &mut syn::DeriveInput, -) -> Result> { - replace_receiver(input); - - let ctxt = Ctxt::new(); - let cont = match Container::from_ast(&ctxt, input, Derive::Deserialize) { - Some(cont) => cont, - None => return Err(ctxt.check().unwrap_err()), - }; - precondition(&ctxt, &cont); - ctxt.check()?; - - let ident = &cont.ident; - let params = Parameters::new(&cont); - let (de_impl_generics, _, ty_generics, where_clause) = split_with_de_lifetime(¶ms); - let body = Stmts(deserialize_body(&cont, ¶ms)); - let delife = params.borrowed.de_lifetime(); - let serde = cont.attrs.serde_path(); - - let impl_block = if let Some(remote) = cont.attrs.remote() { - let vis = &input.vis; - let used = pretend::pretend_used(&cont, params.is_packed); - quote! { - impl #de_impl_generics #ident #ty_generics #where_clause { - #vis fn deserialize<__D>(__deserializer: __D) -> #serde::__private::Result<#remote #ty_generics, __D::Error> - where - __D: #serde::Deserializer<#delife>, - { - #used - #body - } - } - } - } else { - let fn_deserialize_in_place = deserialize_in_place_body(&cont, ¶ms); - - quote! { - #[automatically_derived] - impl #de_impl_generics #serde::Deserialize<#delife> for #ident #ty_generics #where_clause { - fn deserialize<__D>(__deserializer: __D) -> #serde::__private::Result - where - __D: #serde::Deserializer<#delife>, - { - #body - } - - #fn_deserialize_in_place - } - } - }; - - Ok(dummy::wrap_in_const( - cont.attrs.custom_serde_path(), - "DESERIALIZE", - ident, - impl_block, - )) -} - -fn precondition(cx: &Ctxt, cont: &Container) { - precondition_sized(cx, cont); - precondition_no_de_lifetime(cx, cont); -} - -fn precondition_sized(cx: &Ctxt, cont: &Container) { - if let Data::Struct(_, fields) = &cont.data { - if let Some(last) = fields.last() { - if let syn::Type::Slice(_) = ungroup(last.ty) { - cx.error_spanned_by( - cont.original, - "cannot deserialize a dynamically sized struct", - ); - } - } - } -} - -fn precondition_no_de_lifetime(cx: &Ctxt, cont: &Container) { - if let BorrowedLifetimes::Borrowed(_) = borrowed_lifetimes(cont) { - for param in cont.generics.lifetimes() { - if param.lifetime.to_string() == "'de" { - cx.error_spanned_by( - ¶m.lifetime, - "cannot deserialize when there is a lifetime parameter called 'de", - ); - return; - } - } - } -} - -struct Parameters { - /// Name of the type the `derive` is on. - local: syn::Ident, - - /// Path to the type the impl is for. Either a single `Ident` for local - /// types or `some::remote::Ident` for remote types. Does not include - /// generic parameters. - this: syn::Path, - - /// Generics including any explicit and inferred bounds for the impl. - generics: syn::Generics, - - /// Lifetimes borrowed from the deserializer. These will become bounds on - /// the `'de` lifetime of the deserializer. - borrowed: BorrowedLifetimes, - - /// At least one field has a serde(getter) attribute, implying that the - /// remote type has a private field. - has_getter: bool, - - /// Type has a repr(packed) attribute. - is_packed: bool, -} - -impl Parameters { - fn new(cont: &Container) -> Self { - let local = cont.ident.clone(); - let this = match cont.attrs.remote() { - Some(remote) => remote.clone(), - None => cont.ident.clone().into(), - }; - let borrowed = borrowed_lifetimes(cont); - let generics = build_generics(cont, &borrowed); - let has_getter = cont.data.has_getter(); - let is_packed = cont.attrs.is_packed(); - - Parameters { - local, - this, - generics, - borrowed, - has_getter, - is_packed, - } - } - - /// Type name to use in error messages and `&'static str` arguments to - /// various Deserializer methods. - fn type_name(&self) -> String { - self.this.segments.last().unwrap().ident.to_string() - } -} - -// All the generics in the input, plus a bound `T: Deserialize` for each generic -// field type that will be deserialized by us, plus a bound `T: Default` for -// each generic field type that will be set to a default value. -fn build_generics(cont: &Container, borrowed: &BorrowedLifetimes) -> syn::Generics { - let generics = bound::without_defaults(cont.generics); - - let generics = bound::with_where_predicates_from_fields(cont, &generics, attr::Field::de_bound); - - let generics = - bound::with_where_predicates_from_variants(cont, &generics, attr::Variant::de_bound); - - match cont.attrs.de_bound() { - Some(predicates) => bound::with_where_predicates(&generics, predicates), - None => { - let generics = match *cont.attrs.default() { - attr::Default::Default => bound::with_self_bound( - cont, - &generics, - &parse_quote!(_serde::__private::Default), - ), - attr::Default::None | attr::Default::Path(_) => generics, - }; - - let delife = borrowed.de_lifetime(); - let generics = bound::with_bound( - cont, - &generics, - needs_deserialize_bound, - &parse_quote!(_serde::Deserialize<#delife>), - ); - - bound::with_bound( - cont, - &generics, - requires_default, - &parse_quote!(_serde::__private::Default), - ) - } - } -} - -// Fields with a `skip_deserializing` or `deserialize_with` attribute, or which -// belong to a variant with a `skip_deserializing` or `deserialize_with` -// attribute, are not deserialized by us so we do not generate a bound. Fields -// with a `bound` attribute specify their own bound so we do not generate one. -// All other fields may need a `T: Deserialize` bound where T is the type of the -// field. -fn needs_deserialize_bound(field: &attr::Field, variant: Option<&attr::Variant>) -> bool { - !field.skip_deserializing() - && field.deserialize_with().is_none() - && field.de_bound().is_none() - && variant.map_or(true, |variant| { - !variant.skip_deserializing() - && variant.deserialize_with().is_none() - && variant.de_bound().is_none() - }) -} - -// Fields with a `default` attribute (not `default=...`), and fields with a -// `skip_deserializing` attribute that do not also have `default=...`. -fn requires_default(field: &attr::Field, _variant: Option<&attr::Variant>) -> bool { - if let attr::Default::Default = *field.default() { - true - } else { - false - } -} - -enum BorrowedLifetimes { - Borrowed(BTreeSet), - Static, -} - -impl BorrowedLifetimes { - fn de_lifetime(&self) -> syn::Lifetime { - match *self { - BorrowedLifetimes::Borrowed(_) => syn::Lifetime::new("'de", Span::call_site()), - BorrowedLifetimes::Static => syn::Lifetime::new("'static", Span::call_site()), - } - } - - fn de_lifetime_def(&self) -> Option { - match self { - BorrowedLifetimes::Borrowed(bounds) => Some(syn::LifetimeDef { - attrs: Vec::new(), - lifetime: syn::Lifetime::new("'de", Span::call_site()), - colon_token: None, - bounds: bounds.iter().cloned().collect(), - }), - BorrowedLifetimes::Static => None, - } - } -} - -// The union of lifetimes borrowed by each field of the container. -// -// These turn into bounds on the `'de` lifetime of the Deserialize impl. If -// lifetimes `'a` and `'b` are borrowed but `'c` is not, the impl is: -// -// impl<'de: 'a + 'b, 'a, 'b, 'c> Deserialize<'de> for S<'a, 'b, 'c> -// -// If any borrowed lifetime is `'static`, then `'de: 'static` would be redundant -// and we use plain `'static` instead of `'de`. -fn borrowed_lifetimes(cont: &Container) -> BorrowedLifetimes { - let mut lifetimes = BTreeSet::new(); - for field in cont.data.all_fields() { - if !field.attrs.skip_deserializing() { - lifetimes.extend(field.attrs.borrowed_lifetimes().iter().cloned()); - } - } - if lifetimes.iter().any(|b| b.to_string() == "'static") { - BorrowedLifetimes::Static - } else { - BorrowedLifetimes::Borrowed(lifetimes) - } -} - -fn deserialize_body(cont: &Container, params: &Parameters) -> Fragment { - if cont.attrs.transparent() { - deserialize_transparent(cont, params) - } else if let Some(type_from) = cont.attrs.type_from() { - deserialize_from(type_from) - } else if let Some(type_try_from) = cont.attrs.type_try_from() { - deserialize_try_from(type_try_from) - } else if let attr::Identifier::No = cont.attrs.identifier() { - match &cont.data { - Data::Enum(variants) => deserialize_enum(params, variants, &cont.attrs), - Data::Struct(Style::Struct, fields) => { - deserialize_struct(None, params, fields, &cont.attrs, None, &Untagged::No) - } - Data::Struct(Style::Tuple, fields) | Data::Struct(Style::Newtype, fields) => { - deserialize_tuple(None, params, fields, &cont.attrs, None) - } - Data::Struct(Style::Unit, _) => deserialize_unit_struct(params, &cont.attrs), - } - } else { - match &cont.data { - Data::Enum(variants) => deserialize_custom_identifier(params, variants, &cont.attrs), - Data::Struct(_, _) => unreachable!("checked in serde_derive_internals"), - } - } -} - -#[cfg(feature = "deserialize_in_place")] -fn deserialize_in_place_body(cont: &Container, params: &Parameters) -> Option { - // Only remote derives have getters, and we do not generate - // deserialize_in_place for remote derives. - assert!(!params.has_getter); - - if cont.attrs.transparent() - || cont.attrs.type_from().is_some() - || cont.attrs.type_try_from().is_some() - || cont.attrs.identifier().is_some() - || cont - .data - .all_fields() - .all(|f| f.attrs.deserialize_with().is_some()) - { - return None; - } - - let code = match &cont.data { - Data::Struct(Style::Struct, fields) => { - deserialize_struct_in_place(None, params, fields, &cont.attrs, None)? - } - Data::Struct(Style::Tuple, fields) | Data::Struct(Style::Newtype, fields) => { - deserialize_tuple_in_place(None, params, fields, &cont.attrs, None) - } - Data::Enum(_) | Data::Struct(Style::Unit, _) => { - return None; - } - }; - - let delife = params.borrowed.de_lifetime(); - let stmts = Stmts(code); - - let fn_deserialize_in_place = quote_block! { - fn deserialize_in_place<__D>(__deserializer: __D, __place: &mut Self) -> _serde::__private::Result<(), __D::Error> - where - __D: _serde::Deserializer<#delife>, - { - #stmts - } - }; - - Some(Stmts(fn_deserialize_in_place)) -} - -#[cfg(not(feature = "deserialize_in_place"))] -fn deserialize_in_place_body(_cont: &Container, _params: &Parameters) -> Option { - None -} - -fn deserialize_transparent(cont: &Container, params: &Parameters) -> Fragment { - let fields = match &cont.data { - Data::Struct(_, fields) => fields, - Data::Enum(_) => unreachable!(), - }; - - let this = ¶ms.this; - let transparent_field = fields.iter().find(|f| f.attrs.transparent()).unwrap(); - - let path = match transparent_field.attrs.deserialize_with() { - Some(path) => quote!(#path), - None => { - let span = transparent_field.original.span(); - quote_spanned!(span=> _serde::Deserialize::deserialize) - } - }; - - let assign = fields.iter().map(|field| { - let member = &field.member; - if ptr::eq(field, transparent_field) { - quote!(#member: __transparent) - } else { - let value = match field.attrs.default() { - attr::Default::Default => quote!(_serde::__private::Default::default()), - attr::Default::Path(path) => quote!(#path()), - attr::Default::None => quote!(_serde::__private::PhantomData), - }; - quote!(#member: #value) - } - }); - - quote_block! { - _serde::__private::Result::map( - #path(__deserializer), - |__transparent| #this { #(#assign),* }) - } -} - -fn deserialize_from(type_from: &syn::Type) -> Fragment { - quote_block! { - _serde::__private::Result::map( - <#type_from as _serde::Deserialize>::deserialize(__deserializer), - _serde::__private::From::from) - } -} - -fn deserialize_try_from(type_try_from: &syn::Type) -> Fragment { - quote_block! { - _serde::__private::Result::and_then( - <#type_try_from as _serde::Deserialize>::deserialize(__deserializer), - |v| _serde::__private::TryFrom::try_from(v).map_err(_serde::de::Error::custom)) - } -} - -fn deserialize_unit_struct(params: &Parameters, cattrs: &attr::Container) -> Fragment { - let this = ¶ms.this; - let type_name = cattrs.name().deserialize_name(); - - let expecting = format!("unit struct {}", params.type_name()); - let expecting = cattrs.expecting().unwrap_or(&expecting); - - quote_block! { - struct __Visitor; - - impl<'de> _serde::de::Visitor<'de> for __Visitor { - type Value = #this; - - fn expecting(&self, __formatter: &mut _serde::__private::Formatter) -> _serde::__private::fmt::Result { - _serde::__private::Formatter::write_str(__formatter, #expecting) - } - - #[inline] - fn visit_unit<__E>(self) -> _serde::__private::Result - where - __E: _serde::de::Error, - { - _serde::__private::Ok(#this) - } - } - - _serde::Deserializer::deserialize_unit_struct(__deserializer, #type_name, __Visitor) - } -} - -fn deserialize_tuple( - variant_ident: Option<&syn::Ident>, - params: &Parameters, - fields: &[Field], - cattrs: &attr::Container, - deserializer: Option, -) -> Fragment { - let this = ¶ms.this; - let (de_impl_generics, de_ty_generics, ty_generics, where_clause) = - split_with_de_lifetime(params); - let delife = params.borrowed.de_lifetime(); - - assert!(!cattrs.has_flatten()); - - // If there are getters (implying private fields), construct the local type - // and use an `Into` conversion to get the remote type. If there are no - // getters then construct the target type directly. - let construct = if params.has_getter { - let local = ¶ms.local; - quote!(#local) - } else { - quote!(#this) - }; - - let is_enum = variant_ident.is_some(); - let type_path = match variant_ident { - Some(variant_ident) => quote!(#construct::#variant_ident), - None => construct, - }; - let expecting = match variant_ident { - Some(variant_ident) => format!("tuple variant {}::{}", params.type_name(), variant_ident), - None => format!("tuple struct {}", params.type_name()), - }; - let expecting = cattrs.expecting().unwrap_or(&expecting); - - let nfields = fields.len(); - - let visit_newtype_struct = if !is_enum && nfields == 1 { - Some(deserialize_newtype_struct(&type_path, params, &fields[0])) - } else { - None - }; - - let visit_seq = Stmts(deserialize_seq( - &type_path, params, fields, false, cattrs, expecting, - )); - - let visitor_expr = quote! { - __Visitor { - marker: _serde::__private::PhantomData::<#this #ty_generics>, - lifetime: _serde::__private::PhantomData, - } - }; - let dispatch = if let Some(deserializer) = deserializer { - quote!(_serde::Deserializer::deserialize_tuple(#deserializer, #nfields, #visitor_expr)) - } else if is_enum { - quote!(_serde::de::VariantAccess::tuple_variant(__variant, #nfields, #visitor_expr)) - } else if nfields == 1 { - let type_name = cattrs.name().deserialize_name(); - quote!(_serde::Deserializer::deserialize_newtype_struct(__deserializer, #type_name, #visitor_expr)) - } else { - let type_name = cattrs.name().deserialize_name(); - quote!(_serde::Deserializer::deserialize_tuple_struct(__deserializer, #type_name, #nfields, #visitor_expr)) - }; - - let all_skipped = fields.iter().all(|field| field.attrs.skip_deserializing()); - let visitor_var = if all_skipped { - quote!(_) - } else { - quote!(mut __seq) - }; - - quote_block! { - struct __Visitor #de_impl_generics #where_clause { - marker: _serde::__private::PhantomData<#this #ty_generics>, - lifetime: _serde::__private::PhantomData<&#delife ()>, - } - - impl #de_impl_generics _serde::de::Visitor<#delife> for __Visitor #de_ty_generics #where_clause { - type Value = #this #ty_generics; - - fn expecting(&self, __formatter: &mut _serde::__private::Formatter) -> _serde::__private::fmt::Result { - _serde::__private::Formatter::write_str(__formatter, #expecting) - } - - #visit_newtype_struct - - #[inline] - fn visit_seq<__A>(self, #visitor_var: __A) -> _serde::__private::Result - where - __A: _serde::de::SeqAccess<#delife>, - { - #visit_seq - } - } - - #dispatch - } -} - -#[cfg(feature = "deserialize_in_place")] -fn deserialize_tuple_in_place( - variant_ident: Option, - params: &Parameters, - fields: &[Field], - cattrs: &attr::Container, - deserializer: Option, -) -> Fragment { - let this = ¶ms.this; - let (de_impl_generics, de_ty_generics, ty_generics, where_clause) = - split_with_de_lifetime(params); - let delife = params.borrowed.de_lifetime(); - - assert!(!cattrs.has_flatten()); - - let is_enum = variant_ident.is_some(); - let expecting = match variant_ident { - Some(variant_ident) => format!("tuple variant {}::{}", params.type_name(), variant_ident), - None => format!("tuple struct {}", params.type_name()), - }; - let expecting = cattrs.expecting().unwrap_or(&expecting); - - let nfields = fields.len(); - - let visit_newtype_struct = if !is_enum && nfields == 1 { - Some(deserialize_newtype_struct_in_place(params, &fields[0])) - } else { - None - }; - - let visit_seq = Stmts(deserialize_seq_in_place(params, fields, cattrs, expecting)); - - let visitor_expr = quote! { - __Visitor { - place: __place, - lifetime: _serde::__private::PhantomData, - } - }; - - let dispatch = if let Some(deserializer) = deserializer { - quote!(_serde::Deserializer::deserialize_tuple(#deserializer, #nfields, #visitor_expr)) - } else if is_enum { - quote!(_serde::de::VariantAccess::tuple_variant(__variant, #nfields, #visitor_expr)) - } else if nfields == 1 { - let type_name = cattrs.name().deserialize_name(); - quote!(_serde::Deserializer::deserialize_newtype_struct(__deserializer, #type_name, #visitor_expr)) - } else { - let type_name = cattrs.name().deserialize_name(); - quote!(_serde::Deserializer::deserialize_tuple_struct(__deserializer, #type_name, #nfields, #visitor_expr)) - }; - - let all_skipped = fields.iter().all(|field| field.attrs.skip_deserializing()); - let visitor_var = if all_skipped { - quote!(_) - } else { - quote!(mut __seq) - }; - - let in_place_impl_generics = de_impl_generics.in_place(); - let in_place_ty_generics = de_ty_generics.in_place(); - let place_life = place_lifetime(); - - quote_block! { - struct __Visitor #in_place_impl_generics #where_clause { - place: &#place_life mut #this #ty_generics, - lifetime: _serde::__private::PhantomData<&#delife ()>, - } - - impl #in_place_impl_generics _serde::de::Visitor<#delife> for __Visitor #in_place_ty_generics #where_clause { - type Value = (); - - fn expecting(&self, __formatter: &mut _serde::__private::Formatter) -> _serde::__private::fmt::Result { - _serde::__private::Formatter::write_str(__formatter, #expecting) - } - - #visit_newtype_struct - - #[inline] - fn visit_seq<__A>(self, #visitor_var: __A) -> _serde::__private::Result - where - __A: _serde::de::SeqAccess<#delife>, - { - #visit_seq - } - } - - #dispatch - } -} - -fn deserialize_seq( - type_path: &TokenStream, - params: &Parameters, - fields: &[Field], - is_struct: bool, - cattrs: &attr::Container, - expecting: &str, -) -> Fragment { - let vars = (0..fields.len()).map(field_i as fn(_) -> _); - - let deserialized_count = fields - .iter() - .filter(|field| !field.attrs.skip_deserializing()) - .count(); - let expecting = if deserialized_count == 1 { - format!("{} with 1 element", expecting) - } else { - format!("{} with {} elements", expecting, deserialized_count) - }; - let expecting = cattrs.expecting().unwrap_or(&expecting); - - let mut index_in_seq = 0_usize; - let let_values = vars.clone().zip(fields).map(|(var, field)| { - if field.attrs.skip_deserializing() { - let default = Expr(expr_is_missing(field, cattrs)); - quote! { - let #var = #default; - } - } else { - let visit = match field.attrs.deserialize_with() { - None => { - let field_ty = field.ty; - let span = field.original.span(); - let func = - quote_spanned!(span=> _serde::de::SeqAccess::next_element::<#field_ty>); - quote!(try!(#func(&mut __seq))) - } - Some(path) => { - let (wrapper, wrapper_ty) = wrap_deserialize_field_with(params, field.ty, path); - quote!({ - #wrapper - _serde::__private::Option::map( - try!(_serde::de::SeqAccess::next_element::<#wrapper_ty>(&mut __seq)), - |__wrap| __wrap.value) - }) - } - }; - let value_if_none = match field.attrs.default() { - attr::Default::Default => quote!(_serde::__private::Default::default()), - attr::Default::Path(path) => quote!(#path()), - attr::Default::None => quote!( - return _serde::__private::Err(_serde::de::Error::invalid_length(#index_in_seq, &#expecting)); - ), - }; - let assign = quote! { - let #var = match #visit { - _serde::__private::Some(__value) => __value, - _serde::__private::None => { - #value_if_none - } - }; - }; - index_in_seq += 1; - assign - } - }); - - let mut result = if is_struct { - let names = fields.iter().map(|f| &f.member); - quote! { - #type_path { #( #names: #vars ),* } - } - } else { - quote! { - #type_path ( #(#vars),* ) - } - }; - - if params.has_getter { - let this = ¶ms.this; - result = quote! { - _serde::__private::Into::<#this>::into(#result) - }; - } - - let let_default = match cattrs.default() { - attr::Default::Default => Some(quote!( - let __default: Self::Value = _serde::__private::Default::default(); - )), - attr::Default::Path(path) => Some(quote!( - let __default: Self::Value = #path(); - )), - attr::Default::None => { - // We don't need the default value, to prevent an unused variable warning - // we'll leave the line empty. - None - } - }; - - quote_block! { - #let_default - #(#let_values)* - _serde::__private::Ok(#result) - } -} - -#[cfg(feature = "deserialize_in_place")] -fn deserialize_seq_in_place( - params: &Parameters, - fields: &[Field], - cattrs: &attr::Container, - expecting: &str, -) -> Fragment { - let deserialized_count = fields - .iter() - .filter(|field| !field.attrs.skip_deserializing()) - .count(); - let expecting = if deserialized_count == 1 { - format!("{} with 1 element", expecting) - } else { - format!("{} with {} elements", expecting, deserialized_count) - }; - let expecting = cattrs.expecting().unwrap_or(&expecting); - - let mut index_in_seq = 0usize; - let write_values = fields.iter().map(|field| { - let member = &field.member; - - if field.attrs.skip_deserializing() { - let default = Expr(expr_is_missing(field, cattrs)); - quote! { - self.place.#member = #default; - } - } else { - let value_if_none = match field.attrs.default() { - attr::Default::Default => quote!( - self.place.#member = _serde::__private::Default::default(); - ), - attr::Default::Path(path) => quote!( - self.place.#member = #path(); - ), - attr::Default::None => quote!( - return _serde::__private::Err(_serde::de::Error::invalid_length(#index_in_seq, &#expecting)); - ), - }; - let write = match field.attrs.deserialize_with() { - None => { - quote! { - if let _serde::__private::None = try!(_serde::de::SeqAccess::next_element_seed(&mut __seq, - _serde::__private::de::InPlaceSeed(&mut self.place.#member))) - { - #value_if_none - } - } - } - Some(path) => { - let (wrapper, wrapper_ty) = wrap_deserialize_field_with(params, field.ty, path); - quote!({ - #wrapper - match try!(_serde::de::SeqAccess::next_element::<#wrapper_ty>(&mut __seq)) { - _serde::__private::Some(__wrap) => { - self.place.#member = __wrap.value; - } - _serde::__private::None => { - #value_if_none - } - } - }) - } - }; - index_in_seq += 1; - write - } - }); - - let this = ¶ms.this; - let (_, ty_generics, _) = params.generics.split_for_impl(); - let let_default = match cattrs.default() { - attr::Default::Default => Some(quote!( - let __default: #this #ty_generics = _serde::__private::Default::default(); - )), - attr::Default::Path(path) => Some(quote!( - let __default: #this #ty_generics = #path(); - )), - attr::Default::None => { - // We don't need the default value, to prevent an unused variable warning - // we'll leave the line empty. - None - } - }; - - quote_block! { - #let_default - #(#write_values)* - _serde::__private::Ok(()) - } -} - -fn deserialize_newtype_struct( - type_path: &TokenStream, - params: &Parameters, - field: &Field, -) -> TokenStream { - let delife = params.borrowed.de_lifetime(); - let field_ty = field.ty; - - let value = match field.attrs.deserialize_with() { - None => { - let span = field.original.span(); - let func = quote_spanned!(span=> <#field_ty as _serde::Deserialize>::deserialize); - quote! { - try!(#func(__e)) - } - } - Some(path) => { - quote! { - try!(#path(__e)) - } - } - }; - - let mut result = quote!(#type_path(__field0)); - if params.has_getter { - let this = ¶ms.this; - result = quote! { - _serde::__private::Into::<#this>::into(#result) - }; - } - - quote! { - #[inline] - fn visit_newtype_struct<__E>(self, __e: __E) -> _serde::__private::Result - where - __E: _serde::Deserializer<#delife>, - { - let __field0: #field_ty = #value; - _serde::__private::Ok(#result) - } - } -} - -#[cfg(feature = "deserialize_in_place")] -fn deserialize_newtype_struct_in_place(params: &Parameters, field: &Field) -> TokenStream { - // We do not generate deserialize_in_place if every field has a - // deserialize_with. - assert!(field.attrs.deserialize_with().is_none()); - - let delife = params.borrowed.de_lifetime(); - - quote! { - #[inline] - fn visit_newtype_struct<__E>(self, __e: __E) -> _serde::__private::Result - where - __E: _serde::Deserializer<#delife>, - { - _serde::Deserialize::deserialize_in_place(__e, &mut self.place.0) - } - } -} - -enum Untagged { - Yes, - No, -} - -fn deserialize_struct( - variant_ident: Option<&syn::Ident>, - params: &Parameters, - fields: &[Field], - cattrs: &attr::Container, - deserializer: Option, - untagged: &Untagged, -) -> Fragment { - let is_enum = variant_ident.is_some(); - - let this = ¶ms.this; - let (de_impl_generics, de_ty_generics, ty_generics, where_clause) = - split_with_de_lifetime(params); - let delife = params.borrowed.de_lifetime(); - - // If there are getters (implying private fields), construct the local type - // and use an `Into` conversion to get the remote type. If there are no - // getters then construct the target type directly. - let construct = if params.has_getter { - let local = ¶ms.local; - quote!(#local) - } else { - quote!(#this) - }; - - let type_path = match variant_ident { - Some(variant_ident) => quote!(#construct::#variant_ident), - None => construct, - }; - let expecting = match variant_ident { - Some(variant_ident) => format!("struct variant {}::{}", params.type_name(), variant_ident), - None => format!("struct {}", params.type_name()), - }; - let expecting = cattrs.expecting().unwrap_or(&expecting); - - let visit_seq = Stmts(deserialize_seq( - &type_path, params, fields, true, cattrs, expecting, - )); - - let (field_visitor, fields_stmt, visit_map) = if cattrs.has_flatten() { - deserialize_struct_as_map_visitor(&type_path, params, fields, cattrs) - } else { - deserialize_struct_as_struct_visitor(&type_path, params, fields, cattrs) - }; - let field_visitor = Stmts(field_visitor); - let fields_stmt = fields_stmt.map(Stmts); - let visit_map = Stmts(visit_map); - - let visitor_expr = quote! { - __Visitor { - marker: _serde::__private::PhantomData::<#this #ty_generics>, - lifetime: _serde::__private::PhantomData, - } - }; - let dispatch = if let Some(deserializer) = deserializer { - quote! { - _serde::Deserializer::deserialize_any(#deserializer, #visitor_expr) - } - } else if is_enum && cattrs.has_flatten() { - quote! { - _serde::de::VariantAccess::newtype_variant_seed(__variant, #visitor_expr) - } - } else if is_enum { - quote! { - _serde::de::VariantAccess::struct_variant(__variant, FIELDS, #visitor_expr) - } - } else if cattrs.has_flatten() { - quote! { - _serde::Deserializer::deserialize_map(__deserializer, #visitor_expr) - } - } else { - let type_name = cattrs.name().deserialize_name(); - quote! { - _serde::Deserializer::deserialize_struct(__deserializer, #type_name, FIELDS, #visitor_expr) - } - }; - - let all_skipped = fields.iter().all(|field| field.attrs.skip_deserializing()); - let visitor_var = if all_skipped { - quote!(_) - } else { - quote!(mut __seq) - }; - - // untagged struct variants do not get a visit_seq method. The same applies to - // structs that only have a map representation. - let visit_seq = match *untagged { - Untagged::No if !cattrs.has_flatten() => Some(quote! { - #[inline] - fn visit_seq<__A>(self, #visitor_var: __A) -> _serde::__private::Result - where - __A: _serde::de::SeqAccess<#delife>, - { - #visit_seq - } - }), - _ => None, - }; - - let visitor_seed = if is_enum && cattrs.has_flatten() { - Some(quote! { - impl #de_impl_generics _serde::de::DeserializeSeed<#delife> for __Visitor #de_ty_generics #where_clause { - type Value = #this #ty_generics; - - fn deserialize<__D>(self, __deserializer: __D) -> _serde::__private::Result - where - __D: _serde::Deserializer<'de>, - { - _serde::Deserializer::deserialize_map(__deserializer, self) - } - } - }) - } else { - None - }; - - quote_block! { - #field_visitor - - struct __Visitor #de_impl_generics #where_clause { - marker: _serde::__private::PhantomData<#this #ty_generics>, - lifetime: _serde::__private::PhantomData<&#delife ()>, - } - - impl #de_impl_generics _serde::de::Visitor<#delife> for __Visitor #de_ty_generics #where_clause { - type Value = #this #ty_generics; - - fn expecting(&self, __formatter: &mut _serde::__private::Formatter) -> _serde::__private::fmt::Result { - _serde::__private::Formatter::write_str(__formatter, #expecting) - } - - #visit_seq - - #[inline] - fn visit_map<__A>(self, mut __map: __A) -> _serde::__private::Result - where - __A: _serde::de::MapAccess<#delife>, - { - #visit_map - } - } - - #visitor_seed - - #fields_stmt - - #dispatch - } -} - -#[cfg(feature = "deserialize_in_place")] -fn deserialize_struct_in_place( - variant_ident: Option, - params: &Parameters, - fields: &[Field], - cattrs: &attr::Container, - deserializer: Option, -) -> Option { - let is_enum = variant_ident.is_some(); - - // for now we do not support in_place deserialization for structs that - // are represented as map. - if cattrs.has_flatten() { - return None; - } - - let this = ¶ms.this; - let (de_impl_generics, de_ty_generics, ty_generics, where_clause) = - split_with_de_lifetime(params); - let delife = params.borrowed.de_lifetime(); - - let expecting = match variant_ident { - Some(variant_ident) => format!("struct variant {}::{}", params.type_name(), variant_ident), - None => format!("struct {}", params.type_name()), - }; - let expecting = cattrs.expecting().unwrap_or(&expecting); - - let visit_seq = Stmts(deserialize_seq_in_place(params, fields, cattrs, expecting)); - - let (field_visitor, fields_stmt, visit_map) = - deserialize_struct_as_struct_in_place_visitor(params, fields, cattrs); - - let field_visitor = Stmts(field_visitor); - let fields_stmt = Stmts(fields_stmt); - let visit_map = Stmts(visit_map); - - let visitor_expr = quote! { - __Visitor { - place: __place, - lifetime: _serde::__private::PhantomData, - } - }; - let dispatch = if let Some(deserializer) = deserializer { - quote! { - _serde::Deserializer::deserialize_any(#deserializer, #visitor_expr) - } - } else if is_enum { - quote! { - _serde::de::VariantAccess::struct_variant(__variant, FIELDS, #visitor_expr) - } - } else { - let type_name = cattrs.name().deserialize_name(); - quote! { - _serde::Deserializer::deserialize_struct(__deserializer, #type_name, FIELDS, #visitor_expr) - } - }; - - let all_skipped = fields.iter().all(|field| field.attrs.skip_deserializing()); - let visitor_var = if all_skipped { - quote!(_) - } else { - quote!(mut __seq) - }; - - let visit_seq = quote! { - #[inline] - fn visit_seq<__A>(self, #visitor_var: __A) -> _serde::__private::Result - where - __A: _serde::de::SeqAccess<#delife>, - { - #visit_seq - } - }; - - let in_place_impl_generics = de_impl_generics.in_place(); - let in_place_ty_generics = de_ty_generics.in_place(); - let place_life = place_lifetime(); - - Some(quote_block! { - #field_visitor - - struct __Visitor #in_place_impl_generics #where_clause { - place: &#place_life mut #this #ty_generics, - lifetime: _serde::__private::PhantomData<&#delife ()>, - } - - impl #in_place_impl_generics _serde::de::Visitor<#delife> for __Visitor #in_place_ty_generics #where_clause { - type Value = (); - - fn expecting(&self, __formatter: &mut _serde::__private::Formatter) -> _serde::__private::fmt::Result { - _serde::__private::Formatter::write_str(__formatter, #expecting) - } - - #visit_seq - - #[inline] - fn visit_map<__A>(self, mut __map: __A) -> _serde::__private::Result - where - __A: _serde::de::MapAccess<#delife>, - { - #visit_map - } - } - - #fields_stmt - - #dispatch - }) -} - -fn deserialize_enum( - params: &Parameters, - variants: &[Variant], - cattrs: &attr::Container, -) -> Fragment { - match cattrs.tag() { - attr::TagType::External => deserialize_externally_tagged_enum(params, variants, cattrs), - attr::TagType::Internal { tag } => { - deserialize_internally_tagged_enum(params, variants, cattrs, tag) - } - attr::TagType::Adjacent { tag, content } => { - deserialize_adjacently_tagged_enum(params, variants, cattrs, tag, content) - } - attr::TagType::None => deserialize_untagged_enum(params, variants, cattrs), - } -} - -fn prepare_enum_variant_enum( - variants: &[Variant], - cattrs: &attr::Container, -) -> (TokenStream, Stmts) { - let mut deserialized_variants = variants - .iter() - .enumerate() - .filter(|&(_, variant)| !variant.attrs.skip_deserializing()); - - let variant_names_idents: Vec<_> = deserialized_variants - .clone() - .map(|(i, variant)| { - ( - variant.attrs.name().deserialize_name(), - field_i(i), - variant.attrs.aliases(), - ) - }) - .collect(); - - let other_idx = deserialized_variants.position(|(_, variant)| variant.attrs.other()); - - let variants_stmt = { - let variant_names = variant_names_idents.iter().map(|(name, _, _)| name); - quote! { - const VARIANTS: &'static [&'static str] = &[ #(#variant_names),* ]; - } - }; - - let variant_visitor = Stmts(deserialize_generated_identifier( - &variant_names_idents, - cattrs, - true, - other_idx, - )); - - (variants_stmt, variant_visitor) -} - -fn deserialize_externally_tagged_enum( - params: &Parameters, - variants: &[Variant], - cattrs: &attr::Container, -) -> Fragment { - let this = ¶ms.this; - let (de_impl_generics, de_ty_generics, ty_generics, where_clause) = - split_with_de_lifetime(params); - let delife = params.borrowed.de_lifetime(); - - let type_name = cattrs.name().deserialize_name(); - let expecting = format!("enum {}", params.type_name()); - let expecting = cattrs.expecting().unwrap_or(&expecting); - - let (variants_stmt, variant_visitor) = prepare_enum_variant_enum(variants, cattrs); - - // Match arms to extract a variant from a string - let variant_arms = variants - .iter() - .enumerate() - .filter(|&(_, variant)| !variant.attrs.skip_deserializing()) - .map(|(i, variant)| { - let variant_name = field_i(i); - - let block = Match(deserialize_externally_tagged_variant( - params, variant, cattrs, - )); - - quote! { - (__Field::#variant_name, __variant) => #block - } - }); - - let all_skipped = variants - .iter() - .all(|variant| variant.attrs.skip_deserializing()); - let match_variant = if all_skipped { - // This is an empty enum like `enum Impossible {}` or an enum in which - // all variants have `#[serde(skip_deserializing)]`. - quote! { - // FIXME: Once we drop support for Rust 1.15: - // let _serde::__private::Err(__err) = _serde::de::EnumAccess::variant::<__Field>(__data); - // _serde::__private::Err(__err) - _serde::__private::Result::map( - _serde::de::EnumAccess::variant::<__Field>(__data), - |(__impossible, _)| match __impossible {}) - } - } else { - quote! { - match try!(_serde::de::EnumAccess::variant(__data)) { - #(#variant_arms)* - } - } - }; - - quote_block! { - #variant_visitor - - struct __Visitor #de_impl_generics #where_clause { - marker: _serde::__private::PhantomData<#this #ty_generics>, - lifetime: _serde::__private::PhantomData<&#delife ()>, - } - - impl #de_impl_generics _serde::de::Visitor<#delife> for __Visitor #de_ty_generics #where_clause { - type Value = #this #ty_generics; - - fn expecting(&self, __formatter: &mut _serde::__private::Formatter) -> _serde::__private::fmt::Result { - _serde::__private::Formatter::write_str(__formatter, #expecting) - } - - fn visit_enum<__A>(self, __data: __A) -> _serde::__private::Result - where - __A: _serde::de::EnumAccess<#delife>, - { - #match_variant - } - } - - #variants_stmt - - _serde::Deserializer::deserialize_enum( - __deserializer, - #type_name, - VARIANTS, - __Visitor { - marker: _serde::__private::PhantomData::<#this #ty_generics>, - lifetime: _serde::__private::PhantomData, - }, - ) - } -} - -fn deserialize_internally_tagged_enum( - params: &Parameters, - variants: &[Variant], - cattrs: &attr::Container, - tag: &str, -) -> Fragment { - let (variants_stmt, variant_visitor) = prepare_enum_variant_enum(variants, cattrs); - - // Match arms to extract a variant from a string - let variant_arms = variants - .iter() - .enumerate() - .filter(|&(_, variant)| !variant.attrs.skip_deserializing()) - .map(|(i, variant)| { - let variant_name = field_i(i); - - let block = Match(deserialize_internally_tagged_variant( - params, - variant, - cattrs, - quote! { - _serde::__private::de::ContentDeserializer::<__D::Error>::new(__tagged.content) - }, - )); - - quote! { - __Field::#variant_name => #block - } - }); - - let expecting = format!("internally tagged enum {}", params.type_name()); - let expecting = cattrs.expecting().unwrap_or(&expecting); - - quote_block! { - #variant_visitor - - #variants_stmt - - let __tagged = try!(_serde::Deserializer::deserialize_any( - __deserializer, - _serde::__private::de::TaggedContentVisitor::<__Field>::new(#tag, #expecting))); - - match __tagged.tag { - #(#variant_arms)* - } - } -} - -fn deserialize_adjacently_tagged_enum( - params: &Parameters, - variants: &[Variant], - cattrs: &attr::Container, - tag: &str, - content: &str, -) -> Fragment { - let this = ¶ms.this; - let (de_impl_generics, de_ty_generics, ty_generics, where_clause) = - split_with_de_lifetime(params); - let delife = params.borrowed.de_lifetime(); - - let (variants_stmt, variant_visitor) = prepare_enum_variant_enum(variants, cattrs); - - let variant_arms: &Vec<_> = &variants - .iter() - .enumerate() - .filter(|&(_, variant)| !variant.attrs.skip_deserializing()) - .map(|(i, variant)| { - let variant_index = field_i(i); - - let block = Match(deserialize_untagged_variant( - params, - variant, - cattrs, - quote!(__deserializer), - )); - - quote! { - __Field::#variant_index => #block - } - }) - .collect(); - - let expecting = format!("adjacently tagged enum {}", params.type_name()); - let expecting = cattrs.expecting().unwrap_or(&expecting); - let type_name = cattrs.name().deserialize_name(); - let deny_unknown_fields = cattrs.deny_unknown_fields(); - - // If unknown fields are allowed, we pick the visitor that can step over - // those. Otherwise we pick the visitor that fails on unknown keys. - let field_visitor_ty = if deny_unknown_fields { - quote! { _serde::__private::de::TagOrContentFieldVisitor } - } else { - quote! { _serde::__private::de::TagContentOtherFieldVisitor } - }; - - let tag_or_content = quote! { - #field_visitor_ty { - tag: #tag, - content: #content, - } - }; - - let mut missing_content = quote! { - _serde::__private::Err(<__A::Error as _serde::de::Error>::missing_field(#content)) - }; - let mut missing_content_fallthrough = quote!(); - let missing_content_arms = variants - .iter() - .enumerate() - .filter(|&(_, variant)| !variant.attrs.skip_deserializing()) - .filter_map(|(i, variant)| { - let variant_index = field_i(i); - let variant_ident = &variant.ident; - - let arm = match variant.style { - Style::Unit => quote! { - _serde::__private::Ok(#this::#variant_ident) - }, - Style::Newtype if variant.attrs.deserialize_with().is_none() => { - let span = variant.original.span(); - let func = quote_spanned!(span=> _serde::__private::de::missing_field); - quote! { - #func(#content).map(#this::#variant_ident) - } - } - _ => { - missing_content_fallthrough = quote!(_ => #missing_content); - return None; - } - }; - Some(quote! { - __Field::#variant_index => #arm, - }) - }) - .collect::>(); - if !missing_content_arms.is_empty() { - missing_content = quote! { - match __field { - #(#missing_content_arms)* - #missing_content_fallthrough - } - }; - } - - // Advance the map by one key, returning early in case of error. - let next_key = quote! { - try!(_serde::de::MapAccess::next_key_seed(&mut __map, #tag_or_content)) - }; - - // When allowing unknown fields, we want to transparently step through keys - // we don't care about until we find `tag`, `content`, or run out of keys. - let next_relevant_key = if deny_unknown_fields { - next_key - } else { - quote!({ - let mut __rk : _serde::__private::Option<_serde::__private::de::TagOrContentField> = _serde::__private::None; - while let _serde::__private::Some(__k) = #next_key { - match __k { - _serde::__private::de::TagContentOtherField::Other => { - let _ = try!(_serde::de::MapAccess::next_value::<_serde::de::IgnoredAny>(&mut __map)); - continue; - }, - _serde::__private::de::TagContentOtherField::Tag => { - __rk = _serde::__private::Some(_serde::__private::de::TagOrContentField::Tag); - break; - } - _serde::__private::de::TagContentOtherField::Content => { - __rk = _serde::__private::Some(_serde::__private::de::TagOrContentField::Content); - break; - } - } - } - - __rk - }) - }; - - // Step through remaining keys, looking for duplicates of previously-seen - // keys. When unknown fields are denied, any key that isn't a duplicate will - // at this point immediately produce an error. - let visit_remaining_keys = quote! { - match #next_relevant_key { - _serde::__private::Some(_serde::__private::de::TagOrContentField::Tag) => { - _serde::__private::Err(<__A::Error as _serde::de::Error>::duplicate_field(#tag)) - } - _serde::__private::Some(_serde::__private::de::TagOrContentField::Content) => { - _serde::__private::Err(<__A::Error as _serde::de::Error>::duplicate_field(#content)) - } - _serde::__private::None => _serde::__private::Ok(__ret), - } - }; - - let finish_content_then_tag = if variant_arms.is_empty() { - quote! { - match try!(_serde::de::MapAccess::next_value::<__Field>(&mut __map)) {} - } - } else { - quote! { - let __ret = try!(match try!(_serde::de::MapAccess::next_value(&mut __map)) { - // Deserialize the buffered content now that we know the variant. - #(#variant_arms)* - }); - // Visit remaining keys, looking for duplicates. - #visit_remaining_keys - } - }; - - quote_block! { - #variant_visitor - - #variants_stmt - - struct __Seed #de_impl_generics #where_clause { - field: __Field, - marker: _serde::__private::PhantomData<#this #ty_generics>, - lifetime: _serde::__private::PhantomData<&#delife ()>, - } - - impl #de_impl_generics _serde::de::DeserializeSeed<#delife> for __Seed #de_ty_generics #where_clause { - type Value = #this #ty_generics; - - fn deserialize<__D>(self, __deserializer: __D) -> _serde::__private::Result - where - __D: _serde::Deserializer<#delife>, - { - match self.field { - #(#variant_arms)* - } - } - } - - struct __Visitor #de_impl_generics #where_clause { - marker: _serde::__private::PhantomData<#this #ty_generics>, - lifetime: _serde::__private::PhantomData<&#delife ()>, - } - - impl #de_impl_generics _serde::de::Visitor<#delife> for __Visitor #de_ty_generics #where_clause { - type Value = #this #ty_generics; - - fn expecting(&self, __formatter: &mut _serde::__private::Formatter) -> _serde::__private::fmt::Result { - _serde::__private::Formatter::write_str(__formatter, #expecting) - } - - fn visit_map<__A>(self, mut __map: __A) -> _serde::__private::Result - where - __A: _serde::de::MapAccess<#delife>, - { - // Visit the first relevant key. - match #next_relevant_key { - // First key is the tag. - _serde::__private::Some(_serde::__private::de::TagOrContentField::Tag) => { - // Parse the tag. - let __field = try!(_serde::de::MapAccess::next_value(&mut __map)); - // Visit the second key. - match #next_relevant_key { - // Second key is a duplicate of the tag. - _serde::__private::Some(_serde::__private::de::TagOrContentField::Tag) => { - _serde::__private::Err(<__A::Error as _serde::de::Error>::duplicate_field(#tag)) - } - // Second key is the content. - _serde::__private::Some(_serde::__private::de::TagOrContentField::Content) => { - let __ret = try!(_serde::de::MapAccess::next_value_seed(&mut __map, - __Seed { - field: __field, - marker: _serde::__private::PhantomData, - lifetime: _serde::__private::PhantomData, - })); - // Visit remaining keys, looking for duplicates. - #visit_remaining_keys - } - // There is no second key; might be okay if the we have a unit variant. - _serde::__private::None => #missing_content - } - } - // First key is the content. - _serde::__private::Some(_serde::__private::de::TagOrContentField::Content) => { - // Buffer up the content. - let __content = try!(_serde::de::MapAccess::next_value::<_serde::__private::de::Content>(&mut __map)); - // Visit the second key. - match #next_relevant_key { - // Second key is the tag. - _serde::__private::Some(_serde::__private::de::TagOrContentField::Tag) => { - let __deserializer = _serde::__private::de::ContentDeserializer::<__A::Error>::new(__content); - #finish_content_then_tag - } - // Second key is a duplicate of the content. - _serde::__private::Some(_serde::__private::de::TagOrContentField::Content) => { - _serde::__private::Err(<__A::Error as _serde::de::Error>::duplicate_field(#content)) - } - // There is no second key. - _serde::__private::None => { - _serde::__private::Err(<__A::Error as _serde::de::Error>::missing_field(#tag)) - } - } - } - // There is no first key. - _serde::__private::None => { - _serde::__private::Err(<__A::Error as _serde::de::Error>::missing_field(#tag)) - } - } - } - - fn visit_seq<__A>(self, mut __seq: __A) -> _serde::__private::Result - where - __A: _serde::de::SeqAccess<#delife>, - { - // Visit the first element - the tag. - match try!(_serde::de::SeqAccess::next_element(&mut __seq)) { - _serde::__private::Some(__field) => { - // Visit the second element - the content. - match try!(_serde::de::SeqAccess::next_element_seed( - &mut __seq, - __Seed { - field: __field, - marker: _serde::__private::PhantomData, - lifetime: _serde::__private::PhantomData, - }, - )) { - _serde::__private::Some(__ret) => _serde::__private::Ok(__ret), - // There is no second element. - _serde::__private::None => { - _serde::__private::Err(_serde::de::Error::invalid_length(1, &self)) - } - } - } - // There is no first element. - _serde::__private::None => { - _serde::__private::Err(_serde::de::Error::invalid_length(0, &self)) - } - } - } - } - - const FIELDS: &'static [&'static str] = &[#tag, #content]; - _serde::Deserializer::deserialize_struct( - __deserializer, - #type_name, - FIELDS, - __Visitor { - marker: _serde::__private::PhantomData::<#this #ty_generics>, - lifetime: _serde::__private::PhantomData, - }, - ) - } -} - -fn deserialize_untagged_enum( - params: &Parameters, - variants: &[Variant], - cattrs: &attr::Container, -) -> Fragment { - let attempts = variants - .iter() - .filter(|variant| !variant.attrs.skip_deserializing()) - .map(|variant| { - Expr(deserialize_untagged_variant( - params, - variant, - cattrs, - quote!( - _serde::__private::de::ContentRefDeserializer::<__D::Error>::new(&__content) - ), - )) - }); - - // TODO this message could be better by saving the errors from the failed - // attempts. The heuristic used by TOML was to count the number of fields - // processed before an error, and use the error that happened after the - // largest number of fields. I'm not sure I like that. Maybe it would be - // better to save all the errors and combine them into one message that - // explains why none of the variants matched. - let fallthrough_msg = format!( - "data did not match any variant of untagged enum {}", - params.type_name() - ); - let fallthrough_msg = cattrs.expecting().unwrap_or(&fallthrough_msg); - - quote_block! { - let __content = try!(<_serde::__private::de::Content as _serde::Deserialize>::deserialize(__deserializer)); - - #( - if let _serde::__private::Ok(__ok) = #attempts { - return _serde::__private::Ok(__ok); - } - )* - - _serde::__private::Err(_serde::de::Error::custom(#fallthrough_msg)) - } -} - -fn deserialize_externally_tagged_variant( - params: &Parameters, - variant: &Variant, - cattrs: &attr::Container, -) -> Fragment { - if let Some(path) = variant.attrs.deserialize_with() { - let (wrapper, wrapper_ty, unwrap_fn) = wrap_deserialize_variant_with(params, variant, path); - return quote_block! { - #wrapper - _serde::__private::Result::map( - _serde::de::VariantAccess::newtype_variant::<#wrapper_ty>(__variant), #unwrap_fn) - }; - } - - let variant_ident = &variant.ident; - - match variant.style { - Style::Unit => { - let this = ¶ms.this; - quote_block! { - try!(_serde::de::VariantAccess::unit_variant(__variant)); - _serde::__private::Ok(#this::#variant_ident) - } - } - Style::Newtype => deserialize_externally_tagged_newtype_variant( - variant_ident, - params, - &variant.fields[0], - cattrs, - ), - Style::Tuple => { - deserialize_tuple(Some(variant_ident), params, &variant.fields, cattrs, None) - } - Style::Struct => deserialize_struct( - Some(variant_ident), - params, - &variant.fields, - cattrs, - None, - &Untagged::No, - ), - } -} - -// Generates significant part of the visit_seq and visit_map bodies of visitors -// for the variants of internally tagged enum. -fn deserialize_internally_tagged_variant( - params: &Parameters, - variant: &Variant, - cattrs: &attr::Container, - deserializer: TokenStream, -) -> Fragment { - if variant.attrs.deserialize_with().is_some() { - return deserialize_untagged_variant(params, variant, cattrs, deserializer); - } - - let variant_ident = &variant.ident; - - match effective_style(variant) { - Style::Unit => { - let this = ¶ms.this; - let type_name = params.type_name(); - let variant_name = variant.ident.to_string(); - let default = variant.fields.get(0).map(|field| { - let default = Expr(expr_is_missing(field, cattrs)); - quote!((#default)) - }); - quote_block! { - try!(_serde::Deserializer::deserialize_any(#deserializer, _serde::__private::de::InternallyTaggedUnitVisitor::new(#type_name, #variant_name))); - _serde::__private::Ok(#this::#variant_ident #default) - } - } - Style::Newtype => deserialize_untagged_newtype_variant( - variant_ident, - params, - &variant.fields[0], - &deserializer, - ), - Style::Struct => deserialize_struct( - Some(variant_ident), - params, - &variant.fields, - cattrs, - Some(deserializer), - &Untagged::No, - ), - Style::Tuple => unreachable!("checked in serde_derive_internals"), - } -} - -fn deserialize_untagged_variant( - params: &Parameters, - variant: &Variant, - cattrs: &attr::Container, - deserializer: TokenStream, -) -> Fragment { - if let Some(path) = variant.attrs.deserialize_with() { - let unwrap_fn = unwrap_to_variant_closure(params, variant, false); - return quote_block! { - _serde::__private::Result::map(#path(#deserializer), #unwrap_fn) - }; - } - - let variant_ident = &variant.ident; - - match effective_style(variant) { - Style::Unit => { - let this = ¶ms.this; - let type_name = params.type_name(); - let variant_name = variant.ident.to_string(); - let default = variant.fields.get(0).map(|field| { - let default = Expr(expr_is_missing(field, cattrs)); - quote!((#default)) - }); - quote_expr! { - match _serde::Deserializer::deserialize_any( - #deserializer, - _serde::__private::de::UntaggedUnitVisitor::new(#type_name, #variant_name) - ) { - _serde::__private::Ok(()) => _serde::__private::Ok(#this::#variant_ident #default), - _serde::__private::Err(__err) => _serde::__private::Err(__err), - } - } - } - Style::Newtype => deserialize_untagged_newtype_variant( - variant_ident, - params, - &variant.fields[0], - &deserializer, - ), - Style::Tuple => deserialize_tuple( - Some(variant_ident), - params, - &variant.fields, - cattrs, - Some(deserializer), - ), - Style::Struct => deserialize_struct( - Some(variant_ident), - params, - &variant.fields, - cattrs, - Some(deserializer), - &Untagged::Yes, - ), - } -} - -fn deserialize_externally_tagged_newtype_variant( - variant_ident: &syn::Ident, - params: &Parameters, - field: &Field, - cattrs: &attr::Container, -) -> Fragment { - let this = ¶ms.this; - - if field.attrs.skip_deserializing() { - let this = ¶ms.this; - let default = Expr(expr_is_missing(field, cattrs)); - return quote_block! { - try!(_serde::de::VariantAccess::unit_variant(__variant)); - _serde::__private::Ok(#this::#variant_ident(#default)) - }; - } - - match field.attrs.deserialize_with() { - None => { - let field_ty = field.ty; - let span = field.original.span(); - let func = - quote_spanned!(span=> _serde::de::VariantAccess::newtype_variant::<#field_ty>); - quote_expr! { - _serde::__private::Result::map(#func(__variant), #this::#variant_ident) - } - } - Some(path) => { - let (wrapper, wrapper_ty) = wrap_deserialize_field_with(params, field.ty, path); - quote_block! { - #wrapper - _serde::__private::Result::map( - _serde::de::VariantAccess::newtype_variant::<#wrapper_ty>(__variant), - |__wrapper| #this::#variant_ident(__wrapper.value)) - } - } - } -} - -fn deserialize_untagged_newtype_variant( - variant_ident: &syn::Ident, - params: &Parameters, - field: &Field, - deserializer: &TokenStream, -) -> Fragment { - let this = ¶ms.this; - let field_ty = field.ty; - match field.attrs.deserialize_with() { - None => { - let span = field.original.span(); - let func = quote_spanned!(span=> <#field_ty as _serde::Deserialize>::deserialize); - quote_expr! { - _serde::__private::Result::map(#func(#deserializer), #this::#variant_ident) - } - } - Some(path) => { - quote_block! { - let __value: _serde::__private::Result<#field_ty, _> = #path(#deserializer); - _serde::__private::Result::map(__value, #this::#variant_ident) - } - } - } -} - -fn deserialize_generated_identifier( - fields: &[(String, Ident, Vec)], - cattrs: &attr::Container, - is_variant: bool, - other_idx: Option, -) -> Fragment { - let this = quote!(__Field); - let field_idents: &Vec<_> = &fields.iter().map(|(_, ident, _)| ident).collect(); - - let (ignore_variant, fallthrough) = if !is_variant && cattrs.has_flatten() { - let ignore_variant = quote!(__other(_serde::__private::de::Content<'de>),); - let fallthrough = quote!(_serde::__private::Ok(__Field::__other(__value))); - (Some(ignore_variant), Some(fallthrough)) - } else if let Some(other_idx) = other_idx { - let ignore_variant = fields[other_idx].1.clone(); - let fallthrough = quote!(_serde::__private::Ok(__Field::#ignore_variant)); - (None, Some(fallthrough)) - } else if is_variant || cattrs.deny_unknown_fields() { - (None, None) - } else { - let ignore_variant = quote!(__ignore,); - let fallthrough = quote!(_serde::__private::Ok(__Field::__ignore)); - (Some(ignore_variant), Some(fallthrough)) - }; - - let visitor_impl = Stmts(deserialize_identifier( - &this, - fields, - is_variant, - fallthrough, - None, - !is_variant && cattrs.has_flatten(), - None, - )); - - let lifetime = if !is_variant && cattrs.has_flatten() { - Some(quote!(<'de>)) - } else { - None - }; - - quote_block! { - #[allow(non_camel_case_types)] - enum __Field #lifetime { - #(#field_idents,)* - #ignore_variant - } - - struct __FieldVisitor; - - impl<'de> _serde::de::Visitor<'de> for __FieldVisitor { - type Value = __Field #lifetime; - - #visitor_impl - } - - impl<'de> _serde::Deserialize<'de> for __Field #lifetime { - #[inline] - fn deserialize<__D>(__deserializer: __D) -> _serde::__private::Result - where - __D: _serde::Deserializer<'de>, - { - _serde::Deserializer::deserialize_identifier(__deserializer, __FieldVisitor) - } - } - } -} - -// Generates `Deserialize::deserialize` body for an enum with -// `serde(field_identifier)` or `serde(variant_identifier)` attribute. -fn deserialize_custom_identifier( - params: &Parameters, - variants: &[Variant], - cattrs: &attr::Container, -) -> Fragment { - let is_variant = match cattrs.identifier() { - attr::Identifier::Variant => true, - attr::Identifier::Field => false, - attr::Identifier::No => unreachable!(), - }; - - let this = ¶ms.this; - let this = quote!(#this); - - let (ordinary, fallthrough, fallthrough_borrowed) = if let Some(last) = variants.last() { - let last_ident = &last.ident; - if last.attrs.other() { - // Process `serde(other)` attribute. It would always be found on the - // last variant (checked in `check_identifier`), so all preceding - // are ordinary variants. - let ordinary = &variants[..variants.len() - 1]; - let fallthrough = quote!(_serde::__private::Ok(#this::#last_ident)); - (ordinary, Some(fallthrough), None) - } else if let Style::Newtype = last.style { - let ordinary = &variants[..variants.len() - 1]; - let fallthrough = |value| { - quote! { - _serde::__private::Result::map( - _serde::Deserialize::deserialize( - _serde::__private::de::IdentifierDeserializer::from(#value) - ), - #this::#last_ident) - } - }; - ( - ordinary, - Some(fallthrough(quote!(__value))), - Some(fallthrough(quote!(_serde::__private::de::Borrowed( - __value - )))), - ) - } else { - (variants, None, None) - } - } else { - (variants, None, None) - }; - - let names_idents: Vec<_> = ordinary - .iter() - .map(|variant| { - ( - variant.attrs.name().deserialize_name(), - variant.ident.clone(), - variant.attrs.aliases(), - ) - }) - .collect(); - - let names = names_idents.iter().map(|(name, _, _)| name); - - let names_const = if fallthrough.is_some() { - None - } else if is_variant { - let variants = quote! { - const VARIANTS: &'static [&'static str] = &[ #(#names),* ]; - }; - Some(variants) - } else { - let fields = quote! { - const FIELDS: &'static [&'static str] = &[ #(#names),* ]; - }; - Some(fields) - }; - - let (de_impl_generics, de_ty_generics, ty_generics, where_clause) = - split_with_de_lifetime(params); - let delife = params.borrowed.de_lifetime(); - let visitor_impl = Stmts(deserialize_identifier( - &this, - &names_idents, - is_variant, - fallthrough, - fallthrough_borrowed, - false, - cattrs.expecting(), - )); - - quote_block! { - #names_const - - struct __FieldVisitor #de_impl_generics #where_clause { - marker: _serde::__private::PhantomData<#this #ty_generics>, - lifetime: _serde::__private::PhantomData<&#delife ()>, - } - - impl #de_impl_generics _serde::de::Visitor<#delife> for __FieldVisitor #de_ty_generics #where_clause { - type Value = #this #ty_generics; - - #visitor_impl - } - - let __visitor = __FieldVisitor { - marker: _serde::__private::PhantomData::<#this #ty_generics>, - lifetime: _serde::__private::PhantomData, - }; - _serde::Deserializer::deserialize_identifier(__deserializer, __visitor) - } -} - -fn deserialize_identifier( - this: &TokenStream, - fields: &[(String, Ident, Vec)], - is_variant: bool, - fallthrough: Option, - fallthrough_borrowed: Option, - collect_other_fields: bool, - expecting: Option<&str>, -) -> Fragment { - let mut flat_fields = Vec::new(); - for (_, ident, aliases) in fields { - flat_fields.extend(aliases.iter().map(|alias| (alias, ident))); - } - - let field_strs: &Vec<_> = &flat_fields.iter().map(|(name, _)| name).collect(); - let field_bytes: &Vec<_> = &flat_fields - .iter() - .map(|(name, _)| Literal::byte_string(name.as_bytes())) - .collect(); - - let constructors: &Vec<_> = &flat_fields - .iter() - .map(|(_, ident)| quote!(#this::#ident)) - .collect(); - let main_constructors: &Vec<_> = &fields - .iter() - .map(|(_, ident, _)| quote!(#this::#ident)) - .collect(); - - let expecting = expecting.unwrap_or(if is_variant { - "variant identifier" - } else { - "field identifier" - }); - - let index_expecting = if is_variant { "variant" } else { "field" }; - - let bytes_to_str = if fallthrough.is_some() || collect_other_fields { - None - } else { - Some(quote! { - let __value = &_serde::__private::from_utf8_lossy(__value); - }) - }; - - let ( - value_as_str_content, - value_as_borrowed_str_content, - value_as_bytes_content, - value_as_borrowed_bytes_content, - ) = if collect_other_fields { - ( - Some(quote! { - let __value = _serde::__private::de::Content::String(_serde::__private::ToString::to_string(__value)); - }), - Some(quote! { - let __value = _serde::__private::de::Content::Str(__value); - }), - Some(quote! { - let __value = _serde::__private::de::Content::ByteBuf(__value.to_vec()); - }), - Some(quote! { - let __value = _serde::__private::de::Content::Bytes(__value); - }), - ) - } else { - (None, None, None, None) - }; - - let fallthrough_arm_tokens; - let fallthrough_arm = if let Some(fallthrough) = &fallthrough { - fallthrough - } else if is_variant { - fallthrough_arm_tokens = quote! { - _serde::__private::Err(_serde::de::Error::unknown_variant(__value, VARIANTS)) - }; - &fallthrough_arm_tokens - } else { - fallthrough_arm_tokens = quote! { - _serde::__private::Err(_serde::de::Error::unknown_field(__value, FIELDS)) - }; - &fallthrough_arm_tokens - }; - - let u64_fallthrough_arm_tokens; - let u64_fallthrough_arm = if let Some(fallthrough) = &fallthrough { - fallthrough - } else { - let fallthrough_msg = format!("{} index 0 <= i < {}", index_expecting, fields.len()); - u64_fallthrough_arm_tokens = quote! { - _serde::__private::Err(_serde::de::Error::invalid_value( - _serde::de::Unexpected::Unsigned(__value), - &#fallthrough_msg, - )) - }; - &u64_fallthrough_arm_tokens - }; - - let variant_indices = 0_u64..; - let visit_other = if collect_other_fields { - quote! { - fn visit_bool<__E>(self, __value: bool) -> _serde::__private::Result - where - __E: _serde::de::Error, - { - _serde::__private::Ok(__Field::__other(_serde::__private::de::Content::Bool(__value))) - } - - fn visit_i8<__E>(self, __value: i8) -> _serde::__private::Result - where - __E: _serde::de::Error, - { - _serde::__private::Ok(__Field::__other(_serde::__private::de::Content::I8(__value))) - } - - fn visit_i16<__E>(self, __value: i16) -> _serde::__private::Result - where - __E: _serde::de::Error, - { - _serde::__private::Ok(__Field::__other(_serde::__private::de::Content::I16(__value))) - } - - fn visit_i32<__E>(self, __value: i32) -> _serde::__private::Result - where - __E: _serde::de::Error, - { - _serde::__private::Ok(__Field::__other(_serde::__private::de::Content::I32(__value))) - } - - fn visit_i64<__E>(self, __value: i64) -> _serde::__private::Result - where - __E: _serde::de::Error, - { - _serde::__private::Ok(__Field::__other(_serde::__private::de::Content::I64(__value))) - } - - fn visit_u8<__E>(self, __value: u8) -> _serde::__private::Result - where - __E: _serde::de::Error, - { - _serde::__private::Ok(__Field::__other(_serde::__private::de::Content::U8(__value))) - } - - fn visit_u16<__E>(self, __value: u16) -> _serde::__private::Result - where - __E: _serde::de::Error, - { - _serde::__private::Ok(__Field::__other(_serde::__private::de::Content::U16(__value))) - } - - fn visit_u32<__E>(self, __value: u32) -> _serde::__private::Result - where - __E: _serde::de::Error, - { - _serde::__private::Ok(__Field::__other(_serde::__private::de::Content::U32(__value))) - } - - fn visit_u64<__E>(self, __value: u64) -> _serde::__private::Result - where - __E: _serde::de::Error, - { - _serde::__private::Ok(__Field::__other(_serde::__private::de::Content::U64(__value))) - } - - fn visit_f32<__E>(self, __value: f32) -> _serde::__private::Result - where - __E: _serde::de::Error, - { - _serde::__private::Ok(__Field::__other(_serde::__private::de::Content::F32(__value))) - } - - fn visit_f64<__E>(self, __value: f64) -> _serde::__private::Result - where - __E: _serde::de::Error, - { - _serde::__private::Ok(__Field::__other(_serde::__private::de::Content::F64(__value))) - } - - fn visit_char<__E>(self, __value: char) -> _serde::__private::Result - where - __E: _serde::de::Error, - { - _serde::__private::Ok(__Field::__other(_serde::__private::de::Content::Char(__value))) - } - - fn visit_unit<__E>(self) -> _serde::__private::Result - where - __E: _serde::de::Error, - { - _serde::__private::Ok(__Field::__other(_serde::__private::de::Content::Unit)) - } - } - } else { - quote! { - fn visit_u64<__E>(self, __value: u64) -> _serde::__private::Result - where - __E: _serde::de::Error, - { - match __value { - #( - #variant_indices => _serde::__private::Ok(#main_constructors), - )* - _ => #u64_fallthrough_arm, - } - } - } - }; - - let visit_borrowed = if fallthrough_borrowed.is_some() || collect_other_fields { - let fallthrough_borrowed_arm = fallthrough_borrowed.as_ref().unwrap_or(fallthrough_arm); - Some(quote! { - fn visit_borrowed_str<__E>(self, __value: &'de str) -> _serde::__private::Result - where - __E: _serde::de::Error, - { - match __value { - #( - #field_strs => _serde::__private::Ok(#constructors), - )* - _ => { - #value_as_borrowed_str_content - #fallthrough_borrowed_arm - } - } - } - - fn visit_borrowed_bytes<__E>(self, __value: &'de [u8]) -> _serde::__private::Result - where - __E: _serde::de::Error, - { - match __value { - #( - #field_bytes => _serde::__private::Ok(#constructors), - )* - _ => { - #bytes_to_str - #value_as_borrowed_bytes_content - #fallthrough_borrowed_arm - } - } - } - }) - } else { - None - }; - - quote_block! { - fn expecting(&self, __formatter: &mut _serde::__private::Formatter) -> _serde::__private::fmt::Result { - _serde::__private::Formatter::write_str(__formatter, #expecting) - } - - #visit_other - - fn visit_str<__E>(self, __value: &str) -> _serde::__private::Result - where - __E: _serde::de::Error, - { - match __value { - #( - #field_strs => _serde::__private::Ok(#constructors), - )* - _ => { - #value_as_str_content - #fallthrough_arm - } - } - } - - fn visit_bytes<__E>(self, __value: &[u8]) -> _serde::__private::Result - where - __E: _serde::de::Error, - { - match __value { - #( - #field_bytes => _serde::__private::Ok(#constructors), - )* - _ => { - #bytes_to_str - #value_as_bytes_content - #fallthrough_arm - } - } - } - - #visit_borrowed - } -} - -fn deserialize_struct_as_struct_visitor( - struct_path: &TokenStream, - params: &Parameters, - fields: &[Field], - cattrs: &attr::Container, -) -> (Fragment, Option, Fragment) { - assert!(!cattrs.has_flatten()); - - let field_names_idents: Vec<_> = fields - .iter() - .enumerate() - .filter(|&(_, field)| !field.attrs.skip_deserializing()) - .map(|(i, field)| { - ( - field.attrs.name().deserialize_name(), - field_i(i), - field.attrs.aliases(), - ) - }) - .collect(); - - let fields_stmt = { - let field_names = field_names_idents.iter().map(|(name, _, _)| name); - quote_block! { - const FIELDS: &'static [&'static str] = &[ #(#field_names),* ]; - } - }; - - let field_visitor = deserialize_generated_identifier(&field_names_idents, cattrs, false, None); - - let visit_map = deserialize_map(struct_path, params, fields, cattrs); - - (field_visitor, Some(fields_stmt), visit_map) -} - -fn deserialize_struct_as_map_visitor( - struct_path: &TokenStream, - params: &Parameters, - fields: &[Field], - cattrs: &attr::Container, -) -> (Fragment, Option, Fragment) { - let field_names_idents: Vec<_> = fields - .iter() - .enumerate() - .filter(|&(_, field)| !field.attrs.skip_deserializing() && !field.attrs.flatten()) - .map(|(i, field)| { - ( - field.attrs.name().deserialize_name(), - field_i(i), - field.attrs.aliases(), - ) - }) - .collect(); - - let field_visitor = deserialize_generated_identifier(&field_names_idents, cattrs, false, None); - - let visit_map = deserialize_map(struct_path, params, fields, cattrs); - - (field_visitor, None, visit_map) -} - -fn deserialize_map( - struct_path: &TokenStream, - params: &Parameters, - fields: &[Field], - cattrs: &attr::Container, -) -> Fragment { - // Create the field names for the fields. - let fields_names: Vec<_> = fields - .iter() - .enumerate() - .map(|(i, field)| (field, field_i(i))) - .collect(); - - // Declare each field that will be deserialized. - let let_values = fields_names - .iter() - .filter(|&&(field, _)| !field.attrs.skip_deserializing() && !field.attrs.flatten()) - .map(|(field, name)| { - let field_ty = field.ty; - quote! { - let mut #name: _serde::__private::Option<#field_ty> = _serde::__private::None; - } - }); - - // Collect contents for flatten fields into a buffer - let let_collect = if cattrs.has_flatten() { - Some(quote! { - let mut __collect = _serde::__private::Vec::<_serde::__private::Option<( - _serde::__private::de::Content, - _serde::__private::de::Content - )>>::new(); - }) - } else { - None - }; - - // Match arms to extract a value for a field. - let value_arms = fields_names - .iter() - .filter(|&&(field, _)| !field.attrs.skip_deserializing() && !field.attrs.flatten()) - .map(|(field, name)| { - let deser_name = field.attrs.name().deserialize_name(); - - let visit = match field.attrs.deserialize_with() { - None => { - let field_ty = field.ty; - let span = field.original.span(); - let func = - quote_spanned!(span=> _serde::de::MapAccess::next_value::<#field_ty>); - quote! { - try!(#func(&mut __map)) - } - } - Some(path) => { - let (wrapper, wrapper_ty) = wrap_deserialize_field_with(params, field.ty, path); - quote!({ - #wrapper - match _serde::de::MapAccess::next_value::<#wrapper_ty>(&mut __map) { - _serde::__private::Ok(__wrapper) => __wrapper.value, - _serde::__private::Err(__err) => { - return _serde::__private::Err(__err); - } - } - }) - } - }; - quote! { - __Field::#name => { - if _serde::__private::Option::is_some(&#name) { - return _serde::__private::Err(<__A::Error as _serde::de::Error>::duplicate_field(#deser_name)); - } - #name = _serde::__private::Some(#visit); - } - } - }); - - // Visit ignored values to consume them - let ignored_arm = if cattrs.has_flatten() { - Some(quote! { - __Field::__other(__name) => { - __collect.push(_serde::__private::Some(( - __name, - try!(_serde::de::MapAccess::next_value(&mut __map))))); - } - }) - } else if cattrs.deny_unknown_fields() { - None - } else { - Some(quote! { - _ => { let _ = try!(_serde::de::MapAccess::next_value::<_serde::de::IgnoredAny>(&mut __map)); } - }) - }; - - let all_skipped = fields.iter().all(|field| field.attrs.skip_deserializing()); - let match_keys = if cattrs.deny_unknown_fields() && all_skipped { - quote! { - // FIXME: Once we drop support for Rust 1.15: - // let _serde::__private::None::<__Field> = try!(_serde::de::MapAccess::next_key(&mut __map)); - _serde::__private::Option::map( - try!(_serde::de::MapAccess::next_key::<__Field>(&mut __map)), - |__impossible| match __impossible {}); - } - } else { - quote! { - while let _serde::__private::Some(__key) = try!(_serde::de::MapAccess::next_key::<__Field>(&mut __map)) { - match __key { - #(#value_arms)* - #ignored_arm - } - } - } - }; - - let extract_values = fields_names - .iter() - .filter(|&&(field, _)| !field.attrs.skip_deserializing() && !field.attrs.flatten()) - .map(|(field, name)| { - let missing_expr = Match(expr_is_missing(field, cattrs)); - - quote! { - let #name = match #name { - _serde::__private::Some(#name) => #name, - _serde::__private::None => #missing_expr - }; - } - }); - - let extract_collected = fields_names - .iter() - .filter(|&&(field, _)| field.attrs.flatten() && !field.attrs.skip_deserializing()) - .map(|(field, name)| { - let field_ty = field.ty; - let func = match field.attrs.deserialize_with() { - None => { - let span = field.original.span(); - quote_spanned!(span=> _serde::de::Deserialize::deserialize) - } - Some(path) => quote!(#path), - }; - quote! { - let #name: #field_ty = try!(#func( - _serde::__private::de::FlatMapDeserializer( - &mut __collect, - _serde::__private::PhantomData))); - } - }); - - let collected_deny_unknown_fields = if cattrs.has_flatten() && cattrs.deny_unknown_fields() { - Some(quote! { - if let _serde::__private::Some(_serde::__private::Some((__key, _))) = - __collect.into_iter().filter(_serde::__private::Option::is_some).next() - { - if let _serde::__private::Some(__key) = __key.as_str() { - return _serde::__private::Err( - _serde::de::Error::custom(format_args!("unknown field `{}`", &__key))); - } else { - return _serde::__private::Err( - _serde::de::Error::custom(format_args!("unexpected map key"))); - } - } - }) - } else { - None - }; - - let result = fields_names.iter().map(|(field, name)| { - let member = &field.member; - if field.attrs.skip_deserializing() { - let value = Expr(expr_is_missing(field, cattrs)); - quote!(#member: #value) - } else { - quote!(#member: #name) - } - }); - - let let_default = match cattrs.default() { - attr::Default::Default => Some(quote!( - let __default: Self::Value = _serde::__private::Default::default(); - )), - attr::Default::Path(path) => Some(quote!( - let __default: Self::Value = #path(); - )), - attr::Default::None => { - // We don't need the default value, to prevent an unused variable warning - // we'll leave the line empty. - None - } - }; - - let mut result = quote!(#struct_path { #(#result),* }); - if params.has_getter { - let this = ¶ms.this; - result = quote! { - _serde::__private::Into::<#this>::into(#result) - }; - } - - quote_block! { - #(#let_values)* - - #let_collect - - #match_keys - - #let_default - - #(#extract_values)* - - #(#extract_collected)* - - #collected_deny_unknown_fields - - _serde::__private::Ok(#result) - } -} - -#[cfg(feature = "deserialize_in_place")] -fn deserialize_struct_as_struct_in_place_visitor( - params: &Parameters, - fields: &[Field], - cattrs: &attr::Container, -) -> (Fragment, Fragment, Fragment) { - assert!(!cattrs.has_flatten()); - - let field_names_idents: Vec<_> = fields - .iter() - .enumerate() - .filter(|&(_, field)| !field.attrs.skip_deserializing()) - .map(|(i, field)| { - ( - field.attrs.name().deserialize_name(), - field_i(i), - field.attrs.aliases(), - ) - }) - .collect(); - - let fields_stmt = { - let field_names = field_names_idents.iter().map(|(name, _, _)| name); - quote_block! { - const FIELDS: &'static [&'static str] = &[ #(#field_names),* ]; - } - }; - - let field_visitor = deserialize_generated_identifier(&field_names_idents, cattrs, false, None); - - let visit_map = deserialize_map_in_place(params, fields, cattrs); - - (field_visitor, fields_stmt, visit_map) -} - -#[cfg(feature = "deserialize_in_place")] -fn deserialize_map_in_place( - params: &Parameters, - fields: &[Field], - cattrs: &attr::Container, -) -> Fragment { - assert!(!cattrs.has_flatten()); - - // Create the field names for the fields. - let fields_names: Vec<_> = fields - .iter() - .enumerate() - .map(|(i, field)| (field, field_i(i))) - .collect(); - - // For deserialize_in_place, declare booleans for each field that will be - // deserialized. - let let_flags = fields_names - .iter() - .filter(|&&(field, _)| !field.attrs.skip_deserializing()) - .map(|(_, name)| { - quote! { - let mut #name: bool = false; - } - }); - - // Match arms to extract a value for a field. - let value_arms_from = fields_names - .iter() - .filter(|&&(field, _)| !field.attrs.skip_deserializing()) - .map(|(field, name)| { - let deser_name = field.attrs.name().deserialize_name(); - let member = &field.member; - - let visit = match field.attrs.deserialize_with() { - None => { - quote! { - try!(_serde::de::MapAccess::next_value_seed(&mut __map, _serde::__private::de::InPlaceSeed(&mut self.place.#member))) - } - } - Some(path) => { - let (wrapper, wrapper_ty) = wrap_deserialize_field_with(params, field.ty, path); - quote!({ - #wrapper - self.place.#member = match _serde::de::MapAccess::next_value::<#wrapper_ty>(&mut __map) { - _serde::__private::Ok(__wrapper) => __wrapper.value, - _serde::__private::Err(__err) => { - return _serde::__private::Err(__err); - } - }; - }) - } - }; - quote! { - __Field::#name => { - if #name { - return _serde::__private::Err(<__A::Error as _serde::de::Error>::duplicate_field(#deser_name)); - } - #visit; - #name = true; - } - } - }); - - // Visit ignored values to consume them - let ignored_arm = if cattrs.deny_unknown_fields() { - None - } else { - Some(quote! { - _ => { let _ = try!(_serde::de::MapAccess::next_value::<_serde::de::IgnoredAny>(&mut __map)); } - }) - }; - - let all_skipped = fields.iter().all(|field| field.attrs.skip_deserializing()); - - let match_keys = if cattrs.deny_unknown_fields() && all_skipped { - quote! { - // FIXME: Once we drop support for Rust 1.15: - // let _serde::__private::None::<__Field> = try!(_serde::de::MapAccess::next_key(&mut __map)); - _serde::__private::Option::map( - try!(_serde::de::MapAccess::next_key::<__Field>(&mut __map)), - |__impossible| match __impossible {}); - } - } else { - quote! { - while let _serde::__private::Some(__key) = try!(_serde::de::MapAccess::next_key::<__Field>(&mut __map)) { - match __key { - #(#value_arms_from)* - #ignored_arm - } - } - } - }; - - let check_flags = fields_names - .iter() - .filter(|&&(field, _)| !field.attrs.skip_deserializing()) - .map(|(field, name)| { - let missing_expr = expr_is_missing(field, cattrs); - // If missing_expr unconditionally returns an error, don't try - // to assign its value to self.place. - if field.attrs.default().is_none() - && cattrs.default().is_none() - && field.attrs.deserialize_with().is_some() - { - let missing_expr = Stmts(missing_expr); - quote! { - if !#name { - #missing_expr; - } - } - } else { - let member = &field.member; - let missing_expr = Expr(missing_expr); - quote! { - if !#name { - self.place.#member = #missing_expr; - }; - } - } - }); - - let this = ¶ms.this; - let (_, _, ty_generics, _) = split_with_de_lifetime(params); - - let let_default = match cattrs.default() { - attr::Default::Default => Some(quote!( - let __default: #this #ty_generics = _serde::__private::Default::default(); - )), - attr::Default::Path(path) => Some(quote!( - let __default: #this #ty_generics = #path(); - )), - attr::Default::None => { - // We don't need the default value, to prevent an unused variable warning - // we'll leave the line empty. - None - } - }; - - quote_block! { - #(#let_flags)* - - #match_keys - - #let_default - - #(#check_flags)* - - _serde::__private::Ok(()) - } -} - -fn field_i(i: usize) -> Ident { - Ident::new(&format!("__field{}", i), Span::call_site()) -} - -/// This function wraps the expression in `#[serde(deserialize_with = "...")]` -/// in a trait to prevent it from accessing the internal `Deserialize` state. -fn wrap_deserialize_with( - params: &Parameters, - value_ty: &TokenStream, - deserialize_with: &syn::ExprPath, -) -> (TokenStream, TokenStream) { - let this = ¶ms.this; - let (de_impl_generics, de_ty_generics, ty_generics, where_clause) = - split_with_de_lifetime(params); - let delife = params.borrowed.de_lifetime(); - - let wrapper = quote! { - struct __DeserializeWith #de_impl_generics #where_clause { - value: #value_ty, - phantom: _serde::__private::PhantomData<#this #ty_generics>, - lifetime: _serde::__private::PhantomData<&#delife ()>, - } - - impl #de_impl_generics _serde::Deserialize<#delife> for __DeserializeWith #de_ty_generics #where_clause { - fn deserialize<__D>(__deserializer: __D) -> _serde::__private::Result - where - __D: _serde::Deserializer<#delife>, - { - _serde::__private::Ok(__DeserializeWith { - value: try!(#deserialize_with(__deserializer)), - phantom: _serde::__private::PhantomData, - lifetime: _serde::__private::PhantomData, - }) - } - } - }; - - let wrapper_ty = quote!(__DeserializeWith #de_ty_generics); - - (wrapper, wrapper_ty) -} - -fn wrap_deserialize_field_with( - params: &Parameters, - field_ty: &syn::Type, - deserialize_with: &syn::ExprPath, -) -> (TokenStream, TokenStream) { - wrap_deserialize_with(params, "e!(#field_ty), deserialize_with) -} - -fn wrap_deserialize_variant_with( - params: &Parameters, - variant: &Variant, - deserialize_with: &syn::ExprPath, -) -> (TokenStream, TokenStream, TokenStream) { - let field_tys = variant.fields.iter().map(|field| field.ty); - let (wrapper, wrapper_ty) = - wrap_deserialize_with(params, "e!((#(#field_tys),*)), deserialize_with); - - let unwrap_fn = unwrap_to_variant_closure(params, variant, true); - - (wrapper, wrapper_ty, unwrap_fn) -} - -// Generates closure that converts single input parameter to the final value. -fn unwrap_to_variant_closure( - params: &Parameters, - variant: &Variant, - with_wrapper: bool, -) -> TokenStream { - let this = ¶ms.this; - let variant_ident = &variant.ident; - - let (arg, wrapper) = if with_wrapper { - (quote! { __wrap }, quote! { __wrap.value }) - } else { - let field_tys = variant.fields.iter().map(|field| field.ty); - (quote! { __wrap: (#(#field_tys),*) }, quote! { __wrap }) - }; - - let field_access = (0..variant.fields.len()).map(|n| { - Member::Unnamed(Index { - index: n as u32, - span: Span::call_site(), - }) - }); - - match variant.style { - Style::Struct if variant.fields.len() == 1 => { - let member = &variant.fields[0].member; - quote! { - |#arg| #this::#variant_ident { #member: #wrapper } - } - } - Style::Struct => { - let members = variant.fields.iter().map(|field| &field.member); - quote! { - |#arg| #this::#variant_ident { #(#members: #wrapper.#field_access),* } - } - } - Style::Tuple => quote! { - |#arg| #this::#variant_ident(#(#wrapper.#field_access),*) - }, - Style::Newtype => quote! { - |#arg| #this::#variant_ident(#wrapper) - }, - Style::Unit => quote! { - |#arg| #this::#variant_ident - }, - } -} - -fn expr_is_missing(field: &Field, cattrs: &attr::Container) -> Fragment { - match field.attrs.default() { - attr::Default::Default => { - let span = field.original.span(); - let func = quote_spanned!(span=> _serde::__private::Default::default); - return quote_expr!(#func()); - } - attr::Default::Path(path) => { - return quote_expr!(#path()); - } - attr::Default::None => { /* below */ } - } - - match *cattrs.default() { - attr::Default::Default | attr::Default::Path(_) => { - let member = &field.member; - return quote_expr!(__default.#member); - } - attr::Default::None => { /* below */ } - } - - let name = field.attrs.name().deserialize_name(); - match field.attrs.deserialize_with() { - None => { - let span = field.original.span(); - let func = quote_spanned!(span=> _serde::__private::de::missing_field); - quote_expr! { - try!(#func(#name)) - } - } - Some(_) => { - quote_expr! { - return _serde::__private::Err(<__A::Error as _serde::de::Error>::missing_field(#name)) - } - } - } -} - -fn effective_style(variant: &Variant) -> Style { - match variant.style { - Style::Newtype if variant.fields[0].attrs.skip_deserializing() => Style::Unit, - other => other, - } -} - -struct DeImplGenerics<'a>(&'a Parameters); -#[cfg(feature = "deserialize_in_place")] -struct InPlaceImplGenerics<'a>(&'a Parameters); - -impl<'a> ToTokens for DeImplGenerics<'a> { - fn to_tokens(&self, tokens: &mut TokenStream) { - let mut generics = self.0.generics.clone(); - if let Some(de_lifetime) = self.0.borrowed.de_lifetime_def() { - generics.params = Some(syn::GenericParam::Lifetime(de_lifetime)) - .into_iter() - .chain(generics.params) - .collect(); - } - let (impl_generics, _, _) = generics.split_for_impl(); - impl_generics.to_tokens(tokens); - } -} - -#[cfg(feature = "deserialize_in_place")] -impl<'a> ToTokens for InPlaceImplGenerics<'a> { - fn to_tokens(&self, tokens: &mut TokenStream) { - let place_lifetime = place_lifetime(); - let mut generics = self.0.generics.clone(); - - // Add lifetime for `&'place mut Self, and `'a: 'place` - for param in &mut generics.params { - match param { - syn::GenericParam::Lifetime(param) => { - param.bounds.push(place_lifetime.lifetime.clone()); - } - syn::GenericParam::Type(param) => { - param.bounds.push(syn::TypeParamBound::Lifetime( - place_lifetime.lifetime.clone(), - )); - } - syn::GenericParam::Const(_) => {} - } - } - generics.params = Some(syn::GenericParam::Lifetime(place_lifetime)) - .into_iter() - .chain(generics.params) - .collect(); - if let Some(de_lifetime) = self.0.borrowed.de_lifetime_def() { - generics.params = Some(syn::GenericParam::Lifetime(de_lifetime)) - .into_iter() - .chain(generics.params) - .collect(); - } - let (impl_generics, _, _) = generics.split_for_impl(); - impl_generics.to_tokens(tokens); - } -} - -#[cfg(feature = "deserialize_in_place")] -impl<'a> DeImplGenerics<'a> { - fn in_place(self) -> InPlaceImplGenerics<'a> { - InPlaceImplGenerics(self.0) - } -} - -struct DeTypeGenerics<'a>(&'a Parameters); -#[cfg(feature = "deserialize_in_place")] -struct InPlaceTypeGenerics<'a>(&'a Parameters); - -impl<'a> ToTokens for DeTypeGenerics<'a> { - fn to_tokens(&self, tokens: &mut TokenStream) { - let mut generics = self.0.generics.clone(); - if self.0.borrowed.de_lifetime_def().is_some() { - let def = syn::LifetimeDef { - attrs: Vec::new(), - lifetime: syn::Lifetime::new("'de", Span::call_site()), - colon_token: None, - bounds: Punctuated::new(), - }; - generics.params = Some(syn::GenericParam::Lifetime(def)) - .into_iter() - .chain(generics.params) - .collect(); - } - let (_, ty_generics, _) = generics.split_for_impl(); - ty_generics.to_tokens(tokens); - } -} - -#[cfg(feature = "deserialize_in_place")] -impl<'a> ToTokens for InPlaceTypeGenerics<'a> { - fn to_tokens(&self, tokens: &mut TokenStream) { - let mut generics = self.0.generics.clone(); - generics.params = Some(syn::GenericParam::Lifetime(place_lifetime())) - .into_iter() - .chain(generics.params) - .collect(); - - if self.0.borrowed.de_lifetime_def().is_some() { - let def = syn::LifetimeDef { - attrs: Vec::new(), - lifetime: syn::Lifetime::new("'de", Span::call_site()), - colon_token: None, - bounds: Punctuated::new(), - }; - generics.params = Some(syn::GenericParam::Lifetime(def)) - .into_iter() - .chain(generics.params) - .collect(); - } - let (_, ty_generics, _) = generics.split_for_impl(); - ty_generics.to_tokens(tokens); - } -} - -#[cfg(feature = "deserialize_in_place")] -impl<'a> DeTypeGenerics<'a> { - fn in_place(self) -> InPlaceTypeGenerics<'a> { - InPlaceTypeGenerics(self.0) - } -} - -#[cfg(feature = "deserialize_in_place")] -fn place_lifetime() -> syn::LifetimeDef { - syn::LifetimeDef { - attrs: Vec::new(), - lifetime: syn::Lifetime::new("'place", Span::call_site()), - colon_token: None, - bounds: Punctuated::new(), - } -} - -fn split_with_de_lifetime( - params: &Parameters, -) -> ( - DeImplGenerics, - DeTypeGenerics, - syn::TypeGenerics, - Option<&syn::WhereClause>, -) { - let de_impl_generics = DeImplGenerics(params); - let de_ty_generics = DeTypeGenerics(params); - let (_, ty_generics, where_clause) = params.generics.split_for_impl(); - (de_impl_generics, de_ty_generics, ty_generics, where_clause) -} diff --git a/vendor/serde_derive/src/dummy.rs b/vendor/serde_derive/src/dummy.rs deleted file mode 100644 index 29de2601..00000000 --- a/vendor/serde_derive/src/dummy.rs +++ /dev/null @@ -1,48 +0,0 @@ -use proc_macro2::{Ident, TokenStream}; -use quote::format_ident; - -use syn; -use try; - -pub fn wrap_in_const( - serde_path: Option<&syn::Path>, - trait_: &str, - ty: &Ident, - code: TokenStream, -) -> TokenStream { - let try_replacement = try::replacement(); - - let dummy_const = if cfg!(underscore_consts) { - format_ident!("_") - } else { - format_ident!("_IMPL_{}_FOR_{}", trait_, unraw(ty)) - }; - - let use_serde = match serde_path { - Some(path) => quote! { - use #path as _serde; - }, - None => quote! { - #[allow(unused_extern_crates, clippy::useless_attribute)] - extern crate serde as _serde; - }, - }; - - quote! { - #[doc(hidden)] - #[allow(non_upper_case_globals, unused_attributes, unused_qualifications)] - const #dummy_const: () = { - #use_serde - #try_replacement - #code - }; - } -} - -#[allow(deprecated)] -fn unraw(ident: &Ident) -> String { - // str::trim_start_matches was added in 1.30, trim_left_matches deprecated - // in 1.33. We currently support rustc back to 1.15 so we need to continue - // to use the deprecated one. - ident.to_string().trim_left_matches("r#").to_owned() -} diff --git a/vendor/serde_derive/src/fragment.rs b/vendor/serde_derive/src/fragment.rs deleted file mode 100644 index 324504aa..00000000 --- a/vendor/serde_derive/src/fragment.rs +++ /dev/null @@ -1,74 +0,0 @@ -use proc_macro2::TokenStream; -use quote::ToTokens; -use syn::token; - -pub enum Fragment { - /// Tokens that can be used as an expression. - Expr(TokenStream), - /// Tokens that can be used inside a block. The surrounding curly braces are - /// not part of these tokens. - Block(TokenStream), -} - -macro_rules! quote_expr { - ($($tt:tt)*) => { - $crate::fragment::Fragment::Expr(quote!($($tt)*)) - } -} - -macro_rules! quote_block { - ($($tt:tt)*) => { - $crate::fragment::Fragment::Block(quote!($($tt)*)) - } -} - -/// Interpolate a fragment in place of an expression. This involves surrounding -/// Block fragments in curly braces. -pub struct Expr(pub Fragment); -impl ToTokens for Expr { - fn to_tokens(&self, out: &mut TokenStream) { - match &self.0 { - Fragment::Expr(expr) => expr.to_tokens(out), - Fragment::Block(block) => { - token::Brace::default().surround(out, |out| block.to_tokens(out)); - } - } - } -} - -/// Interpolate a fragment as the statements of a block. -pub struct Stmts(pub Fragment); -impl ToTokens for Stmts { - fn to_tokens(&self, out: &mut TokenStream) { - match &self.0 { - Fragment::Expr(expr) => expr.to_tokens(out), - Fragment::Block(block) => block.to_tokens(out), - } - } -} - -/// Interpolate a fragment as the value part of a `match` expression. This -/// involves putting a comma after expressions and curly braces around blocks. -pub struct Match(pub Fragment); -impl ToTokens for Match { - fn to_tokens(&self, out: &mut TokenStream) { - match &self.0 { - Fragment::Expr(expr) => { - expr.to_tokens(out); - ::default().to_tokens(out); - } - Fragment::Block(block) => { - token::Brace::default().surround(out, |out| block.to_tokens(out)); - } - } - } -} - -impl AsRef for Fragment { - fn as_ref(&self) -> &TokenStream { - match self { - Fragment::Expr(expr) => expr, - Fragment::Block(block) => block, - } - } -} diff --git a/vendor/serde_derive/src/internals/ast.rs b/vendor/serde_derive/src/internals/ast.rs deleted file mode 100644 index 2a6950b2..00000000 --- a/vendor/serde_derive/src/internals/ast.rs +++ /dev/null @@ -1,202 +0,0 @@ -//! A Serde ast, parsed from the Syn ast and ready to generate Rust code. - -use internals::attr; -use internals::check; -use internals::{Ctxt, Derive}; -use syn; -use syn::punctuated::Punctuated; - -/// A source data structure annotated with `#[derive(Serialize)]` and/or `#[derive(Deserialize)]`, -/// parsed into an internal representation. -pub struct Container<'a> { - /// The struct or enum name (without generics). - pub ident: syn::Ident, - /// Attributes on the structure, parsed for Serde. - pub attrs: attr::Container, - /// The contents of the struct or enum. - pub data: Data<'a>, - /// Any generics on the struct or enum. - pub generics: &'a syn::Generics, - /// Original input. - pub original: &'a syn::DeriveInput, -} - -/// The fields of a struct or enum. -/// -/// Analogous to `syn::Data`. -pub enum Data<'a> { - Enum(Vec>), - Struct(Style, Vec>), -} - -/// A variant of an enum. -pub struct Variant<'a> { - pub ident: syn::Ident, - pub attrs: attr::Variant, - pub style: Style, - pub fields: Vec>, - pub original: &'a syn::Variant, -} - -/// A field of a struct. -pub struct Field<'a> { - pub member: syn::Member, - pub attrs: attr::Field, - pub ty: &'a syn::Type, - pub original: &'a syn::Field, -} - -#[derive(Copy, Clone)] -pub enum Style { - /// Named fields. - Struct, - /// Many unnamed fields. - Tuple, - /// One unnamed field. - Newtype, - /// No fields. - Unit, -} - -impl<'a> Container<'a> { - /// Convert the raw Syn ast into a parsed container object, collecting errors in `cx`. - pub fn from_ast( - cx: &Ctxt, - item: &'a syn::DeriveInput, - derive: Derive, - ) -> Option> { - let mut attrs = attr::Container::from_ast(cx, item); - - let mut data = match &item.data { - syn::Data::Enum(data) => Data::Enum(enum_from_ast(cx, &data.variants, attrs.default())), - syn::Data::Struct(data) => { - let (style, fields) = struct_from_ast(cx, &data.fields, None, attrs.default()); - Data::Struct(style, fields) - } - syn::Data::Union(_) => { - cx.error_spanned_by(item, "Serde does not support derive for unions"); - return None; - } - }; - - let mut has_flatten = false; - match &mut data { - Data::Enum(variants) => { - for variant in variants { - variant.attrs.rename_by_rules(attrs.rename_all_rules()); - for field in &mut variant.fields { - if field.attrs.flatten() { - has_flatten = true; - } - field - .attrs - .rename_by_rules(variant.attrs.rename_all_rules()); - } - } - } - Data::Struct(_, fields) => { - for field in fields { - if field.attrs.flatten() { - has_flatten = true; - } - field.attrs.rename_by_rules(attrs.rename_all_rules()); - } - } - } - - if has_flatten { - attrs.mark_has_flatten(); - } - - let mut item = Container { - ident: item.ident.clone(), - attrs, - data, - generics: &item.generics, - original: item, - }; - check::check(cx, &mut item, derive); - Some(item) - } -} - -impl<'a> Data<'a> { - pub fn all_fields(&'a self) -> Box> + 'a> { - match self { - Data::Enum(variants) => { - Box::new(variants.iter().flat_map(|variant| variant.fields.iter())) - } - Data::Struct(_, fields) => Box::new(fields.iter()), - } - } - - pub fn has_getter(&self) -> bool { - self.all_fields().any(|f| f.attrs.getter().is_some()) - } -} - -fn enum_from_ast<'a>( - cx: &Ctxt, - variants: &'a Punctuated, - container_default: &attr::Default, -) -> Vec> { - variants - .iter() - .map(|variant| { - let attrs = attr::Variant::from_ast(cx, variant); - let (style, fields) = - struct_from_ast(cx, &variant.fields, Some(&attrs), container_default); - Variant { - ident: variant.ident.clone(), - attrs, - style, - fields, - original: variant, - } - }) - .collect() -} - -fn struct_from_ast<'a>( - cx: &Ctxt, - fields: &'a syn::Fields, - attrs: Option<&attr::Variant>, - container_default: &attr::Default, -) -> (Style, Vec>) { - match fields { - syn::Fields::Named(fields) => ( - Style::Struct, - fields_from_ast(cx, &fields.named, attrs, container_default), - ), - syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => ( - Style::Newtype, - fields_from_ast(cx, &fields.unnamed, attrs, container_default), - ), - syn::Fields::Unnamed(fields) => ( - Style::Tuple, - fields_from_ast(cx, &fields.unnamed, attrs, container_default), - ), - syn::Fields::Unit => (Style::Unit, Vec::new()), - } -} - -fn fields_from_ast<'a>( - cx: &Ctxt, - fields: &'a Punctuated, - attrs: Option<&attr::Variant>, - container_default: &attr::Default, -) -> Vec> { - fields - .iter() - .enumerate() - .map(|(i, field)| Field { - member: match &field.ident { - Some(ident) => syn::Member::Named(ident.clone()), - None => syn::Member::Unnamed(i.into()), - }, - attrs: attr::Field::from_ast(cx, i, field, attrs, container_default), - ty: &field.ty, - original: field, - }) - .collect() -} diff --git a/vendor/serde_derive/src/internals/attr.rs b/vendor/serde_derive/src/internals/attr.rs deleted file mode 100644 index 13f55250..00000000 --- a/vendor/serde_derive/src/internals/attr.rs +++ /dev/null @@ -1,1954 +0,0 @@ -use internals::respan::respan; -use internals::symbol::*; -use internals::{ungroup, Ctxt}; -use proc_macro2::{Spacing, Span, TokenStream, TokenTree}; -use quote::ToTokens; -use std::borrow::Cow; -use std::collections::BTreeSet; -use syn; -use syn::parse::{self, Parse, ParseStream}; -use syn::punctuated::Punctuated; -use syn::Ident; -use syn::Meta::{List, NameValue, Path}; -use syn::NestedMeta::{Lit, Meta}; - -// This module handles parsing of `#[serde(...)]` attributes. The entrypoints -// are `attr::Container::from_ast`, `attr::Variant::from_ast`, and -// `attr::Field::from_ast`. Each returns an instance of the corresponding -// struct. Note that none of them return a Result. Unrecognized, malformed, or -// duplicated attributes result in a span_err but otherwise are ignored. The -// user will see errors simultaneously for all bad attributes in the crate -// rather than just the first. - -pub use internals::case::RenameRule; - -struct Attr<'c, T> { - cx: &'c Ctxt, - name: Symbol, - tokens: TokenStream, - value: Option, -} - -impl<'c, T> Attr<'c, T> { - fn none(cx: &'c Ctxt, name: Symbol) -> Self { - Attr { - cx, - name, - tokens: TokenStream::new(), - value: None, - } - } - - fn set(&mut self, obj: A, value: T) { - let tokens = obj.into_token_stream(); - - if self.value.is_some() { - self.cx - .error_spanned_by(tokens, format!("duplicate serde attribute `{}`", self.name)); - } else { - self.tokens = tokens; - self.value = Some(value); - } - } - - fn set_opt(&mut self, obj: A, value: Option) { - if let Some(value) = value { - self.set(obj, value); - } - } - - fn set_if_none(&mut self, value: T) { - if self.value.is_none() { - self.value = Some(value); - } - } - - fn get(self) -> Option { - self.value - } - - fn get_with_tokens(self) -> Option<(TokenStream, T)> { - match self.value { - Some(v) => Some((self.tokens, v)), - None => None, - } - } -} - -struct BoolAttr<'c>(Attr<'c, ()>); - -impl<'c> BoolAttr<'c> { - fn none(cx: &'c Ctxt, name: Symbol) -> Self { - BoolAttr(Attr::none(cx, name)) - } - - fn set_true(&mut self, obj: A) { - self.0.set(obj, ()); - } - - fn get(&self) -> bool { - self.0.value.is_some() - } -} - -struct VecAttr<'c, T> { - cx: &'c Ctxt, - name: Symbol, - first_dup_tokens: TokenStream, - values: Vec, -} - -impl<'c, T> VecAttr<'c, T> { - fn none(cx: &'c Ctxt, name: Symbol) -> Self { - VecAttr { - cx, - name, - first_dup_tokens: TokenStream::new(), - values: Vec::new(), - } - } - - fn insert(&mut self, obj: A, value: T) { - if self.values.len() == 1 { - self.first_dup_tokens = obj.into_token_stream(); - } - self.values.push(value); - } - - fn at_most_one(mut self) -> Result, ()> { - if self.values.len() > 1 { - let dup_token = self.first_dup_tokens; - self.cx.error_spanned_by( - dup_token, - format!("duplicate serde attribute `{}`", self.name), - ); - Err(()) - } else { - Ok(self.values.pop()) - } - } - - fn get(self) -> Vec { - self.values - } -} - -pub struct Name { - serialize: String, - serialize_renamed: bool, - deserialize: String, - deserialize_renamed: bool, - deserialize_aliases: Vec, -} - -#[allow(deprecated)] -fn unraw(ident: &Ident) -> String { - // str::trim_start_matches was added in 1.30, trim_left_matches deprecated - // in 1.33. We currently support rustc back to 1.15 so we need to continue - // to use the deprecated one. - ident.to_string().trim_left_matches("r#").to_owned() -} - -impl Name { - fn from_attrs( - source_name: String, - ser_name: Attr, - de_name: Attr, - de_aliases: Option>, - ) -> Name { - let deserialize_aliases = match de_aliases { - Some(de_aliases) => { - let mut alias_list = BTreeSet::new(); - for alias_name in de_aliases.get() { - alias_list.insert(alias_name); - } - alias_list.into_iter().collect() - } - None => Vec::new(), - }; - - let ser_name = ser_name.get(); - let ser_renamed = ser_name.is_some(); - let de_name = de_name.get(); - let de_renamed = de_name.is_some(); - Name { - serialize: ser_name.unwrap_or_else(|| source_name.clone()), - serialize_renamed: ser_renamed, - deserialize: de_name.unwrap_or(source_name), - deserialize_renamed: de_renamed, - deserialize_aliases, - } - } - - /// Return the container name for the container when serializing. - pub fn serialize_name(&self) -> String { - self.serialize.clone() - } - - /// Return the container name for the container when deserializing. - pub fn deserialize_name(&self) -> String { - self.deserialize.clone() - } - - fn deserialize_aliases(&self) -> Vec { - let mut aliases = self.deserialize_aliases.clone(); - let main_name = self.deserialize_name(); - if !aliases.contains(&main_name) { - aliases.push(main_name); - } - aliases - } -} - -pub struct RenameAllRules { - serialize: RenameRule, - deserialize: RenameRule, -} - -/// Represents struct or enum attribute information. -pub struct Container { - name: Name, - transparent: bool, - deny_unknown_fields: bool, - default: Default, - rename_all_rules: RenameAllRules, - ser_bound: Option>, - de_bound: Option>, - tag: TagType, - type_from: Option, - type_try_from: Option, - type_into: Option, - remote: Option, - identifier: Identifier, - has_flatten: bool, - serde_path: Option, - is_packed: bool, - /// Error message generated when type can't be deserialized - expecting: Option, -} - -/// Styles of representing an enum. -pub enum TagType { - /// The default. - /// - /// ```json - /// {"variant1": {"key1": "value1", "key2": "value2"}} - /// ``` - External, - - /// `#[serde(tag = "type")]` - /// - /// ```json - /// {"type": "variant1", "key1": "value1", "key2": "value2"} - /// ``` - Internal { tag: String }, - - /// `#[serde(tag = "t", content = "c")]` - /// - /// ```json - /// {"t": "variant1", "c": {"key1": "value1", "key2": "value2"}} - /// ``` - Adjacent { tag: String, content: String }, - - /// `#[serde(untagged)]` - /// - /// ```json - /// {"key1": "value1", "key2": "value2"} - /// ``` - None, -} - -/// Whether this enum represents the fields of a struct or the variants of an -/// enum. -#[derive(Copy, Clone)] -pub enum Identifier { - /// It does not. - No, - - /// This enum represents the fields of a struct. All of the variants must be - /// unit variants, except possibly one which is annotated with - /// `#[serde(other)]` and is a newtype variant. - Field, - - /// This enum represents the variants of an enum. All of the variants must - /// be unit variants. - Variant, -} - -impl Identifier { - #[cfg(feature = "deserialize_in_place")] - pub fn is_some(self) -> bool { - match self { - Identifier::No => false, - Identifier::Field | Identifier::Variant => true, - } - } -} - -impl Container { - /// Extract out the `#[serde(...)]` attributes from an item. - pub fn from_ast(cx: &Ctxt, item: &syn::DeriveInput) -> Self { - let mut ser_name = Attr::none(cx, RENAME); - let mut de_name = Attr::none(cx, RENAME); - let mut transparent = BoolAttr::none(cx, TRANSPARENT); - let mut deny_unknown_fields = BoolAttr::none(cx, DENY_UNKNOWN_FIELDS); - let mut default = Attr::none(cx, DEFAULT); - let mut rename_all_ser_rule = Attr::none(cx, RENAME_ALL); - let mut rename_all_de_rule = Attr::none(cx, RENAME_ALL); - let mut ser_bound = Attr::none(cx, BOUND); - let mut de_bound = Attr::none(cx, BOUND); - let mut untagged = BoolAttr::none(cx, UNTAGGED); - let mut internal_tag = Attr::none(cx, TAG); - let mut content = Attr::none(cx, CONTENT); - let mut type_from = Attr::none(cx, FROM); - let mut type_try_from = Attr::none(cx, TRY_FROM); - let mut type_into = Attr::none(cx, INTO); - let mut remote = Attr::none(cx, REMOTE); - let mut field_identifier = BoolAttr::none(cx, FIELD_IDENTIFIER); - let mut variant_identifier = BoolAttr::none(cx, VARIANT_IDENTIFIER); - let mut serde_path = Attr::none(cx, CRATE); - let mut expecting = Attr::none(cx, EXPECTING); - - for meta_item in item - .attrs - .iter() - .flat_map(|attr| get_serde_meta_items(cx, attr)) - .flatten() - { - match &meta_item { - // Parse `#[serde(rename = "foo")]` - Meta(NameValue(m)) if m.path == RENAME => { - if let Ok(s) = get_lit_str(cx, RENAME, &m.lit) { - ser_name.set(&m.path, s.value()); - de_name.set(&m.path, s.value()); - } - } - - // Parse `#[serde(rename(serialize = "foo", deserialize = "bar"))]` - Meta(List(m)) if m.path == RENAME => { - if let Ok((ser, de)) = get_renames(cx, &m.nested) { - ser_name.set_opt(&m.path, ser.map(syn::LitStr::value)); - de_name.set_opt(&m.path, de.map(syn::LitStr::value)); - } - } - - // Parse `#[serde(rename_all = "foo")]` - Meta(NameValue(m)) if m.path == RENAME_ALL => { - if let Ok(s) = get_lit_str(cx, RENAME_ALL, &m.lit) { - match RenameRule::from_str(&s.value()) { - Ok(rename_rule) => { - rename_all_ser_rule.set(&m.path, rename_rule); - rename_all_de_rule.set(&m.path, rename_rule); - } - Err(err) => cx.error_spanned_by(s, err), - } - } - } - - // Parse `#[serde(rename_all(serialize = "foo", deserialize = "bar"))]` - Meta(List(m)) if m.path == RENAME_ALL => { - if let Ok((ser, de)) = get_renames(cx, &m.nested) { - if let Some(ser) = ser { - match RenameRule::from_str(&ser.value()) { - Ok(rename_rule) => rename_all_ser_rule.set(&m.path, rename_rule), - Err(err) => cx.error_spanned_by(ser, err), - } - } - if let Some(de) = de { - match RenameRule::from_str(&de.value()) { - Ok(rename_rule) => rename_all_de_rule.set(&m.path, rename_rule), - Err(err) => cx.error_spanned_by(de, err), - } - } - } - } - - // Parse `#[serde(transparent)]` - Meta(Path(word)) if word == TRANSPARENT => { - transparent.set_true(word); - } - - // Parse `#[serde(deny_unknown_fields)]` - Meta(Path(word)) if word == DENY_UNKNOWN_FIELDS => { - deny_unknown_fields.set_true(word); - } - - // Parse `#[serde(default)]` - Meta(Path(word)) if word == DEFAULT => match &item.data { - syn::Data::Struct(syn::DataStruct { fields, .. }) => match fields { - syn::Fields::Named(_) => { - default.set(word, Default::Default); - } - syn::Fields::Unnamed(_) | syn::Fields::Unit => cx.error_spanned_by( - fields, - "#[serde(default)] can only be used on structs with named fields", - ), - }, - syn::Data::Enum(syn::DataEnum { enum_token, .. }) => cx.error_spanned_by( - enum_token, - "#[serde(default)] can only be used on structs with named fields", - ), - syn::Data::Union(syn::DataUnion { union_token, .. }) => cx.error_spanned_by( - union_token, - "#[serde(default)] can only be used on structs with named fields", - ), - }, - - // Parse `#[serde(default = "...")]` - Meta(NameValue(m)) if m.path == DEFAULT => { - if let Ok(path) = parse_lit_into_expr_path(cx, DEFAULT, &m.lit) { - match &item.data { - syn::Data::Struct(syn::DataStruct { fields, .. }) => { - match fields { - syn::Fields::Named(_) => { - default.set(&m.path, Default::Path(path)); - } - syn::Fields::Unnamed(_) | syn::Fields::Unit => cx - .error_spanned_by( - fields, - "#[serde(default = \"...\")] can only be used on structs with named fields", - ), - } - } - syn::Data::Enum(syn::DataEnum { enum_token, .. }) => cx - .error_spanned_by( - enum_token, - "#[serde(default = \"...\")] can only be used on structs with named fields", - ), - syn::Data::Union(syn::DataUnion { - union_token, .. - }) => cx.error_spanned_by( - union_token, - "#[serde(default = \"...\")] can only be used on structs with named fields", - ), - } - } - } - - // Parse `#[serde(bound = "T: SomeBound")]` - Meta(NameValue(m)) if m.path == BOUND => { - if let Ok(where_predicates) = parse_lit_into_where(cx, BOUND, BOUND, &m.lit) { - ser_bound.set(&m.path, where_predicates.clone()); - de_bound.set(&m.path, where_predicates); - } - } - - // Parse `#[serde(bound(serialize = "...", deserialize = "..."))]` - Meta(List(m)) if m.path == BOUND => { - if let Ok((ser, de)) = get_where_predicates(cx, &m.nested) { - ser_bound.set_opt(&m.path, ser); - de_bound.set_opt(&m.path, de); - } - } - - // Parse `#[serde(untagged)]` - Meta(Path(word)) if word == UNTAGGED => match item.data { - syn::Data::Enum(_) => { - untagged.set_true(word); - } - syn::Data::Struct(syn::DataStruct { struct_token, .. }) => { - cx.error_spanned_by( - struct_token, - "#[serde(untagged)] can only be used on enums", - ); - } - syn::Data::Union(syn::DataUnion { union_token, .. }) => { - cx.error_spanned_by( - union_token, - "#[serde(untagged)] can only be used on enums", - ); - } - }, - - // Parse `#[serde(tag = "type")]` - Meta(NameValue(m)) if m.path == TAG => { - if let Ok(s) = get_lit_str(cx, TAG, &m.lit) { - match &item.data { - syn::Data::Enum(_) => { - internal_tag.set(&m.path, s.value()); - } - syn::Data::Struct(syn::DataStruct { fields, .. }) => match fields { - syn::Fields::Named(_) => { - internal_tag.set(&m.path, s.value()); - } - syn::Fields::Unnamed(_) | syn::Fields::Unit => { - cx.error_spanned_by( - fields, - "#[serde(tag = \"...\")] can only be used on enums and structs with named fields", - ); - } - }, - syn::Data::Union(syn::DataUnion { union_token, .. }) => { - cx.error_spanned_by( - union_token, - "#[serde(tag = \"...\")] can only be used on enums and structs with named fields", - ); - } - } - } - } - - // Parse `#[serde(content = "c")]` - Meta(NameValue(m)) if m.path == CONTENT => { - if let Ok(s) = get_lit_str(cx, CONTENT, &m.lit) { - match &item.data { - syn::Data::Enum(_) => { - content.set(&m.path, s.value()); - } - syn::Data::Struct(syn::DataStruct { struct_token, .. }) => { - cx.error_spanned_by( - struct_token, - "#[serde(content = \"...\")] can only be used on enums", - ); - } - syn::Data::Union(syn::DataUnion { union_token, .. }) => { - cx.error_spanned_by( - union_token, - "#[serde(content = \"...\")] can only be used on enums", - ); - } - } - } - } - - // Parse `#[serde(from = "Type")] - Meta(NameValue(m)) if m.path == FROM => { - if let Ok(from_ty) = parse_lit_into_ty(cx, FROM, &m.lit) { - type_from.set_opt(&m.path, Some(from_ty)); - } - } - - // Parse `#[serde(try_from = "Type")] - Meta(NameValue(m)) if m.path == TRY_FROM => { - if let Ok(try_from_ty) = parse_lit_into_ty(cx, TRY_FROM, &m.lit) { - type_try_from.set_opt(&m.path, Some(try_from_ty)); - } - } - - // Parse `#[serde(into = "Type")] - Meta(NameValue(m)) if m.path == INTO => { - if let Ok(into_ty) = parse_lit_into_ty(cx, INTO, &m.lit) { - type_into.set_opt(&m.path, Some(into_ty)); - } - } - - // Parse `#[serde(remote = "...")]` - Meta(NameValue(m)) if m.path == REMOTE => { - if let Ok(path) = parse_lit_into_path(cx, REMOTE, &m.lit) { - if is_primitive_path(&path, "Self") { - remote.set(&m.path, item.ident.clone().into()); - } else { - remote.set(&m.path, path); - } - } - } - - // Parse `#[serde(field_identifier)]` - Meta(Path(word)) if word == FIELD_IDENTIFIER => { - field_identifier.set_true(word); - } - - // Parse `#[serde(variant_identifier)]` - Meta(Path(word)) if word == VARIANT_IDENTIFIER => { - variant_identifier.set_true(word); - } - - // Parse `#[serde(crate = "foo")]` - Meta(NameValue(m)) if m.path == CRATE => { - if let Ok(path) = parse_lit_into_path(cx, CRATE, &m.lit) { - serde_path.set(&m.path, path); - } - } - - // Parse `#[serde(expecting = "a message")]` - Meta(NameValue(m)) if m.path == EXPECTING => { - if let Ok(s) = get_lit_str(cx, EXPECTING, &m.lit) { - expecting.set(&m.path, s.value()); - } - } - - Meta(meta_item) => { - let path = meta_item - .path() - .into_token_stream() - .to_string() - .replace(' ', ""); - cx.error_spanned_by( - meta_item.path(), - format!("unknown serde container attribute `{}`", path), - ); - } - - Lit(lit) => { - cx.error_spanned_by(lit, "unexpected literal in serde container attribute"); - } - } - } - - let mut is_packed = false; - for attr in &item.attrs { - if attr.path.is_ident("repr") { - let _ = attr.parse_args_with(|input: ParseStream| { - while let Some(token) = input.parse()? { - if let TokenTree::Ident(ident) = token { - is_packed |= ident == "packed"; - } - } - Ok(()) - }); - } - } - - Container { - name: Name::from_attrs(unraw(&item.ident), ser_name, de_name, None), - transparent: transparent.get(), - deny_unknown_fields: deny_unknown_fields.get(), - default: default.get().unwrap_or(Default::None), - rename_all_rules: RenameAllRules { - serialize: rename_all_ser_rule.get().unwrap_or(RenameRule::None), - deserialize: rename_all_de_rule.get().unwrap_or(RenameRule::None), - }, - ser_bound: ser_bound.get(), - de_bound: de_bound.get(), - tag: decide_tag(cx, item, untagged, internal_tag, content), - type_from: type_from.get(), - type_try_from: type_try_from.get(), - type_into: type_into.get(), - remote: remote.get(), - identifier: decide_identifier(cx, item, field_identifier, variant_identifier), - has_flatten: false, - serde_path: serde_path.get(), - is_packed, - expecting: expecting.get(), - } - } - - pub fn name(&self) -> &Name { - &self.name - } - - pub fn rename_all_rules(&self) -> &RenameAllRules { - &self.rename_all_rules - } - - pub fn transparent(&self) -> bool { - self.transparent - } - - pub fn deny_unknown_fields(&self) -> bool { - self.deny_unknown_fields - } - - pub fn default(&self) -> &Default { - &self.default - } - - pub fn ser_bound(&self) -> Option<&[syn::WherePredicate]> { - self.ser_bound.as_ref().map(|vec| &vec[..]) - } - - pub fn de_bound(&self) -> Option<&[syn::WherePredicate]> { - self.de_bound.as_ref().map(|vec| &vec[..]) - } - - pub fn tag(&self) -> &TagType { - &self.tag - } - - pub fn type_from(&self) -> Option<&syn::Type> { - self.type_from.as_ref() - } - - pub fn type_try_from(&self) -> Option<&syn::Type> { - self.type_try_from.as_ref() - } - - pub fn type_into(&self) -> Option<&syn::Type> { - self.type_into.as_ref() - } - - pub fn remote(&self) -> Option<&syn::Path> { - self.remote.as_ref() - } - - pub fn is_packed(&self) -> bool { - self.is_packed - } - - pub fn identifier(&self) -> Identifier { - self.identifier - } - - pub fn has_flatten(&self) -> bool { - self.has_flatten - } - - pub fn mark_has_flatten(&mut self) { - self.has_flatten = true; - } - - pub fn custom_serde_path(&self) -> Option<&syn::Path> { - self.serde_path.as_ref() - } - - pub fn serde_path(&self) -> Cow { - self.custom_serde_path() - .map_or_else(|| Cow::Owned(parse_quote!(_serde)), Cow::Borrowed) - } - - /// Error message generated when type can't be deserialized. - /// If `None`, default message will be used - pub fn expecting(&self) -> Option<&str> { - self.expecting.as_ref().map(String::as_ref) - } -} - -fn decide_tag( - cx: &Ctxt, - item: &syn::DeriveInput, - untagged: BoolAttr, - internal_tag: Attr, - content: Attr, -) -> TagType { - match ( - untagged.0.get_with_tokens(), - internal_tag.get_with_tokens(), - content.get_with_tokens(), - ) { - (None, None, None) => TagType::External, - (Some(_), None, None) => TagType::None, - (None, Some((_, tag)), None) => { - // Check that there are no tuple variants. - if let syn::Data::Enum(data) = &item.data { - for variant in &data.variants { - match &variant.fields { - syn::Fields::Named(_) | syn::Fields::Unit => {} - syn::Fields::Unnamed(fields) => { - if fields.unnamed.len() != 1 { - cx.error_spanned_by( - variant, - "#[serde(tag = \"...\")] cannot be used with tuple variants", - ); - break; - } - } - } - } - } - TagType::Internal { tag } - } - (Some((untagged_tokens, _)), Some((tag_tokens, _)), None) => { - cx.error_spanned_by( - untagged_tokens, - "enum cannot be both untagged and internally tagged", - ); - cx.error_spanned_by( - tag_tokens, - "enum cannot be both untagged and internally tagged", - ); - TagType::External // doesn't matter, will error - } - (None, None, Some((content_tokens, _))) => { - cx.error_spanned_by( - content_tokens, - "#[serde(tag = \"...\", content = \"...\")] must be used together", - ); - TagType::External - } - (Some((untagged_tokens, _)), None, Some((content_tokens, _))) => { - cx.error_spanned_by( - untagged_tokens, - "untagged enum cannot have #[serde(content = \"...\")]", - ); - cx.error_spanned_by( - content_tokens, - "untagged enum cannot have #[serde(content = \"...\")]", - ); - TagType::External - } - (None, Some((_, tag)), Some((_, content))) => TagType::Adjacent { tag, content }, - (Some((untagged_tokens, _)), Some((tag_tokens, _)), Some((content_tokens, _))) => { - cx.error_spanned_by( - untagged_tokens, - "untagged enum cannot have #[serde(tag = \"...\", content = \"...\")]", - ); - cx.error_spanned_by( - tag_tokens, - "untagged enum cannot have #[serde(tag = \"...\", content = \"...\")]", - ); - cx.error_spanned_by( - content_tokens, - "untagged enum cannot have #[serde(tag = \"...\", content = \"...\")]", - ); - TagType::External - } - } -} - -fn decide_identifier( - cx: &Ctxt, - item: &syn::DeriveInput, - field_identifier: BoolAttr, - variant_identifier: BoolAttr, -) -> Identifier { - match ( - &item.data, - field_identifier.0.get_with_tokens(), - variant_identifier.0.get_with_tokens(), - ) { - (_, None, None) => Identifier::No, - (_, Some((field_identifier_tokens, _)), Some((variant_identifier_tokens, _))) => { - cx.error_spanned_by( - field_identifier_tokens, - "#[serde(field_identifier)] and #[serde(variant_identifier)] cannot both be set", - ); - cx.error_spanned_by( - variant_identifier_tokens, - "#[serde(field_identifier)] and #[serde(variant_identifier)] cannot both be set", - ); - Identifier::No - } - (syn::Data::Enum(_), Some(_), None) => Identifier::Field, - (syn::Data::Enum(_), None, Some(_)) => Identifier::Variant, - (syn::Data::Struct(syn::DataStruct { struct_token, .. }), Some(_), None) => { - cx.error_spanned_by( - struct_token, - "#[serde(field_identifier)] can only be used on an enum", - ); - Identifier::No - } - (syn::Data::Union(syn::DataUnion { union_token, .. }), Some(_), None) => { - cx.error_spanned_by( - union_token, - "#[serde(field_identifier)] can only be used on an enum", - ); - Identifier::No - } - (syn::Data::Struct(syn::DataStruct { struct_token, .. }), None, Some(_)) => { - cx.error_spanned_by( - struct_token, - "#[serde(variant_identifier)] can only be used on an enum", - ); - Identifier::No - } - (syn::Data::Union(syn::DataUnion { union_token, .. }), None, Some(_)) => { - cx.error_spanned_by( - union_token, - "#[serde(variant_identifier)] can only be used on an enum", - ); - Identifier::No - } - } -} - -/// Represents variant attribute information -pub struct Variant { - name: Name, - rename_all_rules: RenameAllRules, - ser_bound: Option>, - de_bound: Option>, - skip_deserializing: bool, - skip_serializing: bool, - other: bool, - serialize_with: Option, - deserialize_with: Option, - borrow: Option, -} - -impl Variant { - pub fn from_ast(cx: &Ctxt, variant: &syn::Variant) -> Self { - let mut ser_name = Attr::none(cx, RENAME); - let mut de_name = Attr::none(cx, RENAME); - let mut de_aliases = VecAttr::none(cx, RENAME); - let mut skip_deserializing = BoolAttr::none(cx, SKIP_DESERIALIZING); - let mut skip_serializing = BoolAttr::none(cx, SKIP_SERIALIZING); - let mut rename_all_ser_rule = Attr::none(cx, RENAME_ALL); - let mut rename_all_de_rule = Attr::none(cx, RENAME_ALL); - let mut ser_bound = Attr::none(cx, BOUND); - let mut de_bound = Attr::none(cx, BOUND); - let mut other = BoolAttr::none(cx, OTHER); - let mut serialize_with = Attr::none(cx, SERIALIZE_WITH); - let mut deserialize_with = Attr::none(cx, DESERIALIZE_WITH); - let mut borrow = Attr::none(cx, BORROW); - - for meta_item in variant - .attrs - .iter() - .flat_map(|attr| get_serde_meta_items(cx, attr)) - .flatten() - { - match &meta_item { - // Parse `#[serde(rename = "foo")]` - Meta(NameValue(m)) if m.path == RENAME => { - if let Ok(s) = get_lit_str(cx, RENAME, &m.lit) { - ser_name.set(&m.path, s.value()); - de_name.set_if_none(s.value()); - de_aliases.insert(&m.path, s.value()); - } - } - - // Parse `#[serde(rename(serialize = "foo", deserialize = "bar"))]` - Meta(List(m)) if m.path == RENAME => { - if let Ok((ser, de)) = get_multiple_renames(cx, &m.nested) { - ser_name.set_opt(&m.path, ser.map(syn::LitStr::value)); - for de_value in de { - de_name.set_if_none(de_value.value()); - de_aliases.insert(&m.path, de_value.value()); - } - } - } - - // Parse `#[serde(alias = "foo")]` - Meta(NameValue(m)) if m.path == ALIAS => { - if let Ok(s) = get_lit_str(cx, ALIAS, &m.lit) { - de_aliases.insert(&m.path, s.value()); - } - } - - // Parse `#[serde(rename_all = "foo")]` - Meta(NameValue(m)) if m.path == RENAME_ALL => { - if let Ok(s) = get_lit_str(cx, RENAME_ALL, &m.lit) { - match RenameRule::from_str(&s.value()) { - Ok(rename_rule) => { - rename_all_ser_rule.set(&m.path, rename_rule); - rename_all_de_rule.set(&m.path, rename_rule); - } - Err(err) => cx.error_spanned_by(s, err), - } - } - } - - // Parse `#[serde(rename_all(serialize = "foo", deserialize = "bar"))]` - Meta(List(m)) if m.path == RENAME_ALL => { - if let Ok((ser, de)) = get_renames(cx, &m.nested) { - if let Some(ser) = ser { - match RenameRule::from_str(&ser.value()) { - Ok(rename_rule) => rename_all_ser_rule.set(&m.path, rename_rule), - Err(err) => cx.error_spanned_by(ser, err), - } - } - if let Some(de) = de { - match RenameRule::from_str(&de.value()) { - Ok(rename_rule) => rename_all_de_rule.set(&m.path, rename_rule), - Err(err) => cx.error_spanned_by(de, err), - } - } - } - } - - // Parse `#[serde(skip)]` - Meta(Path(word)) if word == SKIP => { - skip_serializing.set_true(word); - skip_deserializing.set_true(word); - } - - // Parse `#[serde(skip_deserializing)]` - Meta(Path(word)) if word == SKIP_DESERIALIZING => { - skip_deserializing.set_true(word); - } - - // Parse `#[serde(skip_serializing)]` - Meta(Path(word)) if word == SKIP_SERIALIZING => { - skip_serializing.set_true(word); - } - - // Parse `#[serde(other)]` - Meta(Path(word)) if word == OTHER => { - other.set_true(word); - } - - // Parse `#[serde(bound = "T: SomeBound")]` - Meta(NameValue(m)) if m.path == BOUND => { - if let Ok(where_predicates) = parse_lit_into_where(cx, BOUND, BOUND, &m.lit) { - ser_bound.set(&m.path, where_predicates.clone()); - de_bound.set(&m.path, where_predicates); - } - } - - // Parse `#[serde(bound(serialize = "...", deserialize = "..."))]` - Meta(List(m)) if m.path == BOUND => { - if let Ok((ser, de)) = get_where_predicates(cx, &m.nested) { - ser_bound.set_opt(&m.path, ser); - de_bound.set_opt(&m.path, de); - } - } - - // Parse `#[serde(with = "...")]` - Meta(NameValue(m)) if m.path == WITH => { - if let Ok(path) = parse_lit_into_expr_path(cx, WITH, &m.lit) { - let mut ser_path = path.clone(); - ser_path - .path - .segments - .push(Ident::new("serialize", Span::call_site()).into()); - serialize_with.set(&m.path, ser_path); - let mut de_path = path; - de_path - .path - .segments - .push(Ident::new("deserialize", Span::call_site()).into()); - deserialize_with.set(&m.path, de_path); - } - } - - // Parse `#[serde(serialize_with = "...")]` - Meta(NameValue(m)) if m.path == SERIALIZE_WITH => { - if let Ok(path) = parse_lit_into_expr_path(cx, SERIALIZE_WITH, &m.lit) { - serialize_with.set(&m.path, path); - } - } - - // Parse `#[serde(deserialize_with = "...")]` - Meta(NameValue(m)) if m.path == DESERIALIZE_WITH => { - if let Ok(path) = parse_lit_into_expr_path(cx, DESERIALIZE_WITH, &m.lit) { - deserialize_with.set(&m.path, path); - } - } - - // Defer `#[serde(borrow)]` and `#[serde(borrow = "'a + 'b")]` - Meta(m) if m.path() == BORROW => match &variant.fields { - syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => { - borrow.set(m.path(), m.clone()); - } - _ => { - cx.error_spanned_by( - variant, - "#[serde(borrow)] may only be used on newtype variants", - ); - } - }, - - Meta(meta_item) => { - let path = meta_item - .path() - .into_token_stream() - .to_string() - .replace(' ', ""); - cx.error_spanned_by( - meta_item.path(), - format!("unknown serde variant attribute `{}`", path), - ); - } - - Lit(lit) => { - cx.error_spanned_by(lit, "unexpected literal in serde variant attribute"); - } - } - } - - Variant { - name: Name::from_attrs(unraw(&variant.ident), ser_name, de_name, Some(de_aliases)), - rename_all_rules: RenameAllRules { - serialize: rename_all_ser_rule.get().unwrap_or(RenameRule::None), - deserialize: rename_all_de_rule.get().unwrap_or(RenameRule::None), - }, - ser_bound: ser_bound.get(), - de_bound: de_bound.get(), - skip_deserializing: skip_deserializing.get(), - skip_serializing: skip_serializing.get(), - other: other.get(), - serialize_with: serialize_with.get(), - deserialize_with: deserialize_with.get(), - borrow: borrow.get(), - } - } - - pub fn name(&self) -> &Name { - &self.name - } - - pub fn aliases(&self) -> Vec { - self.name.deserialize_aliases() - } - - pub fn rename_by_rules(&mut self, rules: &RenameAllRules) { - if !self.name.serialize_renamed { - self.name.serialize = rules.serialize.apply_to_variant(&self.name.serialize); - } - if !self.name.deserialize_renamed { - self.name.deserialize = rules.deserialize.apply_to_variant(&self.name.deserialize); - } - } - - pub fn rename_all_rules(&self) -> &RenameAllRules { - &self.rename_all_rules - } - - pub fn ser_bound(&self) -> Option<&[syn::WherePredicate]> { - self.ser_bound.as_ref().map(|vec| &vec[..]) - } - - pub fn de_bound(&self) -> Option<&[syn::WherePredicate]> { - self.de_bound.as_ref().map(|vec| &vec[..]) - } - - pub fn skip_deserializing(&self) -> bool { - self.skip_deserializing - } - - pub fn skip_serializing(&self) -> bool { - self.skip_serializing - } - - pub fn other(&self) -> bool { - self.other - } - - pub fn serialize_with(&self) -> Option<&syn::ExprPath> { - self.serialize_with.as_ref() - } - - pub fn deserialize_with(&self) -> Option<&syn::ExprPath> { - self.deserialize_with.as_ref() - } -} - -/// Represents field attribute information -pub struct Field { - name: Name, - skip_serializing: bool, - skip_deserializing: bool, - skip_serializing_if: Option, - default: Default, - serialize_with: Option, - deserialize_with: Option, - ser_bound: Option>, - de_bound: Option>, - borrowed_lifetimes: BTreeSet, - getter: Option, - flatten: bool, - transparent: bool, -} - -/// Represents the default to use for a field when deserializing. -pub enum Default { - /// Field must always be specified because it does not have a default. - None, - /// The default is given by `std::default::Default::default()`. - Default, - /// The default is given by this function. - Path(syn::ExprPath), -} - -impl Default { - pub fn is_none(&self) -> bool { - match self { - Default::None => true, - Default::Default | Default::Path(_) => false, - } - } -} - -impl Field { - /// Extract out the `#[serde(...)]` attributes from a struct field. - pub fn from_ast( - cx: &Ctxt, - index: usize, - field: &syn::Field, - attrs: Option<&Variant>, - container_default: &Default, - ) -> Self { - let mut ser_name = Attr::none(cx, RENAME); - let mut de_name = Attr::none(cx, RENAME); - let mut de_aliases = VecAttr::none(cx, RENAME); - let mut skip_serializing = BoolAttr::none(cx, SKIP_SERIALIZING); - let mut skip_deserializing = BoolAttr::none(cx, SKIP_DESERIALIZING); - let mut skip_serializing_if = Attr::none(cx, SKIP_SERIALIZING_IF); - let mut default = Attr::none(cx, DEFAULT); - let mut serialize_with = Attr::none(cx, SERIALIZE_WITH); - let mut deserialize_with = Attr::none(cx, DESERIALIZE_WITH); - let mut ser_bound = Attr::none(cx, BOUND); - let mut de_bound = Attr::none(cx, BOUND); - let mut borrowed_lifetimes = Attr::none(cx, BORROW); - let mut getter = Attr::none(cx, GETTER); - let mut flatten = BoolAttr::none(cx, FLATTEN); - - let ident = match &field.ident { - Some(ident) => unraw(ident), - None => index.to_string(), - }; - - let variant_borrow = attrs - .and_then(|variant| variant.borrow.as_ref()) - .map(|borrow| Meta(borrow.clone())); - - for meta_item in field - .attrs - .iter() - .flat_map(|attr| get_serde_meta_items(cx, attr)) - .flatten() - .chain(variant_borrow) - { - match &meta_item { - // Parse `#[serde(rename = "foo")]` - Meta(NameValue(m)) if m.path == RENAME => { - if let Ok(s) = get_lit_str(cx, RENAME, &m.lit) { - ser_name.set(&m.path, s.value()); - de_name.set_if_none(s.value()); - de_aliases.insert(&m.path, s.value()); - } - } - - // Parse `#[serde(rename(serialize = "foo", deserialize = "bar"))]` - Meta(List(m)) if m.path == RENAME => { - if let Ok((ser, de)) = get_multiple_renames(cx, &m.nested) { - ser_name.set_opt(&m.path, ser.map(syn::LitStr::value)); - for de_value in de { - de_name.set_if_none(de_value.value()); - de_aliases.insert(&m.path, de_value.value()); - } - } - } - - // Parse `#[serde(alias = "foo")]` - Meta(NameValue(m)) if m.path == ALIAS => { - if let Ok(s) = get_lit_str(cx, ALIAS, &m.lit) { - de_aliases.insert(&m.path, s.value()); - } - } - - // Parse `#[serde(default)]` - Meta(Path(word)) if word == DEFAULT => { - default.set(word, Default::Default); - } - - // Parse `#[serde(default = "...")]` - Meta(NameValue(m)) if m.path == DEFAULT => { - if let Ok(path) = parse_lit_into_expr_path(cx, DEFAULT, &m.lit) { - default.set(&m.path, Default::Path(path)); - } - } - - // Parse `#[serde(skip_serializing)]` - Meta(Path(word)) if word == SKIP_SERIALIZING => { - skip_serializing.set_true(word); - } - - // Parse `#[serde(skip_deserializing)]` - Meta(Path(word)) if word == SKIP_DESERIALIZING => { - skip_deserializing.set_true(word); - } - - // Parse `#[serde(skip)]` - Meta(Path(word)) if word == SKIP => { - skip_serializing.set_true(word); - skip_deserializing.set_true(word); - } - - // Parse `#[serde(skip_serializing_if = "...")]` - Meta(NameValue(m)) if m.path == SKIP_SERIALIZING_IF => { - if let Ok(path) = parse_lit_into_expr_path(cx, SKIP_SERIALIZING_IF, &m.lit) { - skip_serializing_if.set(&m.path, path); - } - } - - // Parse `#[serde(serialize_with = "...")]` - Meta(NameValue(m)) if m.path == SERIALIZE_WITH => { - if let Ok(path) = parse_lit_into_expr_path(cx, SERIALIZE_WITH, &m.lit) { - serialize_with.set(&m.path, path); - } - } - - // Parse `#[serde(deserialize_with = "...")]` - Meta(NameValue(m)) if m.path == DESERIALIZE_WITH => { - if let Ok(path) = parse_lit_into_expr_path(cx, DESERIALIZE_WITH, &m.lit) { - deserialize_with.set(&m.path, path); - } - } - - // Parse `#[serde(with = "...")]` - Meta(NameValue(m)) if m.path == WITH => { - if let Ok(path) = parse_lit_into_expr_path(cx, WITH, &m.lit) { - let mut ser_path = path.clone(); - ser_path - .path - .segments - .push(Ident::new("serialize", Span::call_site()).into()); - serialize_with.set(&m.path, ser_path); - let mut de_path = path; - de_path - .path - .segments - .push(Ident::new("deserialize", Span::call_site()).into()); - deserialize_with.set(&m.path, de_path); - } - } - - // Parse `#[serde(bound = "T: SomeBound")]` - Meta(NameValue(m)) if m.path == BOUND => { - if let Ok(where_predicates) = parse_lit_into_where(cx, BOUND, BOUND, &m.lit) { - ser_bound.set(&m.path, where_predicates.clone()); - de_bound.set(&m.path, where_predicates); - } - } - - // Parse `#[serde(bound(serialize = "...", deserialize = "..."))]` - Meta(List(m)) if m.path == BOUND => { - if let Ok((ser, de)) = get_where_predicates(cx, &m.nested) { - ser_bound.set_opt(&m.path, ser); - de_bound.set_opt(&m.path, de); - } - } - - // Parse `#[serde(borrow)]` - Meta(Path(word)) if word == BORROW => { - if let Ok(borrowable) = borrowable_lifetimes(cx, &ident, field) { - borrowed_lifetimes.set(word, borrowable); - } - } - - // Parse `#[serde(borrow = "'a + 'b")]` - Meta(NameValue(m)) if m.path == BORROW => { - if let Ok(lifetimes) = parse_lit_into_lifetimes(cx, BORROW, &m.lit) { - if let Ok(borrowable) = borrowable_lifetimes(cx, &ident, field) { - for lifetime in &lifetimes { - if !borrowable.contains(lifetime) { - cx.error_spanned_by( - field, - format!( - "field `{}` does not have lifetime {}", - ident, lifetime - ), - ); - } - } - borrowed_lifetimes.set(&m.path, lifetimes); - } - } - } - - // Parse `#[serde(getter = "...")]` - Meta(NameValue(m)) if m.path == GETTER => { - if let Ok(path) = parse_lit_into_expr_path(cx, GETTER, &m.lit) { - getter.set(&m.path, path); - } - } - - // Parse `#[serde(flatten)]` - Meta(Path(word)) if word == FLATTEN => { - flatten.set_true(word); - } - - Meta(meta_item) => { - let path = meta_item - .path() - .into_token_stream() - .to_string() - .replace(' ', ""); - cx.error_spanned_by( - meta_item.path(), - format!("unknown serde field attribute `{}`", path), - ); - } - - Lit(lit) => { - cx.error_spanned_by(lit, "unexpected literal in serde field attribute"); - } - } - } - - // Is skip_deserializing, initialize the field to Default::default() unless a - // different default is specified by `#[serde(default = "...")]` on - // ourselves or our container (e.g. the struct we are in). - if let Default::None = *container_default { - if skip_deserializing.0.value.is_some() { - default.set_if_none(Default::Default); - } - } - - let mut borrowed_lifetimes = borrowed_lifetimes.get().unwrap_or_default(); - if !borrowed_lifetimes.is_empty() { - // Cow and Cow<[u8]> never borrow by default: - // - // impl<'de, 'a, T: ?Sized> Deserialize<'de> for Cow<'a, T> - // - // A #[serde(borrow)] attribute enables borrowing that corresponds - // roughly to these impls: - // - // impl<'de: 'a, 'a> Deserialize<'de> for Cow<'a, str> - // impl<'de: 'a, 'a> Deserialize<'de> for Cow<'a, [u8]> - if is_cow(&field.ty, is_str) { - let mut path = syn::Path { - leading_colon: None, - segments: Punctuated::new(), - }; - let span = Span::call_site(); - path.segments.push(Ident::new("_serde", span).into()); - path.segments.push(Ident::new("__private", span).into()); - path.segments.push(Ident::new("de", span).into()); - path.segments - .push(Ident::new("borrow_cow_str", span).into()); - let expr = syn::ExprPath { - attrs: Vec::new(), - qself: None, - path, - }; - deserialize_with.set_if_none(expr); - } else if is_cow(&field.ty, is_slice_u8) { - let mut path = syn::Path { - leading_colon: None, - segments: Punctuated::new(), - }; - let span = Span::call_site(); - path.segments.push(Ident::new("_serde", span).into()); - path.segments.push(Ident::new("__private", span).into()); - path.segments.push(Ident::new("de", span).into()); - path.segments - .push(Ident::new("borrow_cow_bytes", span).into()); - let expr = syn::ExprPath { - attrs: Vec::new(), - qself: None, - path, - }; - deserialize_with.set_if_none(expr); - } - } else if is_implicitly_borrowed(&field.ty) { - // Types &str and &[u8] are always implicitly borrowed. No need for - // a #[serde(borrow)]. - collect_lifetimes(&field.ty, &mut borrowed_lifetimes); - } - - Field { - name: Name::from_attrs(ident, ser_name, de_name, Some(de_aliases)), - skip_serializing: skip_serializing.get(), - skip_deserializing: skip_deserializing.get(), - skip_serializing_if: skip_serializing_if.get(), - default: default.get().unwrap_or(Default::None), - serialize_with: serialize_with.get(), - deserialize_with: deserialize_with.get(), - ser_bound: ser_bound.get(), - de_bound: de_bound.get(), - borrowed_lifetimes, - getter: getter.get(), - flatten: flatten.get(), - transparent: false, - } - } - - pub fn name(&self) -> &Name { - &self.name - } - - pub fn aliases(&self) -> Vec { - self.name.deserialize_aliases() - } - - pub fn rename_by_rules(&mut self, rules: &RenameAllRules) { - if !self.name.serialize_renamed { - self.name.serialize = rules.serialize.apply_to_field(&self.name.serialize); - } - if !self.name.deserialize_renamed { - self.name.deserialize = rules.deserialize.apply_to_field(&self.name.deserialize); - } - } - - pub fn skip_serializing(&self) -> bool { - self.skip_serializing - } - - pub fn skip_deserializing(&self) -> bool { - self.skip_deserializing - } - - pub fn skip_serializing_if(&self) -> Option<&syn::ExprPath> { - self.skip_serializing_if.as_ref() - } - - pub fn default(&self) -> &Default { - &self.default - } - - pub fn serialize_with(&self) -> Option<&syn::ExprPath> { - self.serialize_with.as_ref() - } - - pub fn deserialize_with(&self) -> Option<&syn::ExprPath> { - self.deserialize_with.as_ref() - } - - pub fn ser_bound(&self) -> Option<&[syn::WherePredicate]> { - self.ser_bound.as_ref().map(|vec| &vec[..]) - } - - pub fn de_bound(&self) -> Option<&[syn::WherePredicate]> { - self.de_bound.as_ref().map(|vec| &vec[..]) - } - - pub fn borrowed_lifetimes(&self) -> &BTreeSet { - &self.borrowed_lifetimes - } - - pub fn getter(&self) -> Option<&syn::ExprPath> { - self.getter.as_ref() - } - - pub fn flatten(&self) -> bool { - self.flatten - } - - pub fn transparent(&self) -> bool { - self.transparent - } - - pub fn mark_transparent(&mut self) { - self.transparent = true; - } -} - -type SerAndDe = (Option, Option); - -fn get_ser_and_de<'a, 'b, T, F>( - cx: &'b Ctxt, - attr_name: Symbol, - metas: &'a Punctuated, - f: F, -) -> Result<(VecAttr<'b, T>, VecAttr<'b, T>), ()> -where - T: 'a, - F: Fn(&Ctxt, Symbol, Symbol, &'a syn::Lit) -> Result, -{ - let mut ser_meta = VecAttr::none(cx, attr_name); - let mut de_meta = VecAttr::none(cx, attr_name); - - for meta in metas { - match meta { - Meta(NameValue(meta)) if meta.path == SERIALIZE => { - if let Ok(v) = f(cx, attr_name, SERIALIZE, &meta.lit) { - ser_meta.insert(&meta.path, v); - } - } - - Meta(NameValue(meta)) if meta.path == DESERIALIZE => { - if let Ok(v) = f(cx, attr_name, DESERIALIZE, &meta.lit) { - de_meta.insert(&meta.path, v); - } - } - - _ => { - cx.error_spanned_by( - meta, - format!( - "malformed {0} attribute, expected `{0}(serialize = ..., deserialize = ...)`", - attr_name - ), - ); - return Err(()); - } - } - } - - Ok((ser_meta, de_meta)) -} - -fn get_renames<'a>( - cx: &Ctxt, - items: &'a Punctuated, -) -> Result, ()> { - let (ser, de) = get_ser_and_de(cx, RENAME, items, get_lit_str2)?; - Ok((ser.at_most_one()?, de.at_most_one()?)) -} - -fn get_multiple_renames<'a>( - cx: &Ctxt, - items: &'a Punctuated, -) -> Result<(Option<&'a syn::LitStr>, Vec<&'a syn::LitStr>), ()> { - let (ser, de) = get_ser_and_de(cx, RENAME, items, get_lit_str2)?; - Ok((ser.at_most_one()?, de.get())) -} - -fn get_where_predicates( - cx: &Ctxt, - items: &Punctuated, -) -> Result>, ()> { - let (ser, de) = get_ser_and_de(cx, BOUND, items, parse_lit_into_where)?; - Ok((ser.at_most_one()?, de.at_most_one()?)) -} - -pub fn get_serde_meta_items(cx: &Ctxt, attr: &syn::Attribute) -> Result, ()> { - if attr.path != SERDE { - return Ok(Vec::new()); - } - - match attr.parse_meta() { - Ok(List(meta)) => Ok(meta.nested.into_iter().collect()), - Ok(other) => { - cx.error_spanned_by(other, "expected #[serde(...)]"); - Err(()) - } - Err(err) => { - cx.syn_error(err); - Err(()) - } - } -} - -fn get_lit_str<'a>(cx: &Ctxt, attr_name: Symbol, lit: &'a syn::Lit) -> Result<&'a syn::LitStr, ()> { - get_lit_str2(cx, attr_name, attr_name, lit) -} - -fn get_lit_str2<'a>( - cx: &Ctxt, - attr_name: Symbol, - meta_item_name: Symbol, - lit: &'a syn::Lit, -) -> Result<&'a syn::LitStr, ()> { - if let syn::Lit::Str(lit) = lit { - Ok(lit) - } else { - cx.error_spanned_by( - lit, - format!( - "expected serde {} attribute to be a string: `{} = \"...\"`", - attr_name, meta_item_name - ), - ); - Err(()) - } -} - -fn parse_lit_into_path(cx: &Ctxt, attr_name: Symbol, lit: &syn::Lit) -> Result { - let string = get_lit_str(cx, attr_name, lit)?; - parse_lit_str(string).map_err(|_| { - cx.error_spanned_by(lit, format!("failed to parse path: {:?}", string.value())); - }) -} - -fn parse_lit_into_expr_path( - cx: &Ctxt, - attr_name: Symbol, - lit: &syn::Lit, -) -> Result { - let string = get_lit_str(cx, attr_name, lit)?; - parse_lit_str(string).map_err(|_| { - cx.error_spanned_by(lit, format!("failed to parse path: {:?}", string.value())); - }) -} - -fn parse_lit_into_where( - cx: &Ctxt, - attr_name: Symbol, - meta_item_name: Symbol, - lit: &syn::Lit, -) -> Result, ()> { - let string = get_lit_str2(cx, attr_name, meta_item_name, lit)?; - if string.value().is_empty() { - return Ok(Vec::new()); - } - - let where_string = syn::LitStr::new(&format!("where {}", string.value()), string.span()); - - parse_lit_str::(&where_string) - .map(|wh| wh.predicates.into_iter().collect()) - .map_err(|err| cx.error_spanned_by(lit, err)) -} - -fn parse_lit_into_ty(cx: &Ctxt, attr_name: Symbol, lit: &syn::Lit) -> Result { - let string = get_lit_str(cx, attr_name, lit)?; - - parse_lit_str(string).map_err(|_| { - cx.error_spanned_by( - lit, - format!("failed to parse type: {} = {:?}", attr_name, string.value()), - ); - }) -} - -// Parses a string literal like "'a + 'b + 'c" containing a nonempty list of -// lifetimes separated by `+`. -fn parse_lit_into_lifetimes( - cx: &Ctxt, - attr_name: Symbol, - lit: &syn::Lit, -) -> Result, ()> { - let string = get_lit_str(cx, attr_name, lit)?; - if string.value().is_empty() { - cx.error_spanned_by(lit, "at least one lifetime must be borrowed"); - return Err(()); - } - - struct BorrowedLifetimes(Punctuated); - - impl Parse for BorrowedLifetimes { - fn parse(input: ParseStream) -> parse::Result { - Punctuated::parse_separated_nonempty(input).map(BorrowedLifetimes) - } - } - - if let Ok(BorrowedLifetimes(lifetimes)) = parse_lit_str(string) { - let mut set = BTreeSet::new(); - for lifetime in lifetimes { - if !set.insert(lifetime.clone()) { - cx.error_spanned_by(lit, format!("duplicate borrowed lifetime `{}`", lifetime)); - } - } - return Ok(set); - } - - cx.error_spanned_by( - lit, - format!("failed to parse borrowed lifetimes: {:?}", string.value()), - ); - Err(()) -} - -fn is_implicitly_borrowed(ty: &syn::Type) -> bool { - is_implicitly_borrowed_reference(ty) || is_option(ty, is_implicitly_borrowed_reference) -} - -fn is_implicitly_borrowed_reference(ty: &syn::Type) -> bool { - is_reference(ty, is_str) || is_reference(ty, is_slice_u8) -} - -// Whether the type looks like it might be `std::borrow::Cow` where elem="T". -// This can have false negatives and false positives. -// -// False negative: -// -// use std::borrow::Cow as Pig; -// -// #[derive(Deserialize)] -// struct S<'a> { -// #[serde(borrow)] -// pig: Pig<'a, str>, -// } -// -// False positive: -// -// type str = [i16]; -// -// #[derive(Deserialize)] -// struct S<'a> { -// #[serde(borrow)] -// cow: Cow<'a, str>, -// } -fn is_cow(ty: &syn::Type, elem: fn(&syn::Type) -> bool) -> bool { - let path = match ungroup(ty) { - syn::Type::Path(ty) => &ty.path, - _ => { - return false; - } - }; - let seg = match path.segments.last() { - Some(seg) => seg, - None => { - return false; - } - }; - let args = match &seg.arguments { - syn::PathArguments::AngleBracketed(bracketed) => &bracketed.args, - _ => { - return false; - } - }; - seg.ident == "Cow" - && args.len() == 2 - && match (&args[0], &args[1]) { - (syn::GenericArgument::Lifetime(_), syn::GenericArgument::Type(arg)) => elem(arg), - _ => false, - } -} - -fn is_option(ty: &syn::Type, elem: fn(&syn::Type) -> bool) -> bool { - let path = match ungroup(ty) { - syn::Type::Path(ty) => &ty.path, - _ => { - return false; - } - }; - let seg = match path.segments.last() { - Some(seg) => seg, - None => { - return false; - } - }; - let args = match &seg.arguments { - syn::PathArguments::AngleBracketed(bracketed) => &bracketed.args, - _ => { - return false; - } - }; - seg.ident == "Option" - && args.len() == 1 - && match &args[0] { - syn::GenericArgument::Type(arg) => elem(arg), - _ => false, - } -} - -// Whether the type looks like it might be `&T` where elem="T". This can have -// false negatives and false positives. -// -// False negative: -// -// type Yarn = str; -// -// #[derive(Deserialize)] -// struct S<'a> { -// r: &'a Yarn, -// } -// -// False positive: -// -// type str = [i16]; -// -// #[derive(Deserialize)] -// struct S<'a> { -// r: &'a str, -// } -fn is_reference(ty: &syn::Type, elem: fn(&syn::Type) -> bool) -> bool { - match ungroup(ty) { - syn::Type::Reference(ty) => ty.mutability.is_none() && elem(&ty.elem), - _ => false, - } -} - -fn is_str(ty: &syn::Type) -> bool { - is_primitive_type(ty, "str") -} - -fn is_slice_u8(ty: &syn::Type) -> bool { - match ungroup(ty) { - syn::Type::Slice(ty) => is_primitive_type(&ty.elem, "u8"), - _ => false, - } -} - -fn is_primitive_type(ty: &syn::Type, primitive: &str) -> bool { - match ungroup(ty) { - syn::Type::Path(ty) => ty.qself.is_none() && is_primitive_path(&ty.path, primitive), - _ => false, - } -} - -fn is_primitive_path(path: &syn::Path, primitive: &str) -> bool { - path.leading_colon.is_none() - && path.segments.len() == 1 - && path.segments[0].ident == primitive - && path.segments[0].arguments.is_empty() -} - -// All lifetimes that this type could borrow from a Deserializer. -// -// For example a type `S<'a, 'b>` could borrow `'a` and `'b`. On the other hand -// a type `for<'a> fn(&'a str)` could not borrow `'a` from the Deserializer. -// -// This is used when there is an explicit or implicit `#[serde(borrow)]` -// attribute on the field so there must be at least one borrowable lifetime. -fn borrowable_lifetimes( - cx: &Ctxt, - name: &str, - field: &syn::Field, -) -> Result, ()> { - let mut lifetimes = BTreeSet::new(); - collect_lifetimes(&field.ty, &mut lifetimes); - if lifetimes.is_empty() { - cx.error_spanned_by( - field, - format!("field `{}` has no lifetimes to borrow", name), - ); - Err(()) - } else { - Ok(lifetimes) - } -} - -fn collect_lifetimes(ty: &syn::Type, out: &mut BTreeSet) { - match ty { - syn::Type::Slice(ty) => { - collect_lifetimes(&ty.elem, out); - } - syn::Type::Array(ty) => { - collect_lifetimes(&ty.elem, out); - } - syn::Type::Ptr(ty) => { - collect_lifetimes(&ty.elem, out); - } - syn::Type::Reference(ty) => { - out.extend(ty.lifetime.iter().cloned()); - collect_lifetimes(&ty.elem, out); - } - syn::Type::Tuple(ty) => { - for elem in &ty.elems { - collect_lifetimes(elem, out); - } - } - syn::Type::Path(ty) => { - if let Some(qself) = &ty.qself { - collect_lifetimes(&qself.ty, out); - } - for seg in &ty.path.segments { - if let syn::PathArguments::AngleBracketed(bracketed) = &seg.arguments { - for arg in &bracketed.args { - match arg { - syn::GenericArgument::Lifetime(lifetime) => { - out.insert(lifetime.clone()); - } - syn::GenericArgument::Type(ty) => { - collect_lifetimes(ty, out); - } - syn::GenericArgument::Binding(binding) => { - collect_lifetimes(&binding.ty, out); - } - syn::GenericArgument::Constraint(_) - | syn::GenericArgument::Const(_) => {} - } - } - } - } - } - syn::Type::Paren(ty) => { - collect_lifetimes(&ty.elem, out); - } - syn::Type::Group(ty) => { - collect_lifetimes(&ty.elem, out); - } - syn::Type::Macro(ty) => { - collect_lifetimes_from_tokens(ty.mac.tokens.clone(), out); - } - syn::Type::BareFn(_) - | syn::Type::Never(_) - | syn::Type::TraitObject(_) - | syn::Type::ImplTrait(_) - | syn::Type::Infer(_) - | syn::Type::Verbatim(_) => {} - - #[cfg(test)] - syn::Type::__TestExhaustive(_) => unimplemented!(), - #[cfg(not(test))] - _ => {} - } -} - -fn collect_lifetimes_from_tokens(tokens: TokenStream, out: &mut BTreeSet) { - let mut iter = tokens.into_iter(); - while let Some(tt) = iter.next() { - match &tt { - TokenTree::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => { - if let Some(TokenTree::Ident(ident)) = iter.next() { - out.insert(syn::Lifetime { - apostrophe: op.span(), - ident, - }); - } - } - TokenTree::Group(group) => { - let tokens = group.stream(); - collect_lifetimes_from_tokens(tokens, out); - } - _ => {} - } - } -} - -fn parse_lit_str(s: &syn::LitStr) -> parse::Result -where - T: Parse, -{ - let tokens = spanned_tokens(s)?; - syn::parse2(tokens) -} - -fn spanned_tokens(s: &syn::LitStr) -> parse::Result { - let stream = syn::parse_str(&s.value())?; - Ok(respan(stream, s.span())) -} diff --git a/vendor/serde_derive/src/internals/case.rs b/vendor/serde_derive/src/internals/case.rs deleted file mode 100644 index 55450516..00000000 --- a/vendor/serde_derive/src/internals/case.rs +++ /dev/null @@ -1,197 +0,0 @@ -//! Code to convert the Rust-styled field/variant (e.g. `my_field`, `MyType`) to the -//! case of the source (e.g. `my-field`, `MY_FIELD`). - -// See https://users.rust-lang.org/t/psa-dealing-with-warning-unused-import-std-ascii-asciiext-in-today-s-nightly/13726 -#[allow(deprecated, unused_imports)] -use std::ascii::AsciiExt; - -use std::fmt::{self, Debug, Display}; - -use self::RenameRule::*; - -/// The different possible ways to change case of fields in a struct, or variants in an enum. -#[derive(Copy, Clone, PartialEq)] -pub enum RenameRule { - /// Don't apply a default rename rule. - None, - /// Rename direct children to "lowercase" style. - LowerCase, - /// Rename direct children to "UPPERCASE" style. - UpperCase, - /// Rename direct children to "PascalCase" style, as typically used for - /// enum variants. - PascalCase, - /// Rename direct children to "camelCase" style. - CamelCase, - /// Rename direct children to "snake_case" style, as commonly used for - /// fields. - SnakeCase, - /// Rename direct children to "SCREAMING_SNAKE_CASE" style, as commonly - /// used for constants. - ScreamingSnakeCase, - /// Rename direct children to "kebab-case" style. - KebabCase, - /// Rename direct children to "SCREAMING-KEBAB-CASE" style. - ScreamingKebabCase, -} - -static RENAME_RULES: &[(&str, RenameRule)] = &[ - ("lowercase", LowerCase), - ("UPPERCASE", UpperCase), - ("PascalCase", PascalCase), - ("camelCase", CamelCase), - ("snake_case", SnakeCase), - ("SCREAMING_SNAKE_CASE", ScreamingSnakeCase), - ("kebab-case", KebabCase), - ("SCREAMING-KEBAB-CASE", ScreamingKebabCase), -]; - -impl RenameRule { - pub fn from_str(rename_all_str: &str) -> Result { - for (name, rule) in RENAME_RULES { - if rename_all_str == *name { - return Ok(*rule); - } - } - Err(ParseError { - unknown: rename_all_str, - }) - } - - /// Apply a renaming rule to an enum variant, returning the version expected in the source. - pub fn apply_to_variant(&self, variant: &str) -> String { - match *self { - None | PascalCase => variant.to_owned(), - LowerCase => variant.to_ascii_lowercase(), - UpperCase => variant.to_ascii_uppercase(), - CamelCase => variant[..1].to_ascii_lowercase() + &variant[1..], - SnakeCase => { - let mut snake = String::new(); - for (i, ch) in variant.char_indices() { - if i > 0 && ch.is_uppercase() { - snake.push('_'); - } - snake.push(ch.to_ascii_lowercase()); - } - snake - } - ScreamingSnakeCase => SnakeCase.apply_to_variant(variant).to_ascii_uppercase(), - KebabCase => SnakeCase.apply_to_variant(variant).replace('_', "-"), - ScreamingKebabCase => ScreamingSnakeCase - .apply_to_variant(variant) - .replace('_', "-"), - } - } - - /// Apply a renaming rule to a struct field, returning the version expected in the source. - pub fn apply_to_field(&self, field: &str) -> String { - match *self { - None | LowerCase | SnakeCase => field.to_owned(), - UpperCase => field.to_ascii_uppercase(), - PascalCase => { - let mut pascal = String::new(); - let mut capitalize = true; - for ch in field.chars() { - if ch == '_' { - capitalize = true; - } else if capitalize { - pascal.push(ch.to_ascii_uppercase()); - capitalize = false; - } else { - pascal.push(ch); - } - } - pascal - } - CamelCase => { - let pascal = PascalCase.apply_to_field(field); - pascal[..1].to_ascii_lowercase() + &pascal[1..] - } - ScreamingSnakeCase => field.to_ascii_uppercase(), - KebabCase => field.replace('_', "-"), - ScreamingKebabCase => ScreamingSnakeCase.apply_to_field(field).replace('_', "-"), - } - } -} - -pub struct ParseError<'a> { - unknown: &'a str, -} - -impl<'a> Display for ParseError<'a> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str("unknown rename rule `rename_all = ")?; - Debug::fmt(self.unknown, f)?; - f.write_str("`, expected one of ")?; - for (i, (name, _rule)) in RENAME_RULES.iter().enumerate() { - if i > 0 { - f.write_str(", ")?; - } - Debug::fmt(name, f)?; - } - Ok(()) - } -} - -#[test] -fn rename_variants() { - for &(original, lower, upper, camel, snake, screaming, kebab, screaming_kebab) in &[ - ( - "Outcome", "outcome", "OUTCOME", "outcome", "outcome", "OUTCOME", "outcome", "OUTCOME", - ), - ( - "VeryTasty", - "verytasty", - "VERYTASTY", - "veryTasty", - "very_tasty", - "VERY_TASTY", - "very-tasty", - "VERY-TASTY", - ), - ("A", "a", "A", "a", "a", "A", "a", "A"), - ("Z42", "z42", "Z42", "z42", "z42", "Z42", "z42", "Z42"), - ] { - assert_eq!(None.apply_to_variant(original), original); - assert_eq!(LowerCase.apply_to_variant(original), lower); - assert_eq!(UpperCase.apply_to_variant(original), upper); - assert_eq!(PascalCase.apply_to_variant(original), original); - assert_eq!(CamelCase.apply_to_variant(original), camel); - assert_eq!(SnakeCase.apply_to_variant(original), snake); - assert_eq!(ScreamingSnakeCase.apply_to_variant(original), screaming); - assert_eq!(KebabCase.apply_to_variant(original), kebab); - assert_eq!( - ScreamingKebabCase.apply_to_variant(original), - screaming_kebab - ); - } -} - -#[test] -fn rename_fields() { - for &(original, upper, pascal, camel, screaming, kebab, screaming_kebab) in &[ - ( - "outcome", "OUTCOME", "Outcome", "outcome", "OUTCOME", "outcome", "OUTCOME", - ), - ( - "very_tasty", - "VERY_TASTY", - "VeryTasty", - "veryTasty", - "VERY_TASTY", - "very-tasty", - "VERY-TASTY", - ), - ("a", "A", "A", "a", "A", "a", "A"), - ("z42", "Z42", "Z42", "z42", "Z42", "z42", "Z42"), - ] { - assert_eq!(None.apply_to_field(original), original); - assert_eq!(UpperCase.apply_to_field(original), upper); - assert_eq!(PascalCase.apply_to_field(original), pascal); - assert_eq!(CamelCase.apply_to_field(original), camel); - assert_eq!(SnakeCase.apply_to_field(original), original); - assert_eq!(ScreamingSnakeCase.apply_to_field(original), screaming); - assert_eq!(KebabCase.apply_to_field(original), kebab); - assert_eq!(ScreamingKebabCase.apply_to_field(original), screaming_kebab); - } -} diff --git a/vendor/serde_derive/src/internals/check.rs b/vendor/serde_derive/src/internals/check.rs deleted file mode 100644 index 0e2484a7..00000000 --- a/vendor/serde_derive/src/internals/check.rs +++ /dev/null @@ -1,420 +0,0 @@ -use internals::ast::{Container, Data, Field, Style}; -use internals::attr::{Identifier, TagType}; -use internals::{ungroup, Ctxt, Derive}; -use syn::{Member, Type}; - -/// Cross-cutting checks that require looking at more than a single attrs -/// object. Simpler checks should happen when parsing and building the attrs. -pub fn check(cx: &Ctxt, cont: &mut Container, derive: Derive) { - check_getter(cx, cont); - check_flatten(cx, cont); - check_identifier(cx, cont); - check_variant_skip_attrs(cx, cont); - check_internal_tag_field_name_conflict(cx, cont); - check_adjacent_tag_conflict(cx, cont); - check_transparent(cx, cont, derive); - check_from_and_try_from(cx, cont); -} - -/// Getters are only allowed inside structs (not enums) with the `remote` -/// attribute. -fn check_getter(cx: &Ctxt, cont: &Container) { - match cont.data { - Data::Enum(_) => { - if cont.data.has_getter() { - cx.error_spanned_by( - cont.original, - "#[serde(getter = \"...\")] is not allowed in an enum", - ); - } - } - Data::Struct(_, _) => { - if cont.data.has_getter() && cont.attrs.remote().is_none() { - cx.error_spanned_by( - cont.original, - "#[serde(getter = \"...\")] can only be used in structs that have #[serde(remote = \"...\")]", - ); - } - } - } -} - -/// Flattening has some restrictions we can test. -fn check_flatten(cx: &Ctxt, cont: &Container) { - match &cont.data { - Data::Enum(variants) => { - for variant in variants { - for field in &variant.fields { - check_flatten_field(cx, variant.style, field); - } - } - } - Data::Struct(style, fields) => { - for field in fields { - check_flatten_field(cx, *style, field); - } - } - } -} - -fn check_flatten_field(cx: &Ctxt, style: Style, field: &Field) { - if !field.attrs.flatten() { - return; - } - match style { - Style::Tuple => { - cx.error_spanned_by( - field.original, - "#[serde(flatten)] cannot be used on tuple structs", - ); - } - Style::Newtype => { - cx.error_spanned_by( - field.original, - "#[serde(flatten)] cannot be used on newtype structs", - ); - } - _ => {} - } -} - -/// The `other` attribute must be used at most once and it must be the last -/// variant of an enum. -/// -/// Inside a `variant_identifier` all variants must be unit variants. Inside a -/// `field_identifier` all but possibly one variant must be unit variants. The -/// last variant may be a newtype variant which is an implicit "other" case. -fn check_identifier(cx: &Ctxt, cont: &Container) { - let variants = match &cont.data { - Data::Enum(variants) => variants, - Data::Struct(_, _) => { - return; - } - }; - - for (i, variant) in variants.iter().enumerate() { - match ( - variant.style, - cont.attrs.identifier(), - variant.attrs.other(), - cont.attrs.tag(), - ) { - // The `other` attribute may not be used in a variant_identifier. - (_, Identifier::Variant, true, _) => { - cx.error_spanned_by( - variant.original, - "#[serde(other)] may not be used on a variant identifier", - ); - } - - // Variant with `other` attribute cannot appear in untagged enum - (_, Identifier::No, true, &TagType::None) => { - cx.error_spanned_by( - variant.original, - "#[serde(other)] cannot appear on untagged enum", - ); - } - - // Variant with `other` attribute must be the last one. - (Style::Unit, Identifier::Field, true, _) | (Style::Unit, Identifier::No, true, _) => { - if i < variants.len() - 1 { - cx.error_spanned_by( - variant.original, - "#[serde(other)] must be on the last variant", - ); - } - } - - // Variant with `other` attribute must be a unit variant. - (_, Identifier::Field, true, _) | (_, Identifier::No, true, _) => { - cx.error_spanned_by( - variant.original, - "#[serde(other)] must be on a unit variant", - ); - } - - // Any sort of variant is allowed if this is not an identifier. - (_, Identifier::No, false, _) => {} - - // Unit variant without `other` attribute is always fine. - (Style::Unit, _, false, _) => {} - - // The last field is allowed to be a newtype catch-all. - (Style::Newtype, Identifier::Field, false, _) => { - if i < variants.len() - 1 { - cx.error_spanned_by( - variant.original, - format!("`{}` must be the last variant", variant.ident), - ); - } - } - - (_, Identifier::Field, false, _) => { - cx.error_spanned_by( - variant.original, - "#[serde(field_identifier)] may only contain unit variants", - ); - } - - (_, Identifier::Variant, false, _) => { - cx.error_spanned_by( - variant.original, - "#[serde(variant_identifier)] may only contain unit variants", - ); - } - } - } -} - -/// Skip-(de)serializing attributes are not allowed on variants marked -/// (de)serialize_with. -fn check_variant_skip_attrs(cx: &Ctxt, cont: &Container) { - let variants = match &cont.data { - Data::Enum(variants) => variants, - Data::Struct(_, _) => { - return; - } - }; - - for variant in variants.iter() { - if variant.attrs.serialize_with().is_some() { - if variant.attrs.skip_serializing() { - cx.error_spanned_by( - variant.original, - format!( - "variant `{}` cannot have both #[serde(serialize_with)] and #[serde(skip_serializing)]", - variant.ident - ), - ); - } - - for field in &variant.fields { - let member = member_message(&field.member); - - if field.attrs.skip_serializing() { - cx.error_spanned_by( - variant.original, - format!( - "variant `{}` cannot have both #[serde(serialize_with)] and a field {} marked with #[serde(skip_serializing)]", - variant.ident, member - ), - ); - } - - if field.attrs.skip_serializing_if().is_some() { - cx.error_spanned_by( - variant.original, - format!( - "variant `{}` cannot have both #[serde(serialize_with)] and a field {} marked with #[serde(skip_serializing_if)]", - variant.ident, member - ), - ); - } - } - } - - if variant.attrs.deserialize_with().is_some() { - if variant.attrs.skip_deserializing() { - cx.error_spanned_by( - variant.original, - format!( - "variant `{}` cannot have both #[serde(deserialize_with)] and #[serde(skip_deserializing)]", - variant.ident - ), - ); - } - - for field in &variant.fields { - if field.attrs.skip_deserializing() { - let member = member_message(&field.member); - - cx.error_spanned_by( - variant.original, - format!( - "variant `{}` cannot have both #[serde(deserialize_with)] and a field {} marked with #[serde(skip_deserializing)]", - variant.ident, member - ), - ); - } - } - } - } -} - -/// The tag of an internally-tagged struct variant must not be -/// the same as either one of its fields, as this would result in -/// duplicate keys in the serialized output and/or ambiguity in -/// the to-be-deserialized input. -fn check_internal_tag_field_name_conflict(cx: &Ctxt, cont: &Container) { - let variants = match &cont.data { - Data::Enum(variants) => variants, - Data::Struct(_, _) => return, - }; - - let tag = match cont.attrs.tag() { - TagType::Internal { tag } => tag.as_str(), - TagType::External | TagType::Adjacent { .. } | TagType::None => return, - }; - - let diagnose_conflict = || { - cx.error_spanned_by( - cont.original, - format!("variant field name `{}` conflicts with internal tag", tag), - ); - }; - - for variant in variants { - match variant.style { - Style::Struct => { - for field in &variant.fields { - let check_ser = !field.attrs.skip_serializing(); - let check_de = !field.attrs.skip_deserializing(); - let name = field.attrs.name(); - let ser_name = name.serialize_name(); - - if check_ser && ser_name == tag { - diagnose_conflict(); - return; - } - - for de_name in field.attrs.aliases() { - if check_de && de_name == tag { - diagnose_conflict(); - return; - } - } - } - } - Style::Unit | Style::Newtype | Style::Tuple => {} - } - } -} - -/// In the case of adjacently-tagged enums, the type and the -/// contents tag must differ, for the same reason. -fn check_adjacent_tag_conflict(cx: &Ctxt, cont: &Container) { - let (type_tag, content_tag) = match cont.attrs.tag() { - TagType::Adjacent { tag, content } => (tag, content), - TagType::Internal { .. } | TagType::External | TagType::None => return, - }; - - if type_tag == content_tag { - cx.error_spanned_by( - cont.original, - format!( - "enum tags `{}` for type and content conflict with each other", - type_tag - ), - ); - } -} - -/// Enums and unit structs cannot be transparent. -fn check_transparent(cx: &Ctxt, cont: &mut Container, derive: Derive) { - if !cont.attrs.transparent() { - return; - } - - if cont.attrs.type_from().is_some() { - cx.error_spanned_by( - cont.original, - "#[serde(transparent)] is not allowed with #[serde(from = \"...\")]", - ); - } - - if cont.attrs.type_try_from().is_some() { - cx.error_spanned_by( - cont.original, - "#[serde(transparent)] is not allowed with #[serde(try_from = \"...\")]", - ); - } - - if cont.attrs.type_into().is_some() { - cx.error_spanned_by( - cont.original, - "#[serde(transparent)] is not allowed with #[serde(into = \"...\")]", - ); - } - - let fields = match &mut cont.data { - Data::Enum(_) => { - cx.error_spanned_by( - cont.original, - "#[serde(transparent)] is not allowed on an enum", - ); - return; - } - Data::Struct(Style::Unit, _) => { - cx.error_spanned_by( - cont.original, - "#[serde(transparent)] is not allowed on a unit struct", - ); - return; - } - Data::Struct(_, fields) => fields, - }; - - let mut transparent_field = None; - - for field in fields { - if allow_transparent(field, derive) { - if transparent_field.is_some() { - cx.error_spanned_by( - cont.original, - "#[serde(transparent)] requires struct to have at most one transparent field", - ); - return; - } - transparent_field = Some(field); - } - } - - match transparent_field { - Some(transparent_field) => transparent_field.attrs.mark_transparent(), - None => match derive { - Derive::Serialize => { - cx.error_spanned_by( - cont.original, - "#[serde(transparent)] requires at least one field that is not skipped", - ); - } - Derive::Deserialize => { - cx.error_spanned_by( - cont.original, - "#[serde(transparent)] requires at least one field that is neither skipped nor has a default", - ); - } - }, - } -} - -fn member_message(member: &Member) -> String { - match member { - Member::Named(ident) => format!("`{}`", ident), - Member::Unnamed(i) => format!("#{}", i.index), - } -} - -fn allow_transparent(field: &Field, derive: Derive) -> bool { - if let Type::Path(ty) = ungroup(field.ty) { - if let Some(seg) = ty.path.segments.last() { - if seg.ident == "PhantomData" { - return false; - } - } - } - - match derive { - Derive::Serialize => !field.attrs.skip_serializing(), - Derive::Deserialize => !field.attrs.skip_deserializing() && field.attrs.default().is_none(), - } -} - -fn check_from_and_try_from(cx: &Ctxt, cont: &mut Container) { - if cont.attrs.type_from().is_some() && cont.attrs.type_try_from().is_some() { - cx.error_spanned_by( - cont.original, - "#[serde(from = \"...\")] and #[serde(try_from = \"...\")] conflict with each other", - ); - } -} diff --git a/vendor/serde_derive/src/internals/ctxt.rs b/vendor/serde_derive/src/internals/ctxt.rs deleted file mode 100644 index d692c2a4..00000000 --- a/vendor/serde_derive/src/internals/ctxt.rs +++ /dev/null @@ -1,62 +0,0 @@ -use quote::ToTokens; -use std::cell::RefCell; -use std::fmt::Display; -use std::thread; -use syn; - -/// A type to collect errors together and format them. -/// -/// Dropping this object will cause a panic. It must be consumed using `check`. -/// -/// References can be shared since this type uses run-time exclusive mut checking. -#[derive(Default)] -pub struct Ctxt { - // The contents will be set to `None` during checking. This is so that checking can be - // enforced. - errors: RefCell>>, -} - -impl Ctxt { - /// Create a new context object. - /// - /// This object contains no errors, but will still trigger a panic if it is not `check`ed. - pub fn new() -> Self { - Ctxt { - errors: RefCell::new(Some(Vec::new())), - } - } - - /// Add an error to the context object with a tokenenizable object. - /// - /// The object is used for spanning in error messages. - pub fn error_spanned_by(&self, obj: A, msg: T) { - self.errors - .borrow_mut() - .as_mut() - .unwrap() - // Curb monomorphization from generating too many identical methods. - .push(syn::Error::new_spanned(obj.into_token_stream(), msg)); - } - - /// Add one of Syn's parse errors. - pub fn syn_error(&self, err: syn::Error) { - self.errors.borrow_mut().as_mut().unwrap().push(err); - } - - /// Consume this object, producing a formatted error string if there are errors. - pub fn check(self) -> Result<(), Vec> { - let errors = self.errors.borrow_mut().take().unwrap(); - match errors.len() { - 0 => Ok(()), - _ => Err(errors), - } - } -} - -impl Drop for Ctxt { - fn drop(&mut self) { - if !thread::panicking() && self.errors.borrow().is_some() { - panic!("forgot to check for errors"); - } - } -} diff --git a/vendor/serde_derive/src/internals/mod.rs b/vendor/serde_derive/src/internals/mod.rs deleted file mode 100644 index 5e9f416c..00000000 --- a/vendor/serde_derive/src/internals/mod.rs +++ /dev/null @@ -1,28 +0,0 @@ -pub mod ast; -pub mod attr; - -mod ctxt; -pub use self::ctxt::Ctxt; - -mod receiver; -pub use self::receiver::replace_receiver; - -mod case; -mod check; -mod respan; -mod symbol; - -use syn::Type; - -#[derive(Copy, Clone)] -pub enum Derive { - Serialize, - Deserialize, -} - -pub fn ungroup(mut ty: &Type) -> &Type { - while let Type::Group(group) = ty { - ty = &group.elem; - } - ty -} diff --git a/vendor/serde_derive/src/internals/receiver.rs b/vendor/serde_derive/src/internals/receiver.rs deleted file mode 100644 index 2b722d8f..00000000 --- a/vendor/serde_derive/src/internals/receiver.rs +++ /dev/null @@ -1,287 +0,0 @@ -use internals::respan::respan; -use proc_macro2::Span; -use quote::ToTokens; -use std::mem; -use syn::punctuated::Punctuated; -use syn::{ - parse_quote, Data, DeriveInput, Expr, ExprPath, GenericArgument, GenericParam, Generics, Macro, - Path, PathArguments, QSelf, ReturnType, Type, TypeParamBound, TypePath, WherePredicate, -}; - -pub fn replace_receiver(input: &mut DeriveInput) { - let self_ty = { - let ident = &input.ident; - let ty_generics = input.generics.split_for_impl().1; - parse_quote!(#ident #ty_generics) - }; - let mut visitor = ReplaceReceiver(&self_ty); - visitor.visit_generics_mut(&mut input.generics); - visitor.visit_data_mut(&mut input.data); -} - -struct ReplaceReceiver<'a>(&'a TypePath); - -impl ReplaceReceiver<'_> { - fn self_ty(&self, span: Span) -> TypePath { - let tokens = self.0.to_token_stream(); - let respanned = respan(tokens, span); - syn::parse2(respanned).unwrap() - } - - fn self_to_qself(&self, qself: &mut Option, path: &mut Path) { - if path.leading_colon.is_some() || path.segments[0].ident != "Self" { - return; - } - - if path.segments.len() == 1 { - self.self_to_expr_path(path); - return; - } - - let span = path.segments[0].ident.span(); - *qself = Some(QSelf { - lt_token: Token![<](span), - ty: Box::new(Type::Path(self.self_ty(span))), - position: 0, - as_token: None, - gt_token: Token![>](span), - }); - - path.leading_colon = Some(**path.segments.pairs().next().unwrap().punct().unwrap()); - - let segments = mem::replace(&mut path.segments, Punctuated::new()); - path.segments = segments.into_pairs().skip(1).collect(); - } - - fn self_to_expr_path(&self, path: &mut Path) { - let self_ty = self.self_ty(path.segments[0].ident.span()); - let variant = mem::replace(path, self_ty.path); - for segment in &mut path.segments { - if let PathArguments::AngleBracketed(bracketed) = &mut segment.arguments { - if bracketed.colon2_token.is_none() && !bracketed.args.is_empty() { - bracketed.colon2_token = Some(::default()); - } - } - } - if variant.segments.len() > 1 { - path.segments.push_punct(::default()); - path.segments.extend(variant.segments.into_pairs().skip(1)); - } - } -} - -impl ReplaceReceiver<'_> { - // `Self` -> `Receiver` - fn visit_type_mut(&mut self, ty: &mut Type) { - let span = if let Type::Path(node) = ty { - if node.qself.is_none() && node.path.is_ident("Self") { - node.path.segments[0].ident.span() - } else { - self.visit_type_path_mut(node); - return; - } - } else { - self.visit_type_mut_impl(ty); - return; - }; - *ty = self.self_ty(span).into(); - } - - // `Self::Assoc` -> `::Assoc` - fn visit_type_path_mut(&mut self, ty: &mut TypePath) { - if ty.qself.is_none() { - self.self_to_qself(&mut ty.qself, &mut ty.path); - } - self.visit_type_path_mut_impl(ty); - } - - // `Self::method` -> `::method` - fn visit_expr_path_mut(&mut self, expr: &mut ExprPath) { - if expr.qself.is_none() { - self.self_to_qself(&mut expr.qself, &mut expr.path); - } - self.visit_expr_path_mut_impl(expr); - } - - // Everything below is simply traversing the syntax tree. - - fn visit_type_mut_impl(&mut self, ty: &mut Type) { - match ty { - Type::Array(ty) => { - self.visit_type_mut(&mut ty.elem); - self.visit_expr_mut(&mut ty.len); - } - Type::BareFn(ty) => { - for arg in &mut ty.inputs { - self.visit_type_mut(&mut arg.ty); - } - self.visit_return_type_mut(&mut ty.output); - } - Type::Group(ty) => self.visit_type_mut(&mut ty.elem), - Type::ImplTrait(ty) => { - for bound in &mut ty.bounds { - self.visit_type_param_bound_mut(bound); - } - } - Type::Macro(ty) => self.visit_macro_mut(&mut ty.mac), - Type::Paren(ty) => self.visit_type_mut(&mut ty.elem), - Type::Path(ty) => { - if let Some(qself) = &mut ty.qself { - self.visit_type_mut(&mut qself.ty); - } - self.visit_path_mut(&mut ty.path); - } - Type::Ptr(ty) => self.visit_type_mut(&mut ty.elem), - Type::Reference(ty) => self.visit_type_mut(&mut ty.elem), - Type::Slice(ty) => self.visit_type_mut(&mut ty.elem), - Type::TraitObject(ty) => { - for bound in &mut ty.bounds { - self.visit_type_param_bound_mut(bound); - } - } - Type::Tuple(ty) => { - for elem in &mut ty.elems { - self.visit_type_mut(elem); - } - } - - Type::Infer(_) | Type::Never(_) | Type::Verbatim(_) => {} - - #[cfg(test)] - Type::__TestExhaustive(_) => unimplemented!(), - #[cfg(not(test))] - _ => {} - } - } - - fn visit_type_path_mut_impl(&mut self, ty: &mut TypePath) { - if let Some(qself) = &mut ty.qself { - self.visit_type_mut(&mut qself.ty); - } - self.visit_path_mut(&mut ty.path); - } - - fn visit_expr_path_mut_impl(&mut self, expr: &mut ExprPath) { - if let Some(qself) = &mut expr.qself { - self.visit_type_mut(&mut qself.ty); - } - self.visit_path_mut(&mut expr.path); - } - - fn visit_path_mut(&mut self, path: &mut Path) { - for segment in &mut path.segments { - self.visit_path_arguments_mut(&mut segment.arguments); - } - } - - fn visit_path_arguments_mut(&mut self, arguments: &mut PathArguments) { - match arguments { - PathArguments::None => {} - PathArguments::AngleBracketed(arguments) => { - for arg in &mut arguments.args { - match arg { - GenericArgument::Type(arg) => self.visit_type_mut(arg), - GenericArgument::Binding(arg) => self.visit_type_mut(&mut arg.ty), - GenericArgument::Lifetime(_) - | GenericArgument::Constraint(_) - | GenericArgument::Const(_) => {} - } - } - } - PathArguments::Parenthesized(arguments) => { - for argument in &mut arguments.inputs { - self.visit_type_mut(argument); - } - self.visit_return_type_mut(&mut arguments.output); - } - } - } - - fn visit_return_type_mut(&mut self, return_type: &mut ReturnType) { - match return_type { - ReturnType::Default => {} - ReturnType::Type(_, output) => self.visit_type_mut(output), - } - } - - fn visit_type_param_bound_mut(&mut self, bound: &mut TypeParamBound) { - match bound { - TypeParamBound::Trait(bound) => self.visit_path_mut(&mut bound.path), - TypeParamBound::Lifetime(_) => {} - } - } - - fn visit_generics_mut(&mut self, generics: &mut Generics) { - for param in &mut generics.params { - match param { - GenericParam::Type(param) => { - for bound in &mut param.bounds { - self.visit_type_param_bound_mut(bound); - } - } - GenericParam::Lifetime(_) | GenericParam::Const(_) => {} - } - } - if let Some(where_clause) = &mut generics.where_clause { - for predicate in &mut where_clause.predicates { - match predicate { - WherePredicate::Type(predicate) => { - self.visit_type_mut(&mut predicate.bounded_ty); - for bound in &mut predicate.bounds { - self.visit_type_param_bound_mut(bound); - } - } - WherePredicate::Lifetime(_) | WherePredicate::Eq(_) => {} - } - } - } - } - - fn visit_data_mut(&mut self, data: &mut Data) { - match data { - Data::Struct(data) => { - for field in &mut data.fields { - self.visit_type_mut(&mut field.ty); - } - } - Data::Enum(data) => { - for variant in &mut data.variants { - for field in &mut variant.fields { - self.visit_type_mut(&mut field.ty); - } - } - } - Data::Union(_) => {} - } - } - - fn visit_expr_mut(&mut self, expr: &mut Expr) { - match expr { - Expr::Binary(expr) => { - self.visit_expr_mut(&mut expr.left); - self.visit_expr_mut(&mut expr.right); - } - Expr::Call(expr) => { - self.visit_expr_mut(&mut expr.func); - for arg in &mut expr.args { - self.visit_expr_mut(arg); - } - } - Expr::Cast(expr) => { - self.visit_expr_mut(&mut expr.expr); - self.visit_type_mut(&mut expr.ty); - } - Expr::Field(expr) => self.visit_expr_mut(&mut expr.base), - Expr::Index(expr) => { - self.visit_expr_mut(&mut expr.expr); - self.visit_expr_mut(&mut expr.index); - } - Expr::Paren(expr) => self.visit_expr_mut(&mut expr.expr), - Expr::Path(expr) => self.visit_expr_path_mut(expr), - Expr::Unary(expr) => self.visit_expr_mut(&mut expr.expr), - _ => {} - } - } - - fn visit_macro_mut(&mut self, _mac: &mut Macro) {} -} diff --git a/vendor/serde_derive/src/internals/respan.rs b/vendor/serde_derive/src/internals/respan.rs deleted file mode 100644 index dcec7017..00000000 --- a/vendor/serde_derive/src/internals/respan.rs +++ /dev/null @@ -1,16 +0,0 @@ -use proc_macro2::{Group, Span, TokenStream, TokenTree}; - -pub(crate) fn respan(stream: TokenStream, span: Span) -> TokenStream { - stream - .into_iter() - .map(|token| respan_token(token, span)) - .collect() -} - -fn respan_token(mut token: TokenTree, span: Span) -> TokenTree { - if let TokenTree::Group(g) = &mut token { - *g = Group::new(g.delimiter(), respan(g.stream(), span)); - } - token.set_span(span); - token -} diff --git a/vendor/serde_derive/src/internals/symbol.rs b/vendor/serde_derive/src/internals/symbol.rs deleted file mode 100644 index 1fedd275..00000000 --- a/vendor/serde_derive/src/internals/symbol.rs +++ /dev/null @@ -1,68 +0,0 @@ -use std::fmt::{self, Display}; -use syn::{Ident, Path}; - -#[derive(Copy, Clone)] -pub struct Symbol(&'static str); - -pub const ALIAS: Symbol = Symbol("alias"); -pub const BORROW: Symbol = Symbol("borrow"); -pub const BOUND: Symbol = Symbol("bound"); -pub const CONTENT: Symbol = Symbol("content"); -pub const CRATE: Symbol = Symbol("crate"); -pub const DEFAULT: Symbol = Symbol("default"); -pub const DENY_UNKNOWN_FIELDS: Symbol = Symbol("deny_unknown_fields"); -pub const DESERIALIZE: Symbol = Symbol("deserialize"); -pub const DESERIALIZE_WITH: Symbol = Symbol("deserialize_with"); -pub const FIELD_IDENTIFIER: Symbol = Symbol("field_identifier"); -pub const FLATTEN: Symbol = Symbol("flatten"); -pub const FROM: Symbol = Symbol("from"); -pub const GETTER: Symbol = Symbol("getter"); -pub const INTO: Symbol = Symbol("into"); -pub const OTHER: Symbol = Symbol("other"); -pub const REMOTE: Symbol = Symbol("remote"); -pub const RENAME: Symbol = Symbol("rename"); -pub const RENAME_ALL: Symbol = Symbol("rename_all"); -pub const SERDE: Symbol = Symbol("serde"); -pub const SERIALIZE: Symbol = Symbol("serialize"); -pub const SERIALIZE_WITH: Symbol = Symbol("serialize_with"); -pub const SKIP: Symbol = Symbol("skip"); -pub const SKIP_DESERIALIZING: Symbol = Symbol("skip_deserializing"); -pub const SKIP_SERIALIZING: Symbol = Symbol("skip_serializing"); -pub const SKIP_SERIALIZING_IF: Symbol = Symbol("skip_serializing_if"); -pub const TAG: Symbol = Symbol("tag"); -pub const TRANSPARENT: Symbol = Symbol("transparent"); -pub const TRY_FROM: Symbol = Symbol("try_from"); -pub const UNTAGGED: Symbol = Symbol("untagged"); -pub const VARIANT_IDENTIFIER: Symbol = Symbol("variant_identifier"); -pub const WITH: Symbol = Symbol("with"); -pub const EXPECTING: Symbol = Symbol("expecting"); - -impl PartialEq for Ident { - fn eq(&self, word: &Symbol) -> bool { - self == word.0 - } -} - -impl<'a> PartialEq for &'a Ident { - fn eq(&self, word: &Symbol) -> bool { - *self == word.0 - } -} - -impl PartialEq for Path { - fn eq(&self, word: &Symbol) -> bool { - self.is_ident(word.0) - } -} - -impl<'a> PartialEq for &'a Path { - fn eq(&self, word: &Symbol) -> bool { - self.is_ident(word.0) - } -} - -impl Display for Symbol { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str(self.0) - } -} diff --git a/vendor/serde_derive/src/lib.rs b/vendor/serde_derive/src/lib.rs deleted file mode 100644 index e7885c7e..00000000 --- a/vendor/serde_derive/src/lib.rs +++ /dev/null @@ -1,107 +0,0 @@ -//! This crate provides Serde's two derive macros. -//! -//! ```edition2018 -//! # use serde_derive::{Serialize, Deserialize}; -//! # -//! #[derive(Serialize, Deserialize)] -//! # struct S; -//! # -//! # fn main() {} -//! ``` -//! -//! Please refer to [https://serde.rs/derive.html] for how to set this up. -//! -//! [https://serde.rs/derive.html]: https://serde.rs/derive.html - -#![doc(html_root_url = "https://docs.rs/serde_derive/1.0.132")] -#![allow(unknown_lints, bare_trait_objects)] -// Ignored clippy lints -#![allow( - // clippy false positive: https://github.com/rust-lang/rust-clippy/issues/7054 - clippy::branches_sharing_code, - clippy::cognitive_complexity, - // clippy bug: https://github.com/rust-lang/rust-clippy/issues/7575 - clippy::collapsible_match, - clippy::enum_variant_names, - // clippy bug: https://github.com/rust-lang/rust-clippy/issues/6797 - clippy::manual_map, - clippy::match_like_matches_macro, - clippy::needless_pass_by_value, - clippy::too_many_arguments, - clippy::trivially_copy_pass_by_ref, - clippy::used_underscore_binding, - clippy::wildcard_in_or_patterns, - // clippy bug: https://github.com/rust-lang/rust-clippy/issues/5704 - clippy::unnested_or_patterns, -)] -// Ignored clippy_pedantic lints -#![allow( - clippy::cast_possible_truncation, - clippy::checked_conversions, - clippy::doc_markdown, - clippy::enum_glob_use, - clippy::indexing_slicing, - clippy::items_after_statements, - clippy::let_underscore_drop, - clippy::manual_assert, - clippy::map_err_ignore, - clippy::match_same_arms, - // clippy bug: https://github.com/rust-lang/rust-clippy/issues/6984 - clippy::match_wildcard_for_single_variants, - clippy::module_name_repetitions, - clippy::must_use_candidate, - clippy::option_if_let_else, - clippy::similar_names, - clippy::single_match_else, - clippy::struct_excessive_bools, - clippy::too_many_lines, - clippy::unseparated_literal_suffix, - clippy::unused_self, - clippy::use_self, - clippy::wildcard_imports -)] - -#[macro_use] -extern crate quote; -#[macro_use] -extern crate syn; - -extern crate proc_macro; -extern crate proc_macro2; - -mod internals; - -use proc_macro::TokenStream; -use syn::DeriveInput; - -#[macro_use] -mod bound; -#[macro_use] -mod fragment; - -mod de; -mod dummy; -mod pretend; -mod ser; -mod try; - -#[proc_macro_derive(Serialize, attributes(serde))] -pub fn derive_serialize(input: TokenStream) -> TokenStream { - let mut input = parse_macro_input!(input as DeriveInput); - ser::expand_derive_serialize(&mut input) - .unwrap_or_else(to_compile_errors) - .into() -} - -#[proc_macro_derive(Deserialize, attributes(serde))] -pub fn derive_deserialize(input: TokenStream) -> TokenStream { - let mut input = parse_macro_input!(input as DeriveInput); - de::expand_derive_deserialize(&mut input) - .unwrap_or_else(to_compile_errors) - .into() -} - -fn to_compile_errors(errors: Vec) -> proc_macro2::TokenStream { - let compile_errors = errors.iter().map(syn::Error::to_compile_error); - quote!(#(#compile_errors)*) -} diff --git a/vendor/serde_derive/src/pretend.rs b/vendor/serde_derive/src/pretend.rs deleted file mode 100644 index 3af6a66f..00000000 --- a/vendor/serde_derive/src/pretend.rs +++ /dev/null @@ -1,201 +0,0 @@ -use proc_macro2::TokenStream; -use quote::format_ident; - -use internals::ast::{Container, Data, Field, Style, Variant}; - -// Suppress dead_code warnings that would otherwise appear when using a remote -// derive. Other than this pretend code, a struct annotated with remote derive -// never has its fields referenced and an enum annotated with remote derive -// never has its variants constructed. -// -// warning: field is never used: `i` -// --> src/main.rs:4:20 -// | -// 4 | struct StructDef { i: i32 } -// | ^^^^^^ -// -// warning: variant is never constructed: `V` -// --> src/main.rs:8:16 -// | -// 8 | enum EnumDef { V } -// | ^ -// -pub fn pretend_used(cont: &Container, is_packed: bool) -> TokenStream { - let pretend_fields = pretend_fields_used(cont, is_packed); - let pretend_variants = pretend_variants_used(cont); - - quote! { - #pretend_fields - #pretend_variants - } -} - -// For structs with named fields, expands to: -// -// match None::<&T> { -// Some(T { a: __v0, b: __v1 }) => {} -// _ => {} -// } -// -// For packed structs on sufficiently new rustc, expands to: -// -// match None::<&T> { -// Some(__v @ T { a: _, b: _ }) => { -// let _ = addr_of!(__v.a); -// let _ = addr_of!(__v.b); -// } -// _ => {} -// } -// -// For packed structs on older rustc, we assume Sized and !Drop, and expand to: -// -// match None:: { -// Some(T { a: __v0, b: __v1 }) => {} -// _ => {} -// } -// -// For enums, expands to the following but only including struct variants: -// -// match None::<&T> { -// Some(T::A { a: __v0 }) => {} -// Some(T::B { b: __v0 }) => {} -// _ => {} -// } -// -fn pretend_fields_used(cont: &Container, is_packed: bool) -> TokenStream { - match &cont.data { - Data::Enum(variants) => pretend_fields_used_enum(cont, variants), - Data::Struct(Style::Struct, fields) => { - if is_packed { - pretend_fields_used_struct_packed(cont, fields) - } else { - pretend_fields_used_struct(cont, fields) - } - } - Data::Struct(_, _) => quote!(), - } -} - -fn pretend_fields_used_struct(cont: &Container, fields: &[Field]) -> TokenStream { - let type_ident = &cont.ident; - let (_, ty_generics, _) = cont.generics.split_for_impl(); - - let members = fields.iter().map(|field| &field.member); - let placeholders = (0usize..).map(|i| format_ident!("__v{}", i)); - - quote! { - match _serde::__private::None::<&#type_ident #ty_generics> { - _serde::__private::Some(#type_ident { #(#members: #placeholders),* }) => {} - _ => {} - } - } -} - -fn pretend_fields_used_struct_packed(cont: &Container, fields: &[Field]) -> TokenStream { - let type_ident = &cont.ident; - let (_, ty_generics, _) = cont.generics.split_for_impl(); - - let members = fields.iter().map(|field| &field.member).collect::>(); - - #[cfg(ptr_addr_of)] - { - quote! { - match _serde::__private::None::<&#type_ident #ty_generics> { - _serde::__private::Some(__v @ #type_ident { #(#members: _),* }) => { - #( - let _ = _serde::__private::ptr::addr_of!(__v.#members); - )* - } - _ => {} - } - } - } - - #[cfg(not(ptr_addr_of))] - { - let placeholders = (0usize..).map(|i| format_ident!("__v{}", i)); - - quote! { - match _serde::__private::None::<#type_ident #ty_generics> { - _serde::__private::Some(#type_ident { #(#members: #placeholders),* }) => {} - _ => {} - } - } - } -} - -fn pretend_fields_used_enum(cont: &Container, variants: &[Variant]) -> TokenStream { - let type_ident = &cont.ident; - let (_, ty_generics, _) = cont.generics.split_for_impl(); - - let patterns = variants - .iter() - .filter_map(|variant| match variant.style { - Style::Struct => { - let variant_ident = &variant.ident; - let members = variant.fields.iter().map(|field| &field.member); - let placeholders = (0usize..).map(|i| format_ident!("__v{}", i)); - Some(quote!(#type_ident::#variant_ident { #(#members: #placeholders),* })) - } - _ => None, - }) - .collect::>(); - - quote! { - match _serde::__private::None::<&#type_ident #ty_generics> { - #( - _serde::__private::Some(#patterns) => {} - )* - _ => {} - } - } -} - -// Expands to one of these per enum variant: -// -// match None { -// Some((__v0, __v1,)) => { -// let _ = E::V { a: __v0, b: __v1 }; -// } -// _ => {} -// } -// -fn pretend_variants_used(cont: &Container) -> TokenStream { - let variants = match &cont.data { - Data::Enum(variants) => variants, - Data::Struct(_, _) => { - return quote!(); - } - }; - - let type_ident = &cont.ident; - let (_, ty_generics, _) = cont.generics.split_for_impl(); - let turbofish = ty_generics.as_turbofish(); - - let cases = variants.iter().map(|variant| { - let variant_ident = &variant.ident; - let placeholders = &(0..variant.fields.len()) - .map(|i| format_ident!("__v{}", i)) - .collect::>(); - - let pat = match variant.style { - Style::Struct => { - let members = variant.fields.iter().map(|field| &field.member); - quote!({ #(#members: #placeholders),* }) - } - Style::Tuple | Style::Newtype => quote!(( #(#placeholders),* )), - Style::Unit => quote!(), - }; - - quote! { - match _serde::__private::None { - _serde::__private::Some((#(#placeholders,)*)) => { - let _ = #type_ident::#variant_ident #turbofish #pat; - } - _ => {} - } - } - }); - - quote!(#(#cases)*) -} diff --git a/vendor/serde_derive/src/ser.rs b/vendor/serde_derive/src/ser.rs deleted file mode 100644 index 529a20d7..00000000 --- a/vendor/serde_derive/src/ser.rs +++ /dev/null @@ -1,1338 +0,0 @@ -use proc_macro2::{Span, TokenStream}; -use syn::spanned::Spanned; -use syn::{self, Ident, Index, Member}; - -use bound; -use dummy; -use fragment::{Fragment, Match, Stmts}; -use internals::ast::{Container, Data, Field, Style, Variant}; -use internals::{attr, replace_receiver, Ctxt, Derive}; -use pretend; - -pub fn expand_derive_serialize( - input: &mut syn::DeriveInput, -) -> Result> { - replace_receiver(input); - - let ctxt = Ctxt::new(); - let cont = match Container::from_ast(&ctxt, input, Derive::Serialize) { - Some(cont) => cont, - None => return Err(ctxt.check().unwrap_err()), - }; - precondition(&ctxt, &cont); - ctxt.check()?; - - let ident = &cont.ident; - let params = Parameters::new(&cont); - let (impl_generics, ty_generics, where_clause) = params.generics.split_for_impl(); - let body = Stmts(serialize_body(&cont, ¶ms)); - let serde = cont.attrs.serde_path(); - - let impl_block = if let Some(remote) = cont.attrs.remote() { - let vis = &input.vis; - let used = pretend::pretend_used(&cont, params.is_packed); - quote! { - impl #impl_generics #ident #ty_generics #where_clause { - #vis fn serialize<__S>(__self: &#remote #ty_generics, __serializer: __S) -> #serde::__private::Result<__S::Ok, __S::Error> - where - __S: #serde::Serializer, - { - #used - #body - } - } - } - } else { - quote! { - #[automatically_derived] - impl #impl_generics #serde::Serialize for #ident #ty_generics #where_clause { - fn serialize<__S>(&self, __serializer: __S) -> #serde::__private::Result<__S::Ok, __S::Error> - where - __S: #serde::Serializer, - { - #body - } - } - } - }; - - Ok(dummy::wrap_in_const( - cont.attrs.custom_serde_path(), - "SERIALIZE", - ident, - impl_block, - )) -} - -fn precondition(cx: &Ctxt, cont: &Container) { - match cont.attrs.identifier() { - attr::Identifier::No => {} - attr::Identifier::Field => { - cx.error_spanned_by(cont.original, "field identifiers cannot be serialized"); - } - attr::Identifier::Variant => { - cx.error_spanned_by(cont.original, "variant identifiers cannot be serialized"); - } - } -} - -struct Parameters { - /// Variable holding the value being serialized. Either `self` for local - /// types or `__self` for remote types. - self_var: Ident, - - /// Path to the type the impl is for. Either a single `Ident` for local - /// types or `some::remote::Ident` for remote types. Does not include - /// generic parameters. - this: syn::Path, - - /// Generics including any explicit and inferred bounds for the impl. - generics: syn::Generics, - - /// Type has a `serde(remote = "...")` attribute. - is_remote: bool, - - /// Type has a repr(packed) attribute. - is_packed: bool, -} - -impl Parameters { - fn new(cont: &Container) -> Self { - let is_remote = cont.attrs.remote().is_some(); - let self_var = if is_remote { - Ident::new("__self", Span::call_site()) - } else { - Ident::new("self", Span::call_site()) - }; - - let this = match cont.attrs.remote() { - Some(remote) => remote.clone(), - None => cont.ident.clone().into(), - }; - - let is_packed = cont.attrs.is_packed(); - - let generics = build_generics(cont); - - Parameters { - self_var, - this, - generics, - is_remote, - is_packed, - } - } - - /// Type name to use in error messages and `&'static str` arguments to - /// various Serializer methods. - fn type_name(&self) -> String { - self.this.segments.last().unwrap().ident.to_string() - } -} - -// All the generics in the input, plus a bound `T: Serialize` for each generic -// field type that will be serialized by us. -fn build_generics(cont: &Container) -> syn::Generics { - let generics = bound::without_defaults(cont.generics); - - let generics = - bound::with_where_predicates_from_fields(cont, &generics, attr::Field::ser_bound); - - let generics = - bound::with_where_predicates_from_variants(cont, &generics, attr::Variant::ser_bound); - - match cont.attrs.ser_bound() { - Some(predicates) => bound::with_where_predicates(&generics, predicates), - None => bound::with_bound( - cont, - &generics, - needs_serialize_bound, - &parse_quote!(_serde::Serialize), - ), - } -} - -// Fields with a `skip_serializing` or `serialize_with` attribute, or which -// belong to a variant with a 'skip_serializing` or `serialize_with` attribute, -// are not serialized by us so we do not generate a bound. Fields with a `bound` -// attribute specify their own bound so we do not generate one. All other fields -// may need a `T: Serialize` bound where T is the type of the field. -fn needs_serialize_bound(field: &attr::Field, variant: Option<&attr::Variant>) -> bool { - !field.skip_serializing() - && field.serialize_with().is_none() - && field.ser_bound().is_none() - && variant.map_or(true, |variant| { - !variant.skip_serializing() - && variant.serialize_with().is_none() - && variant.ser_bound().is_none() - }) -} - -fn serialize_body(cont: &Container, params: &Parameters) -> Fragment { - if cont.attrs.transparent() { - serialize_transparent(cont, params) - } else if let Some(type_into) = cont.attrs.type_into() { - serialize_into(params, type_into) - } else { - match &cont.data { - Data::Enum(variants) => serialize_enum(params, variants, &cont.attrs), - Data::Struct(Style::Struct, fields) => serialize_struct(params, fields, &cont.attrs), - Data::Struct(Style::Tuple, fields) => { - serialize_tuple_struct(params, fields, &cont.attrs) - } - Data::Struct(Style::Newtype, fields) => { - serialize_newtype_struct(params, &fields[0], &cont.attrs) - } - Data::Struct(Style::Unit, _) => serialize_unit_struct(&cont.attrs), - } - } -} - -fn serialize_transparent(cont: &Container, params: &Parameters) -> Fragment { - let fields = match &cont.data { - Data::Struct(_, fields) => fields, - Data::Enum(_) => unreachable!(), - }; - - let self_var = ¶ms.self_var; - let transparent_field = fields.iter().find(|f| f.attrs.transparent()).unwrap(); - let member = &transparent_field.member; - - let path = match transparent_field.attrs.serialize_with() { - Some(path) => quote!(#path), - None => { - let span = transparent_field.original.span(); - quote_spanned!(span=> _serde::Serialize::serialize) - } - }; - - quote_block! { - #path(&#self_var.#member, __serializer) - } -} - -fn serialize_into(params: &Parameters, type_into: &syn::Type) -> Fragment { - let self_var = ¶ms.self_var; - quote_block! { - _serde::Serialize::serialize( - &_serde::__private::Into::<#type_into>::into(_serde::__private::Clone::clone(#self_var)), - __serializer) - } -} - -fn serialize_unit_struct(cattrs: &attr::Container) -> Fragment { - let type_name = cattrs.name().serialize_name(); - - quote_expr! { - _serde::Serializer::serialize_unit_struct(__serializer, #type_name) - } -} - -fn serialize_newtype_struct( - params: &Parameters, - field: &Field, - cattrs: &attr::Container, -) -> Fragment { - let type_name = cattrs.name().serialize_name(); - - let mut field_expr = get_member( - params, - field, - &Member::Unnamed(Index { - index: 0, - span: Span::call_site(), - }), - ); - if let Some(path) = field.attrs.serialize_with() { - field_expr = wrap_serialize_field_with(params, field.ty, path, &field_expr); - } - - let span = field.original.span(); - let func = quote_spanned!(span=> _serde::Serializer::serialize_newtype_struct); - quote_expr! { - #func(__serializer, #type_name, #field_expr) - } -} - -fn serialize_tuple_struct( - params: &Parameters, - fields: &[Field], - cattrs: &attr::Container, -) -> Fragment { - let serialize_stmts = - serialize_tuple_struct_visitor(fields, params, false, &TupleTrait::SerializeTupleStruct); - - let type_name = cattrs.name().serialize_name(); - - let mut serialized_fields = fields - .iter() - .enumerate() - .filter(|(_, field)| !field.attrs.skip_serializing()) - .peekable(); - - let let_mut = mut_if(serialized_fields.peek().is_some()); - - let len = serialized_fields - .map(|(i, field)| match field.attrs.skip_serializing_if() { - None => quote!(1), - Some(path) => { - let index = syn::Index { - index: i as u32, - span: Span::call_site(), - }; - let field_expr = get_member(params, field, &Member::Unnamed(index)); - quote!(if #path(#field_expr) { 0 } else { 1 }) - } - }) - .fold(quote!(0), |sum, expr| quote!(#sum + #expr)); - - quote_block! { - let #let_mut __serde_state = try!(_serde::Serializer::serialize_tuple_struct(__serializer, #type_name, #len)); - #(#serialize_stmts)* - _serde::ser::SerializeTupleStruct::end(__serde_state) - } -} - -fn serialize_struct(params: &Parameters, fields: &[Field], cattrs: &attr::Container) -> Fragment { - assert!(fields.len() as u64 <= u64::from(u32::max_value())); - - if cattrs.has_flatten() { - serialize_struct_as_map(params, fields, cattrs) - } else { - serialize_struct_as_struct(params, fields, cattrs) - } -} - -fn serialize_struct_tag_field(cattrs: &attr::Container, struct_trait: &StructTrait) -> TokenStream { - match cattrs.tag() { - attr::TagType::Internal { tag } => { - let type_name = cattrs.name().serialize_name(); - let func = struct_trait.serialize_field(Span::call_site()); - quote! { - try!(#func(&mut __serde_state, #tag, #type_name)); - } - } - _ => quote! {}, - } -} - -fn serialize_struct_as_struct( - params: &Parameters, - fields: &[Field], - cattrs: &attr::Container, -) -> Fragment { - let serialize_fields = - serialize_struct_visitor(fields, params, false, &StructTrait::SerializeStruct); - - let type_name = cattrs.name().serialize_name(); - - let tag_field = serialize_struct_tag_field(cattrs, &StructTrait::SerializeStruct); - let tag_field_exists = !tag_field.is_empty(); - - let mut serialized_fields = fields - .iter() - .filter(|&field| !field.attrs.skip_serializing()) - .peekable(); - - let let_mut = mut_if(serialized_fields.peek().is_some() || tag_field_exists); - - let len = serialized_fields - .map(|field| match field.attrs.skip_serializing_if() { - None => quote!(1), - Some(path) => { - let field_expr = get_member(params, field, &field.member); - quote!(if #path(#field_expr) { 0 } else { 1 }) - } - }) - .fold( - quote!(#tag_field_exists as usize), - |sum, expr| quote!(#sum + #expr), - ); - - quote_block! { - let #let_mut __serde_state = try!(_serde::Serializer::serialize_struct(__serializer, #type_name, #len)); - #tag_field - #(#serialize_fields)* - _serde::ser::SerializeStruct::end(__serde_state) - } -} - -fn serialize_struct_as_map( - params: &Parameters, - fields: &[Field], - cattrs: &attr::Container, -) -> Fragment { - let serialize_fields = - serialize_struct_visitor(fields, params, false, &StructTrait::SerializeMap); - - let tag_field = serialize_struct_tag_field(cattrs, &StructTrait::SerializeMap); - let tag_field_exists = !tag_field.is_empty(); - - let mut serialized_fields = fields - .iter() - .filter(|&field| !field.attrs.skip_serializing()) - .peekable(); - - let let_mut = mut_if(serialized_fields.peek().is_some() || tag_field_exists); - - let len = if cattrs.has_flatten() { - quote!(_serde::__private::None) - } else { - let len = serialized_fields - .map(|field| match field.attrs.skip_serializing_if() { - None => quote!(1), - Some(path) => { - let field_expr = get_member(params, field, &field.member); - quote!(if #path(#field_expr) { 0 } else { 1 }) - } - }) - .fold( - quote!(#tag_field_exists as usize), - |sum, expr| quote!(#sum + #expr), - ); - quote!(_serde::__private::Some(#len)) - }; - - quote_block! { - let #let_mut __serde_state = try!(_serde::Serializer::serialize_map(__serializer, #len)); - #tag_field - #(#serialize_fields)* - _serde::ser::SerializeMap::end(__serde_state) - } -} - -fn serialize_enum(params: &Parameters, variants: &[Variant], cattrs: &attr::Container) -> Fragment { - assert!(variants.len() as u64 <= u64::from(u32::max_value())); - - let self_var = ¶ms.self_var; - - let arms: Vec<_> = variants - .iter() - .enumerate() - .map(|(variant_index, variant)| { - serialize_variant(params, variant, variant_index as u32, cattrs) - }) - .collect(); - - quote_expr! { - match *#self_var { - #(#arms)* - } - } -} - -fn serialize_variant( - params: &Parameters, - variant: &Variant, - variant_index: u32, - cattrs: &attr::Container, -) -> TokenStream { - let this = ¶ms.this; - let variant_ident = &variant.ident; - - if variant.attrs.skip_serializing() { - let skipped_msg = format!( - "the enum variant {}::{} cannot be serialized", - params.type_name(), - variant_ident - ); - let skipped_err = quote! { - _serde::__private::Err(_serde::ser::Error::custom(#skipped_msg)) - }; - let fields_pat = match variant.style { - Style::Unit => quote!(), - Style::Newtype | Style::Tuple => quote!((..)), - Style::Struct => quote!({ .. }), - }; - quote! { - #this::#variant_ident #fields_pat => #skipped_err, - } - } else { - // variant wasn't skipped - let case = match variant.style { - Style::Unit => { - quote! { - #this::#variant_ident - } - } - Style::Newtype => { - quote! { - #this::#variant_ident(ref __field0) - } - } - Style::Tuple => { - let field_names = (0..variant.fields.len()) - .map(|i| Ident::new(&format!("__field{}", i), Span::call_site())); - quote! { - #this::#variant_ident(#(ref #field_names),*) - } - } - Style::Struct => { - let members = variant.fields.iter().map(|f| &f.member); - quote! { - #this::#variant_ident { #(ref #members),* } - } - } - }; - - let body = Match(match cattrs.tag() { - attr::TagType::External => { - serialize_externally_tagged_variant(params, variant, variant_index, cattrs) - } - attr::TagType::Internal { tag } => { - serialize_internally_tagged_variant(params, variant, cattrs, tag) - } - attr::TagType::Adjacent { tag, content } => { - serialize_adjacently_tagged_variant(params, variant, cattrs, tag, content) - } - attr::TagType::None => serialize_untagged_variant(params, variant, cattrs), - }); - - quote! { - #case => #body - } - } -} - -fn serialize_externally_tagged_variant( - params: &Parameters, - variant: &Variant, - variant_index: u32, - cattrs: &attr::Container, -) -> Fragment { - let type_name = cattrs.name().serialize_name(); - let variant_name = variant.attrs.name().serialize_name(); - - if let Some(path) = variant.attrs.serialize_with() { - let ser = wrap_serialize_variant_with(params, path, variant); - return quote_expr! { - _serde::Serializer::serialize_newtype_variant( - __serializer, - #type_name, - #variant_index, - #variant_name, - #ser, - ) - }; - } - - match effective_style(variant) { - Style::Unit => { - quote_expr! { - _serde::Serializer::serialize_unit_variant( - __serializer, - #type_name, - #variant_index, - #variant_name, - ) - } - } - Style::Newtype => { - let field = &variant.fields[0]; - let mut field_expr = quote!(__field0); - if let Some(path) = field.attrs.serialize_with() { - field_expr = wrap_serialize_field_with(params, field.ty, path, &field_expr); - } - - let span = field.original.span(); - let func = quote_spanned!(span=> _serde::Serializer::serialize_newtype_variant); - quote_expr! { - #func( - __serializer, - #type_name, - #variant_index, - #variant_name, - #field_expr, - ) - } - } - Style::Tuple => serialize_tuple_variant( - TupleVariant::ExternallyTagged { - type_name, - variant_index, - variant_name, - }, - params, - &variant.fields, - ), - Style::Struct => serialize_struct_variant( - StructVariant::ExternallyTagged { - variant_index, - variant_name, - }, - params, - &variant.fields, - &type_name, - ), - } -} - -fn serialize_internally_tagged_variant( - params: &Parameters, - variant: &Variant, - cattrs: &attr::Container, - tag: &str, -) -> Fragment { - let type_name = cattrs.name().serialize_name(); - let variant_name = variant.attrs.name().serialize_name(); - - let enum_ident_str = params.type_name(); - let variant_ident_str = variant.ident.to_string(); - - if let Some(path) = variant.attrs.serialize_with() { - let ser = wrap_serialize_variant_with(params, path, variant); - return quote_expr! { - _serde::__private::ser::serialize_tagged_newtype( - __serializer, - #enum_ident_str, - #variant_ident_str, - #tag, - #variant_name, - #ser, - ) - }; - } - - match effective_style(variant) { - Style::Unit => { - quote_block! { - let mut __struct = try!(_serde::Serializer::serialize_struct( - __serializer, #type_name, 1)); - try!(_serde::ser::SerializeStruct::serialize_field( - &mut __struct, #tag, #variant_name)); - _serde::ser::SerializeStruct::end(__struct) - } - } - Style::Newtype => { - let field = &variant.fields[0]; - let mut field_expr = quote!(__field0); - if let Some(path) = field.attrs.serialize_with() { - field_expr = wrap_serialize_field_with(params, field.ty, path, &field_expr); - } - - let span = field.original.span(); - let func = quote_spanned!(span=> _serde::__private::ser::serialize_tagged_newtype); - quote_expr! { - #func( - __serializer, - #enum_ident_str, - #variant_ident_str, - #tag, - #variant_name, - #field_expr, - ) - } - } - Style::Struct => serialize_struct_variant( - StructVariant::InternallyTagged { tag, variant_name }, - params, - &variant.fields, - &type_name, - ), - Style::Tuple => unreachable!("checked in serde_derive_internals"), - } -} - -fn serialize_adjacently_tagged_variant( - params: &Parameters, - variant: &Variant, - cattrs: &attr::Container, - tag: &str, - content: &str, -) -> Fragment { - let this = ¶ms.this; - let type_name = cattrs.name().serialize_name(); - let variant_name = variant.attrs.name().serialize_name(); - - let inner = Stmts(if let Some(path) = variant.attrs.serialize_with() { - let ser = wrap_serialize_variant_with(params, path, variant); - quote_expr! { - _serde::Serialize::serialize(#ser, __serializer) - } - } else { - match effective_style(variant) { - Style::Unit => { - return quote_block! { - let mut __struct = try!(_serde::Serializer::serialize_struct( - __serializer, #type_name, 1)); - try!(_serde::ser::SerializeStruct::serialize_field( - &mut __struct, #tag, #variant_name)); - _serde::ser::SerializeStruct::end(__struct) - }; - } - Style::Newtype => { - let field = &variant.fields[0]; - let mut field_expr = quote!(__field0); - if let Some(path) = field.attrs.serialize_with() { - field_expr = wrap_serialize_field_with(params, field.ty, path, &field_expr); - } - - let span = field.original.span(); - let func = quote_spanned!(span=> _serde::ser::SerializeStruct::serialize_field); - return quote_block! { - let mut __struct = try!(_serde::Serializer::serialize_struct( - __serializer, #type_name, 2)); - try!(_serde::ser::SerializeStruct::serialize_field( - &mut __struct, #tag, #variant_name)); - try!(#func( - &mut __struct, #content, #field_expr)); - _serde::ser::SerializeStruct::end(__struct) - }; - } - Style::Tuple => { - serialize_tuple_variant(TupleVariant::Untagged, params, &variant.fields) - } - Style::Struct => serialize_struct_variant( - StructVariant::Untagged, - params, - &variant.fields, - &variant_name, - ), - } - }); - - let fields_ty = variant.fields.iter().map(|f| &f.ty); - let fields_ident: &Vec<_> = &match variant.style { - Style::Unit => { - if variant.attrs.serialize_with().is_some() { - vec![] - } else { - unreachable!() - } - } - Style::Newtype => vec![Member::Named(Ident::new("__field0", Span::call_site()))], - Style::Tuple => (0..variant.fields.len()) - .map(|i| Member::Named(Ident::new(&format!("__field{}", i), Span::call_site()))) - .collect(), - Style::Struct => variant.fields.iter().map(|f| f.member.clone()).collect(), - }; - - let (_, ty_generics, where_clause) = params.generics.split_for_impl(); - - let wrapper_generics = if fields_ident.is_empty() { - params.generics.clone() - } else { - bound::with_lifetime_bound(¶ms.generics, "'__a") - }; - let (wrapper_impl_generics, wrapper_ty_generics, _) = wrapper_generics.split_for_impl(); - - quote_block! { - struct __AdjacentlyTagged #wrapper_generics #where_clause { - data: (#(&'__a #fields_ty,)*), - phantom: _serde::__private::PhantomData<#this #ty_generics>, - } - - impl #wrapper_impl_generics _serde::Serialize for __AdjacentlyTagged #wrapper_ty_generics #where_clause { - fn serialize<__S>(&self, __serializer: __S) -> _serde::__private::Result<__S::Ok, __S::Error> - where - __S: _serde::Serializer, - { - // Elements that have skip_serializing will be unused. - #[allow(unused_variables)] - let (#(#fields_ident,)*) = self.data; - #inner - } - } - - let mut __struct = try!(_serde::Serializer::serialize_struct( - __serializer, #type_name, 2)); - try!(_serde::ser::SerializeStruct::serialize_field( - &mut __struct, #tag, #variant_name)); - try!(_serde::ser::SerializeStruct::serialize_field( - &mut __struct, #content, &__AdjacentlyTagged { - data: (#(#fields_ident,)*), - phantom: _serde::__private::PhantomData::<#this #ty_generics>, - })); - _serde::ser::SerializeStruct::end(__struct) - } -} - -fn serialize_untagged_variant( - params: &Parameters, - variant: &Variant, - cattrs: &attr::Container, -) -> Fragment { - if let Some(path) = variant.attrs.serialize_with() { - let ser = wrap_serialize_variant_with(params, path, variant); - return quote_expr! { - _serde::Serialize::serialize(#ser, __serializer) - }; - } - - match effective_style(variant) { - Style::Unit => { - quote_expr! { - _serde::Serializer::serialize_unit(__serializer) - } - } - Style::Newtype => { - let field = &variant.fields[0]; - let mut field_expr = quote!(__field0); - if let Some(path) = field.attrs.serialize_with() { - field_expr = wrap_serialize_field_with(params, field.ty, path, &field_expr); - } - - let span = field.original.span(); - let func = quote_spanned!(span=> _serde::Serialize::serialize); - quote_expr! { - #func(#field_expr, __serializer) - } - } - Style::Tuple => serialize_tuple_variant(TupleVariant::Untagged, params, &variant.fields), - Style::Struct => { - let type_name = cattrs.name().serialize_name(); - serialize_struct_variant(StructVariant::Untagged, params, &variant.fields, &type_name) - } - } -} - -enum TupleVariant { - ExternallyTagged { - type_name: String, - variant_index: u32, - variant_name: String, - }, - Untagged, -} - -fn serialize_tuple_variant( - context: TupleVariant, - params: &Parameters, - fields: &[Field], -) -> Fragment { - let tuple_trait = match context { - TupleVariant::ExternallyTagged { .. } => TupleTrait::SerializeTupleVariant, - TupleVariant::Untagged => TupleTrait::SerializeTuple, - }; - - let serialize_stmts = serialize_tuple_struct_visitor(fields, params, true, &tuple_trait); - - let mut serialized_fields = fields - .iter() - .enumerate() - .filter(|(_, field)| !field.attrs.skip_serializing()) - .peekable(); - - let let_mut = mut_if(serialized_fields.peek().is_some()); - - let len = serialized_fields - .map(|(i, field)| match field.attrs.skip_serializing_if() { - None => quote!(1), - Some(path) => { - let field_expr = Ident::new(&format!("__field{}", i), Span::call_site()); - quote!(if #path(#field_expr) { 0 } else { 1 }) - } - }) - .fold(quote!(0), |sum, expr| quote!(#sum + #expr)); - - match context { - TupleVariant::ExternallyTagged { - type_name, - variant_index, - variant_name, - } => { - quote_block! { - let #let_mut __serde_state = try!(_serde::Serializer::serialize_tuple_variant( - __serializer, - #type_name, - #variant_index, - #variant_name, - #len)); - #(#serialize_stmts)* - _serde::ser::SerializeTupleVariant::end(__serde_state) - } - } - TupleVariant::Untagged => { - quote_block! { - let #let_mut __serde_state = try!(_serde::Serializer::serialize_tuple( - __serializer, - #len)); - #(#serialize_stmts)* - _serde::ser::SerializeTuple::end(__serde_state) - } - } - } -} - -enum StructVariant<'a> { - ExternallyTagged { - variant_index: u32, - variant_name: String, - }, - InternallyTagged { - tag: &'a str, - variant_name: String, - }, - Untagged, -} - -fn serialize_struct_variant<'a>( - context: StructVariant<'a>, - params: &Parameters, - fields: &[Field], - name: &str, -) -> Fragment { - if fields.iter().any(|field| field.attrs.flatten()) { - return serialize_struct_variant_with_flatten(context, params, fields, name); - } - - let struct_trait = match context { - StructVariant::ExternallyTagged { .. } => StructTrait::SerializeStructVariant, - StructVariant::InternallyTagged { .. } | StructVariant::Untagged => { - StructTrait::SerializeStruct - } - }; - - let serialize_fields = serialize_struct_visitor(fields, params, true, &struct_trait); - - let mut serialized_fields = fields - .iter() - .filter(|&field| !field.attrs.skip_serializing()) - .peekable(); - - let let_mut = mut_if(serialized_fields.peek().is_some()); - - let len = serialized_fields - .map(|field| { - let member = &field.member; - - match field.attrs.skip_serializing_if() { - Some(path) => quote!(if #path(#member) { 0 } else { 1 }), - None => quote!(1), - } - }) - .fold(quote!(0), |sum, expr| quote!(#sum + #expr)); - - match context { - StructVariant::ExternallyTagged { - variant_index, - variant_name, - } => { - quote_block! { - let #let_mut __serde_state = try!(_serde::Serializer::serialize_struct_variant( - __serializer, - #name, - #variant_index, - #variant_name, - #len, - )); - #(#serialize_fields)* - _serde::ser::SerializeStructVariant::end(__serde_state) - } - } - StructVariant::InternallyTagged { tag, variant_name } => { - quote_block! { - let mut __serde_state = try!(_serde::Serializer::serialize_struct( - __serializer, - #name, - #len + 1, - )); - try!(_serde::ser::SerializeStruct::serialize_field( - &mut __serde_state, - #tag, - #variant_name, - )); - #(#serialize_fields)* - _serde::ser::SerializeStruct::end(__serde_state) - } - } - StructVariant::Untagged => { - quote_block! { - let #let_mut __serde_state = try!(_serde::Serializer::serialize_struct( - __serializer, - #name, - #len, - )); - #(#serialize_fields)* - _serde::ser::SerializeStruct::end(__serde_state) - } - } - } -} - -fn serialize_struct_variant_with_flatten<'a>( - context: StructVariant<'a>, - params: &Parameters, - fields: &[Field], - name: &str, -) -> Fragment { - let struct_trait = StructTrait::SerializeMap; - let serialize_fields = serialize_struct_visitor(fields, params, true, &struct_trait); - - let mut serialized_fields = fields - .iter() - .filter(|&field| !field.attrs.skip_serializing()) - .peekable(); - - let let_mut = mut_if(serialized_fields.peek().is_some()); - - match context { - StructVariant::ExternallyTagged { - variant_index, - variant_name, - } => { - let this = ¶ms.this; - let fields_ty = fields.iter().map(|f| &f.ty); - let members = &fields.iter().map(|f| &f.member).collect::>(); - - let (_, ty_generics, where_clause) = params.generics.split_for_impl(); - let wrapper_generics = bound::with_lifetime_bound(¶ms.generics, "'__a"); - let (wrapper_impl_generics, wrapper_ty_generics, _) = wrapper_generics.split_for_impl(); - - quote_block! { - struct __EnumFlatten #wrapper_generics #where_clause { - data: (#(&'__a #fields_ty,)*), - phantom: _serde::__private::PhantomData<#this #ty_generics>, - } - - impl #wrapper_impl_generics _serde::Serialize for __EnumFlatten #wrapper_ty_generics #where_clause { - fn serialize<__S>(&self, __serializer: __S) -> _serde::__private::Result<__S::Ok, __S::Error> - where - __S: _serde::Serializer, - { - let (#(#members,)*) = self.data; - let #let_mut __serde_state = try!(_serde::Serializer::serialize_map( - __serializer, - _serde::__private::None)); - #(#serialize_fields)* - _serde::ser::SerializeMap::end(__serde_state) - } - } - - _serde::Serializer::serialize_newtype_variant( - __serializer, - #name, - #variant_index, - #variant_name, - &__EnumFlatten { - data: (#(#members,)*), - phantom: _serde::__private::PhantomData::<#this #ty_generics>, - }) - } - } - StructVariant::InternallyTagged { tag, variant_name } => { - quote_block! { - let #let_mut __serde_state = try!(_serde::Serializer::serialize_map( - __serializer, - _serde::__private::None)); - try!(_serde::ser::SerializeMap::serialize_entry( - &mut __serde_state, - #tag, - #variant_name, - )); - #(#serialize_fields)* - _serde::ser::SerializeMap::end(__serde_state) - } - } - StructVariant::Untagged => { - quote_block! { - let #let_mut __serde_state = try!(_serde::Serializer::serialize_map( - __serializer, - _serde::__private::None)); - #(#serialize_fields)* - _serde::ser::SerializeMap::end(__serde_state) - } - } - } -} - -fn serialize_tuple_struct_visitor( - fields: &[Field], - params: &Parameters, - is_enum: bool, - tuple_trait: &TupleTrait, -) -> Vec { - fields - .iter() - .enumerate() - .filter(|(_, field)| !field.attrs.skip_serializing()) - .map(|(i, field)| { - let mut field_expr = if is_enum { - let id = Ident::new(&format!("__field{}", i), Span::call_site()); - quote!(#id) - } else { - get_member( - params, - field, - &Member::Unnamed(Index { - index: i as u32, - span: Span::call_site(), - }), - ) - }; - - let skip = field - .attrs - .skip_serializing_if() - .map(|path| quote!(#path(#field_expr))); - - if let Some(path) = field.attrs.serialize_with() { - field_expr = wrap_serialize_field_with(params, field.ty, path, &field_expr); - } - - let span = field.original.span(); - let func = tuple_trait.serialize_element(span); - let ser = quote! { - try!(#func(&mut __serde_state, #field_expr)); - }; - - match skip { - None => ser, - Some(skip) => quote!(if !#skip { #ser }), - } - }) - .collect() -} - -fn serialize_struct_visitor( - fields: &[Field], - params: &Parameters, - is_enum: bool, - struct_trait: &StructTrait, -) -> Vec { - fields - .iter() - .filter(|&field| !field.attrs.skip_serializing()) - .map(|field| { - let member = &field.member; - - let mut field_expr = if is_enum { - quote!(#member) - } else { - get_member(params, field, member) - }; - - let key_expr = field.attrs.name().serialize_name(); - - let skip = field - .attrs - .skip_serializing_if() - .map(|path| quote!(#path(#field_expr))); - - if let Some(path) = field.attrs.serialize_with() { - field_expr = wrap_serialize_field_with(params, field.ty, path, &field_expr); - } - - let span = field.original.span(); - let ser = if field.attrs.flatten() { - let func = quote_spanned!(span=> _serde::Serialize::serialize); - quote! { - try!(#func(&#field_expr, _serde::__private::ser::FlatMapSerializer(&mut __serde_state))); - } - } else { - let func = struct_trait.serialize_field(span); - quote! { - try!(#func(&mut __serde_state, #key_expr, #field_expr)); - } - }; - - match skip { - None => ser, - Some(skip) => { - if let Some(skip_func) = struct_trait.skip_field(span) { - quote! { - if !#skip { - #ser - } else { - try!(#skip_func(&mut __serde_state, #key_expr)); - } - } - } else { - quote! { - if !#skip { - #ser - } - } - } - } - } - }) - .collect() -} - -fn wrap_serialize_field_with( - params: &Parameters, - field_ty: &syn::Type, - serialize_with: &syn::ExprPath, - field_expr: &TokenStream, -) -> TokenStream { - wrap_serialize_with(params, serialize_with, &[field_ty], &[quote!(#field_expr)]) -} - -fn wrap_serialize_variant_with( - params: &Parameters, - serialize_with: &syn::ExprPath, - variant: &Variant, -) -> TokenStream { - let field_tys: Vec<_> = variant.fields.iter().map(|field| field.ty).collect(); - let field_exprs: Vec<_> = variant - .fields - .iter() - .map(|field| { - let id = match &field.member { - Member::Named(ident) => ident.clone(), - Member::Unnamed(member) => { - Ident::new(&format!("__field{}", member.index), Span::call_site()) - } - }; - quote!(#id) - }) - .collect(); - wrap_serialize_with( - params, - serialize_with, - field_tys.as_slice(), - field_exprs.as_slice(), - ) -} - -fn wrap_serialize_with( - params: &Parameters, - serialize_with: &syn::ExprPath, - field_tys: &[&syn::Type], - field_exprs: &[TokenStream], -) -> TokenStream { - let this = ¶ms.this; - let (_, ty_generics, where_clause) = params.generics.split_for_impl(); - - let wrapper_generics = if field_exprs.is_empty() { - params.generics.clone() - } else { - bound::with_lifetime_bound(¶ms.generics, "'__a") - }; - let (wrapper_impl_generics, wrapper_ty_generics, _) = wrapper_generics.split_for_impl(); - - let field_access = (0..field_exprs.len()).map(|n| { - Member::Unnamed(Index { - index: n as u32, - span: Span::call_site(), - }) - }); - - quote!({ - struct __SerializeWith #wrapper_impl_generics #where_clause { - values: (#(&'__a #field_tys, )*), - phantom: _serde::__private::PhantomData<#this #ty_generics>, - } - - impl #wrapper_impl_generics _serde::Serialize for __SerializeWith #wrapper_ty_generics #where_clause { - fn serialize<__S>(&self, __s: __S) -> _serde::__private::Result<__S::Ok, __S::Error> - where - __S: _serde::Serializer, - { - #serialize_with(#(self.values.#field_access, )* __s) - } - } - - &__SerializeWith { - values: (#(#field_exprs, )*), - phantom: _serde::__private::PhantomData::<#this #ty_generics>, - } - }) -} - -// Serialization of an empty struct results in code like: -// -// let mut __serde_state = try!(serializer.serialize_struct("S", 0)); -// _serde::ser::SerializeStruct::end(__serde_state) -// -// where we want to omit the `mut` to avoid a warning. -fn mut_if(is_mut: bool) -> Option { - if is_mut { - Some(quote!(mut)) - } else { - None - } -} - -fn get_member(params: &Parameters, field: &Field, member: &Member) -> TokenStream { - let self_var = ¶ms.self_var; - match (params.is_remote, field.attrs.getter()) { - (false, None) => { - if params.is_packed { - quote!(&{#self_var.#member}) - } else { - quote!(&#self_var.#member) - } - } - (true, None) => { - let inner = if params.is_packed { - quote!(&{#self_var.#member}) - } else { - quote!(&#self_var.#member) - }; - let ty = field.ty; - quote!(_serde::__private::ser::constrain::<#ty>(#inner)) - } - (true, Some(getter)) => { - let ty = field.ty; - quote!(_serde::__private::ser::constrain::<#ty>(&#getter(#self_var))) - } - (false, Some(_)) => { - unreachable!("getter is only allowed for remote impls"); - } - } -} - -fn effective_style(variant: &Variant) -> Style { - match variant.style { - Style::Newtype if variant.fields[0].attrs.skip_serializing() => Style::Unit, - other => other, - } -} - -enum StructTrait { - SerializeMap, - SerializeStruct, - SerializeStructVariant, -} - -impl StructTrait { - fn serialize_field(&self, span: Span) -> TokenStream { - match *self { - StructTrait::SerializeMap => { - quote_spanned!(span=> _serde::ser::SerializeMap::serialize_entry) - } - StructTrait::SerializeStruct => { - quote_spanned!(span=> _serde::ser::SerializeStruct::serialize_field) - } - StructTrait::SerializeStructVariant => { - quote_spanned!(span=> _serde::ser::SerializeStructVariant::serialize_field) - } - } - } - - fn skip_field(&self, span: Span) -> Option { - match *self { - StructTrait::SerializeMap => None, - StructTrait::SerializeStruct => { - Some(quote_spanned!(span=> _serde::ser::SerializeStruct::skip_field)) - } - StructTrait::SerializeStructVariant => { - Some(quote_spanned!(span=> _serde::ser::SerializeStructVariant::skip_field)) - } - } - } -} - -enum TupleTrait { - SerializeTuple, - SerializeTupleStruct, - SerializeTupleVariant, -} - -impl TupleTrait { - fn serialize_element(&self, span: Span) -> TokenStream { - match *self { - TupleTrait::SerializeTuple => { - quote_spanned!(span=> _serde::ser::SerializeTuple::serialize_element) - } - TupleTrait::SerializeTupleStruct => { - quote_spanned!(span=> _serde::ser::SerializeTupleStruct::serialize_field) - } - TupleTrait::SerializeTupleVariant => { - quote_spanned!(span=> _serde::ser::SerializeTupleVariant::serialize_field) - } - } - } -} diff --git a/vendor/serde_derive/src/try.rs b/vendor/serde_derive/src/try.rs deleted file mode 100644 index 48cceeba..00000000 --- a/vendor/serde_derive/src/try.rs +++ /dev/null @@ -1,24 +0,0 @@ -use proc_macro2::{Punct, Spacing, TokenStream}; - -// None of our generated code requires the `From::from` error conversion -// performed by the standard library's `try!` macro. With this simplified macro -// we see a significant improvement in type checking and borrow checking time of -// the generated code and a slight improvement in binary size. -pub fn replacement() -> TokenStream { - // Cannot pass `$expr` to `quote!` prior to Rust 1.17.0 so interpolate it. - let dollar = Punct::new('$', Spacing::Alone); - - quote! { - #[allow(unused_macros)] - macro_rules! try { - (#dollar __expr:expr) => { - match #dollar __expr { - _serde::__private::Ok(__val) => __val, - _serde::__private::Err(__err) => { - return _serde::__private::Err(__err); - } - } - } - } - } -} diff --git a/vendor/static_assertions/.cargo-checksum.json b/vendor/static_assertions/.cargo-checksum.json deleted file mode 100644 index eb917cb9..00000000 --- a/vendor/static_assertions/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{"CHANGELOG.md":"750f74f3b520672a81dd2ede5c097cfe972c0da181dd8fd010e7131a3526d4fd","Cargo.toml":"6531dbe3d557e427f9e3510e50cdf3de751a319eece11c9a937b35cfab8744c4","LICENSE-APACHE":"cfc7749b96f63bd31c3c42b5c471bf756814053e847c10f3eb003417bc523d30","LICENSE-MIT":"ea084a2373ebc1f0902c09266e7bf25a05ab3814c1805bb017ffa7308f90c061","README.md":"88cc779ada8c6e1362c2d095c179284ec2755797729946ebccfe8264fcff0f8e","src/assert_cfg.rs":"ce9230bcc055d8df21ceefbed4233df5e73ecb832829ba23ac8d7f54ec457522","src/assert_eq_align.rs":"f09c4ec30e476446ab337a4a9ed950edf21c780a42ece7613f0ffc20225331ae","src/assert_eq_size.rs":"c06d2ff44e1f7af8a57b7c2fe5a9c69aa2a90d12cd41614c9d4f3ae551cdb64c","src/assert_fields.rs":"9c5baeac0215be557213eec9e80fc00de10a721d9b2c369fece743fcc6ccdc8e","src/assert_impl.rs":"fe5a2fffcbb78f60991bbc590481e74fd0d5f7ed8fa75718644599e1fae117ce","src/assert_obj_safe.rs":"88584809f3aa2dfce966b0adbeb6128191229465d653e57b68f968be001eff03","src/assert_trait.rs":"87b2d4dbd4334d9ace9880b81f3a1fbf91725c5e152b34a74c86457ca40ece30","src/assert_type.rs":"7cd942341efa7ed0b89b5e4fe1faa62bafb537ec6cf72ee6866115daee931885","src/const_assert.rs":"e0dcfe9016e34455f5efcb23500f716386d42b9fb09c6d08566d03dce8249e51","src/lib.rs":"4cbaaff38a6ac16631e774ab5a349056cbd2335ac570aeb46c5616eaf9700c3b"},"package":"a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"} \ No newline at end of file diff --git a/vendor/static_assertions/CHANGELOG.md b/vendor/static_assertions/CHANGELOG.md deleted file mode 100644 index 80d55075..00000000 --- a/vendor/static_assertions/CHANGELOG.md +++ /dev/null @@ -1,181 +0,0 @@ -# Changelog [![Crates.io][crate-badge]][crate] -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog] and this project adheres to -[Semantic Versioning]. - -## [Unreleased] - -## [1.1.0] - 2019-11-03 -### Added -- `assert_impl_any!` macro -- `assert_impl_one!` macro -- `assert_trait_sub_all!` macro -- `assert_trait_super_all!` macro -- Frequently asked questions to `README.md` - -### Fixed -- `assert_eq_size_val!`, `const_assert_eq!`, and `const_assert_ne!` to export - their local inner macros. Not having this prevented them from working when - `use`d or called directly via `static_assertions::macro!(...)` - -### Removed -- Unused `_assert_obj_safe!` from pre-1.0 - -## [1.0.0] - 2019-10-02 -### Added -- `assert_eq_align!` macro - -### Removed -- **[breaking]** Labels from macros that needed them 🎉 - - Made possible by [`const _`] in Rust 1.37 -- **[breaking]** `assert_impl!` macro - -### Fixed -- `assert_fields!` now works for `enum` types with multiple variants - -### Changed -- **[breaking]** `const_assert!` macro to only take one expression - - Reasoning: when custom error messages are added in the future (via - [`assert!`]), having the macro allow for multiple comma-separated - expressions may lead to ambiguity -- **[breaking]** Trait assertions to use `Type: Trait` syntax -- **[breaking]** Field assertions to use `Type: field1, field2` syntax -- **[breaking]** Renamed `assert_{ne,eq}_type!` to `assert_type_{ne,eq}_all!` - -## [0.3.4] - 2019-06-12 -### Changed -- Aliased `assert_impl!` to `assert_impl_all!` and deprecated `assert_impl!` - -### Added -- `assert_impl_all!` as replacement to `assert_impl!` -- `assert_not_impl_all!` and `assert_not_impl_any!` macro counterparts to - `assert_impl_all!` - -### Fixed -- `assert_eq_type!` now works with types involving lifetimes - -## [0.3.3] - 2019-06-12 -### Added -- `const_assert_ne!` macro counterpart to `const_assert_eq!` - -### Fixed -- `assert_eq_type!` would pass when types can coerce via `Deref`, such as with - `str` and `String` - -## [0.3.2] - 2019-05-15 -### Added -- A `assert_eq_type!` macro that allows for checking whether inputs are the same - concrete type -- A `assert_ne_type!` macro for checking whether inputs all refer to different - types - -### Fixed -- `const_assert!` now only takes `bool` values whereas integer (or other type) - values could previously be passed - -## [0.3.1] - 2018-11-15 -### Fixed -- Macros that refer to other internal macros can now be imported when compiling - for Rust 2018 ([issue - #10](https://github.com/nvzqz/static-assertions-rs/issues/10)) - -## [0.3.0] - 2018-11-14 -### Changed -- Bumped minimum supported (automatically tested) Rust version to 1.24.0 -- Moved message parameter for `assert_cfg!()` to last argument position, making - it consistent with other macros - -### Removed -- No need to use `macro!(label; ...)` syntax when compiling on nightly Rust and - enabling the `nightly` feature flag - -## [0.2.5] - 2017-12-12 -### Changed -- `assert_eq_size_ptr` wraps its code inside of a closure, ensuring that the - unsafe code inside never runs -- Clippy no longer warns about `unneeded_field_pattern` within `assert_fields` - -### Added -- Much better documentation with test examples that are guaranteed to fail at - compile-time - -### Removed -- Removed testing features; compile failure tests are now done via doc tests - -## [0.2.4] - 2017-12-11 -### Removed -- Removed the actual call to `mem::transmute` while still utilizing it for size - verification ([Simon Sapin], [#5]) - -### Added -- `assert_cfg` macro that asserts that the given configuration is set -- `assert_fields` macro to assert that a struct type or enum variant has a given - field - -### Fixed -- Allow more generics flexibility in `assert_impl` - -## [0.2.3] - 2017-08-24 -### Fixed -- Trailing commas are now allowed - -### Removed -- Removed clippy warnings - -## [0.2.2] - 2017-08-13 -### Added -- Added `assert_impl` macro to ensure a type implements a given set of traits - -## [0.2.1] - 2017-08-13 -### Added -- Added `assert_obj_safe` macro for ensuring that a trait is object-safe - -## [0.2.0] - 2017-08-12 -### Added -- Added `assert_eq_size_ptr` macro - -### Fixed -- Allow `assert_eq_size`, `const_assert`, and `const_assert_eq` in non-function - contexts via providing a unique label [#1] - -### Removed -- **[Breaking]** Semicolon-separated `assert_eq_size` is no longer allowed - -## [0.1.1] - 2017-08-12 -### Added -- Added `const_assert_eq` macro - -## 0.1.0 - 2017-08-12 - -Initial release - -[Simon Sapin]: https://github.com/SimonSapin - -[`assert!`]: https://doc.rust-lang.org/stable/std/macro.assert.html -[`const _`]: https://github.com/rust-lang/rfcs/blob/master/text/2526-const-wildcard.md - -[#1]: https://github.com/nvzqz/static-assertions-rs/issues/1 -[#5]: https://github.com/nvzqz/static-assertions-rs/pull/5 - -[crate]: https://crates.io/crates/static_assertions -[crate-badge]: https://img.shields.io/crates/v/static_assertions.svg - -[Keep a Changelog]: http://keepachangelog.com/en/1.0.0/ -[Semantic Versioning]: http://semver.org/spec/v2.0.0.html - -[Unreleased]: https://github.com/nvzqz/static-assertions-rs/compare/v1.1.0...HEAD -[1.1.0]: https://github.com/nvzqz/static-assertions-rs/compare/v1.0.0...v1.1.0 -[1.0.0]: https://github.com/nvzqz/static-assertions-rs/compare/v0.3.4...v1.0.0 -[0.3.4]: https://github.com/nvzqz/static-assertions-rs/compare/v0.3.3...v0.3.4 -[0.3.3]: https://github.com/nvzqz/static-assertions-rs/compare/v0.3.2...v0.3.3 -[0.3.2]: https://github.com/nvzqz/static-assertions-rs/compare/v0.3.1...v0.3.2 -[0.3.1]: https://github.com/nvzqz/static-assertions-rs/compare/v0.3.0...v0.3.1 -[0.3.0]: https://github.com/nvzqz/static-assertions-rs/compare/v0.2.5...v0.3.0 -[0.2.5]: https://github.com/nvzqz/static-assertions-rs/compare/v0.2.4...v0.2.5 -[0.2.4]: https://github.com/nvzqz/static-assertions-rs/compare/v0.2.3...v0.2.4 -[0.2.3]: https://github.com/nvzqz/static-assertions-rs/compare/v0.2.2...v0.2.3 -[0.2.2]: https://github.com/nvzqz/static-assertions-rs/compare/v0.2.1...v0.2.2 -[0.2.1]: https://github.com/nvzqz/static-assertions-rs/compare/v0.2.0...v0.2.1 -[0.2.0]: https://github.com/nvzqz/static-assertions-rs/compare/v0.1.1...v0.2.0 -[0.1.1]: https://github.com/nvzqz/static-assertions-rs/compare/v0.1.0...v0.1.1 diff --git a/vendor/static_assertions/Cargo.toml b/vendor/static_assertions/Cargo.toml deleted file mode 100644 index 8651ce1e..00000000 --- a/vendor/static_assertions/Cargo.toml +++ /dev/null @@ -1,39 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies -# -# If you believe there's an error in this file please file an -# issue against the rust-lang/cargo repository. If you're -# editing this file be aware that the upstream Cargo.toml -# will likely look very different (and much more reasonable) - -[package] -name = "static_assertions" -version = "1.1.0" -authors = ["Nikolai Vazquez"] -include = ["Cargo.toml", "src/**/*.rs", "README.md", "CHANGELOG.md", "LICENSE*"] -description = "Compile-time assertions to ensure that invariants are met." -homepage = "https://github.com/nvzqz/static-assertions-rs" -documentation = "https://docs.rs/static_assertions/" -readme = "README.md" -keywords = ["assert", "static", "testing"] -categories = ["no-std", "rust-patterns", "development-tools::testing"] -license = "MIT OR Apache-2.0" -repository = "https://github.com/nvzqz/static-assertions-rs" - -[features] -nightly = [] -[badges.is-it-maintained-issue-resolution] -repository = "nvzqz/static-assertions-rs" - -[badges.is-it-maintained-open-issues] -repository = "nvzqz/static-assertions-rs" - -[badges.maintenance] -status = "passively-maintained" - -[badges.travis-ci] -repository = "nvzqz/static-assertions-rs" diff --git a/vendor/static_assertions/LICENSE-APACHE b/vendor/static_assertions/LICENSE-APACHE deleted file mode 100644 index d6456956..00000000 --- a/vendor/static_assertions/LICENSE-APACHE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/static_assertions/LICENSE-MIT b/vendor/static_assertions/LICENSE-MIT deleted file mode 100644 index ccc94459..00000000 --- a/vendor/static_assertions/LICENSE-MIT +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2017 Nikolai Vazquez - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/static_assertions/README.md b/vendor/static_assertions/README.md deleted file mode 100644 index bca222ed..00000000 --- a/vendor/static_assertions/README.md +++ /dev/null @@ -1,188 +0,0 @@ -[![Banner](https://raw.githubusercontent.com/nvzqz/static-assertions-rs/assets/Banner.png)](https://github.com/nvzqz/static-assertions-rs) - - - -Compile-time assertions for Rust, brought to you by -[Nikolai Vazquez](https://twitter.com/NikolaiVazquez). - -This library lets you ensure correct assumptions about constants, types, and -more. See the [docs] and [FAQ](#faq) for more info! - -## Installation - -This crate is available -[on crates.io](https://crates.io/crates/static_assertions) and can be used by -adding the following to your project's -[`Cargo.toml`](https://doc.rust-lang.org/cargo/reference/manifest.html): - -```toml -[dependencies] -static_assertions = "1.1.0" -``` - -and this to your crate root (`main.rs` or `lib.rs`): - -```rust -#[macro_use] -extern crate static_assertions; -``` - -## Usage - -This crate exposes the following macros: -- [`assert_cfg!`] -- [`assert_eq_align!`] -- [`assert_eq_size!`] -- [`assert_eq_size_ptr!`] -- [`assert_eq_size_val!`] -- [`assert_fields!`] -- [`assert_impl_all!`] -- [`assert_impl_any!`] -- [`assert_impl_one!`] -- [`assert_not_impl_all!`] -- [`assert_not_impl_any!`] -- [`assert_obj_safe!`] -- [`assert_trait_sub_all!`] -- [`assert_trait_super_all!`] -- [`assert_type_eq_all!`] -- [`assert_type_ne_all!`] -- [`const_assert!`] -- [`const_assert_eq!`] -- [`const_assert_ne!`] - -## FAQ - -- **Q:** When would I want to use this? - - **A:** This library is useful for when wanting to ensure properties of - constants, types, and traits. - - Basic examples: - - - With the release of 1.39, `str::len` can be called in a `const` - context. Using [`const_assert!`], one can check that a string generated from - elsewhere is of a given size: - - ```rust - const DATA: &str = include_str!("path/to/string.txt"); - - const_assert!(DATA.len() < 512); - ``` - - - Have a type that absolutely must implement certain traits? With - [`assert_impl_all!`], one can ensure this: - - ```rust - struct Foo { - value: // ... - } - - assert_impl_all!(Foo: Send, Sync); - ``` - -- **Q:** How can I contribute? - - **A:** A couple of ways! You can: - - - Attempt coming up with some form of static analysis that you'd like to see - implemented. Create a [new issue] and describe how you'd imagine your - assertion to work, with example code to demonstrate. - - - Implement your own static assertion and create a [pull request]. - - - Give feedback. What are some pain points? Where is it unpleasant? - - - Write docs. If you're familiar with how this library works, sharing your - knowledge with the rest its users would be great! - -- **Q:** Will this affect my compiled binary? - - **A:** Nope! There is zero runtime cost to using this because all checks are - at compile-time, and so no code is emitted to run. - -- **Q:** Will this affect my compile times? - - **A:** Likely not by anything perceivable. If this is a concern, this library - can be put in `dev-dependencies`: - - ```toml - [dev-dependencies] - static_assertions = "1.1.0" - ``` - - and then assertions can be conditionally run behind `#[cfg(test)]`: - - ```rust - #[cfg(test)] - const_assert_eq!(MEANING_OF_LIFE, 42); - ``` - - However, the assertions will only be checked when running `cargo test`. This - somewhat defeats the purpose of catching false static conditions up-front with - a compilation failure. - -- **Q:** What is `const _`? - - **A:** It's a way of creating an unnamed constant. This is used so that macros - can be called from a global scope without requiring a scope-unique label. This - library makes use of the side effects of evaluating the `const` expression. - See the feature's - [tracking issue](https://github.com/rust-lang/rust/issues/54912) - and - [issue #1](https://github.com/nvzqz/static-assertions-rs/issues/1) - for more info. - -## Changes - -See [`CHANGELOG.md`](https://github.com/nvzqz/static-assertions-rs/blob/master/CHANGELOG.md) -for a complete list of what has changed from one version to another. - -## License - -This project is released under either: - -- [MIT License](https://github.com/nvzqz/static-assertions-rs/blob/master/LICENSE-MIT) - -- [Apache License (Version 2.0)](https://github.com/nvzqz/static-assertions-rs/blob/master/LICENSE-APACHE) - -at your choosing. - -[new issue]: https://github.com/nvzqz/static-assertions-rs/issues/new -[pull request]: https://github.com/nvzqz/static-assertions-rs/pulls -[docs]: https://docs.rs/static_assertions - -[`assert_cfg!`]: https://docs.rs/static_assertions/1.1.0/static_assertions/macro.assert_cfg.html -[`assert_eq_align!`]: https://docs.rs/static_assertions/1.1.0/static_assertions/macro.assert_eq_align.html -[`assert_eq_size!`]: https://docs.rs/static_assertions/1.1.0/static_assertions/macro.assert_eq_size.html -[`assert_eq_size_ptr!`]: https://docs.rs/static_assertions/1.1.0/static_assertions/macro.assert_eq_size_ptr.html -[`assert_eq_size_val!`]: https://docs.rs/static_assertions/1.1.0/static_assertions/macro.assert_eq_size_val.html -[`assert_fields!`]: https://docs.rs/static_assertions/1.1.0/static_assertions/macro.assert_fields.html -[`assert_impl_all!`]: https://docs.rs/static_assertions/1.1.0/static_assertions/macro.assert_impl_all.html -[`assert_impl_any!`]: https://docs.rs/static_assertions/1.1.0/static_assertions/macro.assert_impl_any.html -[`assert_impl_one!`]: https://docs.rs/static_assertions/1.1.0/static_assertions/macro.assert_impl_one.html -[`assert_not_impl_all!`]: https://docs.rs/static_assertions/1.1.0/static_assertions/macro.assert_not_impl_all.html -[`assert_not_impl_any!`]: https://docs.rs/static_assertions/1.1.0/static_assertions/macro.assert_not_impl_any.html -[`assert_obj_safe!`]: https://docs.rs/static_assertions/1.1.0/static_assertions/macro.assert_obj_safe.html -[`assert_trait_sub_all!`]: https://docs.rs/static_assertions/1.1.0/static_assertions/macro.assert_trait_sub_all.html -[`assert_trait_super_all!`]: https://docs.rs/static_assertions/1.1.0/static_assertions/macro.assert_trait_super_all.html -[`assert_type_eq_all!`]: https://docs.rs/static_assertions/1.1.0/static_assertions/macro.assert_type_eq_all.html -[`assert_type_ne_all!`]: https://docs.rs/static_assertions/1.1.0/static_assertions/macro.assert_type_ne_all.html -[`const_assert!`]: https://docs.rs/static_assertions/1.1.0/static_assertions/macro.const_assert.html -[`const_assert_eq!`]: https://docs.rs/static_assertions/1.1.0/static_assertions/macro.const_assert_eq.html -[`const_assert_ne!`]: https://docs.rs/static_assertions/1.1.0/static_assertions/macro.const_assert_ne.html diff --git a/vendor/static_assertions/src/assert_cfg.rs b/vendor/static_assertions/src/assert_cfg.rs deleted file mode 100644 index 24282c1f..00000000 --- a/vendor/static_assertions/src/assert_cfg.rs +++ /dev/null @@ -1,49 +0,0 @@ -/// Asserts that a given configuration is set. -/// -/// # Examples -/// -/// A project will simply fail to compile if the given configuration is not set. -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// // We're not masochists -/// # #[cfg(not(target_pointer_width = "16"))] // Just in case -/// assert_cfg!(not(target_pointer_width = "16")); -/// ``` -/// -/// If a project does not support a set of configurations, you may want to -/// report why. There is the option of providing a compile error message string: -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// # #[cfg(any(unix, windows))] -/// assert_cfg!(any(unix, windows), "There is only support for Unix or Windows"); -/// -/// // User needs to specify a database back-end -/// # #[cfg(target_pointer_width = "0")] // Impossible -/// assert_cfg!(all(not(all(feature = "mysql", feature = "mongodb")), -/// any( feature = "mysql", feature = "mongodb")), -/// "Must exclusively use MySQL or MongoDB as database back-end"); -/// ``` -/// -/// Some configurations are impossible. For example, we can't be compiling for -/// both macOS _and_ Windows simultaneously: -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_cfg!(all(target_os = "macos", -/// target_os = "windows"), -/// "No, that's not how it works! ಠ_ಠ"); -/// ``` -#[macro_export] -macro_rules! assert_cfg { - () => {}; - ($($cfg:meta)+, $msg:expr $(,)?) => { - #[cfg(not($($cfg)+))] - compile_error!($msg); - }; - ($($cfg:tt)*) => { - #[cfg(not($($cfg)*))] - compile_error!(concat!("Cfg does not pass: ", stringify!($($cfg)*))); - }; -} diff --git a/vendor/static_assertions/src/assert_eq_align.rs b/vendor/static_assertions/src/assert_eq_align.rs deleted file mode 100644 index 69412dae..00000000 --- a/vendor/static_assertions/src/assert_eq_align.rs +++ /dev/null @@ -1,45 +0,0 @@ -/// Asserts that types are equal in alignment. -/// -/// This is useful when ensuring that pointer arithmetic is done correctly, or -/// when [FFI] requires a type to have the same alignment as some foreign type. -/// -/// # Examples -/// -/// A `usize` has the same alignment as any pointer type: -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_eq_align!(usize, *const u8, *mut u8); -/// ``` -/// -/// The following passes because `[i32; 4]` has the same alignment as `i32`: -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_eq_align!([i32; 4], i32); -/// ``` -/// -/// The following example fails to compile because `i32x4` explicitly has 4 -/// times the alignment as `[i32; 4]`: -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// # #[allow(non_camel_case_types)] -/// #[repr(align(16))] -/// struct i32x4([i32; 4]); -/// -/// assert_eq_align!(i32x4, [i32; 4]); -/// ``` -/// -/// [FFI]: https://en.wikipedia.org/wiki/Foreign_function_interface -#[macro_export] -macro_rules! assert_eq_align { - ($x:ty, $($xs:ty),+ $(,)?) => { - const _: fn() = || { - // Assigned instance must match the annotated type or else it will - // fail to compile - use $crate::_core::mem::align_of; - $(let _: [(); align_of::<$x>()] = [(); align_of::<$xs>()];)+ - }; - }; -} diff --git a/vendor/static_assertions/src/assert_eq_size.rs b/vendor/static_assertions/src/assert_eq_size.rs deleted file mode 100644 index 9c3c4901..00000000 --- a/vendor/static_assertions/src/assert_eq_size.rs +++ /dev/null @@ -1,123 +0,0 @@ -/// Asserts that types are equal in size. -/// -/// When performing operations such as pointer casts or dealing with [`usize`] -/// versus [`u64`] versus [`u32`], the size of your types matter. That is where -/// this macro comes into play. -/// -/// # Alternatives -/// -/// There also exists [`assert_eq_size_val`](macro.assert_eq_size_val.html) and -/// [`assert_eq_size_ptr`](macro.assert_eq_size_ptr.html). Instead of specifying -/// types to compare, values' sizes can be directly compared against each other. -/// -/// # Examples -/// -/// These three types, despite being very different, all have the same size: -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_eq_size!([u8; 4], (u16, u16), u32); -/// ``` -/// -/// The following example fails to compile because `u32` has 4 times the size of -/// `u8`: -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_eq_size!(u32, u8); -/// ``` -/// -/// [`usize`]: https://doc.rust-lang.org/std/primitive.usize.html -/// [`u64`]: https://doc.rust-lang.org/std/primitive.u64.html -/// [`u32`]: https://doc.rust-lang.org/std/primitive.u32.html -#[macro_export] -macro_rules! assert_eq_size { - ($x:ty, $($xs:ty),+ $(,)?) => { - const _: fn() = || { - $(let _ = $crate::_core::mem::transmute::<$x, $xs>;)+ - }; - }; -} - -/// Asserts that values pointed to are equal in size. -/// -/// # Examples -/// -/// This especially is useful for when coercing pointers between different types -/// and ensuring the underlying values are the same size. -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// fn operation(x: &(u32, u32), y: &[u16; 4]) { -/// assert_eq_size_ptr!(x, y); -/// // ... -/// } -/// ``` -/// -/// The following example fails to compile because byte arrays of different -/// lengths have different sizes: -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; -/// # fn main() { -/// static BYTES: &[u8; 4] = &[ -/// /* ... */ -/// # 0; 4 -/// ]; -/// -/// static TABLE: &[u8; 16] = &[ -/// /* ... */ -/// # 0; 16 -/// ]; -/// -/// assert_eq_size_ptr!(BYTES, TABLE); -/// ``` -#[macro_export] -macro_rules! assert_eq_size_ptr { - ($x:expr, $($xs:expr),+ $(,)?) => { - #[allow(unknown_lints, unsafe_code, forget_copy, useless_transmute)] - let _ = || unsafe { - use $crate::_core::{mem, ptr}; - let mut copy = ptr::read($x); - $(ptr::write(&mut copy, mem::transmute(ptr::read($xs)));)+ - mem::forget(copy); - }; - } -} - -/// Asserts that values are equal in size. -/// -/// This macro doesn't consume its arguments and thus works for -/// non-[`Clone`]able values. -/// -/// # Examples -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; -/// # fn main() { -/// struct Byte(u8); -/// -/// let x = 10u8; -/// let y = Byte(42); // Works for non-cloneable types -/// -/// assert_eq_size_val!(x, y); -/// assert_eq_size_val!(x, y, 0u8); -/// # } -/// ``` -/// -/// Even though both values are 0, they are of types with different sizes: -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; -/// # fn main() { -/// assert_eq_size_val!(0u8, 0u32); -/// # } -/// ``` -/// -/// [`Clone`]: https://doc.rust-lang.org/std/clone/trait.Clone.html -#[macro_export(local_inner_macros)] -macro_rules! assert_eq_size_val { - ($x:expr, $($xs:expr),+ $(,)?) => { - assert_eq_size_ptr!(&$x, $(&$xs),+); - } -} diff --git a/vendor/static_assertions/src/assert_fields.rs b/vendor/static_assertions/src/assert_fields.rs deleted file mode 100644 index 00f62427..00000000 --- a/vendor/static_assertions/src/assert_fields.rs +++ /dev/null @@ -1,72 +0,0 @@ -/// Asserts that the type has the given fields. -/// -/// # Examples -/// -/// One common use case is when types have fields defined multiple times as a -/// result of `#[cfg]`. This can be an issue when exposing a public API. -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; -/// pub struct Ty { -/// #[cfg(windows)] -/// pub val1: u8, -/// #[cfg(not(windows))] -/// pub val1: usize, -/// -/// #[cfg(unix)] -/// pub val2: u32, -/// #[cfg(not(unix))] -/// pub val2: usize, -/// } -/// -/// // Always have `val2` regardless of OS -/// assert_fields!(Ty: val2); -/// ``` -/// -/// This macro even works with `enum` variants: -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// enum Data { -/// Val { -/// id: i32, -/// name: String, -/// bytes: [u8; 128], -/// }, -/// Ptr(*const u8), -/// } -/// -/// assert_fields!(Data::Val: id, bytes); -/// ``` -/// -/// The following example fails to compile because [`Range`] does not have a field named `middle`: -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// use std::ops::Range; -/// -/// assert_fields!(Range: middle); -/// ``` -/// -/// [`Range`]: https://doc.rust-lang.org/std/ops/struct.Range.html -#[macro_export] -macro_rules! assert_fields { - ($t:ident::$v:ident: $($f:ident),+) => { - #[allow(unknown_lints, unneeded_field_pattern)] - const _: fn() = || { - #[allow(dead_code, unreachable_patterns)] - fn assert(value: $t) { - match value { - $($t::$v { $f: _, .. } => {},)+ - _ => {} - } - } - }; - }; - ($t:path: $($f:ident),+) => { - #[allow(unknown_lints, unneeded_field_pattern)] - const _: fn() = || { - $(let $t { $f: _, .. };)+ - }; - }; -} diff --git a/vendor/static_assertions/src/assert_impl.rs b/vendor/static_assertions/src/assert_impl.rs deleted file mode 100644 index 480b6b6c..00000000 --- a/vendor/static_assertions/src/assert_impl.rs +++ /dev/null @@ -1,356 +0,0 @@ -/// Asserts that the type implements exactly one in a set of traits. -/// -/// Related: -/// - [`assert_impl_any!`] -/// - [`assert_impl_all!`] -/// - [`assert_not_impl_all!`] -/// - [`assert_not_impl_any!`] -/// -/// # Examples -/// -/// Given some type `Foo`, it is expected to implement either `Snap`, `Crackle`, -/// or `Pop`: -/// -/// ```compile_fail -/// # use static_assertions::assert_impl_one; fn main() {} -/// struct Foo; -/// -/// trait Snap {} -/// trait Crackle {} -/// trait Pop {} -/// -/// assert_impl_one!(Foo: Snap, Crackle, Pop); -/// ``` -/// -/// If _only_ `Crackle` is implemented, the assertion passes: -/// -/// ``` -/// # use static_assertions::assert_impl_one; fn main() {} -/// # struct Foo; -/// # trait Snap {} -/// # trait Crackle {} -/// # trait Pop {} -/// impl Crackle for Foo {} -/// -/// assert_impl_one!(Foo: Snap, Crackle, Pop); -/// ``` -/// -/// If `Snap` or `Pop` is _also_ implemented, the assertion fails: -/// -/// ```compile_fail -/// # use static_assertions::assert_impl_one; fn main() {} -/// # struct Foo; -/// # trait Snap {} -/// # trait Crackle {} -/// # trait Pop {} -/// # impl Crackle for Foo {} -/// impl Pop for Foo {} -/// -/// assert_impl_one!(Foo: Snap, Crackle, Pop); -/// ``` -/// -/// [`assert_impl_any!`]: macro.assert_impl_any.html -/// [`assert_impl_all!`]: macro.assert_impl_all.html -/// [`assert_not_impl_all!`]: macro.assert_not_impl_all.html -/// [`assert_not_impl_any!`]: macro.assert_not_impl_any.html -#[macro_export] -macro_rules! assert_impl_one { - ($x:ty: $($t:path),+ $(,)?) => { - const _: fn() = || { - // Generic trait that must be implemented for `$x` exactly once. - trait AmbiguousIfMoreThanOne { - // Required for actually being able to reference the trait. - fn some_item() {} - } - - // Creates multiple scoped `Token` types for each trait `$t`, over - // which a specialized `AmbiguousIfMoreThanOne` is - // implemented for every type that implements `$t`. - $({ - #[allow(dead_code)] - struct Token; - - impl AmbiguousIfMoreThanOne for T {} - })+ - - // If there is only one specialized trait impl, type inference with - // `_` can be resolved and this can compile. Fails to compile if - // `$x` implements more than one `AmbiguousIfMoreThanOne` or - // does not implement any at all. - let _ = <$x as AmbiguousIfMoreThanOne<_>>::some_item; - }; - }; -} - -/// Asserts that the type implements _all_ of the given traits. -/// -/// See [`assert_not_impl_all!`] for achieving the opposite effect. -/// -/// # Examples -/// -/// This can be used to ensure types implement auto traits such as [`Send`] and -/// [`Sync`], as well as traits with [blanket `impl`s][blanket]. -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_impl_all!(u32: Copy, Send); -/// assert_impl_all!(&str: Into); -/// ``` -/// -/// The following example fails to compile because raw pointers do not implement -/// [`Send`] since they cannot be moved between threads safely: -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_impl_all!(*const u8: Send); -/// ``` -/// -/// [`assert_not_impl_all!`]: macro.assert_not_impl_all.html -/// [`Send`]: https://doc.rust-lang.org/std/marker/trait.Send.html -/// [`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html -/// [blanket]: https://doc.rust-lang.org/book/ch10-02-traits.html#using-trait-bounds-to-conditionally-implement-methods -#[macro_export] -macro_rules! assert_impl_all { - ($type:ty: $($trait:path),+ $(,)?) => { - const _: fn() = || { - // Only callable when `$type` implements all traits in `$($trait)+`. - fn assert_impl_all() {} - assert_impl_all::<$type>(); - }; - }; -} - -/// Asserts that the type implements _any_ of the given traits. -/// -/// See [`assert_not_impl_any!`] for achieving the opposite effect. -/// -/// # Examples -/// -/// `u8` cannot be converted from `u16`, but it can be converted into `u16`: -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_impl_any!(u8: From, Into); -/// ``` -/// -/// The unit type cannot be converted from `u8` or `u16`, but it does implement -/// [`Send`]: -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_impl_any!((): From, From, Send); -/// ``` -/// -/// The following example fails to compile because raw pointers do not implement -/// [`Send`] or [`Sync`] since they cannot be moved or shared between threads -/// safely: -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_impl_any!(*const u8: Send, Sync); -/// ``` -/// -/// [`assert_not_impl_any!`]: macro.assert_not_impl_any.html -/// [`Send`]: https://doc.rust-lang.org/std/marker/trait.Send.html -/// [`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html -#[macro_export] -macro_rules! assert_impl_any { - ($x:ty: $($t:path),+ $(,)?) => { - const _: fn() = || { - use $crate::_core::marker::PhantomData; - use $crate::_core::ops::Deref; - - // Fallback to use as the first iterative assignment to `previous`. - let previous = AssertImplAnyFallback; - struct AssertImplAnyFallback; - - // Ensures that blanket traits can't impersonate the method. This - // prevents a false positive attack where---if a blanket trait is in - // scope that has `_static_assertions_impl_any`---the macro will - // compile when it shouldn't. - // - // See https://github.com/nvzqz/static-assertions-rs/issues/19 for - // more info. - struct ActualAssertImplAnyToken; - trait AssertImplAnyToken {} - impl AssertImplAnyToken for ActualAssertImplAnyToken {} - fn assert_impl_any_token(_: T) {} - - $(let previous = { - struct Wrapper(PhantomData, N); - - // If the method for this wrapper can't be called then the - // compiler will insert a deref and try again. This forwards the - // compiler's next attempt to the previous wrapper. - impl Deref for Wrapper { - type Target = N; - - fn deref(&self) -> &Self::Target { - &self.1 - } - } - - // This impl is bounded on the `$t` trait, so the method can - // only be called if `$x` implements `$t`. This is why a new - // `Wrapper` is defined for each `previous`. - impl Wrapper { - fn _static_assertions_impl_any(&self) -> ActualAssertImplAnyToken { - ActualAssertImplAnyToken - } - } - - Wrapper::<$x, _>(PhantomData, previous) - };)+ - - // Attempt to find the method that can actually be called. The found - // method must return a type that implements the sealed `Token` - // trait, this ensures that blanket trait methods can't cause this - // macro to compile. - assert_impl_any_token(previous._static_assertions_impl_any()); - }; - }; -} - -/// Asserts that the type does **not** implement _all_ of the given traits. -/// -/// This can be used to ensure types do not implement auto traits such as -/// [`Send`] and [`Sync`], as well as traits with [blanket `impl`s][blanket]. -/// -/// Note that the combination of all provided traits is required to not be -/// implemented. If you want to check that none of multiple traits are -/// implemented you should invoke [`assert_not_impl_any!`] instead. -/// -/// # Examples -/// -/// Although `u32` implements `From`, it does not implement `Into`: -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_not_impl_all!(u32: From, Into); -/// ``` -/// -/// The following example fails to compile since `u32` can be converted into -/// `u64`. -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_not_impl_all!(u32: Into); -/// ``` -/// -/// The following compiles because [`Cell`] is not both [`Sync`] _and_ [`Send`]: -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// use std::cell::Cell; -/// -/// assert_not_impl_all!(Cell: Sync, Send); -/// ``` -/// -/// But it is [`Send`], so this fails to compile: -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// # std::cell::Cell; -/// assert_not_impl_all!(Cell: Send); -/// ``` -/// -/// [`Send`]: https://doc.rust-lang.org/std/marker/trait.Send.html -/// [`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html -/// [`assert_not_impl_any!`]: macro.assert_not_impl_any.html -/// [`Cell`]: https://doc.rust-lang.org/std/cell/struct.Cell.html -/// [blanket]: https://doc.rust-lang.org/book/ch10-02-traits.html#using-trait-bounds-to-conditionally-implement-methods -#[macro_export] -macro_rules! assert_not_impl_all { - ($x:ty: $($t:path),+ $(,)?) => { - const _: fn() = || { - // Generic trait with a blanket impl over `()` for all types. - trait AmbiguousIfImpl { - // Required for actually being able to reference the trait. - fn some_item() {} - } - - impl AmbiguousIfImpl<()> for T {} - - // Used for the specialized impl when *all* traits in - // `$($t)+` are implemented. - #[allow(dead_code)] - struct Invalid; - - impl AmbiguousIfImpl for T {} - - // If there is only one specialized trait impl, type inference with - // `_` can be resolved and this can compile. Fails to compile if - // `$x` implements `AmbiguousIfImpl`. - let _ = <$x as AmbiguousIfImpl<_>>::some_item; - }; - }; -} - -/// Asserts that the type does **not** implement _any_ of the given traits. -/// -/// This can be used to ensure types do not implement auto traits such as -/// [`Send`] and [`Sync`], as well as traits with [blanket `impl`s][blanket]. -/// -/// This macro causes a compilation failure if any of the provided individual -/// traits are implemented for the type. If you want to check that a combination -/// of traits is not implemented you should invoke [`assert_not_impl_all!`] -/// instead. For single traits both macros behave the same. -/// -/// # Examples -/// -/// If `u32` were to implement `Into` conversions for `usize` _and_ for `u8`, -/// the following would fail to compile: -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_not_impl_any!(u32: Into, Into); -/// ``` -/// -/// This is also good for simple one-off cases: -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_not_impl_any!(&'static mut u8: Copy); -/// ``` -/// -/// The following example fails to compile since `u32` can be converted into -/// `u64` even though it can not be converted into a `u16`: -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_not_impl_any!(u32: Into, Into); -/// ``` -/// -/// [`Send`]: https://doc.rust-lang.org/std/marker/trait.Send.html -/// [`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html -/// [`assert_not_impl_all!`]: macro.assert_not_impl_all.html -/// [blanket]: https://doc.rust-lang.org/book/ch10-02-traits.html#using-trait-bounds-to-conditionally-implement-methods -#[macro_export] -macro_rules! assert_not_impl_any { - ($x:ty: $($t:path),+ $(,)?) => { - const _: fn() = || { - // Generic trait with a blanket impl over `()` for all types. - trait AmbiguousIfImpl { - // Required for actually being able to reference the trait. - fn some_item() {} - } - - impl AmbiguousIfImpl<()> for T {} - - // Creates multiple scoped `Invalid` types for each trait `$t`, over - // which a specialized `AmbiguousIfImpl` is implemented for - // every type that implements `$t`. - $({ - #[allow(dead_code)] - struct Invalid; - - impl AmbiguousIfImpl for T {} - })+ - - // If there is only one specialized trait impl, type inference with - // `_` can be resolved and this can compile. Fails to compile if - // `$x` implements any `AmbiguousIfImpl`. - let _ = <$x as AmbiguousIfImpl<_>>::some_item; - }; - }; -} diff --git a/vendor/static_assertions/src/assert_obj_safe.rs b/vendor/static_assertions/src/assert_obj_safe.rs deleted file mode 100644 index ecbba964..00000000 --- a/vendor/static_assertions/src/assert_obj_safe.rs +++ /dev/null @@ -1,76 +0,0 @@ -// FIXME: Link below is required to render in index -/// Asserts that the traits support dynamic dispatch -/// ([object-safety](https://doc.rust-lang.org/book/ch17-02-trait-objects.html#object-safety-is-required-for-trait-objects)). -/// -/// This is useful for when changes are made to a trait that accidentally -/// prevent it from being used as an [object]. Such a case would be adding a -/// generic method and forgetting to add `where Self: Sized` after it. If left -/// unnoticed, that mistake will affect crate users and break both forward and -/// backward compatibility. -/// -/// # Examples -/// -/// When exposing a public API, it's important that traits that could previously -/// use dynamic dispatch can still do so in future compatible crate versions. -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// trait MySafeTrait { -/// fn foo(&self) -> u32; -/// } -/// -/// assert_obj_safe!(std::fmt::Write, MySafeTrait); -/// ``` -/// -/// Works with traits that are not in the calling module: -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// mod inner { -/// pub trait BasicTrait { -/// fn bar(&self); -/// } -/// assert_obj_safe!(BasicTrait); -/// } -/// -/// assert_obj_safe!(inner::BasicTrait); -/// ``` -/// -/// The following example fails to compile because raw pointers cannot be sent -/// between threads safely: -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_impl!(*const u8, Send); -/// ``` -/// -/// The following example fails to compile because generics without -/// `where Self: Sized` are not allowed in [object-safe][object] trait methods: -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// trait MyUnsafeTrait { -/// fn baz(&self) -> T; -/// } -/// -/// assert_obj_safe!(MyUnsafeTrait); -/// ``` -/// -/// When we fix that, the previous code will compile: -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// trait MyUnsafeTrait { -/// fn baz(&self) -> T where Self: Sized; -/// } -/// -/// assert_obj_safe!(MyUnsafeTrait); -/// ``` -/// -/// [object]: https://doc.rust-lang.org/book/ch17-02-trait-objects.html#object-safety-is-required-for-trait-objects -#[macro_export] -macro_rules! assert_obj_safe { - ($($xs:path),+ $(,)?) => { - $(const _: Option<&$xs> = None;)+ - }; -} diff --git a/vendor/static_assertions/src/assert_trait.rs b/vendor/static_assertions/src/assert_trait.rs deleted file mode 100644 index c231492f..00000000 --- a/vendor/static_assertions/src/assert_trait.rs +++ /dev/null @@ -1,105 +0,0 @@ -/// Asserts that the trait is a child of all of the other traits. -/// -/// Related: -/// - [`assert_trait_super_all!`] -/// -/// # Examples -/// -/// All types that implement [`Copy`] must implement [`Clone`]: -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_trait_sub_all!(Copy: Clone); -/// ``` -/// -/// All types that implement [`Ord`] must implement [`PartialEq`], [`Eq`], and -/// [`PartialOrd`]: -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_trait_sub_all!(Ord: PartialEq, Eq, PartialOrd); -/// ``` -/// -/// The following example fails to compile because [`Eq`] is not required for -/// [`PartialOrd`]: -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_trait_sub_all!(PartialOrd: Eq); -/// ``` -/// -/// [`assert_trait_super_all!`]: macro.assert_trait_super_all.html -/// -/// [`Copy`]: https://doc.rust-lang.org/std/marker/trait.Copy.html -/// [`Clone`]: https://doc.rust-lang.org/std/clone/trait.Clone.html -/// [`Ord`]: https://doc.rust-lang.org/std/cmp/trait.Ord.html -/// [`PartialOrd`]: https://doc.rust-lang.org/std/cmp/trait.PartialOrd.html -/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html -/// [`PartialEq`]: https://doc.rust-lang.org/std/cmp/trait.PartialEq.html -#[macro_export] -macro_rules! assert_trait_sub_all { - ($sub:path: $($super:path),+ $(,)?) => { - const _: () = { - // One scope per super-trait. - $({ - #[allow(non_camel_case_types)] - trait __Impl_Implication: $super {} - - // Can only be implemented for `$sub` types if `$super` is - // also implemented. - impl __Impl_Implication for T {} - })+ - }; - }; -} - -/// Asserts that the trait is a parent of all of the other traits. -/// -/// Related: -/// - [`assert_trait_sub_all!`] -/// -/// # Examples -/// -/// With this, traits `A` and `B` can both be tested to require [`Copy`] on a -/// single line: -/// -/// ``` -/// # use static_assertions::assert_trait_super_all; -/// trait A: Copy {} -/// trait B: Copy {} -/// -/// assert_trait_super_all!(Copy: A, B); -/// ``` -/// -/// Otherwise, each sub-trait would require its own call to -/// [`assert_trait_sub_all!`]: -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// # trait A: Copy {} -/// # trait B: Copy {} -/// assert_trait_sub_all!(A: Copy); -/// assert_trait_sub_all!(B: Copy); -/// ``` -/// -/// The following example fails to compile because trait `C` does not require -/// [`Copy`]: -/// -/// ```compile_fail -/// # use static_assertions::assert_trait_super_all; -/// # trait A: Copy {} -/// # trait B: Copy {} -/// trait C {} -/// -/// assert_trait_super_all!(Copy: A, B, C); -/// ``` -/// -/// [`assert_trait_sub_all!`]: macro.assert_trait_sub_all.html -/// -/// [`Copy`]: https://doc.rust-lang.org/std/marker/trait.Copy.html -#[macro_export(local_inner_macros)] -macro_rules! assert_trait_super_all { - ($super:path: $($sub:path),+ $(,)?) => { - $(assert_trait_sub_all!($sub: $super);)+ - }; -} diff --git a/vendor/static_assertions/src/assert_type.rs b/vendor/static_assertions/src/assert_type.rs deleted file mode 100644 index dd2dc2a6..00000000 --- a/vendor/static_assertions/src/assert_type.rs +++ /dev/null @@ -1,101 +0,0 @@ -/// Asserts that _all_ types in a list are equal to each other. -/// -/// # Examples -/// -/// Often times, type aliases are used to express usage semantics via naming. In -/// some cases, the underlying type may differ based on platform. However, other -/// types like [`c_float`] will always alias the same type. -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// use std::os::raw::c_float; -/// -/// assert_type_eq_all!(c_float, f32); -/// ``` -/// -/// This macro can also be used to compare types that involve lifetimes! Just -/// use `'static` in that case: -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; -/// # fn main() { -/// type Buf<'a> = &'a [u8]; -/// -/// assert_type_eq_all!(Buf<'static>, &'static [u8]); -/// # } -/// ``` -/// -/// The following example fails to compile because `String` and `str` do not -/// refer to the same type: -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_type_eq_all!(String, str); -/// ``` -/// -/// This should also work the other way around, regardless of [`Deref`] -/// implementations. -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_type_eq_all!(str, String); -/// ``` -/// -/// [`c_float`]: https://doc.rust-lang.org/std/os/raw/type.c_float.html -/// [`Deref`]: https://doc.rust-lang.org/std/ops/trait.Deref.html -#[macro_export] -macro_rules! assert_type_eq_all { - ($x:ty, $($xs:ty),+ $(,)*) => { - const _: fn() = || { $({ - trait TypeEq { - type This: ?Sized; - } - - impl TypeEq for T { - type This = Self; - } - - fn assert_type_eq_all() - where - T: ?Sized + TypeEq, - U: ?Sized, - {} - - assert_type_eq_all::<$x, $xs>(); - })+ }; - }; -} - -/// Asserts that _all_ types are **not** equal to each other. -/// -/// # Examples -/// -/// Rust has all sorts of slices, but they represent different types of data: -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// assert_type_ne_all!([u8], [u16], str); -/// ``` -/// -/// The following example fails to compile because [`c_uchar`] is a type alias -/// for [`u8`]: -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// use std::os::raw::c_uchar; -/// -/// assert_type_ne_all!(c_uchar, u8, u32); -/// ``` -/// -/// [`c_uchar`]: https://doc.rust-lang.org/std/os/raw/type.c_uchar.html -/// [`u8`]: https://doc.rust-lang.org/std/primitive.u8.html -#[macro_export] -macro_rules! assert_type_ne_all { - ($x:ty, $($y:ty),+ $(,)?) => { - const _: fn() = || { - trait MutuallyExclusive {} - impl MutuallyExclusive for $x {} - $(impl MutuallyExclusive for $y {})+ - }; - }; -} diff --git a/vendor/static_assertions/src/const_assert.rs b/vendor/static_assertions/src/const_assert.rs deleted file mode 100644 index 16ae4a6b..00000000 --- a/vendor/static_assertions/src/const_assert.rs +++ /dev/null @@ -1,109 +0,0 @@ -/// Asserts that constant expressions evaluate to `true`. -/// -/// Constant expressions can be ensured to have certain properties via this -/// macro If the expression evaluates to `false`, the file will fail to compile. -/// This is synonymous to [`static_assert` in C++][static_assert]. -/// -/// # Alternatives -/// -/// There also exists [`const_assert_eq`](macro.const_assert_eq.html) for -/// validating whether a sequence of expressions are equal to one another. -/// -/// # Examples -/// -/// A common use case is to guarantee properties about a constant value that's -/// generated via meta-programming. -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// const VALUE: i32 = // ... -/// # 3; -/// -/// const_assert!(VALUE >= 2); -/// ``` -/// -/// Inputs are type-checked as booleans: -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// const_assert!(!0); -/// ``` -/// -/// Despite this being a macro, we see this produces a type error: -/// -/// ```txt -/// | const_assert!(!0); -/// | ^^ expected bool, found integral variable -/// | -/// = note: expected type `bool` -/// found type `{integer}` -/// ``` -/// -/// The following fails to compile because multiplying by 5 does not have an -/// identity property: -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// const_assert!(5 * 5 == 5); -/// ``` -/// -/// [static_assert]: http://en.cppreference.com/w/cpp/language/static_assert -#[macro_export] -macro_rules! const_assert { - ($x:expr $(,)?) => { - #[allow(unknown_lints, eq_op)] - const _: [(); 0 - !{ const ASSERT: bool = $x; ASSERT } as usize] = []; - }; -} - -/// Asserts that constants are equal in value. -/// -/// # Examples -/// -/// This works as a shorthand for `const_assert!(a == b)`: -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// const TWO: usize = 2; -/// -/// const_assert_eq!(TWO * TWO, TWO + TWO); -/// ``` -/// -/// Just because 2 × 2 = 2 + 2 doesn't mean it holds true for other numbers: -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// const_assert_eq!(4 + 4, 4 * 4); -/// ``` -#[macro_export(local_inner_macros)] -macro_rules! const_assert_eq { - ($x:expr, $y:expr $(,)?) => { - const_assert!($x == $y); - }; -} - -/// Asserts that constants are **not** equal in value. -/// -/// # Examples -/// -/// This works as a shorthand for `const_assert!(a != b)`: -/// -/// ``` -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// const NUM: usize = 32; -/// -/// const_assert_ne!(NUM * NUM, 64); -/// ``` -/// -/// The following example fails to compile because 2 is magic and 2 × 2 = 2 + 2: -/// -/// ```compile_fail -/// # #[macro_use] extern crate static_assertions; fn main() {} -/// const_assert_ne!(2 + 2, 2 * 2); -/// ``` -#[macro_export(local_inner_macros)] -macro_rules! const_assert_ne { - ($x:expr, $y:expr $(,)?) => { - const_assert!($x != $y); - }; -} diff --git a/vendor/static_assertions/src/lib.rs b/vendor/static_assertions/src/lib.rs deleted file mode 100644 index a43cc1f5..00000000 --- a/vendor/static_assertions/src/lib.rs +++ /dev/null @@ -1,97 +0,0 @@ -//! [![Banner](https://raw.githubusercontent.com/nvzqz/static-assertions-rs/assets/Banner.png)](https://github.com/nvzqz/static-assertions-rs) -//! -//! -//! -//! Assertions to ensure correct assumptions about constants, types, and more. -//! -//! _All_ checks provided by this crate are performed at [compile-time]. This -//! allows for finding errors quickly and early when it comes to ensuring -//! certain features or aspects of a codebase. These macros are especially -//! important when exposing a public API that requires types to be the same size -//! or implement certain traits. -//! -//! # Usage -//! -//! This crate is available [on crates.io][crate] and can be used by adding the -//! following to your project's [`Cargo.toml`]: -//! -//! ```toml -//! [dependencies] -//! static_assertions = "1.1.0" -//! ``` -//! -//! and this to your crate root (`main.rs` or `lib.rs`): -//! -//! ``` -//! #[macro_use] -//! extern crate static_assertions; -//! # fn main() {} -//! ``` -//! -//! When using [Rust 2018 edition][2018], the following shorthand can help if -//! having `#[macro_use]` is undesirable. -//! -//! ```edition2018 -//! extern crate static_assertions as sa; -//! -//! sa::const_assert!(true); -//! ``` -//! -//! # Examples -//! -//! Very thorough examples are provided in the docs for -//! [each individual macro](#macros). Failure case examples are also documented. -//! -//! # Changes -//! -//! See [`CHANGELOG.md`](https://github.com/nvzqz/static-assertions-rs/blob/master/CHANGELOG.md) -//! for an exhaustive list of what has changed from one version to another. -//! -//! # Donate -//! -//! This project is made freely available (as in free beer), but unfortunately -//! not all beer is free! So, if you would like to buy me a beer (or coffee or -//! *more*), then consider supporting my work that's benefited your project -//! and thousands of others. -//! -//! -//! Become a Patron! -//! -//! -//! Buy me a coffee -//! -//! -//! [Rust 1.37]: https://blog.rust-lang.org/2019/08/15/Rust-1.37.0.html -//! [2018]: https://blog.rust-lang.org/2018/12/06/Rust-1.31-and-rust-2018.html#rust-2018 -//! [crate]: https://crates.io/crates/static_assertions -//! [compile-time]: https://en.wikipedia.org/wiki/Compile_time -//! [`Cargo.toml`]: https://doc.rust-lang.org/cargo/reference/manifest.html - -#![doc(html_root_url = "https://docs.rs/static_assertions/1.1.0")] -#![doc(html_logo_url = "https://raw.githubusercontent.com/nvzqz/static-assertions-rs/assets/Icon.png")] - -#![no_std] - -#![deny(unused_macros)] - -#[doc(hidden)] -pub extern crate core as _core; - -mod assert_cfg; -mod assert_eq_align; -mod assert_eq_size; -mod assert_fields; -mod assert_impl; -mod assert_obj_safe; -mod assert_trait; -mod assert_type; -mod const_assert; diff --git a/vendor/syn/.cargo-checksum.json b/vendor/syn/.cargo-checksum.json deleted file mode 100644 index dee3bccb..00000000 --- a/vendor/syn/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{"Cargo.toml":"7fb282af447a34bee9f5fa216d1d99e93abcdfb63f5f0fa255c7b2b895156109","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"417bb33dc685fb0036f91229dd0d828b104a4c35010d362f2a6e5d8b4cf1e36d","benches/file.rs":"b5f458687896253823b642778ee091e42b0d423282e5c177a66d74d429cc39e6","benches/rust.rs":"9c41ecf8cff9ea278befa2f0336dea392c4e85041909ea9eff3402f15b2312c1","build.rs":"241f9e3af93b32d2d928ef9251d8ed2e67c6c64acc4aacce81f3aca58778e655","src/attr.rs":"bab811535308771df407b36c9c9d442c413cfc40cce557e7505c85a2875bf41d","src/await.rs":"8aa22e3c201cb2bdb6b4817fa00901f308ab06817607aa7b884c58c957705969","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"a5d6692938c2ec6ec140f70ec89fa93659fa227b52e8d381e9da7dd440f0249e","src/custom_keyword.rs":"a834c617b4a0aa87c21275ded72a4e04d7fac2d2255cfc037c0690d21488fc31","src/custom_punctuation.rs":"8a666298e774b0d326642f0f73284f6677d0d0a7c9e4a712c9c98d010b4d8a2c","src/data.rs":"6774795445345360208e2ddc932acd33bbf96e9ac8407e1e4d7e45d44f174ebf","src/derive.rs":"d4205fb2b699e784c0889bcde7e80b77ab222ee74ec3c349b5b1fa63a5f3b0b0","src/discouraged.rs":"6c6a9298f8d24f578da119557bc588f3bd928f7b79fca27d6bdfe3e786dd005f","src/error.rs":"30ed3ab35ba852ac2cfc1f5f388845bf0d0576aa485f045c7da3af0fb24b9f84","src/export.rs":"d18438464c7ee1de7092c7c5d48dd9d114bdf184c1ea452c4fa709265cd19469","src/expr.rs":"29ab61a3ef4f50b7ae5b6c7efe16d3a5ad4bc45b3ff18f8a67f20c1953cd6901","src/ext.rs":"1f648cff1d705a1cea64b32b77482b97a82d2fe0aaf63b40cade91e5c02dc969","src/file.rs":"f86697655222ae294215114f4eae8e6b0b5e2a935d6c479ff8f8f889c4efd2e2","src/gen/clone.rs":"8db60fc2bf515f9a65f4e98669427ce02ed0ccc9ffdad25d8fb39b38768ecaf1","src/gen/debug.rs":"facf4fb5c7e017dd890c4a9531f337659d7b55475aa44124abf48e088ad56fc5","src/gen/eq.rs":"576a4f1e4030434b0f0170f4d681d2c46292fda64ad5f3913623e1c6e858c24f","src/gen/fold.rs":"3f59e59ed8ad2ab5dd347bfbe41bbc785c2aabd8ae902087a584a6daed597182","src/gen/hash.rs":"458052bb0d64b2a722eaeef4461af80bce109cd97d19bd44dbd608e4c53a6520","src/gen/visit.rs":"94f2c6bed4ef1cd0e83d91dddaa95730619fa48fdd4a0be2fd8740e730ff744d","src/gen/visit_mut.rs":"abc8cb67ad3300d0667761daf1e057b80b4d3f3980d94e8d6f3ed68bdcda3776","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"c8272a8010b378c09eac33e9904299797aee3a879832e6a37a580b518f2a641a","src/group.rs":"d9cd30ea5bd4d453293e8e8debbe388a0360c3c766f67c8499fec453a9a43cdc","src/ident.rs":"d67975d3dd89becde96579c0454a1357aa7c62da11bdf94f29ccf63514ffbc9d","src/item.rs":"4a9e16aafef74f22d5c399735a55af741ec907f587f20845c9ac8c815027f65a","src/lib.rs":"6fc78c9af85f369210fcf7b9cdee7b75e4baf072cb885c8ff03ded0692337ece","src/lifetime.rs":"b18862ef1e690037a4f308ea897debad7bc5038584e3b26c6d8809752ea0e3c2","src/lit.rs":"1cf2700ff9b992bfbe660654abb2af4c816765f901451a359ce450c3dd5f2651","src/lookahead.rs":"92ee63b48de02d3f6f1b09121f0fbac41d55cebc5771c8320e27df8482906152","src/mac.rs":"004cb89f9697564f6c9ee837e08ead68463ef946fb4c13c6c105adf2ba364b2b","src/macros.rs":"266590fd63a4a9d8637c6f8d36702acaace4ba59ab74c0883859efd1e260bae5","src/op.rs":"9d499022902743a6a0a19223b356449a979b90e60552d0446497d72750e646a4","src/parse.rs":"8b4aa518660dfd6310a5455b5624de01ad7ba42d11527b217f811b687b26e1fc","src/parse_macro_input.rs":"88929a1a7e5e72aa2d0b3459e52d8975afea856d159047ba4ab02ecbc5878a9c","src/parse_quote.rs":"80db945403d9731c5f3299a5819a5e2bb726c19fd9f256d50240bc32703c96b1","src/pat.rs":"397b02e0a6f6af8e87c2b3d70cc5b65f5428f9154f09b1006a51b31aaea65038","src/path.rs":"6e890d2d1ceb3d70ede5825d1d74eb98f8c716f444c7eb6ef5026dd610baa202","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"f389f99adafde96e6d9cc5a00fe95acbaf1ea556191dde2cce98bbd8155aed23","src/reserved.rs":"e70e028bd55cfa43e23cab4ba29e4dc53a3d91eff685ef2b6e57efc2b87a3428","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"3ca016a943637653ab98e373dfb826a120f3c159867346fa38a844439944eb39","src/stmt.rs":"e68f76530606aab24e3587aa697fcd862c176e1cca0a50ab883c76bb91464f3d","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"5e423a696f80e281c322f37c87577f9fdc28607e9c007e24896a2b12da62d5ad","src/tt.rs":"32402645b6e82ef1e882945721b59b5fb7b0ee337d1972876362ecacef643d0f","src/ty.rs":"e4f5a4bab6ed7bdd4878e3450a9b36e5c4d9c14de342a99c105e3e93095de86a","src/verbatim.rs":"802a97df997432f18cac6e6200ff6ea29fb2474986005e0fcdbc2b65197f87f7","src/whitespace.rs":"e63dd0aa3d34029f17766a8b09c1a6e4479e36c552c8b7023d710a399333aace","tests/.gitignore":"22e782449a3c216db3f7215d5fb8882e316768e40beeec3833aae419ad8941db","tests/common/eq.rs":"202b65faf5681088ba86bd6dcfe97bf66ab3845b3cece2287e43ef3b513dba36","tests/common/mod.rs":"25ef6d7daa09bad3198a0e9e91b2812425f92db7c585c1e34a03a84d7362ccd8","tests/common/parse.rs":"81580f23583723f7a2a337c4d13ebc021057cd825562fb4e474caa7cc641fed9","tests/debug/gen.rs":"e30e2b6c61feb15abe11cee86c4edff9f7e7c9c79080447d44be97869c9a3adb","tests/debug/mod.rs":"868763d0ef1609a3ad5e05e9f1bfa0f813e91e7e9a36653414a188bb2fdaa425","tests/macros/mod.rs":"a93136b172377ffebe8b68fd596a86d6625f64ed6c3d5e7f5d6ad859e25d5623","tests/repo/mod.rs":"fee2c7aa1547ec84913d49432028e8e32001c1d298c57e2b60ecf52ae39e1a82","tests/repo/progress.rs":"c08d0314a7f3ecf760d471f27da3cd2a500aeb9f1c8331bffb2aa648f9fabf3f","tests/test_asyncness.rs":"cff01db49d28ab23b0b258bc6c0a5cc4071be4fe7248eef344a5d79d2fb649b7","tests/test_attribute.rs":"0ffd99384e1a52ae17d9fed5c4053e411e8f9018decef07ffa621d1faa7329d8","tests/test_derive_input.rs":"610444351e3bf99366976bbf1da109c334a70ac9500caef366bcf9b68819829f","tests/test_expr.rs":"e68b5827415471ccfb7efd91bbfba2f025f1cb8f993c417e7b3c0c3bec6a1fa6","tests/test_generics.rs":"9d713f90a79d6145efc89fb6f946029ca03486c632219950889da39940152ba0","tests/test_grouping.rs":"6276c3c73bba649dec5c97904ad2492879f918bc887a2c425d095c654ca0d925","tests/test_ident.rs":"9eb53d1e21edf23e7c9e14dc74dcc2b2538e9221e19dbcc0a44e3acc2e90f3f6","tests/test_item.rs":"2a8ba326963d758b5a3395bfa07375844f8e043885bb752b69be84a2fe6b4e17","tests/test_iterators.rs":"53ed6078d37550bd6765d2411e3660be401aef8a31a407350cc064a7d08c7c33","tests/test_lit.rs":"ef3f39da6ed67ba73b05eab3dda299887a455bac8e97701a90a94b636681588f","tests/test_meta.rs":"bd6910ec0eba05e814dad27dda0ea65e1f8b483e64d943213066ffd114a82b45","tests/test_parse_buffer.rs":"8bbe2d24ca8a3788f72c6908fc96c26d546f11c69687bf8d72727f851d5e2d27","tests/test_parse_stream.rs":"2f449a2c41a3dee6fd14bee24e1666a453cb808eda17332fd91afd127fcdd2a6","tests/test_pat.rs":"d4465f4fc3fd5d6e534ba8efabe1e0ed6da89de4ac7c96effa6bfb880c4287cf","tests/test_path.rs":"13ae78e958f0d7334d11f32519f593968e5503d46e29ec345feede025f16113d","tests/test_precedence.rs":"6040a565757541f73cac22900d70469dfcd960c5f72e43966ce1383fb46f4c62","tests/test_receiver.rs":"084eca59984b9a18651da52f2c4407355da3de1335916a12477652999e2d01cc","tests/test_round_trip.rs":"b05d8f1fdaa35dd4c1baa76c25ba6e46664643b45ee2ab40b9cf206935e95471","tests/test_shebang.rs":"f5772cadad5b56e3112cb16308b779f92bce1c3a48091fc9933deb2276a69331","tests/test_should_parse.rs":"1d3535698a446e2755bfc360676bdb161841a1f454cdef6e7556c6d06a95c89d","tests/test_size.rs":"5fae772bab66809d6708232f35cfb4a287882486763b0f763feec2ad79fbb68b","tests/test_stmt.rs":"ff6f6f447f46a52fb544353b7dcd2cd9e60781b297dcc86551bee613beaf2280","tests/test_token_trees.rs":"43e56a701817e3c3bfd0cae54a457dd7a38ccb3ca19da41e2b995fdf20e6ed18","tests/test_ty.rs":"c59dfc8952f35befa52cf97af99098b9a042499601c78fa2ea3e169359a3ee5f","tests/test_visibility.rs":"7456fcb3a6634db509748aededff9c2d8b242d511a3e5ee3022e40b232892704","tests/zzz_stable.rs":"2a862e59cb446235ed99aec0e6ada8e16d3ecc30229b29d825b7c0bbc2602989"},"package":"b7f58f7e8eaa0009c5fec437aabf511bd9933e4b2d7407bd05273c01a8906ea7"} \ No newline at end of file diff --git a/vendor/syn/Cargo.toml b/vendor/syn/Cargo.toml deleted file mode 100644 index 134a3439..00000000 --- a/vendor/syn/Cargo.toml +++ /dev/null @@ -1,97 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies. -# -# If you are reading this file be aware that the original Cargo.toml -# will likely look very different (and much more reasonable). -# See Cargo.toml.orig for the original contents. - -[package] -edition = "2018" -name = "syn" -version = "1.0.75" -authors = ["David Tolnay "] -include = ["/benches/**", "/build.rs", "/Cargo.toml", "/LICENSE-APACHE", "/LICENSE-MIT", "/README.md", "/src/**", "/tests/**"] -description = "Parser for Rust source code" -documentation = "https://docs.rs/syn" -readme = "README.md" -categories = ["development-tools::procedural-macro-helpers"] -license = "MIT OR Apache-2.0" -repository = "https://github.com/dtolnay/syn" -[package.metadata.docs.rs] -all-features = true -rustdoc-args = ["--cfg", "doc_cfg"] -targets = ["x86_64-unknown-linux-gnu"] - -[package.metadata.playground] -features = ["full", "visit", "visit-mut", "fold", "extra-traits"] - -[[bench]] -name = "rust" -harness = false -required-features = ["full", "parsing"] - -[[bench]] -name = "file" -required-features = ["full", "parsing"] -[dependencies.proc-macro2] -version = "1.0.26" -default-features = false - -[dependencies.quote] -version = "1.0" -optional = true -default-features = false - -[dependencies.unicode-xid] -version = "0.2" -[dev-dependencies.anyhow] -version = "1.0" - -[dev-dependencies.flate2] -version = "1.0" - -[dev-dependencies.insta] -version = "1.0" - -[dev-dependencies.rayon] -version = "1.0" - -[dev-dependencies.ref-cast] -version = "1.0" - -[dev-dependencies.regex] -version = "1.0" - -[dev-dependencies.reqwest] -version = "0.10" -features = ["blocking"] - -[dev-dependencies.syn-test-suite] -version = "0" - -[dev-dependencies.tar] -version = "0.4.16" - -[dev-dependencies.termcolor] -version = "1.0" - -[dev-dependencies.walkdir] -version = "2.1" - -[features] -clone-impls = [] -default = ["derive", "parsing", "printing", "clone-impls", "proc-macro"] -derive = [] -extra-traits = [] -fold = [] -full = [] -parsing = [] -printing = ["quote"] -proc-macro = ["proc-macro2/proc-macro", "quote/proc-macro"] -test = ["syn-test-suite/all-features"] -visit = [] -visit-mut = [] diff --git a/vendor/syn/LICENSE-APACHE b/vendor/syn/LICENSE-APACHE deleted file mode 100644 index 16fe87b0..00000000 --- a/vendor/syn/LICENSE-APACHE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/vendor/syn/LICENSE-MIT b/vendor/syn/LICENSE-MIT deleted file mode 100644 index 31aa7938..00000000 --- a/vendor/syn/LICENSE-MIT +++ /dev/null @@ -1,23 +0,0 @@ -Permission is hereby granted, free of charge, to any -person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the -Software without restriction, including without -limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software -is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice -shall be included in all copies or substantial portions -of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/vendor/syn/README.md b/vendor/syn/README.md deleted file mode 100644 index 38005f5e..00000000 --- a/vendor/syn/README.md +++ /dev/null @@ -1,285 +0,0 @@ -Parser for Rust source code -=========================== - -[github](https://github.com/dtolnay/syn) -[crates.io](https://crates.io/crates/syn) -[docs.rs](https://docs.rs/syn) -[build status](https://github.com/dtolnay/syn/actions?query=branch%3Amaster) - -Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree -of Rust source code. - -Currently this library is geared toward use in Rust procedural macros, but -contains some APIs that may be useful more generally. - -- **Data structures** — Syn provides a complete syntax tree that can represent - any valid Rust source code. The syntax tree is rooted at [`syn::File`] which - represents a full source file, but there are other entry points that may be - useful to procedural macros including [`syn::Item`], [`syn::Expr`] and - [`syn::Type`]. - -- **Derives** — Of particular interest to derive macros is [`syn::DeriveInput`] - which is any of the three legal input items to a derive macro. An example - below shows using this type in a library that can derive implementations of a - user-defined trait. - -- **Parsing** — Parsing in Syn is built around [parser functions] with the - signature `fn(ParseStream) -> Result`. Every syntax tree node defined by - Syn is individually parsable and may be used as a building block for custom - syntaxes, or you may dream up your own brand new syntax without involving any - of our syntax tree types. - -- **Location information** — Every token parsed by Syn is associated with a - `Span` that tracks line and column information back to the source of that - token. These spans allow a procedural macro to display detailed error messages - pointing to all the right places in the user's code. There is an example of - this below. - -- **Feature flags** — Functionality is aggressively feature gated so your - procedural macros enable only what they need, and do not pay in compile time - for all the rest. - -[`syn::File`]: https://docs.rs/syn/1.0/syn/struct.File.html -[`syn::Item`]: https://docs.rs/syn/1.0/syn/enum.Item.html -[`syn::Expr`]: https://docs.rs/syn/1.0/syn/enum.Expr.html -[`syn::Type`]: https://docs.rs/syn/1.0/syn/enum.Type.html -[`syn::DeriveInput`]: https://docs.rs/syn/1.0/syn/struct.DeriveInput.html -[parser functions]: https://docs.rs/syn/1.0/syn/parse/index.html - -*Version requirement: Syn supports rustc 1.31 and up.* - -[*Release notes*](https://github.com/dtolnay/syn/releases) - -
- -## Resources - -The best way to learn about procedural macros is by writing some. Consider -working through [this procedural macro workshop][workshop] to get familiar with -the different types of procedural macros. The workshop contains relevant links -into the Syn documentation as you work through each project. - -[workshop]: https://github.com/dtolnay/proc-macro-workshop - -
- -## Example of a derive macro - -The canonical derive macro using Syn looks like this. We write an ordinary Rust -function tagged with a `proc_macro_derive` attribute and the name of the trait -we are deriving. Any time that derive appears in the user's code, the Rust -compiler passes their data structure as tokens into our macro. We get to execute -arbitrary Rust code to figure out what to do with those tokens, then hand some -tokens back to the compiler to compile into the user's crate. - -[`TokenStream`]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html - -```toml -[dependencies] -syn = "1.0" -quote = "1.0" - -[lib] -proc-macro = true -``` - -```rust -use proc_macro::TokenStream; -use quote::quote; -use syn::{parse_macro_input, DeriveInput}; - -#[proc_macro_derive(MyMacro)] -pub fn my_macro(input: TokenStream) -> TokenStream { - // Parse the input tokens into a syntax tree - let input = parse_macro_input!(input as DeriveInput); - - // Build the output, possibly using quasi-quotation - let expanded = quote! { - // ... - }; - - // Hand the output tokens back to the compiler - TokenStream::from(expanded) -} -``` - -The [`heapsize`] example directory shows a complete working implementation of a -derive macro. It works on any Rust compiler 1.31+. The example derives a -`HeapSize` trait which computes an estimate of the amount of heap memory owned -by a value. - -[`heapsize`]: examples/heapsize - -```rust -pub trait HeapSize { - /// Total number of bytes of heap memory owned by `self`. - fn heap_size_of_children(&self) -> usize; -} -``` - -The derive macro allows users to write `#[derive(HeapSize)]` on data structures -in their program. - -```rust -#[derive(HeapSize)] -struct Demo<'a, T: ?Sized> { - a: Box, - b: u8, - c: &'a str, - d: String, -} -``` - -
- -## Spans and error reporting - -The token-based procedural macro API provides great control over where the -compiler's error messages are displayed in user code. Consider the error the -user sees if one of their field types does not implement `HeapSize`. - -```rust -#[derive(HeapSize)] -struct Broken { - ok: String, - bad: std::thread::Thread, -} -``` - -By tracking span information all the way through the expansion of a procedural -macro as shown in the `heapsize` example, token-based macros in Syn are able to -trigger errors that directly pinpoint the source of the problem. - -```console -error[E0277]: the trait bound `std::thread::Thread: HeapSize` is not satisfied - --> src/main.rs:7:5 - | -7 | bad: std::thread::Thread, - | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `HeapSize` is not implemented for `std::thread::Thread` -``` - -
- -## Parsing a custom syntax - -The [`lazy-static`] example directory shows the implementation of a -`functionlike!(...)` procedural macro in which the input tokens are parsed using -Syn's parsing API. - -[`lazy-static`]: examples/lazy-static - -The example reimplements the popular `lazy_static` crate from crates.io as a -procedural macro. - -```rust -lazy_static! { - static ref USERNAME: Regex = Regex::new("^[a-z0-9_-]{3,16}$").unwrap(); -} -``` - -The implementation shows how to trigger custom warnings and error messages on -the macro input. - -```console -warning: come on, pick a more creative name - --> src/main.rs:10:16 - | -10 | static ref FOO: String = "lazy_static".to_owned(); - | ^^^ -``` - -
- -## Testing - -When testing macros, we often care not just that the macro can be used -successfully but also that when the macro is provided with invalid input it -produces maximally helpful error messages. Consider using the [`trybuild`] crate -to write tests for errors that are emitted by your macro or errors detected by -the Rust compiler in the expanded code following misuse of the macro. Such tests -help avoid regressions from later refactors that mistakenly make an error no -longer trigger or be less helpful than it used to be. - -[`trybuild`]: https://github.com/dtolnay/trybuild - -
- -## Debugging - -When developing a procedural macro it can be helpful to look at what the -generated code looks like. Use `cargo rustc -- -Zunstable-options ---pretty=expanded` or the [`cargo expand`] subcommand. - -[`cargo expand`]: https://github.com/dtolnay/cargo-expand - -To show the expanded code for some crate that uses your procedural macro, run -`cargo expand` from that crate. To show the expanded code for one of your own -test cases, run `cargo expand --test the_test_case` where the last argument is -the name of the test file without the `.rs` extension. - -This write-up by Brandon W Maister discusses debugging in more detail: -[Debugging Rust's new Custom Derive system][debugging]. - -[debugging]: https://quodlibetor.github.io/posts/debugging-rusts-new-custom-derive-system/ - -
- -## Optional features - -Syn puts a lot of functionality behind optional features in order to optimize -compile time for the most common use cases. The following features are -available. - -- **`derive`** *(enabled by default)* — Data structures for representing the - possible input to a derive macro, including structs and enums and types. -- **`full`** — Data structures for representing the syntax tree of all valid - Rust source code, including items and expressions. -- **`parsing`** *(enabled by default)* — Ability to parse input tokens into a - syntax tree node of a chosen type. -- **`printing`** *(enabled by default)* — Ability to print a syntax tree node as - tokens of Rust source code. -- **`visit`** — Trait for traversing a syntax tree. -- **`visit-mut`** — Trait for traversing and mutating in place a syntax tree. -- **`fold`** — Trait for transforming an owned syntax tree. -- **`clone-impls`** *(enabled by default)* — Clone impls for all syntax tree - types. -- **`extra-traits`** — Debug, Eq, PartialEq, Hash impls for all syntax tree - types. -- **`proc-macro`** *(enabled by default)* — Runtime dependency on the dynamic - library libproc_macro from rustc toolchain. - -
- -## Proc macro shim - -Syn operates on the token representation provided by the [proc-macro2] crate -from crates.io rather than using the compiler's built in proc-macro crate -directly. This enables code using Syn to execute outside of the context of a -procedural macro, such as in unit tests or build.rs, and we avoid needing -incompatible ecosystems for proc macros vs non-macro use cases. - -In general all of your code should be written against proc-macro2 rather than -proc-macro. The one exception is in the signatures of procedural macro entry -points, which are required by the language to use `proc_macro::TokenStream`. - -The proc-macro2 crate will automatically detect and use the compiler's data -structures when a procedural macro is active. - -[proc-macro2]: https://docs.rs/proc-macro2/1.0/proc_macro2/ - -
- -#### License - - -Licensed under either of Apache License, Version -2.0 or MIT license at your option. - - -
- - -Unless you explicitly state otherwise, any contribution intentionally submitted -for inclusion in this crate by you, as defined in the Apache-2.0 license, shall -be dual licensed as above, without any additional terms or conditions. - diff --git a/vendor/syn/benches/file.rs b/vendor/syn/benches/file.rs deleted file mode 100644 index 33d201c1..00000000 --- a/vendor/syn/benches/file.rs +++ /dev/null @@ -1,30 +0,0 @@ -// $ cargo bench --features full --bench file - -#![feature(rustc_private, test)] -#![recursion_limit = "1024"] - -extern crate test; - -#[macro_use] -#[path = "../tests/macros/mod.rs"] -mod macros; - -#[path = "../tests/common/mod.rs"] -mod common; -#[path = "../tests/repo/mod.rs"] -pub mod repo; - -use proc_macro2::TokenStream; -use std::fs; -use std::str::FromStr; -use test::Bencher; - -const FILE: &str = "tests/rust/library/core/src/str/mod.rs"; - -#[bench] -fn parse_file(b: &mut Bencher) { - repo::clone_rust(); - let content = fs::read_to_string(FILE).unwrap(); - let tokens = TokenStream::from_str(&content).unwrap(); - b.iter(|| syn::parse2::(tokens.clone())); -} diff --git a/vendor/syn/benches/rust.rs b/vendor/syn/benches/rust.rs deleted file mode 100644 index 28dff947..00000000 --- a/vendor/syn/benches/rust.rs +++ /dev/null @@ -1,158 +0,0 @@ -// $ cargo bench --features full --bench rust -// -// Syn only, useful for profiling: -// $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full --bench rust - -#![cfg_attr(not(syn_only), feature(rustc_private))] -#![recursion_limit = "1024"] - -#[macro_use] -#[path = "../tests/macros/mod.rs"] -mod macros; - -#[path = "../tests/common/mod.rs"] -mod common; -#[path = "../tests/repo/mod.rs"] -mod repo; - -use std::fs; -use std::time::{Duration, Instant}; - -#[cfg(not(syn_only))] -mod tokenstream_parse { - use proc_macro2::TokenStream; - use std::str::FromStr; - - pub fn bench(content: &str) -> Result<(), ()> { - TokenStream::from_str(content).map(drop).map_err(drop) - } -} - -mod syn_parse { - pub fn bench(content: &str) -> Result<(), ()> { - syn::parse_file(content).map(drop).map_err(drop) - } -} - -#[cfg(not(syn_only))] -mod librustc_parse { - extern crate rustc_data_structures; - extern crate rustc_errors; - extern crate rustc_parse; - extern crate rustc_session; - extern crate rustc_span; - - use rustc_data_structures::sync::Lrc; - use rustc_errors::{emitter::Emitter, Diagnostic, Handler}; - use rustc_session::parse::ParseSess; - use rustc_span::source_map::{FilePathMapping, SourceMap}; - use rustc_span::{edition::Edition, FileName}; - - pub fn bench(content: &str) -> Result<(), ()> { - struct SilentEmitter; - - impl Emitter for SilentEmitter { - fn emit_diagnostic(&mut self, _diag: &Diagnostic) {} - fn source_map(&self) -> Option<&Lrc> { - None - } - } - - rustc_span::create_session_if_not_set_then(Edition::Edition2018, |_| { - let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let emitter = Box::new(SilentEmitter); - let handler = Handler::with_emitter(false, None, emitter); - let sess = ParseSess::with_span_handler(handler, cm); - if let Err(mut diagnostic) = rustc_parse::parse_crate_from_source_str( - FileName::Custom("bench".to_owned()), - content.to_owned(), - &sess, - ) { - diagnostic.cancel(); - return Err(()); - }; - Ok(()) - }) - } -} - -#[cfg(not(syn_only))] -mod read_from_disk { - pub fn bench(content: &str) -> Result<(), ()> { - let _ = content; - Ok(()) - } -} - -fn exec(mut codepath: impl FnMut(&str) -> Result<(), ()>) -> Duration { - let begin = Instant::now(); - let mut success = 0; - let mut total = 0; - - walkdir::WalkDir::new("tests/rust/src") - .into_iter() - .filter_entry(repo::base_dir_filter) - .for_each(|entry| { - let entry = entry.unwrap(); - let path = entry.path(); - if path.is_dir() { - return; - } - let content = fs::read_to_string(path).unwrap(); - let ok = codepath(&content).is_ok(); - success += ok as usize; - total += 1; - if !ok { - eprintln!("FAIL {}", path.display()); - } - }); - - assert_eq!(success, total); - begin.elapsed() -} - -fn main() { - repo::clone_rust(); - - macro_rules! testcases { - ($($(#[$cfg:meta])* $name:ident,)*) => { - vec![ - $( - $(#[$cfg])* - (stringify!($name), $name::bench as fn(&str) -> Result<(), ()>), - )* - ] - }; - } - - #[cfg(not(syn_only))] - { - let mut lines = 0; - let mut files = 0; - exec(|content| { - lines += content.lines().count(); - files += 1; - Ok(()) - }); - eprintln!("\n{} lines in {} files", lines, files); - } - - for (name, f) in testcases!( - #[cfg(not(syn_only))] - read_from_disk, - #[cfg(not(syn_only))] - tokenstream_parse, - syn_parse, - #[cfg(not(syn_only))] - librustc_parse, - ) { - eprint!("{:20}", format!("{}:", name)); - let elapsed = exec(f); - eprintln!( - "elapsed={}.{:03}s", - elapsed.as_secs(), - elapsed.subsec_millis(), - ); - } - eprintln!(); -} diff --git a/vendor/syn/build.rs b/vendor/syn/build.rs deleted file mode 100644 index 25190f4c..00000000 --- a/vendor/syn/build.rs +++ /dev/null @@ -1,43 +0,0 @@ -use std::env; -use std::process::Command; -use std::str; - -// The rustc-cfg strings below are *not* public API. Please let us know by -// opening a GitHub issue if your build environment requires some way to enable -// these cfgs other than by executing our build script. -fn main() { - let compiler = match rustc_version() { - Some(compiler) => compiler, - None => return, - }; - - if compiler.minor < 36 { - println!("cargo:rustc-cfg=syn_omit_await_from_token_macro"); - } - - if compiler.minor < 39 { - println!("cargo:rustc-cfg=syn_no_const_vec_new"); - } - - if !compiler.nightly { - println!("cargo:rustc-cfg=syn_disable_nightly_tests"); - } -} - -struct Compiler { - minor: u32, - nightly: bool, -} - -fn rustc_version() -> Option { - let rustc = env::var_os("RUSTC")?; - let output = Command::new(rustc).arg("--version").output().ok()?; - let version = str::from_utf8(&output.stdout).ok()?; - let mut pieces = version.split('.'); - if pieces.next() != Some("rustc 1") { - return None; - } - let minor = pieces.next()?.parse().ok()?; - let nightly = version.contains("nightly"); - Some(Compiler { minor, nightly }) -} diff --git a/vendor/syn/src/attr.rs b/vendor/syn/src/attr.rs deleted file mode 100644 index 5d6877ae..00000000 --- a/vendor/syn/src/attr.rs +++ /dev/null @@ -1,664 +0,0 @@ -use super::*; -use crate::punctuated::Punctuated; -use proc_macro2::TokenStream; -use std::iter; - -#[cfg(feature = "parsing")] -use crate::parse::{Parse, ParseBuffer, ParseStream, Parser, Result}; -#[cfg(feature = "parsing")] -use crate::punctuated::Pair; - -ast_struct! { - /// An attribute like `#[repr(transparent)]`. - /// - /// *This type is available only if Syn is built with the `"derive"` or `"full"` - /// feature.* - /// - ///
- /// - /// # Syntax - /// - /// Rust has six types of attributes. - /// - /// - Outer attributes like `#[repr(transparent)]`. These appear outside or - /// in front of the item they describe. - /// - Inner attributes like `#![feature(proc_macro)]`. These appear inside - /// of the item they describe, usually a module. - /// - Outer doc comments like `/// # Example`. - /// - Inner doc comments like `//! Please file an issue`. - /// - Outer block comments `/** # Example */`. - /// - Inner block comments `/*! Please file an issue */`. - /// - /// The `style` field of type `AttrStyle` distinguishes whether an attribute - /// is outer or inner. Doc comments and block comments are promoted to - /// attributes, as this is how they are processed by the compiler and by - /// `macro_rules!` macros. - /// - /// The `path` field gives the possibly colon-delimited path against which - /// the attribute is resolved. It is equal to `"doc"` for desugared doc - /// comments. The `tokens` field contains the rest of the attribute body as - /// tokens. - /// - /// ```text - /// #[derive(Copy)] #[crate::precondition x < 5] - /// ^^^^^^~~~~~~ ^^^^^^^^^^^^^^^^^^^ ~~~~~ - /// path tokens path tokens - /// ``` - /// - ///
- /// - /// # Parsing from tokens to Attribute - /// - /// This type does not implement the [`Parse`] trait and thus cannot be - /// parsed directly by [`ParseStream::parse`]. Instead use - /// [`ParseStream::call`] with one of the two parser functions - /// [`Attribute::parse_outer`] or [`Attribute::parse_inner`] depending on - /// which you intend to parse. - /// - /// [`Parse`]: parse::Parse - /// [`ParseStream::parse`]: parse::ParseBuffer::parse - /// [`ParseStream::call`]: parse::ParseBuffer::call - /// - /// ``` - /// use syn::{Attribute, Ident, Result, Token}; - /// use syn::parse::{Parse, ParseStream}; - /// - /// // Parses a unit struct with attributes. - /// // - /// // #[path = "s.tmpl"] - /// // struct S; - /// struct UnitStruct { - /// attrs: Vec, - /// struct_token: Token![struct], - /// name: Ident, - /// semi_token: Token![;], - /// } - /// - /// impl Parse for UnitStruct { - /// fn parse(input: ParseStream) -> Result { - /// Ok(UnitStruct { - /// attrs: input.call(Attribute::parse_outer)?, - /// struct_token: input.parse()?, - /// name: input.parse()?, - /// semi_token: input.parse()?, - /// }) - /// } - /// } - /// ``` - /// - ///


- /// - /// # Parsing from Attribute to structured arguments - /// - /// The grammar of attributes in Rust is very flexible, which makes the - /// syntax tree not that useful on its own. In particular, arguments of the - /// attribute are held in an arbitrary `tokens: TokenStream`. Macros are - /// expected to check the `path` of the attribute, decide whether they - /// recognize it, and then parse the remaining tokens according to whatever - /// grammar they wish to require for that kind of attribute. - /// - /// If the attribute you are parsing is expected to conform to the - /// conventional structured form of attribute, use [`parse_meta()`] to - /// obtain that structured representation. If the attribute follows some - /// other grammar of its own, use [`parse_args()`] to parse that into the - /// expected data structure. - /// - /// [`parse_meta()`]: Attribute::parse_meta - /// [`parse_args()`]: Attribute::parse_args - /// - ///


- /// - /// # Doc comments - /// - /// The compiler transforms doc comments, such as `/// comment` and `/*! - /// comment */`, into attributes before macros are expanded. Each comment is - /// expanded into an attribute of the form `#[doc = r"comment"]`. - /// - /// As an example, the following `mod` items are expanded identically: - /// - /// ``` - /// # use syn::{ItemMod, parse_quote}; - /// let doc: ItemMod = parse_quote! { - /// /// Single line doc comments - /// /// We write so many! - /// /** - /// * Multi-line comments... - /// * May span many lines - /// */ - /// mod example { - /// //! Of course, they can be inner too - /// /*! And fit in a single line */ - /// } - /// }; - /// let attr: ItemMod = parse_quote! { - /// #[doc = r" Single line doc comments"] - /// #[doc = r" We write so many!"] - /// #[doc = r" - /// * Multi-line comments... - /// * May span many lines - /// "] - /// mod example { - /// #![doc = r" Of course, they can be inner too"] - /// #![doc = r" And fit in a single line "] - /// } - /// }; - /// assert_eq!(doc, attr); - /// ``` - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct Attribute { - pub pound_token: Token![#], - pub style: AttrStyle, - pub bracket_token: token::Bracket, - pub path: Path, - pub tokens: TokenStream, - } -} - -impl Attribute { - /// Parses the content of the attribute, consisting of the path and tokens, - /// as a [`Meta`] if possible. - /// - /// *This function is available only if Syn is built with the `"parsing"` - /// feature.* - #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - pub fn parse_meta(&self) -> Result { - fn clone_ident_segment(segment: &PathSegment) -> PathSegment { - PathSegment { - ident: segment.ident.clone(), - arguments: PathArguments::None, - } - } - - let path = Path { - leading_colon: self - .path - .leading_colon - .as_ref() - .map(|colon| Token![::](colon.spans)), - segments: self - .path - .segments - .pairs() - .map(|pair| match pair { - Pair::Punctuated(seg, punct) => { - Pair::Punctuated(clone_ident_segment(seg), Token![::](punct.spans)) - } - Pair::End(seg) => Pair::End(clone_ident_segment(seg)), - }) - .collect(), - }; - - let parser = |input: ParseStream| parsing::parse_meta_after_path(path, input); - parse::Parser::parse2(parser, self.tokens.clone()) - } - - /// Parse the arguments to the attribute as a syntax tree. - /// - /// This is similar to `syn::parse2::(attr.tokens)` except that: - /// - /// - the surrounding delimiters are *not* included in the input to the - /// parser; and - /// - the error message has a more useful span when `tokens` is empty. - /// - /// ```text - /// #[my_attr(value < 5)] - /// ^^^^^^^^^ what gets parsed - /// ``` - /// - /// *This function is available only if Syn is built with the `"parsing"` - /// feature.* - #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - pub fn parse_args(&self) -> Result { - self.parse_args_with(T::parse) - } - - /// Parse the arguments to the attribute using the given parser. - /// - /// *This function is available only if Syn is built with the `"parsing"` - /// feature.* - #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - pub fn parse_args_with(&self, parser: F) -> Result { - let parser = |input: ParseStream| { - let args = enter_args(self, input)?; - parse::parse_stream(parser, &args) - }; - parser.parse2(self.tokens.clone()) - } - - /// Parses zero or more outer attributes from the stream. - /// - /// *This function is available only if Syn is built with the `"parsing"` - /// feature.* - #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - pub fn parse_outer(input: ParseStream) -> Result> { - let mut attrs = Vec::new(); - while input.peek(Token![#]) { - attrs.push(input.call(parsing::single_parse_outer)?); - } - Ok(attrs) - } - - /// Parses zero or more inner attributes from the stream. - /// - /// *This function is available only if Syn is built with the `"parsing"` - /// feature.* - #[cfg(feature = "parsing")] - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - pub fn parse_inner(input: ParseStream) -> Result> { - let mut attrs = Vec::new(); - parsing::parse_inner(input, &mut attrs)?; - Ok(attrs) - } -} - -#[cfg(feature = "parsing")] -fn expected_parentheses(attr: &Attribute) -> String { - let style = match attr.style { - AttrStyle::Outer => "#", - AttrStyle::Inner(_) => "#!", - }; - - let mut path = String::new(); - for segment in &attr.path.segments { - if !path.is_empty() || attr.path.leading_colon.is_some() { - path += "::"; - } - path += &segment.ident.to_string(); - } - - format!("{}[{}(...)]", style, path) -} - -#[cfg(feature = "parsing")] -fn enter_args<'a>(attr: &Attribute, input: ParseStream<'a>) -> Result> { - if input.is_empty() { - let expected = expected_parentheses(attr); - let msg = format!("expected attribute arguments in parentheses: {}", expected); - return Err(crate::error::new2( - attr.pound_token.span, - attr.bracket_token.span, - msg, - )); - } else if input.peek(Token![=]) { - let expected = expected_parentheses(attr); - let msg = format!("expected parentheses: {}", expected); - return Err(input.error(msg)); - }; - - let content; - if input.peek(token::Paren) { - parenthesized!(content in input); - } else if input.peek(token::Bracket) { - bracketed!(content in input); - } else if input.peek(token::Brace) { - braced!(content in input); - } else { - return Err(input.error("unexpected token in attribute arguments")); - } - - if input.is_empty() { - Ok(content) - } else { - Err(input.error("unexpected token in attribute arguments")) - } -} - -ast_enum! { - /// Distinguishes between attributes that decorate an item and attributes - /// that are contained within an item. - /// - /// *This type is available only if Syn is built with the `"derive"` or `"full"` - /// feature.* - /// - /// # Outer attributes - /// - /// - `#[repr(transparent)]` - /// - `/// # Example` - /// - `/** Please file an issue */` - /// - /// # Inner attributes - /// - /// - `#![feature(proc_macro)]` - /// - `//! # Example` - /// - `/*! Please file an issue */` - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub enum AttrStyle { - Outer, - Inner(Token![!]), - } -} - -ast_enum_of_structs! { - /// Content of a compile-time structured attribute. - /// - /// *This type is available only if Syn is built with the `"derive"` or `"full"` - /// feature.* - /// - /// ## Path - /// - /// A meta path is like the `test` in `#[test]`. - /// - /// ## List - /// - /// A meta list is like the `derive(Copy)` in `#[derive(Copy)]`. - /// - /// ## NameValue - /// - /// A name-value meta is like the `path = "..."` in `#[path = - /// "sys/windows.rs"]`. - /// - /// # Syntax tree enum - /// - /// This type is a [syntax tree enum]. - /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub enum Meta { - Path(Path), - - /// A structured list within an attribute, like `derive(Copy, Clone)`. - List(MetaList), - - /// A name-value pair within an attribute, like `feature = "nightly"`. - NameValue(MetaNameValue), - } -} - -ast_struct! { - /// A structured list within an attribute, like `derive(Copy, Clone)`. - /// - /// *This type is available only if Syn is built with the `"derive"` or - /// `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct MetaList { - pub path: Path, - pub paren_token: token::Paren, - pub nested: Punctuated, - } -} - -ast_struct! { - /// A name-value pair within an attribute, like `feature = "nightly"`. - /// - /// *This type is available only if Syn is built with the `"derive"` or - /// `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct MetaNameValue { - pub path: Path, - pub eq_token: Token![=], - pub lit: Lit, - } -} - -impl Meta { - /// Returns the identifier that begins this structured meta item. - /// - /// For example this would return the `test` in `#[test]`, the `derive` in - /// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`. - pub fn path(&self) -> &Path { - match self { - Meta::Path(path) => path, - Meta::List(meta) => &meta.path, - Meta::NameValue(meta) => &meta.path, - } - } -} - -ast_enum_of_structs! { - /// Element of a compile-time attribute list. - /// - /// *This type is available only if Syn is built with the `"derive"` or `"full"` - /// feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub enum NestedMeta { - /// A structured meta item, like the `Copy` in `#[derive(Copy)]` which - /// would be a nested `Meta::Path`. - Meta(Meta), - - /// A Rust literal, like the `"new_name"` in `#[rename("new_name")]`. - Lit(Lit), - } -} - -/// Conventional argument type associated with an invocation of an attribute -/// macro. -/// -/// For example if we are developing an attribute macro that is intended to be -/// invoked on function items as follows: -/// -/// ``` -/// # const IGNORE: &str = stringify! { -/// #[my_attribute(path = "/v1/refresh")] -/// # }; -/// pub fn refresh() { -/// /* ... */ -/// } -/// ``` -/// -/// The implementation of this macro would want to parse its attribute arguments -/// as type `AttributeArgs`. -/// -/// ``` -/// # extern crate proc_macro; -/// # -/// use proc_macro::TokenStream; -/// use syn::{parse_macro_input, AttributeArgs, ItemFn}; -/// -/// # const IGNORE: &str = stringify! { -/// #[proc_macro_attribute] -/// # }; -/// pub fn my_attribute(args: TokenStream, input: TokenStream) -> TokenStream { -/// let args = parse_macro_input!(args as AttributeArgs); -/// let input = parse_macro_input!(input as ItemFn); -/// -/// /* ... */ -/// # "".parse().unwrap() -/// } -/// ``` -#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] -pub type AttributeArgs = Vec; - -pub trait FilterAttrs<'a> { - type Ret: Iterator; - - fn outer(self) -> Self::Ret; - fn inner(self) -> Self::Ret; -} - -impl<'a, T> FilterAttrs<'a> for T -where - T: IntoIterator, -{ - type Ret = iter::Filter bool>; - - fn outer(self) -> Self::Ret { - fn is_outer(attr: &&Attribute) -> bool { - match attr.style { - AttrStyle::Outer => true, - AttrStyle::Inner(_) => false, - } - } - self.into_iter().filter(is_outer) - } - - fn inner(self) -> Self::Ret { - fn is_inner(attr: &&Attribute) -> bool { - match attr.style { - AttrStyle::Inner(_) => true, - AttrStyle::Outer => false, - } - } - self.into_iter().filter(is_inner) - } -} - -#[cfg(feature = "parsing")] -pub mod parsing { - use super::*; - use crate::ext::IdentExt; - use crate::parse::{Parse, ParseStream, Result}; - - pub fn parse_inner(input: ParseStream, attrs: &mut Vec) -> Result<()> { - while input.peek(Token![#]) && input.peek2(Token![!]) { - attrs.push(input.call(parsing::single_parse_inner)?); - } - Ok(()) - } - - pub fn single_parse_inner(input: ParseStream) -> Result { - let content; - Ok(Attribute { - pound_token: input.parse()?, - style: AttrStyle::Inner(input.parse()?), - bracket_token: bracketed!(content in input), - path: content.call(Path::parse_mod_style)?, - tokens: content.parse()?, - }) - } - - pub fn single_parse_outer(input: ParseStream) -> Result { - let content; - Ok(Attribute { - pound_token: input.parse()?, - style: AttrStyle::Outer, - bracket_token: bracketed!(content in input), - path: content.call(Path::parse_mod_style)?, - tokens: content.parse()?, - }) - } - - // Like Path::parse_mod_style but accepts keywords in the path. - fn parse_meta_path(input: ParseStream) -> Result { - Ok(Path { - leading_colon: input.parse()?, - segments: { - let mut segments = Punctuated::new(); - while input.peek(Ident::peek_any) { - let ident = Ident::parse_any(input)?; - segments.push_value(PathSegment::from(ident)); - if !input.peek(Token![::]) { - break; - } - let punct = input.parse()?; - segments.push_punct(punct); - } - if segments.is_empty() { - return Err(input.error("expected path")); - } else if segments.trailing_punct() { - return Err(input.error("expected path segment")); - } - segments - }, - }) - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - impl Parse for Meta { - fn parse(input: ParseStream) -> Result { - let path = input.call(parse_meta_path)?; - parse_meta_after_path(path, input) - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - impl Parse for MetaList { - fn parse(input: ParseStream) -> Result { - let path = input.call(parse_meta_path)?; - parse_meta_list_after_path(path, input) - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - impl Parse for MetaNameValue { - fn parse(input: ParseStream) -> Result { - let path = input.call(parse_meta_path)?; - parse_meta_name_value_after_path(path, input) - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - impl Parse for NestedMeta { - fn parse(input: ParseStream) -> Result { - if input.peek(Lit) && !(input.peek(LitBool) && input.peek2(Token![=])) { - input.parse().map(NestedMeta::Lit) - } else if input.peek(Ident::peek_any) - || input.peek(Token![::]) && input.peek3(Ident::peek_any) - { - input.parse().map(NestedMeta::Meta) - } else { - Err(input.error("expected identifier or literal")) - } - } - } - - pub fn parse_meta_after_path(path: Path, input: ParseStream) -> Result { - if input.peek(token::Paren) { - parse_meta_list_after_path(path, input).map(Meta::List) - } else if input.peek(Token![=]) { - parse_meta_name_value_after_path(path, input).map(Meta::NameValue) - } else { - Ok(Meta::Path(path)) - } - } - - fn parse_meta_list_after_path(path: Path, input: ParseStream) -> Result { - let content; - Ok(MetaList { - path, - paren_token: parenthesized!(content in input), - nested: content.parse_terminated(NestedMeta::parse)?, - }) - } - - fn parse_meta_name_value_after_path(path: Path, input: ParseStream) -> Result { - Ok(MetaNameValue { - path, - eq_token: input.parse()?, - lit: input.parse()?, - }) - } -} - -#[cfg(feature = "printing")] -mod printing { - use super::*; - use proc_macro2::TokenStream; - use quote::ToTokens; - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for Attribute { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.pound_token.to_tokens(tokens); - if let AttrStyle::Inner(b) = &self.style { - b.to_tokens(tokens); - } - self.bracket_token.surround(tokens, |tokens| { - self.path.to_tokens(tokens); - self.tokens.to_tokens(tokens); - }); - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for MetaList { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.path.to_tokens(tokens); - self.paren_token.surround(tokens, |tokens| { - self.nested.to_tokens(tokens); - }); - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for MetaNameValue { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.path.to_tokens(tokens); - self.eq_token.to_tokens(tokens); - self.lit.to_tokens(tokens); - } - } -} diff --git a/vendor/syn/src/await.rs b/vendor/syn/src/await.rs deleted file mode 100644 index 038c6a5d..00000000 --- a/vendor/syn/src/await.rs +++ /dev/null @@ -1,2 +0,0 @@ -// See include!("await.rs") in token.rs. -export_token_macro! {[await]} diff --git a/vendor/syn/src/bigint.rs b/vendor/syn/src/bigint.rs deleted file mode 100644 index 5397d6be..00000000 --- a/vendor/syn/src/bigint.rs +++ /dev/null @@ -1,66 +0,0 @@ -use std::ops::{AddAssign, MulAssign}; - -// For implementing base10_digits() accessor on LitInt. -pub struct BigInt { - digits: Vec, -} - -impl BigInt { - pub fn new() -> Self { - BigInt { digits: Vec::new() } - } - - pub fn to_string(&self) -> String { - let mut repr = String::with_capacity(self.digits.len()); - - let mut has_nonzero = false; - for digit in self.digits.iter().rev() { - has_nonzero |= *digit != 0; - if has_nonzero { - repr.push((*digit + b'0') as char); - } - } - - if repr.is_empty() { - repr.push('0'); - } - - repr - } - - fn reserve_two_digits(&mut self) { - let len = self.digits.len(); - let desired = - len + !self.digits.ends_with(&[0, 0]) as usize + !self.digits.ends_with(&[0]) as usize; - self.digits.resize(desired, 0); - } -} - -impl AddAssign for BigInt { - // Assumes increment <16. - fn add_assign(&mut self, mut increment: u8) { - self.reserve_two_digits(); - - let mut i = 0; - while increment > 0 { - let sum = self.digits[i] + increment; - self.digits[i] = sum % 10; - increment = sum / 10; - i += 1; - } - } -} - -impl MulAssign for BigInt { - // Assumes base <=16. - fn mul_assign(&mut self, base: u8) { - self.reserve_two_digits(); - - let mut carry = 0; - for digit in &mut self.digits { - let prod = *digit * base + carry; - *digit = prod % 10; - carry = prod / 10; - } - } -} diff --git a/vendor/syn/src/buffer.rs b/vendor/syn/src/buffer.rs deleted file mode 100644 index ec172259..00000000 --- a/vendor/syn/src/buffer.rs +++ /dev/null @@ -1,399 +0,0 @@ -//! A stably addressed token buffer supporting efficient traversal based on a -//! cheaply copyable cursor. -//! -//! *This module is available only if Syn is built with the `"parsing"` feature.* - -// This module is heavily commented as it contains most of the unsafe code in -// Syn, and caution should be used when editing it. The public-facing interface -// is 100% safe but the implementation is fragile internally. - -#[cfg(all( - not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), - feature = "proc-macro" -))] -use crate::proc_macro as pm; -use crate::Lifetime; -use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; -use std::marker::PhantomData; -use std::ptr; - -/// Internal type which is used instead of `TokenTree` to represent a token tree -/// within a `TokenBuffer`. -enum Entry { - // Mimicking types from proc-macro. - Group(Group, TokenBuffer), - Ident(Ident), - Punct(Punct), - Literal(Literal), - // End entries contain a raw pointer to the entry from the containing - // token tree, or null if this is the outermost level. - End(*const Entry), -} - -/// A buffer that can be efficiently traversed multiple times, unlike -/// `TokenStream` which requires a deep copy in order to traverse more than -/// once. -/// -/// *This type is available only if Syn is built with the `"parsing"` feature.* -pub struct TokenBuffer { - // NOTE: Do not derive clone on this - there are raw pointers inside which - // will be messed up. Moving the `TokenBuffer` itself is safe as the actual - // backing slices won't be moved. - data: Box<[Entry]>, -} - -impl TokenBuffer { - // NOTE: DO NOT MUTATE THE `Vec` RETURNED FROM THIS FUNCTION ONCE IT - // RETURNS, THE ADDRESS OF ITS BACKING MEMORY MUST REMAIN STABLE. - fn inner_new(stream: TokenStream, up: *const Entry) -> TokenBuffer { - // Build up the entries list, recording the locations of any Groups - // in the list to be processed later. - let mut entries = Vec::new(); - let mut seqs = Vec::new(); - for tt in stream { - match tt { - TokenTree::Ident(sym) => { - entries.push(Entry::Ident(sym)); - } - TokenTree::Punct(op) => { - entries.push(Entry::Punct(op)); - } - TokenTree::Literal(l) => { - entries.push(Entry::Literal(l)); - } - TokenTree::Group(g) => { - // Record the index of the interesting entry, and store an - // `End(null)` there temporarially. - seqs.push((entries.len(), g)); - entries.push(Entry::End(ptr::null())); - } - } - } - // Add an `End` entry to the end with a reference to the enclosing token - // stream which was passed in. - entries.push(Entry::End(up)); - - // NOTE: This is done to ensure that we don't accidentally modify the - // length of the backing buffer. The backing buffer must remain at a - // constant address after this point, as we are going to store a raw - // pointer into it. - let mut entries = entries.into_boxed_slice(); - for (idx, group) in seqs { - // We know that this index refers to one of the temporary - // `End(null)` entries, and we know that the last entry is - // `End(up)`, so the next index is also valid. - let seq_up = &entries[idx + 1] as *const Entry; - - // The end entry stored at the end of this Entry::Group should - // point to the Entry which follows the Group in the list. - let inner = Self::inner_new(group.stream(), seq_up); - entries[idx] = Entry::Group(group, inner); - } - - TokenBuffer { data: entries } - } - - /// Creates a `TokenBuffer` containing all the tokens from the input - /// `TokenStream`. - /// - /// *This method is available only if Syn is built with both the `"parsing"` and - /// `"proc-macro"` features.* - #[cfg(all( - not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), - feature = "proc-macro" - ))] - pub fn new(stream: pm::TokenStream) -> TokenBuffer { - Self::new2(stream.into()) - } - - /// Creates a `TokenBuffer` containing all the tokens from the input - /// `TokenStream`. - pub fn new2(stream: TokenStream) -> TokenBuffer { - Self::inner_new(stream, ptr::null()) - } - - /// Creates a cursor referencing the first token in the buffer and able to - /// traverse until the end of the buffer. - pub fn begin(&self) -> Cursor { - unsafe { Cursor::create(&self.data[0], &self.data[self.data.len() - 1]) } - } -} - -/// A cheaply copyable cursor into a `TokenBuffer`. -/// -/// This cursor holds a shared reference into the immutable data which is used -/// internally to represent a `TokenStream`, and can be efficiently manipulated -/// and copied around. -/// -/// An empty `Cursor` can be created directly, or one may create a `TokenBuffer` -/// object and get a cursor to its first token with `begin()`. -/// -/// Two cursors are equal if they have the same location in the same input -/// stream, and have the same scope. -/// -/// *This type is available only if Syn is built with the `"parsing"` feature.* -pub struct Cursor<'a> { - // The current entry which the `Cursor` is pointing at. - ptr: *const Entry, - // This is the only `Entry::End(..)` object which this cursor is allowed to - // point at. All other `End` objects are skipped over in `Cursor::create`. - scope: *const Entry, - // Cursor is covariant in 'a. This field ensures that our pointers are still - // valid. - marker: PhantomData<&'a Entry>, -} - -impl<'a> Cursor<'a> { - /// Creates a cursor referencing a static empty TokenStream. - pub fn empty() -> Self { - // It's safe in this situation for us to put an `Entry` object in global - // storage, despite it not actually being safe to send across threads - // (`Ident` is a reference into a thread-local table). This is because - // this entry never includes a `Ident` object. - // - // This wrapper struct allows us to break the rules and put a `Sync` - // object in global storage. - struct UnsafeSyncEntry(Entry); - unsafe impl Sync for UnsafeSyncEntry {} - static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0 as *const Entry)); - - Cursor { - ptr: &EMPTY_ENTRY.0, - scope: &EMPTY_ENTRY.0, - marker: PhantomData, - } - } - - /// This create method intelligently exits non-explicitly-entered - /// `None`-delimited scopes when the cursor reaches the end of them, - /// allowing for them to be treated transparently. - unsafe fn create(mut ptr: *const Entry, scope: *const Entry) -> Self { - // NOTE: If we're looking at a `End(..)`, we want to advance the cursor - // past it, unless `ptr == scope`, which means that we're at the edge of - // our cursor's scope. We should only have `ptr != scope` at the exit - // from None-delimited groups entered with `ignore_none`. - while let Entry::End(exit) = *ptr { - if ptr == scope { - break; - } - ptr = exit; - } - - Cursor { - ptr, - scope, - marker: PhantomData, - } - } - - /// Get the current entry. - fn entry(self) -> &'a Entry { - unsafe { &*self.ptr } - } - - /// Bump the cursor to point at the next token after the current one. This - /// is undefined behavior if the cursor is currently looking at an - /// `Entry::End`. - unsafe fn bump(self) -> Cursor<'a> { - Cursor::create(self.ptr.offset(1), self.scope) - } - - /// While the cursor is looking at a `None`-delimited group, move it to look - /// at the first token inside instead. If the group is empty, this will move - /// the cursor past the `None`-delimited group. - /// - /// WARNING: This mutates its argument. - fn ignore_none(&mut self) { - while let Entry::Group(group, buf) = self.entry() { - if group.delimiter() == Delimiter::None { - // NOTE: We call `Cursor::create` here to make sure that - // situations where we should immediately exit the span after - // entering it are handled correctly. - unsafe { - *self = Cursor::create(&buf.data[0], self.scope); - } - } else { - break; - } - } - } - - /// Checks whether the cursor is currently pointing at the end of its valid - /// scope. - pub fn eof(self) -> bool { - // We're at eof if we're at the end of our scope. - self.ptr == self.scope - } - - /// If the cursor is pointing at a `Group` with the given delimiter, returns - /// a cursor into that group and one pointing to the next `TokenTree`. - pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, Span, Cursor<'a>)> { - // If we're not trying to enter a none-delimited group, we want to - // ignore them. We have to make sure to _not_ ignore them when we want - // to enter them, of course. For obvious reasons. - if delim != Delimiter::None { - self.ignore_none(); - } - - if let Entry::Group(group, buf) = self.entry() { - if group.delimiter() == delim { - return Some((buf.begin(), group.span(), unsafe { self.bump() })); - } - } - - None - } - - /// If the cursor is pointing at a `Ident`, returns it along with a cursor - /// pointing at the next `TokenTree`. - pub fn ident(mut self) -> Option<(Ident, Cursor<'a>)> { - self.ignore_none(); - match self.entry() { - Entry::Ident(ident) => Some((ident.clone(), unsafe { self.bump() })), - _ => None, - } - } - - /// If the cursor is pointing at an `Punct`, returns it along with a cursor - /// pointing at the next `TokenTree`. - pub fn punct(mut self) -> Option<(Punct, Cursor<'a>)> { - self.ignore_none(); - match self.entry() { - Entry::Punct(op) if op.as_char() != '\'' => Some((op.clone(), unsafe { self.bump() })), - _ => None, - } - } - - /// If the cursor is pointing at a `Literal`, return it along with a cursor - /// pointing at the next `TokenTree`. - pub fn literal(mut self) -> Option<(Literal, Cursor<'a>)> { - self.ignore_none(); - match self.entry() { - Entry::Literal(lit) => Some((lit.clone(), unsafe { self.bump() })), - _ => None, - } - } - - /// If the cursor is pointing at a `Lifetime`, returns it along with a - /// cursor pointing at the next `TokenTree`. - pub fn lifetime(mut self) -> Option<(Lifetime, Cursor<'a>)> { - self.ignore_none(); - match self.entry() { - Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => { - let next = unsafe { self.bump() }; - match next.ident() { - Some((ident, rest)) => { - let lifetime = Lifetime { - apostrophe: op.span(), - ident, - }; - Some((lifetime, rest)) - } - None => None, - } - } - _ => None, - } - } - - /// Copies all remaining tokens visible from this cursor into a - /// `TokenStream`. - pub fn token_stream(self) -> TokenStream { - let mut tts = Vec::new(); - let mut cursor = self; - while let Some((tt, rest)) = cursor.token_tree() { - tts.push(tt); - cursor = rest; - } - tts.into_iter().collect() - } - - /// If the cursor is pointing at a `TokenTree`, returns it along with a - /// cursor pointing at the next `TokenTree`. - /// - /// Returns `None` if the cursor has reached the end of its stream. - /// - /// This method does not treat `None`-delimited groups as transparent, and - /// will return a `Group(None, ..)` if the cursor is looking at one. - pub fn token_tree(self) -> Option<(TokenTree, Cursor<'a>)> { - let tree = match self.entry() { - Entry::Group(group, _) => group.clone().into(), - Entry::Literal(lit) => lit.clone().into(), - Entry::Ident(ident) => ident.clone().into(), - Entry::Punct(op) => op.clone().into(), - Entry::End(..) => { - return None; - } - }; - - Some((tree, unsafe { self.bump() })) - } - - /// Returns the `Span` of the current token, or `Span::call_site()` if this - /// cursor points to eof. - pub fn span(self) -> Span { - match self.entry() { - Entry::Group(group, _) => group.span(), - Entry::Literal(l) => l.span(), - Entry::Ident(t) => t.span(), - Entry::Punct(o) => o.span(), - Entry::End(..) => Span::call_site(), - } - } - - /// Skip over the next token without cloning it. Returns `None` if this - /// cursor points to eof. - /// - /// This method treats `'lifetimes` as a single token. - pub(crate) fn skip(self) -> Option> { - match self.entry() { - Entry::End(..) => None, - - // Treat lifetimes as a single tt for the purposes of 'skip'. - Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => { - let next = unsafe { self.bump() }; - match next.entry() { - Entry::Ident(_) => Some(unsafe { next.bump() }), - _ => Some(next), - } - } - _ => Some(unsafe { self.bump() }), - } - } -} - -impl<'a> Copy for Cursor<'a> {} - -impl<'a> Clone for Cursor<'a> { - fn clone(&self) -> Self { - *self - } -} - -impl<'a> Eq for Cursor<'a> {} - -impl<'a> PartialEq for Cursor<'a> { - fn eq(&self, other: &Self) -> bool { - let Cursor { ptr, scope, marker } = self; - let _ = marker; - *ptr == other.ptr && *scope == other.scope - } -} - -pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool { - a.scope == b.scope -} - -pub(crate) fn open_span_of_group(cursor: Cursor) -> Span { - match cursor.entry() { - Entry::Group(group, _) => group.span_open(), - _ => cursor.span(), - } -} - -pub(crate) fn close_span_of_group(cursor: Cursor) -> Span { - match cursor.entry() { - Entry::Group(group, _) => group.span_close(), - _ => cursor.span(), - } -} diff --git a/vendor/syn/src/custom_keyword.rs b/vendor/syn/src/custom_keyword.rs deleted file mode 100644 index 69d787e5..00000000 --- a/vendor/syn/src/custom_keyword.rs +++ /dev/null @@ -1,253 +0,0 @@ -/// Define a type that supports parsing and printing a given identifier as if it -/// were a keyword. -/// -/// # Usage -/// -/// As a convention, it is recommended that this macro be invoked within a -/// module called `kw` or `keyword` and that the resulting parser be invoked -/// with a `kw::` or `keyword::` prefix. -/// -/// ``` -/// mod kw { -/// syn::custom_keyword!(whatever); -/// } -/// ``` -/// -/// The generated syntax tree node supports the following operations just like -/// any built-in keyword token. -/// -/// - [Peeking] — `input.peek(kw::whatever)` -/// -/// - [Parsing] — `input.parse::()?` -/// -/// - [Printing] — `quote!( ... #whatever_token ... )` -/// -/// - Construction from a [`Span`] — `let whatever_token = kw::whatever(sp)` -/// -/// - Field access to its span — `let sp = whatever_token.span` -/// -/// [Peeking]: crate::parse::ParseBuffer::peek -/// [Parsing]: crate::parse::ParseBuffer::parse -/// [Printing]: quote::ToTokens -/// [`Span`]: proc_macro2::Span -/// -/// # Example -/// -/// This example parses input that looks like `bool = true` or `str = "value"`. -/// The key must be either the identifier `bool` or the identifier `str`. If -/// `bool`, the value may be either `true` or `false`. If `str`, the value may -/// be any string literal. -/// -/// The symbols `bool` and `str` are not reserved keywords in Rust so these are -/// not considered keywords in the `syn::token` module. Like any other -/// identifier that is not a keyword, these can be declared as custom keywords -/// by crates that need to use them as such. -/// -/// ``` -/// use syn::{LitBool, LitStr, Result, Token}; -/// use syn::parse::{Parse, ParseStream}; -/// -/// mod kw { -/// syn::custom_keyword!(bool); -/// syn::custom_keyword!(str); -/// } -/// -/// enum Argument { -/// Bool { -/// bool_token: kw::bool, -/// eq_token: Token![=], -/// value: LitBool, -/// }, -/// Str { -/// str_token: kw::str, -/// eq_token: Token![=], -/// value: LitStr, -/// }, -/// } -/// -/// impl Parse for Argument { -/// fn parse(input: ParseStream) -> Result { -/// let lookahead = input.lookahead1(); -/// if lookahead.peek(kw::bool) { -/// Ok(Argument::Bool { -/// bool_token: input.parse::()?, -/// eq_token: input.parse()?, -/// value: input.parse()?, -/// }) -/// } else if lookahead.peek(kw::str) { -/// Ok(Argument::Str { -/// str_token: input.parse::()?, -/// eq_token: input.parse()?, -/// value: input.parse()?, -/// }) -/// } else { -/// Err(lookahead.error()) -/// } -/// } -/// } -/// ``` -#[macro_export] -macro_rules! custom_keyword { - ($ident:ident) => { - #[allow(non_camel_case_types)] - pub struct $ident { - pub span: $crate::__private::Span, - } - - #[doc(hidden)] - #[allow(dead_code, non_snake_case)] - pub fn $ident<__S: $crate::__private::IntoSpans<[$crate::__private::Span; 1]>>( - span: __S, - ) -> $ident { - $ident { - span: $crate::__private::IntoSpans::into_spans(span)[0], - } - } - - impl $crate::__private::Default for $ident { - fn default() -> Self { - $ident { - span: $crate::__private::Span::call_site(), - } - } - } - - $crate::impl_parse_for_custom_keyword!($ident); - $crate::impl_to_tokens_for_custom_keyword!($ident); - $crate::impl_clone_for_custom_keyword!($ident); - $crate::impl_extra_traits_for_custom_keyword!($ident); - }; -} - -// Not public API. -#[cfg(feature = "parsing")] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_parse_for_custom_keyword { - ($ident:ident) => { - // For peek. - impl $crate::token::CustomToken for $ident { - fn peek(cursor: $crate::buffer::Cursor) -> $crate::__private::bool { - if let Some((ident, _rest)) = cursor.ident() { - ident == stringify!($ident) - } else { - false - } - } - - fn display() -> &'static $crate::__private::str { - concat!("`", stringify!($ident), "`") - } - } - - impl $crate::parse::Parse for $ident { - fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> { - input.step(|cursor| { - if let $crate::__private::Some((ident, rest)) = cursor.ident() { - if ident == stringify!($ident) { - return $crate::__private::Ok(($ident { span: ident.span() }, rest)); - } - } - $crate::__private::Err(cursor.error(concat!( - "expected `", - stringify!($ident), - "`" - ))) - }) - } - } - }; -} - -// Not public API. -#[cfg(not(feature = "parsing"))] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_parse_for_custom_keyword { - ($ident:ident) => {}; -} - -// Not public API. -#[cfg(feature = "printing")] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_to_tokens_for_custom_keyword { - ($ident:ident) => { - impl $crate::__private::ToTokens for $ident { - fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) { - let ident = $crate::Ident::new(stringify!($ident), self.span); - $crate::__private::TokenStreamExt::append(tokens, ident); - } - } - }; -} - -// Not public API. -#[cfg(not(feature = "printing"))] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_to_tokens_for_custom_keyword { - ($ident:ident) => {}; -} - -// Not public API. -#[cfg(feature = "clone-impls")] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_clone_for_custom_keyword { - ($ident:ident) => { - impl $crate::__private::Copy for $ident {} - - #[allow(clippy::expl_impl_clone_on_copy)] - impl $crate::__private::Clone for $ident { - fn clone(&self) -> Self { - *self - } - } - }; -} - -// Not public API. -#[cfg(not(feature = "clone-impls"))] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_clone_for_custom_keyword { - ($ident:ident) => {}; -} - -// Not public API. -#[cfg(feature = "extra-traits")] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_extra_traits_for_custom_keyword { - ($ident:ident) => { - impl $crate::__private::Debug for $ident { - fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::fmt::Result { - $crate::__private::Formatter::write_str( - f, - concat!("Keyword [", stringify!($ident), "]"), - ) - } - } - - impl $crate::__private::Eq for $ident {} - - impl $crate::__private::PartialEq for $ident { - fn eq(&self, _other: &Self) -> $crate::__private::bool { - true - } - } - - impl $crate::__private::Hash for $ident { - fn hash<__H: $crate::__private::Hasher>(&self, _state: &mut __H) {} - } - }; -} - -// Not public API. -#[cfg(not(feature = "extra-traits"))] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_extra_traits_for_custom_keyword { - ($ident:ident) => {}; -} diff --git a/vendor/syn/src/custom_punctuation.rs b/vendor/syn/src/custom_punctuation.rs deleted file mode 100644 index 118a8453..00000000 --- a/vendor/syn/src/custom_punctuation.rs +++ /dev/null @@ -1,300 +0,0 @@ -/// Define a type that supports parsing and printing a multi-character symbol -/// as if it were a punctuation token. -/// -/// # Usage -/// -/// ``` -/// syn::custom_punctuation!(LeftRightArrow, <=>); -/// ``` -/// -/// The generated syntax tree node supports the following operations just like -/// any built-in punctuation token. -/// -/// - [Peeking] — `input.peek(LeftRightArrow)` -/// -/// - [Parsing] — `input.parse::()?` -/// -/// - [Printing] — `quote!( ... #lrarrow ... )` -/// -/// - Construction from a [`Span`] — `let lrarrow = LeftRightArrow(sp)` -/// -/// - Construction from multiple [`Span`] — `let lrarrow = LeftRightArrow([sp, sp, sp])` -/// -/// - Field access to its spans — `let spans = lrarrow.spans` -/// -/// [Peeking]: crate::parse::ParseBuffer::peek -/// [Parsing]: crate::parse::ParseBuffer::parse -/// [Printing]: quote::ToTokens -/// [`Span`]: proc_macro2::Span -/// -/// # Example -/// -/// ``` -/// use proc_macro2::{TokenStream, TokenTree}; -/// use syn::parse::{Parse, ParseStream, Peek, Result}; -/// use syn::punctuated::Punctuated; -/// use syn::Expr; -/// -/// syn::custom_punctuation!(PathSeparator, ); -/// -/// // expr expr expr ... -/// struct PathSegments { -/// segments: Punctuated, -/// } -/// -/// impl Parse for PathSegments { -/// fn parse(input: ParseStream) -> Result { -/// let mut segments = Punctuated::new(); -/// -/// let first = parse_until(input, PathSeparator)?; -/// segments.push_value(syn::parse2(first)?); -/// -/// while input.peek(PathSeparator) { -/// segments.push_punct(input.parse()?); -/// -/// let next = parse_until(input, PathSeparator)?; -/// segments.push_value(syn::parse2(next)?); -/// } -/// -/// Ok(PathSegments { segments }) -/// } -/// } -/// -/// fn parse_until(input: ParseStream, end: E) -> Result { -/// let mut tokens = TokenStream::new(); -/// while !input.is_empty() && !input.peek(end) { -/// let next: TokenTree = input.parse()?; -/// tokens.extend(Some(next)); -/// } -/// Ok(tokens) -/// } -/// -/// fn main() { -/// let input = r#" a::b c::d::e "#; -/// let _: PathSegments = syn::parse_str(input).unwrap(); -/// } -/// ``` -#[macro_export] -macro_rules! custom_punctuation { - ($ident:ident, $($tt:tt)+) => { - pub struct $ident { - pub spans: $crate::custom_punctuation_repr!($($tt)+), - } - - #[doc(hidden)] - #[allow(dead_code, non_snake_case)] - pub fn $ident<__S: $crate::__private::IntoSpans<$crate::custom_punctuation_repr!($($tt)+)>>( - spans: __S, - ) -> $ident { - let _validate_len = 0 $(+ $crate::custom_punctuation_len!(strict, $tt))*; - $ident { - spans: $crate::__private::IntoSpans::into_spans(spans) - } - } - - impl $crate::__private::Default for $ident { - fn default() -> Self { - $ident($crate::__private::Span::call_site()) - } - } - - $crate::impl_parse_for_custom_punctuation!($ident, $($tt)+); - $crate::impl_to_tokens_for_custom_punctuation!($ident, $($tt)+); - $crate::impl_clone_for_custom_punctuation!($ident, $($tt)+); - $crate::impl_extra_traits_for_custom_punctuation!($ident, $($tt)+); - }; -} - -// Not public API. -#[cfg(feature = "parsing")] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_parse_for_custom_punctuation { - ($ident:ident, $($tt:tt)+) => { - impl $crate::token::CustomToken for $ident { - fn peek(cursor: $crate::buffer::Cursor) -> bool { - $crate::token::parsing::peek_punct(cursor, $crate::stringify_punct!($($tt)+)) - } - - fn display() -> &'static $crate::__private::str { - concat!("`", $crate::stringify_punct!($($tt)+), "`") - } - } - - impl $crate::parse::Parse for $ident { - fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> { - let spans: $crate::custom_punctuation_repr!($($tt)+) = - $crate::token::parsing::punct(input, $crate::stringify_punct!($($tt)+))?; - Ok($ident(spans)) - } - } - }; -} - -// Not public API. -#[cfg(not(feature = "parsing"))] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_parse_for_custom_punctuation { - ($ident:ident, $($tt:tt)+) => {}; -} - -// Not public API. -#[cfg(feature = "printing")] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_to_tokens_for_custom_punctuation { - ($ident:ident, $($tt:tt)+) => { - impl $crate::__private::ToTokens for $ident { - fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) { - $crate::token::printing::punct($crate::stringify_punct!($($tt)+), &self.spans, tokens) - } - } - }; -} - -// Not public API. -#[cfg(not(feature = "printing"))] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_to_tokens_for_custom_punctuation { - ($ident:ident, $($tt:tt)+) => {}; -} - -// Not public API. -#[cfg(feature = "clone-impls")] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_clone_for_custom_punctuation { - ($ident:ident, $($tt:tt)+) => { - impl $crate::__private::Copy for $ident {} - - #[allow(clippy::expl_impl_clone_on_copy)] - impl $crate::__private::Clone for $ident { - fn clone(&self) -> Self { - *self - } - } - }; -} - -// Not public API. -#[cfg(not(feature = "clone-impls"))] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_clone_for_custom_punctuation { - ($ident:ident, $($tt:tt)+) => {}; -} - -// Not public API. -#[cfg(feature = "extra-traits")] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_extra_traits_for_custom_punctuation { - ($ident:ident, $($tt:tt)+) => { - impl $crate::__private::Debug for $ident { - fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::fmt::Result { - $crate::__private::Formatter::write_str(f, stringify!($ident)) - } - } - - impl $crate::__private::Eq for $ident {} - - impl $crate::__private::PartialEq for $ident { - fn eq(&self, _other: &Self) -> $crate::__private::bool { - true - } - } - - impl $crate::__private::Hash for $ident { - fn hash<__H: $crate::__private::Hasher>(&self, _state: &mut __H) {} - } - }; -} - -// Not public API. -#[cfg(not(feature = "extra-traits"))] -#[doc(hidden)] -#[macro_export] -macro_rules! impl_extra_traits_for_custom_punctuation { - ($ident:ident, $($tt:tt)+) => {}; -} - -// Not public API. -#[doc(hidden)] -#[macro_export] -macro_rules! custom_punctuation_repr { - ($($tt:tt)+) => { - [$crate::__private::Span; 0 $(+ $crate::custom_punctuation_len!(lenient, $tt))+] - }; -} - -// Not public API. -#[doc(hidden)] -#[macro_export] -#[rustfmt::skip] -macro_rules! custom_punctuation_len { - ($mode:ident, +) => { 1 }; - ($mode:ident, +=) => { 2 }; - ($mode:ident, &) => { 1 }; - ($mode:ident, &&) => { 2 }; - ($mode:ident, &=) => { 2 }; - ($mode:ident, @) => { 1 }; - ($mode:ident, !) => { 1 }; - ($mode:ident, ^) => { 1 }; - ($mode:ident, ^=) => { 2 }; - ($mode:ident, :) => { 1 }; - ($mode:ident, ::) => { 2 }; - ($mode:ident, ,) => { 1 }; - ($mode:ident, /) => { 1 }; - ($mode:ident, /=) => { 2 }; - ($mode:ident, .) => { 1 }; - ($mode:ident, ..) => { 2 }; - ($mode:ident, ...) => { 3 }; - ($mode:ident, ..=) => { 3 }; - ($mode:ident, =) => { 1 }; - ($mode:ident, ==) => { 2 }; - ($mode:ident, >=) => { 2 }; - ($mode:ident, >) => { 1 }; - ($mode:ident, <=) => { 2 }; - ($mode:ident, <) => { 1 }; - ($mode:ident, *=) => { 2 }; - ($mode:ident, !=) => { 2 }; - ($mode:ident, |) => { 1 }; - ($mode:ident, |=) => { 2 }; - ($mode:ident, ||) => { 2 }; - ($mode:ident, #) => { 1 }; - ($mode:ident, ?) => { 1 }; - ($mode:ident, ->) => { 2 }; - ($mode:ident, <-) => { 2 }; - ($mode:ident, %) => { 1 }; - ($mode:ident, %=) => { 2 }; - ($mode:ident, =>) => { 2 }; - ($mode:ident, ;) => { 1 }; - ($mode:ident, <<) => { 2 }; - ($mode:ident, <<=) => { 3 }; - ($mode:ident, >>) => { 2 }; - ($mode:ident, >>=) => { 3 }; - ($mode:ident, *) => { 1 }; - ($mode:ident, -) => { 1 }; - ($mode:ident, -=) => { 2 }; - ($mode:ident, ~) => { 1 }; - (lenient, $tt:tt) => { 0 }; - (strict, $tt:tt) => {{ $crate::custom_punctuation_unexpected!($tt); 0 }}; -} - -// Not public API. -#[doc(hidden)] -#[macro_export] -macro_rules! custom_punctuation_unexpected { - () => {}; -} - -// Not public API. -#[doc(hidden)] -#[macro_export] -macro_rules! stringify_punct { - ($($tt:tt)+) => { - concat!($(stringify!($tt)),+) - }; -} diff --git a/vendor/syn/src/data.rs b/vendor/syn/src/data.rs deleted file mode 100644 index 837224e1..00000000 --- a/vendor/syn/src/data.rs +++ /dev/null @@ -1,501 +0,0 @@ -use super::*; -use crate::punctuated::Punctuated; - -ast_struct! { - /// An enum variant. - /// - /// *This type is available only if Syn is built with the `"derive"` or `"full"` - /// feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct Variant { - /// Attributes tagged on the variant. - pub attrs: Vec, - - /// Name of the variant. - pub ident: Ident, - - /// Content stored in the variant. - pub fields: Fields, - - /// Explicit discriminant: `Variant = 1` - pub discriminant: Option<(Token![=], Expr)>, - } -} - -ast_enum_of_structs! { - /// Data stored within an enum variant or struct. - /// - /// *This type is available only if Syn is built with the `"derive"` or `"full"` - /// feature.* - /// - /// # Syntax tree enum - /// - /// This type is a [syntax tree enum]. - /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub enum Fields { - /// Named fields of a struct or struct variant such as `Point { x: f64, - /// y: f64 }`. - Named(FieldsNamed), - - /// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`. - Unnamed(FieldsUnnamed), - - /// Unit struct or unit variant such as `None`. - Unit, - } -} - -ast_struct! { - /// Named fields of a struct or struct variant such as `Point { x: f64, - /// y: f64 }`. - /// - /// *This type is available only if Syn is built with the `"derive"` or - /// `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct FieldsNamed { - pub brace_token: token::Brace, - pub named: Punctuated, - } -} - -ast_struct! { - /// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`. - /// - /// *This type is available only if Syn is built with the `"derive"` or - /// `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct FieldsUnnamed { - pub paren_token: token::Paren, - pub unnamed: Punctuated, - } -} - -impl Fields { - /// Get an iterator over the borrowed [`Field`] items in this object. This - /// iterator can be used to iterate over a named or unnamed struct or - /// variant's fields uniformly. - pub fn iter(&self) -> punctuated::Iter { - match self { - Fields::Unit => crate::punctuated::empty_punctuated_iter(), - Fields::Named(f) => f.named.iter(), - Fields::Unnamed(f) => f.unnamed.iter(), - } - } - - /// Get an iterator over the mutably borrowed [`Field`] items in this - /// object. This iterator can be used to iterate over a named or unnamed - /// struct or variant's fields uniformly. - pub fn iter_mut(&mut self) -> punctuated::IterMut { - match self { - Fields::Unit => crate::punctuated::empty_punctuated_iter_mut(), - Fields::Named(f) => f.named.iter_mut(), - Fields::Unnamed(f) => f.unnamed.iter_mut(), - } - } - - /// Returns the number of fields. - pub fn len(&self) -> usize { - match self { - Fields::Unit => 0, - Fields::Named(f) => f.named.len(), - Fields::Unnamed(f) => f.unnamed.len(), - } - } - - /// Returns `true` if there are zero fields. - pub fn is_empty(&self) -> bool { - match self { - Fields::Unit => true, - Fields::Named(f) => f.named.is_empty(), - Fields::Unnamed(f) => f.unnamed.is_empty(), - } - } -} - -impl IntoIterator for Fields { - type Item = Field; - type IntoIter = punctuated::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - match self { - Fields::Unit => Punctuated::::new().into_iter(), - Fields::Named(f) => f.named.into_iter(), - Fields::Unnamed(f) => f.unnamed.into_iter(), - } - } -} - -impl<'a> IntoIterator for &'a Fields { - type Item = &'a Field; - type IntoIter = punctuated::Iter<'a, Field>; - - fn into_iter(self) -> Self::IntoIter { - self.iter() - } -} - -impl<'a> IntoIterator for &'a mut Fields { - type Item = &'a mut Field; - type IntoIter = punctuated::IterMut<'a, Field>; - - fn into_iter(self) -> Self::IntoIter { - self.iter_mut() - } -} - -ast_struct! { - /// A field of a struct or enum variant. - /// - /// *This type is available only if Syn is built with the `"derive"` or `"full"` - /// feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct Field { - /// Attributes tagged on the field. - pub attrs: Vec, - - /// Visibility of the field. - pub vis: Visibility, - - /// Name of the field, if any. - /// - /// Fields of tuple structs have no names. - pub ident: Option, - - pub colon_token: Option, - - /// Type of the field. - pub ty: Type, - } -} - -ast_enum_of_structs! { - /// The visibility level of an item: inherited or `pub` or - /// `pub(restricted)`. - /// - /// *This type is available only if Syn is built with the `"derive"` or `"full"` - /// feature.* - /// - /// # Syntax tree enum - /// - /// This type is a [syntax tree enum]. - /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub enum Visibility { - /// A public visibility level: `pub`. - Public(VisPublic), - - /// A crate-level visibility: `crate`. - Crate(VisCrate), - - /// A visibility level restricted to some path: `pub(self)` or - /// `pub(super)` or `pub(crate)` or `pub(in some::module)`. - Restricted(VisRestricted), - - /// An inherited visibility, which usually means private. - Inherited, - } -} - -ast_struct! { - /// A public visibility level: `pub`. - /// - /// *This type is available only if Syn is built with the `"derive"` or - /// `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct VisPublic { - pub pub_token: Token![pub], - } -} - -ast_struct! { - /// A crate-level visibility: `crate`. - /// - /// *This type is available only if Syn is built with the `"derive"` or - /// `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct VisCrate { - pub crate_token: Token![crate], - } -} - -ast_struct! { - /// A visibility level restricted to some path: `pub(self)` or - /// `pub(super)` or `pub(crate)` or `pub(in some::module)`. - /// - /// *This type is available only if Syn is built with the `"derive"` or - /// `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct VisRestricted { - pub pub_token: Token![pub], - pub paren_token: token::Paren, - pub in_token: Option, - pub path: Box, - } -} - -#[cfg(feature = "parsing")] -pub mod parsing { - use super::*; - use crate::ext::IdentExt; - use crate::parse::discouraged::Speculative; - use crate::parse::{Parse, ParseStream, Result}; - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - impl Parse for Variant { - fn parse(input: ParseStream) -> Result { - let mut attrs = input.call(Attribute::parse_outer)?; - let _visibility: Visibility = input.parse()?; - let ident: Ident = input.parse()?; - let fields = if input.peek(token::Brace) { - let fields = parse_braced(input, &mut attrs)?; - Fields::Named(fields) - } else if input.peek(token::Paren) { - Fields::Unnamed(input.parse()?) - } else { - Fields::Unit - }; - let discriminant = if input.peek(Token![=]) { - let eq_token: Token![=] = input.parse()?; - let discriminant: Expr = input.parse()?; - Some((eq_token, discriminant)) - } else { - None - }; - Ok(Variant { - attrs, - ident, - fields, - discriminant, - }) - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - impl Parse for FieldsNamed { - fn parse(input: ParseStream) -> Result { - let content; - Ok(FieldsNamed { - brace_token: braced!(content in input), - named: content.parse_terminated(Field::parse_named)?, - }) - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - impl Parse for FieldsUnnamed { - fn parse(input: ParseStream) -> Result { - let content; - Ok(FieldsUnnamed { - paren_token: parenthesized!(content in input), - unnamed: content.parse_terminated(Field::parse_unnamed)?, - }) - } - } - - pub(crate) fn parse_braced( - input: ParseStream, - attrs: &mut Vec, - ) -> Result { - let content; - let brace_token = braced!(content in input); - attr::parsing::parse_inner(&content, attrs)?; - let named = content.parse_terminated(Field::parse_named)?; - Ok(FieldsNamed { brace_token, named }) - } - - impl Field { - /// Parses a named (braced struct) field. - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - pub fn parse_named(input: ParseStream) -> Result { - Ok(Field { - attrs: input.call(Attribute::parse_outer)?, - vis: input.parse()?, - ident: Some(input.parse()?), - colon_token: Some(input.parse()?), - ty: input.parse()?, - }) - } - - /// Parses an unnamed (tuple struct) field. - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - pub fn parse_unnamed(input: ParseStream) -> Result { - Ok(Field { - attrs: input.call(Attribute::parse_outer)?, - vis: input.parse()?, - ident: None, - colon_token: None, - ty: input.parse()?, - }) - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - impl Parse for Visibility { - fn parse(input: ParseStream) -> Result { - // Recognize an empty None-delimited group, as produced by a $:vis - // matcher that matched no tokens. - if input.peek(token::Group) { - let ahead = input.fork(); - let group = crate::group::parse_group(&ahead)?; - if group.content.is_empty() { - input.advance_to(&ahead); - return Ok(Visibility::Inherited); - } - } - - if input.peek(Token![pub]) { - Self::parse_pub(input) - } else if input.peek(Token![crate]) { - Self::parse_crate(input) - } else { - Ok(Visibility::Inherited) - } - } - } - - impl Visibility { - fn parse_pub(input: ParseStream) -> Result { - let pub_token = input.parse::()?; - - if input.peek(token::Paren) { - let ahead = input.fork(); - - let content; - let paren_token = parenthesized!(content in ahead); - if content.peek(Token![crate]) - || content.peek(Token![self]) - || content.peek(Token![super]) - { - let path = content.call(Ident::parse_any)?; - - // Ensure there are no additional tokens within `content`. - // Without explicitly checking, we may misinterpret a tuple - // field as a restricted visibility, causing a parse error. - // e.g. `pub (crate::A, crate::B)` (Issue #720). - if content.is_empty() { - input.advance_to(&ahead); - return Ok(Visibility::Restricted(VisRestricted { - pub_token, - paren_token, - in_token: None, - path: Box::new(Path::from(path)), - })); - } - } else if content.peek(Token![in]) { - let in_token: Token![in] = content.parse()?; - let path = content.call(Path::parse_mod_style)?; - - input.advance_to(&ahead); - return Ok(Visibility::Restricted(VisRestricted { - pub_token, - paren_token, - in_token: Some(in_token), - path: Box::new(path), - })); - } - } - - Ok(Visibility::Public(VisPublic { pub_token })) - } - - fn parse_crate(input: ParseStream) -> Result { - if input.peek2(Token![::]) { - Ok(Visibility::Inherited) - } else { - Ok(Visibility::Crate(VisCrate { - crate_token: input.parse()?, - })) - } - } - - #[cfg(feature = "full")] - pub(crate) fn is_some(&self) -> bool { - match self { - Visibility::Inherited => false, - _ => true, - } - } - } -} - -#[cfg(feature = "printing")] -mod printing { - use super::*; - use crate::print::TokensOrDefault; - use proc_macro2::TokenStream; - use quote::{ToTokens, TokenStreamExt}; - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for Variant { - fn to_tokens(&self, tokens: &mut TokenStream) { - tokens.append_all(&self.attrs); - self.ident.to_tokens(tokens); - self.fields.to_tokens(tokens); - if let Some((eq_token, disc)) = &self.discriminant { - eq_token.to_tokens(tokens); - disc.to_tokens(tokens); - } - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for FieldsNamed { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.brace_token.surround(tokens, |tokens| { - self.named.to_tokens(tokens); - }); - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for FieldsUnnamed { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.paren_token.surround(tokens, |tokens| { - self.unnamed.to_tokens(tokens); - }); - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for Field { - fn to_tokens(&self, tokens: &mut TokenStream) { - tokens.append_all(&self.attrs); - self.vis.to_tokens(tokens); - if let Some(ident) = &self.ident { - ident.to_tokens(tokens); - TokensOrDefault(&self.colon_token).to_tokens(tokens); - } - self.ty.to_tokens(tokens); - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for VisPublic { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.pub_token.to_tokens(tokens); - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for VisCrate { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.crate_token.to_tokens(tokens); - } - } - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for VisRestricted { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.pub_token.to_tokens(tokens); - self.paren_token.surround(tokens, |tokens| { - // TODO: If we have a path which is not "self" or "super" or - // "crate", automatically add the "in" token. - self.in_token.to_tokens(tokens); - self.path.to_tokens(tokens); - }); - } - } -} diff --git a/vendor/syn/src/derive.rs b/vendor/syn/src/derive.rs deleted file mode 100644 index 17387e45..00000000 --- a/vendor/syn/src/derive.rs +++ /dev/null @@ -1,280 +0,0 @@ -use super::*; -use crate::punctuated::Punctuated; - -ast_struct! { - /// Data structure sent to a `proc_macro_derive` macro. - /// - /// *This type is available only if Syn is built with the `"derive"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] - pub struct DeriveInput { - /// Attributes tagged on the whole struct or enum. - pub attrs: Vec, - - /// Visibility of the struct or enum. - pub vis: Visibility, - - /// Name of the struct or enum. - pub ident: Ident, - - /// Generics required to complete the definition. - pub generics: Generics, - - /// Data within the struct or enum. - pub data: Data, - } -} - -ast_enum_of_structs! { - /// The storage of a struct, enum or union data structure. - /// - /// *This type is available only if Syn is built with the `"derive"` feature.* - /// - /// # Syntax tree enum - /// - /// This type is a [syntax tree enum]. - /// - /// [syntax tree enum]: Expr#syntax-tree-enums - #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] - pub enum Data { - /// A struct input to a `proc_macro_derive` macro. - Struct(DataStruct), - - /// An enum input to a `proc_macro_derive` macro. - Enum(DataEnum), - - /// An untagged union input to a `proc_macro_derive` macro. - Union(DataUnion), - } - - do_not_generate_to_tokens -} - -ast_struct! { - /// A struct input to a `proc_macro_derive` macro. - /// - /// *This type is available only if Syn is built with the `"derive"` - /// feature.* - #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] - pub struct DataStruct { - pub struct_token: Token![struct], - pub fields: Fields, - pub semi_token: Option, - } -} - -ast_struct! { - /// An enum input to a `proc_macro_derive` macro. - /// - /// *This type is available only if Syn is built with the `"derive"` - /// feature.* - #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] - pub struct DataEnum { - pub enum_token: Token![enum], - pub brace_token: token::Brace, - pub variants: Punctuated, - } -} - -ast_struct! { - /// An untagged union input to a `proc_macro_derive` macro. - /// - /// *This type is available only if Syn is built with the `"derive"` - /// feature.* - #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] - pub struct DataUnion { - pub union_token: Token![union], - pub fields: FieldsNamed, - } -} - -#[cfg(feature = "parsing")] -pub mod parsing { - use super::*; - use crate::parse::{Parse, ParseStream, Result}; - - #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] - impl Parse for DeriveInput { - fn parse(input: ParseStream) -> Result { - let mut attrs = input.call(Attribute::parse_outer)?; - let vis = input.parse::()?; - - let lookahead = input.lookahead1(); - if lookahead.peek(Token![struct]) { - let struct_token = input.parse::()?; - let ident = input.parse::()?; - let generics = input.parse::()?; - let (where_clause, fields, semi) = data_struct(input, &mut attrs)?; - Ok(DeriveInput { - attrs, - vis, - ident, - generics: Generics { - where_clause, - ..generics - }, - data: Data::Struct(DataStruct { - struct_token, - fields, - semi_token: semi, - }), - }) - } else if lookahead.peek(Token![enum]) { - let enum_token = input.parse::()?; - let ident = input.parse::()?; - let generics = input.parse::()?; - let (where_clause, brace, variants) = data_enum(input, &mut attrs)?; - Ok(DeriveInput { - attrs, - vis, - ident, - generics: Generics { - where_clause, - ..generics - }, - data: Data::Enum(DataEnum { - enum_token, - brace_token: brace, - variants, - }), - }) - } else if lookahead.peek(Token![union]) { - let union_token = input.parse::()?; - let ident = input.parse::()?; - let generics = input.parse::()?; - let (where_clause, fields) = data_union(input, &mut attrs)?; - Ok(DeriveInput { - attrs, - vis, - ident, - generics: Generics { - where_clause, - ..generics - }, - data: Data::Union(DataUnion { - union_token, - fields, - }), - }) - } else { - Err(lookahead.error()) - } - } - } - - pub fn data_struct( - input: ParseStream, - attrs: &mut Vec, - ) -> Result<(Option, Fields, Option)> { - let mut lookahead = input.lookahead1(); - let mut where_clause = None; - if lookahead.peek(Token![where]) { - where_clause = Some(input.parse()?); - lookahead = input.lookahead1(); - } - - if where_clause.is_none() && lookahead.peek(token::Paren) { - let fields = input.parse()?; - - lookahead = input.lookahead1(); - if lookahead.peek(Token![where]) { - where_clause = Some(input.parse()?); - lookahead = input.lookahead1(); - } - - if lookahead.peek(Token![;]) { - let semi = input.parse()?; - Ok((where_clause, Fields::Unnamed(fields), Some(semi))) - } else { - Err(lookahead.error()) - } - } else if lookahead.peek(token::Brace) { - let fields = data::parsing::parse_braced(input, attrs)?; - Ok((where_clause, Fields::Named(fields), None)) - } else if lookahead.peek(Token![;]) { - let semi = input.parse()?; - Ok((where_clause, Fields::Unit, Some(semi))) - } else { - Err(lookahead.error()) - } - } - - pub fn data_enum( - input: ParseStream, - attrs: &mut Vec, - ) -> Result<( - Option, - token::Brace, - Punctuated, - )> { - let where_clause = input.parse()?; - - let content; - let brace = braced!(content in input); - attr::parsing::parse_inner(&content, attrs)?; - let variants = content.parse_terminated(Variant::parse)?; - - Ok((where_clause, brace, variants)) - } - - pub fn data_union( - input: ParseStream, - attrs: &mut Vec, - ) -> Result<(Option, FieldsNamed)> { - let where_clause = input.parse()?; - let fields = data::parsing::parse_braced(input, attrs)?; - Ok((where_clause, fields)) - } -} - -#[cfg(feature = "printing")] -mod printing { - use super::*; - use crate::attr::FilterAttrs; - use crate::print::TokensOrDefault; - use proc_macro2::TokenStream; - use quote::ToTokens; - - #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] - impl ToTokens for DeriveInput { - fn to_tokens(&self, tokens: &mut TokenStream) { - for attr in self.attrs.outer() { - attr.to_tokens(tokens); - } - self.vis.to_tokens(tokens); - match &self.data { - Data::Struct(d) => d.struct_token.to_tokens(tokens), - Data::Enum(d) => d.enum_token.to_tokens(tokens), - Data::Union(d) => d.union_token.to_tokens(tokens), - } - self.ident.to_tokens(tokens); - self.generics.to_tokens(tokens); - match &self.data { - Data::Struct(data) => match &data.fields { - Fields::Named(fields) => { - self.generics.where_clause.to_tokens(tokens); - fields.to_tokens(tokens); - } - Fields::Unnamed(fields) => { - fields.to_tokens(tokens); - self.generics.where_clause.to_tokens(tokens); - TokensOrDefault(&data.semi_token).to_tokens(tokens); - } - Fields::Unit => { - self.generics.where_clause.to_tokens(tokens); - TokensOrDefault(&data.semi_token).to_tokens(tokens); - } - }, - Data::Enum(data) => { - self.generics.where_clause.to_tokens(tokens); - data.brace_token.surround(tokens, |tokens| { - data.variants.to_tokens(tokens); - }); - } - Data::Union(data) => { - self.generics.where_clause.to_tokens(tokens); - data.fields.to_tokens(tokens); - } - } - } - } -} diff --git a/vendor/syn/src/discouraged.rs b/vendor/syn/src/discouraged.rs deleted file mode 100644 index a46129b6..00000000 --- a/vendor/syn/src/discouraged.rs +++ /dev/null @@ -1,194 +0,0 @@ -//! Extensions to the parsing API with niche applicability. - -use super::*; - -/// Extensions to the `ParseStream` API to support speculative parsing. -pub trait Speculative { - /// Advance this parse stream to the position of a forked parse stream. - /// - /// This is the opposite operation to [`ParseStream::fork`]. You can fork a - /// parse stream, perform some speculative parsing, then join the original - /// stream to the fork to "commit" the parsing from the fork to the main - /// stream. - /// - /// If you can avoid doing this, you should, as it limits the ability to - /// generate useful errors. That said, it is often the only way to parse - /// syntax of the form `A* B*` for arbitrary syntax `A` and `B`. The problem - /// is that when the fork fails to parse an `A`, it's impossible to tell - /// whether that was because of a syntax error and the user meant to provide - /// an `A`, or that the `A`s are finished and it's time to start parsing - /// `B`s. Use with care. - /// - /// Also note that if `A` is a subset of `B`, `A* B*` can be parsed by - /// parsing `B*` and removing the leading members of `A` from the - /// repetition, bypassing the need to involve the downsides associated with - /// speculative parsing. - /// - /// [`ParseStream::fork`]: ParseBuffer::fork - /// - /// # Example - /// - /// There has been chatter about the possibility of making the colons in the - /// turbofish syntax like `path::to::` no longer required by accepting - /// `path::to` in expression position. Specifically, according to [RFC - /// 2544], [`PathSegment`] parsing should always try to consume a following - /// `<` token as the start of generic arguments, and reset to the `<` if - /// that fails (e.g. the token is acting as a less-than operator). - /// - /// This is the exact kind of parsing behavior which requires the "fork, - /// try, commit" behavior that [`ParseStream::fork`] discourages. With - /// `advance_to`, we can avoid having to parse the speculatively parsed - /// content a second time. - /// - /// This change in behavior can be implemented in syn by replacing just the - /// `Parse` implementation for `PathSegment`: - /// - /// ``` - /// # use syn::ext::IdentExt; - /// use syn::parse::discouraged::Speculative; - /// # use syn::parse::{Parse, ParseStream}; - /// # use syn::{Ident, PathArguments, Result, Token}; - /// - /// pub struct PathSegment { - /// pub ident: Ident, - /// pub arguments: PathArguments, - /// } - /// # - /// # impl From for PathSegment - /// # where - /// # T: Into, - /// # { - /// # fn from(ident: T) -> Self { - /// # PathSegment { - /// # ident: ident.into(), - /// # arguments: PathArguments::None, - /// # } - /// # } - /// # } - /// - /// impl Parse for PathSegment { - /// fn parse(input: ParseStream) -> Result { - /// if input.peek(Token![super]) - /// || input.peek(Token![self]) - /// || input.peek(Token![Self]) - /// || input.peek(Token![crate]) - /// { - /// let ident = input.call(Ident::parse_any)?; - /// return Ok(PathSegment::from(ident)); - /// } - /// - /// let ident = input.parse()?; - /// if input.peek(Token![::]) && input.peek3(Token![<]) { - /// return Ok(PathSegment { - /// ident, - /// arguments: PathArguments::AngleBracketed(input.parse()?), - /// }); - /// } - /// if input.peek(Token![<]) && !input.peek(Token![<=]) { - /// let fork = input.fork(); - /// if let Ok(arguments) = fork.parse() { - /// input.advance_to(&fork); - /// return Ok(PathSegment { - /// ident, - /// arguments: PathArguments::AngleBracketed(arguments), - /// }); - /// } - /// } - /// Ok(PathSegment::from(ident)) - /// } - /// } - /// - /// # syn::parse_str::("a").unwrap(); - /// ``` - /// - /// # Drawbacks - /// - /// The main drawback of this style of speculative parsing is in error - /// presentation. Even if the lookahead is the "correct" parse, the error - /// that is shown is that of the "fallback" parse. To use the same example - /// as the turbofish above, take the following unfinished "turbofish": - /// - /// ```text - /// let _ = f<&'a fn(), for<'a> serde::>(); - /// ``` - /// - /// If this is parsed as generic arguments, we can provide the error message - /// - /// ```text - /// error: expected identifier - /// --> src.rs:L:C - /// | - /// L | let _ = f<&'a fn(), for<'a> serde::>(); - /// | ^ - /// ``` - /// - /// but if parsed using the above speculative parsing, it falls back to - /// assuming that the `<` is a less-than when it fails to parse the generic - /// arguments, and tries to interpret the `&'a` as the start of a labelled - /// loop, resulting in the much less helpful error - /// - /// ```text - /// error: expected `:` - /// --> src.rs:L:C - /// | - /// L | let _ = f<&'a fn(), for<'a> serde::>(); - /// | ^^ - /// ``` - /// - /// This can be mitigated with various heuristics (two examples: show both - /// forks' parse errors, or show the one that consumed more tokens), but - /// when you can control the grammar, sticking to something that can be - /// parsed LL(3) and without the LL(*) speculative parsing this makes - /// possible, displaying reasonable errors becomes much more simple. - /// - /// [RFC 2544]: https://github.com/rust-lang/rfcs/pull/2544 - /// [`PathSegment`]: crate::PathSegment - /// - /// # Performance - /// - /// This method performs a cheap fixed amount of work that does not depend - /// on how far apart the two streams are positioned. - /// - /// # Panics - /// - /// The forked stream in the argument of `advance_to` must have been - /// obtained by forking `self`. Attempting to advance to any other stream - /// will cause a panic. - fn advance_to(&self, fork: &Self); -} - -impl<'a> Speculative for ParseBuffer<'a> { - fn advance_to(&self, fork: &Self) { - if !crate::buffer::same_scope(self.cursor(), fork.cursor()) { - panic!("Fork was not derived from the advancing parse stream"); - } - - let (self_unexp, self_sp) = inner_unexpected(self); - let (fork_unexp, fork_sp) = inner_unexpected(fork); - if !Rc::ptr_eq(&self_unexp, &fork_unexp) { - match (fork_sp, self_sp) { - // Unexpected set on the fork, but not on `self`, copy it over. - (Some(span), None) => { - self_unexp.set(Unexpected::Some(span)); - } - // Unexpected unset. Use chain to propagate errors from fork. - (None, None) => { - fork_unexp.set(Unexpected::Chain(self_unexp)); - - // Ensure toplevel 'unexpected' tokens from the fork don't - // bubble up the chain by replacing the root `unexpected` - // pointer, only 'unexpected' tokens from existing group - // parsers should bubble. - fork.unexpected - .set(Some(Rc::new(Cell::new(Unexpected::None)))); - } - // Unexpected has been set on `self`. No changes needed. - (_, Some(_)) => {} - } - } - - // See comment on `cell` in the struct definition. - self.cell - .set(unsafe { mem::transmute::>(fork.cursor()) }); - } -} diff --git a/vendor/syn/src/error.rs b/vendor/syn/src/error.rs deleted file mode 100644 index e2f23821..00000000 --- a/vendor/syn/src/error.rs +++ /dev/null @@ -1,412 +0,0 @@ -#[cfg(feature = "parsing")] -use crate::buffer::Cursor; -use crate::thread::ThreadBound; -use proc_macro2::{ - Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree, -}; -#[cfg(feature = "printing")] -use quote::ToTokens; -use std::fmt::{self, Debug, Display}; -use std::iter::FromIterator; -use std::slice; -use std::vec; - -/// The result of a Syn parser. -pub type Result = std::result::Result; - -/// Error returned when a Syn parser cannot parse the input tokens. -/// -/// # Error reporting in proc macros -/// -/// The correct way to report errors back to the compiler from a procedural -/// macro is by emitting an appropriately spanned invocation of -/// [`compile_error!`] in the generated code. This produces a better diagnostic -/// message than simply panicking the macro. -/// -/// [`compile_error!`]: std::compile_error! -/// -/// When parsing macro input, the [`parse_macro_input!`] macro handles the -/// conversion to `compile_error!` automatically. -/// -/// ``` -/// # extern crate proc_macro; -/// # -/// use proc_macro::TokenStream; -/// use syn::{parse_macro_input, AttributeArgs, ItemFn}; -/// -/// # const IGNORE: &str = stringify! { -/// #[proc_macro_attribute] -/// # }; -/// pub fn my_attr(args: TokenStream, input: TokenStream) -> TokenStream { -/// let args = parse_macro_input!(args as AttributeArgs); -/// let input = parse_macro_input!(input as ItemFn); -/// -/// /* ... */ -/// # TokenStream::new() -/// } -/// ``` -/// -/// For errors that arise later than the initial parsing stage, the -/// [`.to_compile_error()`] method can be used to perform an explicit conversion -/// to `compile_error!`. -/// -/// [`.to_compile_error()`]: Error::to_compile_error -/// -/// ``` -/// # extern crate proc_macro; -/// # -/// # use proc_macro::TokenStream; -/// # use syn::{parse_macro_input, DeriveInput}; -/// # -/// # const IGNORE: &str = stringify! { -/// #[proc_macro_derive(MyDerive)] -/// # }; -/// pub fn my_derive(input: TokenStream) -> TokenStream { -/// let input = parse_macro_input!(input as DeriveInput); -/// -/// // fn(DeriveInput) -> syn::Result -/// expand::my_derive(input) -/// .unwrap_or_else(|err| err.to_compile_error()) -/// .into() -/// } -/// # -/// # mod expand { -/// # use proc_macro2::TokenStream; -/// # use syn::{DeriveInput, Result}; -/// # -/// # pub fn my_derive(input: DeriveInput) -> Result { -/// # unimplemented!() -/// # } -/// # } -/// ``` -pub struct Error { - messages: Vec, -} - -struct ErrorMessage { - // Span is implemented as an index into a thread-local interner to keep the - // size small. It is not safe to access from a different thread. We want - // errors to be Send and Sync to play nicely with the Failure crate, so pin - // the span we're given to its original thread and assume it is - // Span::call_site if accessed from any other thread. - start_span: ThreadBound, - end_span: ThreadBound, - message: String, -} - -#[cfg(test)] -struct _Test -where - Error: Send + Sync; - -impl Error { - /// Usually the [`ParseStream::error`] method will be used instead, which - /// automatically uses the correct span from the current position of the - /// parse stream. - /// - /// Use `Error::new` when the error needs to be triggered on some span other - /// than where the parse stream is currently positioned. - /// - /// [`ParseStream::error`]: crate::parse::ParseBuffer::error - /// - /// # Example - /// - /// ``` - /// use syn::{Error, Ident, LitStr, Result, Token}; - /// use syn::parse::ParseStream; - /// - /// // Parses input that looks like `name = "string"` where the key must be - /// // the identifier `name` and the value may be any string literal. - /// // Returns the string literal. - /// fn parse_name(input: ParseStream) -> Result { - /// let name_token: Ident = input.parse()?; - /// if name_token != "name" { - /// // Trigger an error not on the current position of the stream, - /// // but on the position of the unexpected identifier. - /// return Err(Error::new(name_token.span(), "expected `name`")); - /// } - /// input.parse::()?; - /// let s: LitStr = input.parse()?; - /// Ok(s) - /// } - /// ``` - pub fn new(span: Span, message: T) -> Self { - Error { - messages: vec![ErrorMessage { - start_span: ThreadBound::new(span), - end_span: ThreadBound::new(span), - message: message.to_string(), - }], - } - } - - /// Creates an error with the specified message spanning the given syntax - /// tree node. - /// - /// Unlike the `Error::new` constructor, this constructor takes an argument - /// `tokens` which is a syntax tree node. This allows the resulting `Error` - /// to attempt to span all tokens inside of `tokens`. While you would - /// typically be able to use the `Spanned` trait with the above `Error::new` - /// constructor, implementation limitations today mean that - /// `Error::new_spanned` may provide a higher-quality error message on - /// stable Rust. - /// - /// When in doubt it's recommended to stick to `Error::new` (or - /// `ParseStream::error`)! - #[cfg(feature = "printing")] - pub fn new_spanned(tokens: T, message: U) -> Self { - let mut iter = tokens.into_token_stream().into_iter(); - let start = iter.next().map_or_else(Span::call_site, |t| t.span()); - let end = iter.last().map_or(start, |t| t.span()); - Error { - messages: vec![ErrorMessage { - start_span: ThreadBound::new(start), - end_span: ThreadBound::new(end), - message: message.to_string(), - }], - } - } - - /// The source location of the error. - /// - /// Spans are not thread-safe so this function returns `Span::call_site()` - /// if called from a different thread than the one on which the `Error` was - /// originally created. - pub fn span(&self) -> Span { - let start = match self.messages[0].start_span.get() { - Some(span) => *span, - None => return Span::call_site(), - }; - let end = match self.messages[0].end_span.get() { - Some(span) => *span, - None => return Span::call_site(), - }; - start.join(end).unwrap_or(start) - } - - /// Render the error as an invocation of [`compile_error!`]. - /// - /// The [`parse_macro_input!`] macro provides a convenient way to invoke - /// this method correctly in a procedural macro. - /// - /// [`compile_error!`]: std::compile_error! - pub fn to_compile_error(&self) -> TokenStream { - self.messages - .iter() - .map(ErrorMessage::to_compile_error) - .collect() - } - - /// Render the error as an invocation of [`compile_error!`]. - /// - /// [`compile_error!`]: std::compile_error! - /// - /// # Example - /// - /// ``` - /// # extern crate proc_macro; - /// # - /// use proc_macro::TokenStream; - /// use syn::{parse_macro_input, DeriveInput, Error}; - /// - /// # const _: &str = stringify! { - /// #[proc_macro_derive(MyTrait)] - /// # }; - /// pub fn derive_my_trait(input: TokenStream) -> TokenStream { - /// let input = parse_macro_input!(input as DeriveInput); - /// my_trait::expand(input) - /// .unwrap_or_else(Error::into_compile_error) - /// .into() - /// } - /// - /// mod my_trait { - /// use proc_macro2::TokenStream; - /// use syn::{DeriveInput, Result}; - /// - /// pub(crate) fn expand(input: DeriveInput) -> Result { - /// /* ... */ - /// # unimplemented!() - /// } - /// } - /// ``` - pub fn into_compile_error(self) -> TokenStream { - self.to_compile_error() - } - - /// Add another error message to self such that when `to_compile_error()` is - /// called, both errors will be emitted together. - pub fn combine(&mut self, another: Error) { - self.messages.extend(another.messages); - } -} - -impl ErrorMessage { - fn to_compile_error(&self) -> TokenStream { - let start = self - .start_span - .get() - .cloned() - .unwrap_or_else(Span::call_site); - let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site); - - // compile_error!($message) - TokenStream::from_iter(vec![ - TokenTree::Ident(Ident::new("compile_error", start)), - TokenTree::Punct({ - let mut punct = Punct::new('!', Spacing::Alone); - punct.set_span(start); - punct - }), - TokenTree::Group({ - let mut group = Group::new(Delimiter::Brace, { - TokenStream::from_iter(vec![TokenTree::Literal({ - let mut string = Literal::string(&self.message); - string.set_span(end); - string - })]) - }); - group.set_span(end); - group - }), - ]) - } -} - -#[cfg(feature = "parsing")] -pub fn new_at(scope: Span, cursor: Cursor, message: T) -> Error { - if cursor.eof() { - Error::new(scope, format!("unexpected end of input, {}", message)) - } else { - let span = crate::buffer::open_span_of_group(cursor); - Error::new(span, message) - } -} - -#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))] -pub fn new2(start: Span, end: Span, message: T) -> Error { - Error { - messages: vec![ErrorMessage { - start_span: ThreadBound::new(start), - end_span: ThreadBound::new(end), - message: message.to_string(), - }], - } -} - -impl Debug for Error { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - if self.messages.len() == 1 { - formatter - .debug_tuple("Error") - .field(&self.messages[0]) - .finish() - } else { - formatter - .debug_tuple("Error") - .field(&self.messages) - .finish() - } - } -} - -impl Debug for ErrorMessage { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&self.message, formatter) - } -} - -impl Display for Error { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str(&self.messages[0].message) - } -} - -impl Clone for Error { - fn clone(&self) -> Self { - Error { - messages: self.messages.clone(), - } - } -} - -impl Clone for ErrorMessage { - fn clone(&self) -> Self { - let start = self - .start_span - .get() - .cloned() - .unwrap_or_else(Span::call_site); - let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site); - ErrorMessage { - start_span: ThreadBound::new(start), - end_span: ThreadBound::new(end), - message: self.message.clone(), - } - } -} - -impl std::error::Error for Error {} - -impl From for Error { - fn from(err: LexError) -> Self { - Error::new(err.span(), "lex error") - } -} - -impl IntoIterator for Error { - type Item = Error; - type IntoIter = IntoIter; - - fn into_iter(self) -> Self::IntoIter { - IntoIter { - messages: self.messages.into_iter(), - } - } -} - -pub struct IntoIter { - messages: vec::IntoIter, -} - -impl Iterator for IntoIter { - type Item = Error; - - fn next(&mut self) -> Option { - Some(Error { - messages: vec![self.messages.next()?], - }) - } -} - -impl<'a> IntoIterator for &'a Error { - type Item = Error; - type IntoIter = Iter<'a>; - - fn into_iter(self) -> Self::IntoIter { - Iter { - messages: self.messages.iter(), - } - } -} - -pub struct Iter<'a> { - messages: slice::Iter<'a, ErrorMessage>, -} - -impl<'a> Iterator for Iter<'a> { - type Item = Error; - - fn next(&mut self) -> Option { - Some(Error { - messages: vec![self.messages.next()?.clone()], - }) - } -} - -impl Extend for Error { - fn extend>(&mut self, iter: T) { - for err in iter { - self.combine(err); - } - } -} diff --git a/vendor/syn/src/export.rs b/vendor/syn/src/export.rs deleted file mode 100644 index 601a214b..00000000 --- a/vendor/syn/src/export.rs +++ /dev/null @@ -1,37 +0,0 @@ -pub use std::clone::Clone; -pub use std::cmp::{Eq, PartialEq}; -pub use std::convert::From; -pub use std::default::Default; -pub use std::fmt::{self, Debug, Formatter}; -pub use std::hash::{Hash, Hasher}; -pub use std::marker::Copy; -pub use std::option::Option::{None, Some}; -pub use std::result::Result::{Err, Ok}; - -#[cfg(feature = "printing")] -pub extern crate quote; - -pub use proc_macro2::{Span, TokenStream as TokenStream2}; - -pub use crate::span::IntoSpans; - -#[cfg(all( - not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), - feature = "proc-macro" -))] -pub use proc_macro::TokenStream; - -#[cfg(feature = "printing")] -pub use quote::{ToTokens, TokenStreamExt}; - -#[allow(non_camel_case_types)] -pub type bool = help::Bool; -#[allow(non_camel_case_types)] -pub type str = help::Str; - -mod help { - pub type Bool = bool; - pub type Str = str; -} - -pub struct private(pub(crate) ()); diff --git a/vendor/syn/src/expr.rs b/vendor/syn/src/expr.rs deleted file mode 100644 index 24f79117..00000000 --- a/vendor/syn/src/expr.rs +++ /dev/null @@ -1,3497 +0,0 @@ -use super::*; -use crate::punctuated::Punctuated; -#[cfg(feature = "full")] -use crate::reserved::Reserved; -use proc_macro2::{Span, TokenStream}; -#[cfg(feature = "printing")] -use quote::IdentFragment; -#[cfg(feature = "printing")] -use std::fmt::{self, Display}; -use std::hash::{Hash, Hasher}; -#[cfg(feature = "parsing")] -use std::mem; - -ast_enum_of_structs! { - /// A Rust expression. - /// - /// *This type is available only if Syn is built with the `"derive"` or `"full"` - /// feature, but most of the variants are not available unless "full" is enabled.* - /// - /// # Syntax tree enums - /// - /// This type is a syntax tree enum. In Syn this and other syntax tree enums - /// are designed to be traversed using the following rebinding idiom. - /// - /// ``` - /// # use syn::Expr; - /// # - /// # fn example(expr: Expr) { - /// # const IGNORE: &str = stringify! { - /// let expr: Expr = /* ... */; - /// # }; - /// match expr { - /// Expr::MethodCall(expr) => { - /// /* ... */ - /// } - /// Expr::Cast(expr) => { - /// /* ... */ - /// } - /// Expr::If(expr) => { - /// /* ... */ - /// } - /// - /// /* ... */ - /// # _ => {} - /// # } - /// # } - /// ``` - /// - /// We begin with a variable `expr` of type `Expr` that has no fields - /// (because it is an enum), and by matching on it and rebinding a variable - /// with the same name `expr` we effectively imbue our variable with all of - /// the data fields provided by the variant that it turned out to be. So for - /// example above if we ended up in the `MethodCall` case then we get to use - /// `expr.receiver`, `expr.args` etc; if we ended up in the `If` case we get - /// to use `expr.cond`, `expr.then_branch`, `expr.else_branch`. - /// - /// This approach avoids repeating the variant names twice on every line. - /// - /// ``` - /// # use syn::{Expr, ExprMethodCall}; - /// # - /// # fn example(expr: Expr) { - /// // Repetitive; recommend not doing this. - /// match expr { - /// Expr::MethodCall(ExprMethodCall { method, args, .. }) => { - /// # } - /// # _ => {} - /// # } - /// # } - /// ``` - /// - /// In general, the name to which a syntax tree enum variant is bound should - /// be a suitable name for the complete syntax tree enum type. - /// - /// ``` - /// # use syn::{Expr, ExprField}; - /// # - /// # fn example(discriminant: ExprField) { - /// // Binding is called `base` which is the name I would use if I were - /// // assigning `*discriminant.base` without an `if let`. - /// if let Expr::Tuple(base) = *discriminant.base { - /// # } - /// # } - /// ``` - /// - /// A sign that you may not be choosing the right variable names is if you - /// see names getting repeated in your code, like accessing - /// `receiver.receiver` or `pat.pat` or `cond.cond`. - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub enum Expr { - /// A slice literal expression: `[a, b, c, d]`. - Array(ExprArray), - - /// An assignment expression: `a = compute()`. - Assign(ExprAssign), - - /// A compound assignment expression: `counter += 1`. - AssignOp(ExprAssignOp), - - /// An async block: `async { ... }`. - Async(ExprAsync), - - /// An await expression: `fut.await`. - Await(ExprAwait), - - /// A binary operation: `a + b`, `a * b`. - Binary(ExprBinary), - - /// A blocked scope: `{ ... }`. - Block(ExprBlock), - - /// A box expression: `box f`. - Box(ExprBox), - - /// A `break`, with an optional label to break and an optional - /// expression. - Break(ExprBreak), - - /// A function call expression: `invoke(a, b)`. - Call(ExprCall), - - /// A cast expression: `foo as f64`. - Cast(ExprCast), - - /// A closure expression: `|a, b| a + b`. - Closure(ExprClosure), - - /// A `continue`, with an optional label. - Continue(ExprContinue), - - /// Access of a named struct field (`obj.k`) or unnamed tuple struct - /// field (`obj.0`). - Field(ExprField), - - /// A for loop: `for pat in expr { ... }`. - ForLoop(ExprForLoop), - - /// An expression contained within invisible delimiters. - /// - /// This variant is important for faithfully representing the precedence - /// of expressions and is related to `None`-delimited spans in a - /// `TokenStream`. - Group(ExprGroup), - - /// An `if` expression with an optional `else` block: `if expr { ... } - /// else { ... }`. - /// - /// The `else` branch expression may only be an `If` or `Block` - /// expression, not any of the other types of expression. - If(ExprIf), - - /// A square bracketed indexing expression: `vector[2]`. - Index(ExprIndex), - - /// A `let` guard: `let Some(x) = opt`. - Let(ExprLet), - - /// A literal in place of an expression: `1`, `"foo"`. - Lit(ExprLit), - - /// Conditionless loop: `loop { ... }`. - Loop(ExprLoop), - - /// A macro invocation expression: `format!("{}", q)`. - Macro(ExprMacro), - - /// A `match` expression: `match n { Some(n) => {}, None => {} }`. - Match(ExprMatch), - - /// A method call expression: `x.foo::(a, b)`. - MethodCall(ExprMethodCall), - - /// A parenthesized expression: `(a + b)`. - Paren(ExprParen), - - /// A path like `std::mem::replace` possibly containing generic - /// parameters and a qualified self-type. - /// - /// A plain identifier like `x` is a path of length 1. - Path(ExprPath), - - /// A range expression: `1..2`, `1..`, `..2`, `1..=2`, `..=2`. - Range(ExprRange), - - /// A referencing operation: `&a` or `&mut a`. - Reference(ExprReference), - - /// An array literal constructed from one repeated element: `[0u8; N]`. - Repeat(ExprRepeat), - - /// A `return`, with an optional value to be returned. - Return(ExprReturn), - - /// A struct literal expression: `Point { x: 1, y: 1 }`. - /// - /// The `rest` provides the value of the remaining fields as in `S { a: - /// 1, b: 1, ..rest }`. - Struct(ExprStruct), - - /// A try-expression: `expr?`. - Try(ExprTry), - - /// A try block: `try { ... }`. - TryBlock(ExprTryBlock), - - /// A tuple expression: `(a, b, c, d)`. - Tuple(ExprTuple), - - /// A type ascription expression: `foo: f64`. - Type(ExprType), - - /// A unary operation: `!x`, `*x`. - Unary(ExprUnary), - - /// An unsafe block: `unsafe { ... }`. - Unsafe(ExprUnsafe), - - /// Tokens in expression position not interpreted by Syn. - Verbatim(TokenStream), - - /// A while loop: `while expr { ... }`. - While(ExprWhile), - - /// A yield expression: `yield expr`. - Yield(ExprYield), - - // The following is the only supported idiom for exhaustive matching of - // this enum. - // - // match expr { - // Expr::Array(e) => {...} - // Expr::Assign(e) => {...} - // ... - // Expr::Yield(e) => {...} - // - // #[cfg(test)] - // Expr::__TestExhaustive(_) => unimplemented!(), - // #[cfg(not(test))] - // _ => { /* some sane fallback */ } - // } - // - // This way we fail your tests but don't break your library when adding - // a variant. You will be notified by a test failure when a variant is - // added, so that you can add code to handle it, but your library will - // continue to compile and work for downstream users in the interim. - // - // Once `deny(reachable)` is available in rustc, Expr will be - // reimplemented as a non_exhaustive enum. - // https://github.com/rust-lang/rust/issues/44109#issuecomment-521781237 - #[doc(hidden)] - __TestExhaustive(crate::private), - } -} - -ast_struct! { - /// A slice literal expression: `[a, b, c, d]`. - /// - /// *This type is available only if Syn is built with the `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] - pub struct ExprArray #full { - pub attrs: Vec, - pub bracket_token: token::Bracket, - pub elems: Punctuated, - } -} - -ast_struct! { - /// An assignment expression: `a = compute()`. - /// - /// *This type is available only if Syn is built with the `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] - pub struct ExprAssign #full { - pub attrs: Vec, - pub left: Box, - pub eq_token: Token![=], - pub right: Box, - } -} - -ast_struct! { - /// A compound assignment expression: `counter += 1`. - /// - /// *This type is available only if Syn is built with the `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] - pub struct ExprAssignOp #full { - pub attrs: Vec, - pub left: Box, - pub op: BinOp, - pub right: Box, - } -} - -ast_struct! { - /// An async block: `async { ... }`. - /// - /// *This type is available only if Syn is built with the `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] - pub struct ExprAsync #full { - pub attrs: Vec, - pub async_token: Token![async], - pub capture: Option, - pub block: Block, - } -} - -ast_struct! { - /// An await expression: `fut.await`. - /// - /// *This type is available only if Syn is built with the `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] - pub struct ExprAwait #full { - pub attrs: Vec, - pub base: Box, - pub dot_token: Token![.], - pub await_token: token::Await, - } -} - -ast_struct! { - /// A binary operation: `a + b`, `a * b`. - /// - /// *This type is available only if Syn is built with the `"derive"` or - /// `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] - pub struct ExprBinary { - pub attrs: Vec, - pub left: Box, - pub op: BinOp, - pub right: Box, - } -} - -ast_struct! { - /// A blocked scope: `{ ... }`. - /// - /// *This type is available only if Syn is built with the `"full"` feature.* - #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] - pub struct ExprBlock #full { - pub attrs: Vec, - pub label: Option
- -Click to show Cargo.toml. -Run this code in the playground. - - -```toml -[dependencies] - -# The core APIs, including the Serialize and Deserialize traits. Always -# required when using Serde. The "derive" feature is only required when -# using #[derive(Serialize, Deserialize)] to make Serde work with structs -# and enums defined in your crate. -serde = { version = "1.0", features = ["derive"] } - -# Each data format lives in its own crate; the sample code below uses JSON -# but you may be using a different one. -serde_json = "1.0" -``` - -