blob: f4d31526a62e4d2cb9f3667afb231a400935b2c5 [file] [log] [blame]
//! `AstIdMap` allows to create stable IDs for "large" syntax nodes like items
//! and macro calls.
//!
//! Specifically, it enumerates all items in a file and uses position of a an
//! item as an ID. That way, id's don't change unless the set of items itself
//! changes.
use std::{
any::type_name,
fmt,
hash::{Hash, Hasher},
marker::PhantomData,
};
use ra_arena::{Arena, Idx};
use ra_syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
/// `AstId` points to an AST node in a specific file.
pub struct FileAstId<N: AstNode> {
raw: ErasedFileAstId,
_ty: PhantomData<fn() -> N>,
}
impl<N: AstNode> Clone for FileAstId<N> {
fn clone(&self) -> FileAstId<N> {
*self
}
}
impl<N: AstNode> Copy for FileAstId<N> {}
impl<N: AstNode> PartialEq for FileAstId<N> {
fn eq(&self, other: &Self) -> bool {
self.raw == other.raw
}
}
impl<N: AstNode> Eq for FileAstId<N> {}
impl<N: AstNode> Hash for FileAstId<N> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
self.raw.hash(hasher);
}
}
impl<N: AstNode> fmt::Debug for FileAstId<N> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "FileAstId::<{}>({})", type_name::<N>(), self.raw.into_raw())
}
}
impl<N: AstNode> FileAstId<N> {
// Can't make this a From implementation because of coherence
pub fn upcast<M: AstNode>(self) -> FileAstId<M>
where
N: Into<M>,
{
FileAstId { raw: self.raw, _ty: PhantomData }
}
}
type ErasedFileAstId = Idx<SyntaxNodePtr>;
/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back.
#[derive(Debug, PartialEq, Eq, Default)]
pub struct AstIdMap {
arena: Arena<SyntaxNodePtr>,
}
impl AstIdMap {
pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
assert!(node.parent().is_none());
let mut res = AstIdMap { arena: Arena::default() };
// By walking the tree in breadth-first order we make sure that parents
// get lower ids then children. That is, adding a new child does not
// change parent's id. This means that, say, adding a new function to a
// trait does not change ids of top-level items, which helps caching.
bfs(node, |it| {
if let Some(module_item) = ast::ModuleItem::cast(it) {
res.alloc(module_item.syntax());
}
});
res
}
pub fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> {
let raw = self.erased_ast_id(item.syntax());
FileAstId { raw, _ty: PhantomData }
}
fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
let ptr = SyntaxNodePtr::new(item);
match self.arena.iter().find(|(_id, i)| **i == ptr) {
Some((it, _)) => it,
None => panic!(
"Can't find {:?} in AstIdMap:\n{:?}",
item,
self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(),
),
}
}
pub fn get<N: AstNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
self.arena[id.raw].clone().cast::<N>().unwrap()
}
fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId {
self.arena.alloc(SyntaxNodePtr::new(item))
}
}
/// Walks the subtree in bfs order, calling `f` for each node.
fn bfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode)) {
let mut curr_layer = vec![node.clone()];
let mut next_layer = vec![];
while !curr_layer.is_empty() {
curr_layer.drain(..).for_each(|node| {
next_layer.extend(node.children());
f(node);
});
std::mem::swap(&mut curr_layer, &mut next_layer);
}
}