Skip to content
Merged
Show file tree
Hide file tree
Changes from 16 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 19 additions & 6 deletions deepmd/dpmodel/descriptor/dpa3.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,6 +271,9 @@ class DescrptDPA3(NativeOP, BaseDescriptor):
Whether to use electronic configuration type embedding.
use_tebd_bias : bool, Optional
Whether to use bias in the type embedding layer.
use_loc_mapping : bool, Optional
Whether to use local atom index mapping in training or non-parallel inference.
When True, local indexing and mapping are applied to neighbor lists and embeddings during descriptor computation.
type_map : list[str], Optional
A list of strings. Give the name to each type of atoms.
"""
Expand All @@ -290,6 +293,7 @@ def __init__(
seed: Optional[Union[int, list[int]]] = None,
use_econf_tebd: bool = False,
use_tebd_bias: bool = False,
use_loc_mapping: bool = True,
type_map: Optional[list[str]] = None,
) -> None:
super().__init__()
Expand Down Expand Up @@ -335,6 +339,7 @@ def init_subclass_params(sub_data, sub_class):
use_exp_switch=self.repflow_args.use_exp_switch,
use_dynamic_sel=self.repflow_args.use_dynamic_sel,
sel_reduce_factor=self.repflow_args.sel_reduce_factor,
use_loc_mapping=use_loc_mapping,
exclude_types=exclude_types,
env_protection=env_protection,
precision=precision,
Expand All @@ -343,6 +348,7 @@ def init_subclass_params(sub_data, sub_class):

self.use_econf_tebd = use_econf_tebd
self.use_tebd_bias = use_tebd_bias
self.use_loc_mapping = use_loc_mapping
self.type_map = type_map
self.tebd_dim = self.repflow_args.n_dim
self.type_embedding = TypeEmbedNet(
Expand Down Expand Up @@ -541,10 +547,16 @@ def call(
nall = xp.reshape(coord_ext, (nframes, -1)).shape[1] // 3

type_embedding = self.type_embedding.call()
node_ebd_ext = xp.reshape(
xp.take(type_embedding, xp.reshape(atype_ext, [-1]), axis=0),
(nframes, nall, self.tebd_dim),
)
if self.use_loc_mapping:
node_ebd_ext = xp.reshape(
xp.take(type_embedding, xp.reshape(atype_ext[:, :nloc], [-1]), axis=0),
(nframes, nloc, self.tebd_dim),
)
else:
node_ebd_ext = xp.reshape(
xp.take(type_embedding, xp.reshape(atype_ext, [-1]), axis=0),
(nframes, nall, self.tebd_dim),
)
node_ebd_inp = node_ebd_ext[:, :nloc, :]
# repflows
node_ebd, edge_ebd, h2, rot_mat, sw = self.repflows(
Expand All @@ -563,7 +575,7 @@ def serialize(self) -> dict:
data = {
"@class": "Descriptor",
"type": "dpa3",
"@version": 1,
"@version": 2,
"ntypes": self.ntypes,
"repflow_args": self.repflow_args.serialize(),
"concat_output_tebd": self.concat_output_tebd,
Expand All @@ -574,6 +586,7 @@ def serialize(self) -> dict:
"trainable": self.trainable,
"use_econf_tebd": self.use_econf_tebd,
"use_tebd_bias": self.use_tebd_bias,
"use_loc_mapping": self.use_loc_mapping,
"type_map": self.type_map,
"type_embedding": self.type_embedding.serialize(),
}
Expand All @@ -598,7 +611,7 @@ def serialize(self) -> dict:
def deserialize(cls, data: dict) -> "DescrptDPA3":
data = data.copy()
version = data.pop("@version")
check_version_compatibility(version, 1, 1)
check_version_compatibility(version, 2, 1)
data.pop("@class")
data.pop("type")
repflow_variable = data.pop("repflow_variable").copy()
Expand Down
26 changes: 24 additions & 2 deletions deepmd/dpmodel/descriptor/repflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,9 @@ class DescrptBlockRepflows(NativeOP, DescriptorBlock):
In the dynamic selection case, neighbor-scale normalization will use `e_sel / sel_reduce_factor`
or `a_sel / sel_reduce_factor` instead of the raw `e_sel` or `a_sel` values,
accommodating larger selection numbers.
use_loc_mapping : bool, optional
Whether to use local atom index mapping in training or non-parallel inference.
When True, local indexing and mapping are applied to neighbor lists and embeddings during descriptor computation.
ntypes : int
Number of element types
activation_function : str, optional
Expand Down Expand Up @@ -196,6 +199,7 @@ def __init__(
use_exp_switch: bool = False,
use_dynamic_sel: bool = False,
sel_reduce_factor: float = 10.0,
use_loc_mapping: bool = True,
seed: Optional[Union[int, list[int]]] = None,
) -> None:
super().__init__()
Expand Down Expand Up @@ -229,6 +233,7 @@ def __init__(
self.smooth_edge_update = smooth_edge_update
self.use_exp_switch = use_exp_switch
self.use_dynamic_sel = use_dynamic_sel
self.use_loc_mapping = use_loc_mapping
self.sel_reduce_factor = sel_reduce_factor
if self.use_dynamic_sel and not self.smooth_edge_update:
raise NotImplementedError(
Expand Down Expand Up @@ -527,10 +532,22 @@ def call(
cosine_ij, (nframes, nloc, self.a_sel, self.a_sel, 1)
) / (xp.pi**0.5)

if self.use_loc_mapping:
assert mapping is not None
flat_map = xp.reshape(mapping, (nframes, -1))
nlist = xp.reshape(
xp_take_along_axis(flat_map, xp.reshape(nlist, (nframes, -1)), axis=1),
nlist.shape,
)

if self.use_dynamic_sel:
# get graph index
edge_index, angle_index = get_graph_index(
nlist, nlist_mask, a_nlist_mask, nall
nlist,
nlist_mask,
a_nlist_mask,
nall,
use_loc_mapping=self.use_loc_mapping,
)
# flat all the tensors
# n_edge x 1
Expand Down Expand Up @@ -561,7 +578,11 @@ def call(
for idx, ll in enumerate(self.layers):
# node_ebd: nb x nloc x n_dim
# node_ebd_ext: nb x nall x n_dim
node_ebd_ext = xp_take_along_axis(node_ebd, mapping, axis=1)
node_ebd_ext = (
node_ebd
if self.use_loc_mapping
else xp_take_along_axis(node_ebd, mapping, axis=1)
)
node_ebd, edge_ebd, angle_ebd = ll.call(
node_ebd_ext,
edge_ebd,
Expand Down Expand Up @@ -667,6 +688,7 @@ def serialize(self):
"smooth_edge_update": self.smooth_edge_update,
"use_dynamic_sel": self.use_dynamic_sel,
"sel_reduce_factor": self.sel_reduce_factor,
"use_loc_mapping": self.use_loc_mapping,
# variables
"edge_embd": self.edge_embd.serialize(),
"angle_embd": self.angle_embd.serialize(),
Expand Down
8 changes: 7 additions & 1 deletion deepmd/dpmodel/utils/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -1006,6 +1006,7 @@ def get_graph_index(
nlist_mask: np.ndarray,
a_nlist_mask: np.ndarray,
nall: int,
use_loc_mapping: bool = True,
):
"""
Get the index mapping for edge graph and angle graph, ready in `aggregate` or `index_select`.
Expand All @@ -1020,6 +1021,9 @@ def get_graph_index(
Masks of the neighbor list for angle. real nei 1 otherwise 0
nall
The number of extended atoms.
use_loc_mapping
Whether to use local atom index mapping in training or non-parallel inference.
When True, local indexing and mapping are applied to neighbor lists and embeddings during descriptor computation.

Returns
-------
Expand Down Expand Up @@ -1060,7 +1064,9 @@ def get_graph_index(
n2e_index = n2e_index[xp.astype(nlist_mask, xp.bool)]

# node_ext(j) to edge(ij) index_select
frame_shift = xp.arange(nf, dtype=nlist.dtype) * nall
frame_shift = xp.arange(nf, dtype=nlist.dtype) * (
nall if not use_loc_mapping else nloc
)
shifted_nlist = nlist + frame_shift[:, xp.newaxis, xp.newaxis]
# n_edge
n_ext2e_index = shifted_nlist[xp.astype(nlist_mask, xp.bool)]
Expand Down
11 changes: 9 additions & 2 deletions deepmd/pd/model/descriptor/dpa3.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,9 @@ class DescrptDPA3(BaseDescriptor, paddle.nn.Layer):
Whether to use electronic configuration type embedding.
use_tebd_bias : bool, Optional
Whether to use bias in the type embedding layer.
use_loc_mapping : bool, Optional
Whether to use local atom index mapping in training or non-parallel inference.
Not supported yet in Paddle.
type_map : list[str], Optional
A list of strings. Give the name to each type of atoms.
"""
Expand All @@ -108,6 +111,7 @@ def __init__(
seed: Optional[Union[int, list[int]]] = None,
use_econf_tebd: bool = False,
use_tebd_bias: bool = False,
use_loc_mapping: bool = False,
type_map: Optional[list[str]] = None,
) -> None:
super().__init__()
Expand Down Expand Up @@ -152,6 +156,7 @@ def init_subclass_params(sub_data, sub_class):
smooth_edge_update=self.repflow_args.smooth_edge_update,
use_dynamic_sel=self.repflow_args.use_dynamic_sel,
sel_reduce_factor=self.repflow_args.sel_reduce_factor,
use_loc_mapping=use_loc_mapping,
exclude_types=exclude_types,
env_protection=env_protection,
precision=precision,
Expand All @@ -160,6 +165,7 @@ def init_subclass_params(sub_data, sub_class):

self.use_econf_tebd = use_econf_tebd
self.use_tebd_bias = use_tebd_bias
self.use_loc_mapping = use_loc_mapping
self.type_map = type_map
self.tebd_dim = self.repflow_args.n_dim
self.type_embedding = TypeEmbedNet(
Expand Down Expand Up @@ -370,7 +376,7 @@ def serialize(self) -> dict:
data = {
"@class": "Descriptor",
"type": "dpa3",
"@version": 1,
"@version": 2,
"ntypes": self.ntypes,
"repflow_args": self.repflow_args.serialize(),
"concat_output_tebd": self.concat_output_tebd,
Expand All @@ -381,6 +387,7 @@ def serialize(self) -> dict:
"trainable": self.trainable,
"use_econf_tebd": self.use_econf_tebd,
"use_tebd_bias": self.use_tebd_bias,
"use_loc_mapping": self.use_loc_mapping,
"type_map": self.type_map,
"type_embedding": self.type_embedding.embedding.serialize(),
}
Expand All @@ -405,7 +412,7 @@ def serialize(self) -> dict:
def deserialize(cls, data: dict) -> "DescrptDPA3":
data = data.copy()
version = data.pop("@version")
check_version_compatibility(version, 1, 1)
check_version_compatibility(version, 2, 1)
data.pop("@class")
data.pop("type")
repflow_variable = data.pop("repflow_variable").copy()
Expand Down
6 changes: 6 additions & 0 deletions deepmd/pd/model/descriptor/repflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,9 @@ class DescrptBlockRepflows(DescriptorBlock):
optim_update : bool, optional
Whether to enable the optimized update method.
Uses a more efficient process when enabled. Defaults to True
use_loc_mapping : bool, Optional
Whether to use local atom index mapping in training or non-parallel inference.
Not supported yet in Paddle.
ntypes : int
Number of element types
activation_function : str, optional
Expand Down Expand Up @@ -161,6 +164,7 @@ def __init__(
smooth_edge_update: bool = False,
use_dynamic_sel: bool = False,
sel_reduce_factor: float = 10.0,
use_loc_mapping: bool = False,
optim_update: bool = True,
seed: Optional[Union[int, list[int]]] = None,
) -> None:
Expand Down Expand Up @@ -196,6 +200,8 @@ def __init__(
self.use_dynamic_sel = use_dynamic_sel # not supported yet
self.sel_reduce_factor = sel_reduce_factor
assert not self.use_dynamic_sel, "Dynamic selection is not supported yet."
self.use_loc_mapping = use_loc_mapping
assert not self.use_loc_mapping, "Local mapping is not supported yet."

self.n_dim = n_dim
self.e_dim = e_dim
Expand Down
17 changes: 14 additions & 3 deletions deepmd/pt/model/descriptor/dpa3.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,9 @@ class DescrptDPA3(BaseDescriptor, torch.nn.Module):
Whether to use electronic configuration type embedding.
use_tebd_bias : bool, Optional
Whether to use bias in the type embedding layer.
use_loc_mapping : bool, Optional
Whether to use local atom index mapping in training or non-parallel inference.
When True, local indexing and mapping are applied to neighbor lists and embeddings during descriptor computation.
type_map : list[str], Optional
A list of strings. Give the name to each type of atoms.
"""
Expand All @@ -108,6 +111,7 @@ def __init__(
seed: Optional[Union[int, list[int]]] = None,
use_econf_tebd: bool = False,
use_tebd_bias: bool = False,
use_loc_mapping: bool = True,
type_map: Optional[list[str]] = None,
) -> None:
super().__init__()
Expand Down Expand Up @@ -153,13 +157,15 @@ def init_subclass_params(sub_data, sub_class):
use_exp_switch=self.repflow_args.use_exp_switch,
use_dynamic_sel=self.repflow_args.use_dynamic_sel,
sel_reduce_factor=self.repflow_args.sel_reduce_factor,
use_loc_mapping=use_loc_mapping,
exclude_types=exclude_types,
env_protection=env_protection,
precision=precision,
seed=child_seed(seed, 1),
)

self.use_econf_tebd = use_econf_tebd
self.use_loc_mapping = use_loc_mapping
self.use_tebd_bias = use_tebd_bias
self.type_map = type_map
self.tebd_dim = self.repflow_args.n_dim
Expand Down Expand Up @@ -365,7 +371,7 @@ def serialize(self) -> dict:
data = {
"@class": "Descriptor",
"type": "dpa3",
"@version": 1,
"@version": 2,
"ntypes": self.ntypes,
"repflow_args": self.repflow_args.serialize(),
"concat_output_tebd": self.concat_output_tebd,
Expand All @@ -376,6 +382,7 @@ def serialize(self) -> dict:
"trainable": self.trainable,
"use_econf_tebd": self.use_econf_tebd,
"use_tebd_bias": self.use_tebd_bias,
"use_loc_mapping": self.use_loc_mapping,
"type_map": self.type_map,
"type_embedding": self.type_embedding.embedding.serialize(),
}
Expand All @@ -400,7 +407,7 @@ def serialize(self) -> dict:
def deserialize(cls, data: dict) -> "DescrptDPA3":
data = data.copy()
version = data.pop("@version")
check_version_compatibility(version, 1, 1)
check_version_compatibility(version, 2, 1)
data.pop("@class")
data.pop("type")
repflow_variable = data.pop("repflow_variable").copy()
Expand Down Expand Up @@ -469,12 +476,16 @@ def forward(
The smooth switch function. shape: nf x nloc x nnei

"""
parallel_mode = comm_dict is not None
# cast the input to internal precsion
extended_coord = extended_coord.to(dtype=self.prec)
nframes, nloc, nnei = nlist.shape
nall = extended_coord.view(nframes, -1).shape[1] // 3

node_ebd_ext = self.type_embedding(extended_atype)
if not parallel_mode and self.use_loc_mapping:
node_ebd_ext = self.type_embedding(extended_atype[:, :nloc])
else:
node_ebd_ext = self.type_embedding(extended_atype)
node_ebd_inp = node_ebd_ext[:, :nloc, :]
# repflows
node_ebd, edge_ebd, h2, rot_mat, sw = self.repflows(
Expand Down
2 changes: 1 addition & 1 deletion deepmd/pt/model/descriptor/repflow_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -684,7 +684,7 @@ def optim_edge_update_dynamic(

def forward(
self,
node_ebd_ext: torch.Tensor, # nf x nall x n_dim
node_ebd_ext: torch.Tensor, # nf x nall x n_dim [OR] nf x nloc x n_dim when not parallel_mode
edge_ebd: torch.Tensor, # nf x nloc x nnei x e_dim
h2: torch.Tensor, # nf x nloc x nnei x 3
angle_ebd: torch.Tensor, # nf x nloc x a_nnei x a_nnei x a_dim
Expand Down
Loading
Loading