Skip to content

Commit

Permalink
refine code (PaddlePaddle#806)
Browse files Browse the repository at this point in the history
  • Loading branch information
HydrogenSulfate authored Mar 15, 2024
1 parent 3b1d05d commit bee3a10
Show file tree
Hide file tree
Showing 3 changed files with 41 additions and 4 deletions.
26 changes: 26 additions & 0 deletions ppsci/utils/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,11 @@ class Timer(ContextDecorator):
... w = sum(range(0, 10))
>>> func() # doctest: +SKIP
>>> timer = misc.Timer("cost_of_func", auto_print=False)
>>> timer.start()
>>> func()
>>> timer.end()
>>> print(f"time cost of 'cost_of_func' is {timer.interval:.2f}")
"""

interval: float # Time cost for code within Timer context
Expand All @@ -220,10 +225,31 @@ def __init__(self, name: str = "Timer", auto_print: bool = True):
self.auto_print = auto_print

def __enter__(self):
paddle.device.synchronize()
self.start_time = time.perf_counter()
return self

def __exit__(self, type, value, traceback):
paddle.device.synchronize()
self.end_time = time.perf_counter()
self.interval = self.end_time - self.start_time
if self.auto_print:
logger.message(f"{self.name}.time_cost = {self.interval:.2f} s")

def start(self, name: str = "Timer"):
"""Push a new timer context.
Args:
name (str, optional): Name of code block to be clocked. Defaults to "Timer".
"""
paddle.device.synchronize()
self.start_time = time.perf_counter()

def end(self):
"""
End current timer context and print time cost.
"""
paddle.device.synchronize()
self.end_time = time.perf_counter()
self.interval = self.end_time - self.start_time
if self.auto_print:
Expand Down
13 changes: 12 additions & 1 deletion ppsci/utils/save_load.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,18 @@ def load_checkpoint(
equation_dict = paddle.load(f"{path}.pdeqn")

# set state dict
model.set_state_dict(param_dict)
missing_keys, unexpected_keys = model.set_state_dict(param_dict)
if missing_keys:
logger.warning(
f"There are missing keys when loading checkpoint: {missing_keys}, "
"and corresponding parameters will be initialized by default."
)
if unexpected_keys:
logger.warning(
f"There are redundant keys: {unexpected_keys}, "
"and corresponding weights will be ignored."
)

optimizer.set_state_dict(optim_dict)
if grad_scaler is not None:
grad_scaler.load_state_dict(scaler_dict)
Expand Down
6 changes: 3 additions & 3 deletions ppsci/utils/symbolic.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@
sp.sign: paddle.sign,
sp.ceiling: paddle.ceil,
sp.floor: paddle.floor,
# NOTE: sp.Add and sp.Mul is not included here for un-alignment with sympy
# NOTE: sp.Add and sp.Mul is not included here for un-alignment with paddle
# and are implemented manually in 'OperatorNode._add_operator_func' and
# 'OperatorNode._mul_operator_func'
}
Expand Down Expand Up @@ -711,15 +711,15 @@ def lambdify(
such as 'momentum_x'. Defaults to None.
create_graph (bool, optional): Whether to create the gradient graphs of
the computing process. When it is True, higher order derivatives are
supported to compute; when it is False, the gradient graphs of the
supported to compute. When it is False, the gradient graphs of the
computing process would be discarded. Defaults to True.
retain_graph (Optional[bool]): Whether to retain the forward graph which
is used to calculate the gradient. When it is True, the graph would
be retained, in which way users can calculate backward twice for the
same graph. When it is False, the graph would be freed. Defaults to None,
which means it is equal to `create_graph`.
fuse_derivative (bool, optional): Whether to fuse the derivative nodes.
for example, if `expr` is 'Derivative(u, x) + Derivative(u, y)'
For example, if `expr` is 'Derivative(u, x) + Derivative(u, y)'
It will compute grad(u, x) + grad(u, y) if fuse_derivative=False,
else will compute sum(grad(u, [x, y])) if fuse_derivative=True as is more
efficient in backward-graph. Defaults to False, as it is experimental so not
Expand Down

0 comments on commit bee3a10

Please sign in to comment.