Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP]separation parameter #3279

Open
wants to merge 1 commit into
base: develop
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 26 additions & 16 deletions ppcls/engine/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from __future__ import print_function

import os
import gc
import shutil
import copy
import platform
Expand Down Expand Up @@ -316,6 +317,8 @@ def train(self):
self.global_step = 0
uniform_output_enabled = self.config['Global'].get(
"uniform_output_enabled", False)
export_during_train = self.config['Global'].get("export_during_train",
False)

if self.config.Global.checkpoints is not None:
metric_info = init_model(self.config.Global, self.model,
Expand Down Expand Up @@ -403,14 +406,17 @@ def train(self):
prefix=prefix,
loss=self.train_loss_func,
save_student_model=True)
if uniform_output_enabled:
if export_during_train:
save_path = os.path.join(self.output_dir, prefix,
"inference")
self.export(save_path, uniform_output_enabled)
self.export(save_path, export_during_train)
gc.collect()
if self.ema:
ema_save_path = os.path.join(
self.output_dir, prefix, "inference_ema")
self.export(ema_save_path, uniform_output_enabled)
self.export(ema_save_path, export_during_train)
gc.collect()
if uniform_output_enabled:
update_train_results(
self.config, prefix, metric_info, ema=self.ema)
save_load.save_model_info(metric_info, self.output_dir,
Expand All @@ -432,14 +438,17 @@ def train(self):
model_name=self.config["Arch"]["name"],
prefix=prefix,
loss=self.train_loss_func)
if uniform_output_enabled:
if export_during_train:
save_path = os.path.join(self.output_dir, prefix,
"inference")
self.export(save_path, uniform_output_enabled)
self.export(save_path, export_during_train)
gc.collect()
if self.ema:
ema_save_path = os.path.join(self.output_dir, prefix,
"inference_ema")
self.export(ema_save_path, uniform_output_enabled)
self.export(ema_save_path, export_during_train)
gc.collect()
if uniform_output_enabled:
update_train_results(
self.config,
prefix,
Expand All @@ -461,15 +470,18 @@ def train(self):
model_name=self.config["Arch"]["name"],
prefix=prefix,
loss=self.train_loss_func)
if uniform_output_enabled:
if export_during_train:
save_path = os.path.join(self.output_dir, prefix, "inference")
self.export(save_path, uniform_output_enabled)
self.export(save_path, export_during_train)
gc.collect()
if self.ema:
ema_save_path = os.path.join(self.output_dir, prefix,
"inference_ema")
self.export(ema_save_path, uniform_output_enabled)
save_load.save_model_info(metric_info, self.output_dir, prefix)
self.export(ema_save_path, export_during_train)
gc.collect()
self.model.train()
if uniform_output_enabled:
save_load.save_model_info(metric_info, self.output_dir, prefix)

if self.vdl_writer is not None:
self.vdl_writer.close()
Expand Down Expand Up @@ -537,11 +549,9 @@ def infer(self):
save_predict_result(save_path, results)
return results

def export(self,
save_path=None,
uniform_output_enabled=False,
def export(self, save_path=None, export_during_train=False,
ema_module=None):
assert self.mode == "export" or uniform_output_enabled
assert self.mode == "export" or export_during_train
if paddle.distributed.get_rank() != 0:
return
use_multilabel = self.config["Global"].get(
Expand All @@ -555,7 +565,7 @@ def export(self,
model = ExportModel(self.config["Arch"], model
if not ema_module else ema_module, use_multilabel)
if self.config["Global"][
"pretrained_model"] is not None and not uniform_output_enabled:
"pretrained_model"] is not None and not export_during_train:
load_dygraph_pretrain(model.base_model,
self.config["Global"]["pretrained_model"])
model.eval()
Expand Down Expand Up @@ -584,7 +594,7 @@ def export(self,
else:
paddle.jit.save(model, save_path)
if self.config["Global"].get("export_for_fd",
False) or uniform_output_enabled:
False) or export_during_train:
dst_path = os.path.join(os.path.dirname(save_path), 'inference.yml')
dump_infer_config(self.config, dst_path)
logger.info(
Expand Down