Browse Source

Merge branch 'hardware-aware' of github.com:THUMNLab/AutoGL into hardware-aware

tags/v0.3.1
generall 4 years ago
parent
commit
d162f0f00e
3 changed files with 30 additions and 4 deletions
  1. +18
    -4
      autogl/module/nas/algorithm/rl.py
  2. +7
    -0
      autogl/module/nas/space/base.py
  3. +5
    -0
      autogl/module/nas/space/graph_nas_macro.py

+ 18
- 4
autogl/module/nas/algorithm/rl.py View File

@@ -523,6 +523,8 @@ class GraphNasRL(BaseNAS):
model_wd=5e-4,
topk=5,
disable_progress=False,
param_size_weight=None,
param_size_limit=None,
):
super().__init__(device)
self.device = device
@@ -541,6 +543,9 @@ class GraphNasRL(BaseNAS):
self.hist = []
self.topk = topk
self.disable_progress = disable_progress
# TODO: new a class to describe the hardware-aware method
self.param_size_weight = param_size_weight
self.param_size_limit = param_size_limit

def search(self, space: BaseSpace, dset, estimator):
self.model = space
@@ -628,10 +633,19 @@ class GraphNasRL(BaseNAS):
LOGGER.debug(f"{self.arch}\n{self.selection}\n{metric},{loss}")
# diff: not do reward shaping as in graphnas code
reward = metric
self.hist.append([-metric, self.selection])
if len(self.hist) > self.topk:
self.hist.sort(key=lambda x: x[0])
self.hist.pop()
# TODO: change
model_info = self.arch.model.get_model_info()
print(f"model_info: {model_info}")
if self.param_size_weight is not None:
reward -= self.param_size_weight * model_info["param"]
if (
self.param_size_limit is None
or model_info["param"] < self.param_size_limit
):
self.hist.append([-metric, self.selection])
if len(self.hist) > self.topk:
self.hist.sort(key=lambda x: x[0])
self.hist.pop()
rewards.append(reward)

if self.entropy_weight:


+ 7
- 0
autogl/module/nas/space/base.py View File

@@ -145,6 +145,9 @@ class BoxModel(BaseModel):
ret_self.to(self.device)
return ret_self

def __repr__(self) -> str:
return str(self.model.get_model_info())

@property
def model(self):
return self._model
@@ -200,6 +203,10 @@ class BaseSpace(nn.Module):
"""
raise NotImplementedError()

def get_model_info(self):
# TODO: write zhushi
return {}

def instantiate(self):
"""
Instantiate the space, reset default key for the mutables here/


+ 5
- 0
autogl/module/nas/space/graph_nas_macro.py View File

@@ -979,3 +979,8 @@ class GraphNet(BaseSpace):
key = f"layer_{i}_fc_{bn.weight.size(0)}"
if key in param:
self.bns[i] = param[key]

def get_model_info(self):
param_size = sum(x.numel() for x in self.parameters())
info = {"param": param_size}
return info

Loading…
Cancel
Save