Browse Source

fix some bug of parameter

tags/v0.3.1
yanghaiqie 4 years ago
parent
commit
266f40e41e
7 changed files with 71 additions and 11 deletions
  1. +2
    -1
      autogl/module/nas/estimator/__init__.py
  2. +3
    -4
      autogl/module/nas/estimator/one_shot.py
  3. +58
    -0
      autogl/module/nas/estimator/one_shot_hardware_aware.py
  4. +5
    -3
      autogl/module/nas/estimator/train_scratch_hardware_aware.py
  5. +1
    -1
      autogl/module/nas/space/graph_nas.py
  6. +1
    -1
      autogl/module/nas/space/graph_nas_macro.py
  7. +1
    -1
      autogl/module/nas/space/single_path.py

+ 2
- 1
autogl/module/nas/estimator/__init__.py View File

@@ -23,6 +23,7 @@ def register_nas_estimator(name):

from .one_shot import OneShotEstimator
from .train_scratch import TrainEstimator
from .one_shot_hardware_aware import OneShotEstimator_HardwareAware
from .train_scratch_hardware_aware import TrainEstimator_HardwareAware

def build_nas_estimator_from_name(name: str) -> BaseEstimator:
@@ -46,4 +47,4 @@ def build_nas_estimator_from_name(name: str) -> BaseEstimator:
return NAS_ESTIMATOR_DICT[name]()


__all__ = ["BaseEstimator", "OneShotEstimator", "TrainEstimator", "TrainEstimator_HardwareAware"]
__all__ = ["BaseEstimator", "OneShotEstimator", "TrainEstimator", "OneShotEstimator_HardwareAware", "TrainEstimator_HardwareAware"]

+ 3
- 4
autogl/module/nas/estimator/one_shot.py View File

@@ -46,12 +46,11 @@ class OneShotEstimator(BaseEstimator):
pred = model(dset)[mask]
label=bk_label(dset)
y = label[mask]
loss = getattr(F, self.loss_f)(pred, y)
# acc=sum(pred.max(1)[1]==y).item()/y.size(0)
probs = F.softmax(pred, dim=1).detach().cpu().numpy()
y = y.cpu()
model_info = model.get_model_info()
metrics = [model_info[eva] if isinstance(eva, str) else eva.evaluate(probs, y) for eva in self.evaluation]
#metrics = [eva.evaluate(probs, y) for eva in self.evaluation]
return metrics, loss
metrics = [eva.evaluate(probs, y) for eva in self.evaluation]
return metrics, loss

+ 58
- 0
autogl/module/nas/estimator/one_shot_hardware_aware.py View File

@@ -0,0 +1,58 @@
import torch.nn as nn
import torch.nn.functional as F

from . import register_nas_estimator
from ..space import BaseSpace
from .base import BaseEstimator
from ..backend import *
from ...train.evaluation import Evaluation, Acc

# @register_nas_estimator("oneshot")
# class OneShotEstimator(BaseEstimator):
# """
# One shot estimator.

# Use model directly to get estimations.
# """

# def infer(self, model: BaseSpace, dataset, mask="train"):
# device = next(model.parameters()).device
# dset = dataset[0].to(device)
# pred = model(dset)[getattr(dset, f"{mask}_mask")]
# y = dset.y[getattr(dset, f"{mask}_mask")]
# loss = getattr(F, self.loss_f)(pred, y)
# # acc=sum(pred.max(1)[1]==y).item()/y.size(0)
# probs = F.softmax(pred, dim=1).detach().cpu().numpy()
# y = y.cpu()
# metrics = [eva.evaluate(probs, y) for eva in self.evaluation]
# return metrics, loss

@register_nas_estimator("oneshot_hardware")
class OneShotEstimator_HardwareAware(BaseEstimator):
"""
One shot estimator.

Use model directly to get estimations.
"""
def __init__(self, loss_f="nll_loss", evaluation=[Acc()], hardware_evaluation="parameter"):
super().__init__(loss_f, evaluation)
self.evaluation = evaluation
self.hardware_evaluation=hardware_evaluation

def infer(self, model: BaseSpace, dataset, mask="train"):
device = next(model.parameters()).device
dset = dataset[0].to(device)
mask=bk_mask(dset,mask)

pred = model(dset)[mask]
label=bk_label(dset)
y = label[mask]

loss = getattr(F, self.loss_f)(pred, y)
# acc=sum(pred.max(1)[1]==y).item()/y.size(0)
probs = F.softmax(pred, dim=1).detach().cpu().numpy()
y = y.cpu()
model_info = model.get_model_info()
metrics = [eva.evaluate(probs, y) for eva in self.evaluation]
metrics.append(model_info[self.hardware_evaluation])
return metrics, loss

+ 5
- 3
autogl/module/nas/estimator/train_scratch_hardware_aware.py View File

@@ -4,7 +4,7 @@ import torch.nn.functional as F
from . import register_nas_estimator
from ..space import BaseSpace
from .base import BaseEstimator
from .one_shot import OneShotEstimator
from .one_shot_hardware_aware import OneShotEstimator_HardwareAware
import torch

from autogl.module.train import NodeClassificationFullTrainer, Acc
@@ -16,10 +16,12 @@ class TrainEstimator_HardwareAware(BaseEstimator):
An estimator which trans from scratch
"""

def __init__(self, loss_f="nll_loss", evaluation=[Acc(), "parameter"]):
def __init__(self, loss_f="nll_loss", evaluation=[Acc()], hardware_evaluation="parameter"):
super().__init__(loss_f, evaluation)
self.evaluation = evaluation
self.estimator = OneShotEstimator(self.loss_f, self.evaluation)
self.hardware_evaluation = hardware_evaluation
self.estimator = OneShotEstimator_HardwareAware(self.loss_f, self.evaluation, self.hardware_evaluation)


def infer(self, model: BaseSpace, dataset, mask="train"):
# self.trainer.model=model


+ 1
- 1
autogl/module/nas/space/graph_nas.py View File

@@ -214,5 +214,5 @@ class GraphNasNodeClassificationSpace(BaseSpace):
# Find total parameters and trainable parameters
total_params = count_parameters(self)
total_trainable_params = count_parameters(self, only_trainable=True)
print(f'{total_params:,} total parameters.')
# print(f'{total_params:,} total parameters.')
return {"parameter": total_params, "trainable_parameter": total_trainable_params}

+ 1
- 1
autogl/module/nas/space/graph_nas_macro.py View File

@@ -985,5 +985,5 @@ class GraphNet(BaseSpace):
# Find total parameters and trainable parameters
total_params = count_parameters(self)
total_trainable_params = count_parameters(self, only_trainable=True)
print(f'{total_params:,} total parameters.')
# print(f'{total_params:,} total parameters.')
return {"parameter": total_params, "trainable_parameter": total_trainable_params}

+ 1
- 1
autogl/module/nas/space/single_path.py View File

@@ -95,5 +95,5 @@ class SinglePathNodeClassificationSpace(BaseSpace):
def get_model_info(self):
total_params = count_parameters(self)
total_trainable_params = count_parameters(self, only_trainable=True)
print(f'{total_params:,} total parameters.')
# print(f'{total_params:,} total parameters.')
return {"parameter": total_params, "trainable_parameter": total_trainable_params}

Loading…
Cancel
Save