Patches to Recorder and ParamScheduler Callbacks
Modifications to existing callback Recorder
, ParamScheduler
are needed in order to store extra attributes to a temporary file after running the multi core TPU training as spawned processes.
from fastcore.test import *
t1 = torch.tensor(5.)
test_eq(maybe_item(t1), 5.)
test_eq(maybe_item(float(5)),5.)
Given a tensor, maybe_item
converts it to a scalar. If given is not a tensor (e.g. already a scalar), it just returns the scalar.
from fastcore.test import *
from fastai.torch_core import tensor
tl1 = [tensor(2.)] * 5
test_eq(maybe_item(tl1), [2.] * 5)
dt1 = { 'd1': tensor(3.),
'd2': [tensor(1.)] * 3}
df1 = { 'd1': 3.,
'd2': [1.] * 3}
test_eq(maybe_item(dt1), df1)
maybe_item
should also work for lists of tensors and dicts of tensors
and/or list of tensors.
from fastai.test_utils import *
learner = synth_learner()
learner.fit(5)
extra_attrs = learner.recorder.get_extra_attrs()
test_eq(extra_attrs['lrs'], learner.recorder.lrs)
test_eq(extra_attrs['losses'], learner.recorder.losses)
test_eq(extra_attrs['iters'], learner.recorder.iters)
test_eq(extra_attrs['values'], learner.recorder.values)
Recorder.get_extra_attrs
should copy the state attrs (lrs
,losses
,iters
and values
) into
a dict.
test_fn = 'test_rec_attrs.pkl'
!rm -f {test_fn}
learner.recorder.dump_attrs(fn=test_fn)
f = Path(test_fn)
assert f.is_file()
delattr(learner.recorder,'lrs')
delattr(learner.recorder,'losses')
delattr(learner.recorder,'iters')
delattr(learner.recorder,'values')
assert not hasattr(learner.recorder,'lrs')
assert not hasattr(learner.recorder,'losses')
assert not hasattr(learner.recorder,'iters')
assert not hasattr(learner.recorder,'values')
learner.recorder.reload_attrs(fn=test_fn)
assert hasattr(learner.recorder,'lrs')
assert hasattr(learner.recorder,'losses')
assert hasattr(learner.recorder,'iters')
assert hasattr(learner.recorder,'values')
!rm -f {test_fn}
#colab
param_fn = '_paramsched_hps.pkl'
!rm -f {param_fn}
learner.inner_xla = True # simulate spawned process learner
learner.xla_rank = 0
learner.fit_one_cycle(3)
param_f = Path(param_fn)
assert param_f.is_file()
delattr(learner.recorder,'hps')
assert not hasattr(learner.recorder,'hps')
learner.recorder.reload_hps()
assert hasattr(learner.recorder,'hps')
!rm -f {param_fn}
!rm -f _rec_attr.pkl
Test ParamScheduler (fit_one_cycle
uses ParamScheduler
) which means it should create a pickle file
from fastcore.foundation import L
if 'progress' not in L(learner.cbs).attrgot('name'):
learner.add_cbs(ProgressCallback)
learner.fit_one_cycle(5)
assert param_f.is_file()
rec_attr_f = Path('_rec_attr.pkl')
assert rec_attr_f.is_file()