i am trying to run chapter 5 clean notebook with google collab. i have hit a similar error when i run the following:
learn = vision_learner(dls, resnet34, metrics=error_rate)
learn.fine_tune(2)
here is the code:
#hide
! [ -e /content ] && pip install -Uqq fastbook
import fastbook
fastbook.setup_book()
from fastbook import *
from fastai.vision.all import *
path = untar_data(URLs.PETS)
#hide
Path.BASE_PATH = path
pets = DataBlock(blocks = (ImageBlock, CategoryBlock),
get_items=get_image_files,
splitter=RandomSplitter(seed=42),
get_y=using_attr(RegexLabeller(r'(.+)_\d+.jpg$'), 'name'),
item_tfms=Resize(460),
batch_tfms=aug_transforms(size=224, min_scale=0.75))
dls = pets.dataloaders(path/"images")
learn = vision_learner(dls, resnet34, metrics=error_rate)
learn.fine_tune(2)
the error stack trace is as follows:
Downloading: "https://download.pytorch.org/models/resnet34-b627a593.pth" to /root/.cache/torch/hub/checkpoints/resnet34-b627a593.pth
100%|██████████| 83.3M/83.3M [00:00<00:00, 208MB/s]
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
/tmp/ipython-input-160981725.py in <cell line: 0>()
1 learn = vision_learner(dls, resnet34, metrics=error_rate)
----> 2 learn.fine_tune(2)
15 frames/usr/local/lib/python3.12/dist-packages/fastai/callback/schedule.py in fine_tune(self, epochs, base_lr, freeze_epochs, lr_mult, pct_start, div, **kwargs)
165 "Fine tune with `Learner.freeze` for `freeze_epochs`, then with `Learner.unfreeze` for `epochs`, using discriminative LR."
166 self.freeze()
--> 167 self.fit_one_cycle(freeze_epochs, slice(base_lr), pct_start=0.99, **kwargs)
168 base_lr /= 2
169 self.unfreeze()
/usr/local/lib/python3.12/dist-packages/fastai/callback/schedule.py in fit_one_cycle(self, n_epoch, lr_max, div, div_final, pct_start, wd, moms, cbs, reset_opt, start_epoch)
119 scheds = {'lr': combined_cos(pct_start, lr_max/div, lr_max, lr_max/div_final),
120 'mom': combined_cos(pct_start, *(self.moms if moms is None else moms))}
--> 121 self.fit(n_epoch, cbs=ParamScheduler(scheds)+L(cbs), reset_opt=reset_opt, wd=wd, start_epoch=start_epoch)
122
123 # %% ../../nbs/14_callback.schedule.ipynb 50
/usr/local/lib/python3.12/dist-packages/fastai/learner.py in fit(self, n_epoch, lr, wd, cbs, reset_opt, start_epoch)
270 self.opt.set_hypers(lr=self.lr if lr is None else lr)
271 self.n_epoch = n_epoch
--> 272 self._with_events(self._do_fit, 'fit', CancelFitException, self._end_cleanup)
273
274 def _end_cleanup(self): self.dl,self.xb,self.yb,self.pred,self.loss = None,(None,),(None,),None,None
/usr/local/lib/python3.12/dist-packages/fastai/learner.py in _with_events(self, f, event_type, ex, final)
205
206 def _with_events(self, f, event_type, ex, final=noop):
--> 207 try: self(f'before_{event_type}'); f()
208 except ex: self(f'after_cancel_{event_type}')
209 self(f'after_{event_type}'); final()
/usr/local/lib/python3.12/dist-packages/fastai/learner.py in __call__(self, event_name)
178
179 def ordered_cbs(self, event): return [cb for cb in self.cbs.sorted('order') if hasattr(cb, event)]
--> 180 def __call__(self, event_name): L(event_name).map(self._call_one)
181
182 def _call_one(self, event_name):
/usr/local/lib/python3.12/dist-packages/fastcore/foundation.py in wrapper(self, *args, **kwargs)
223 def wrapper(self, *args, **kwargs):
224 if not isinstance(self, L): return lambda items: f(L(items), self, *args, **kwargs)
--> 225 return f(L(self), *args, **kwargs)
226 return wrapper
227
/usr/local/lib/python3.12/dist-packages/fastcore/foundation.py in map(self, f, *args, **kwargs)
231 def map(self:L, f, *args, **kwargs):
232 "Create new `L` with `f` applied to all `items`, passing `args` and `kwargs` to `f`"
--> 233 return self._new(map_ex(self, f, *args, gen=False, **kwargs))
234
235 # %% ../nbs/02_foundation.ipynb #9c5a4633
/usr/local/lib/python3.12/dist-packages/fastcore/basics.py in map_ex(iterable, f, gen, *args, **kwargs)
940 res = map(g, iterable)
941 if gen: return res
--> 942 return list(res)
943
944 # %% ../nbs/01_basics.ipynb #02d08a1b
/usr/local/lib/python3.12/dist-packages/fastcore/basics.py in __call__(self, *args, **kwargs)
925 if isinstance(v,_Arg): kwargs[k] = args.pop(v.i)
926 fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
--> 927 return self.func(*fargs, **kwargs)
928
929 # %% ../nbs/01_basics.ipynb #84779a02
/usr/local/lib/python3.12/dist-packages/fastai/learner.py in _call_one(self, event_name)
182 def _call_one(self, event_name):
183 if not hasattr(event, event_name): raise Exception(f'missing {event_name}')
--> 184 for cb in self.cbs.sorted('order'): cb(event_name)
185
186 def _bn_bias_state(self, with_bias): return norm_bias_params(self.model, with_bias).map(self.opt.state)
/usr/local/lib/python3.12/dist-packages/fastai/callback/core.py in __call__(self, event_name)
62 try: res = getcallable(self, event_name)()
63 except (CancelBatchException, CancelBackwardException, CancelEpochException, CancelFitException, CancelStepException, CancelTrainException, CancelValidException): raise
---> 64 except Exception as e: raise modify_exception(e, f'Exception occured in `{self.__class__.__name__}` when calling event `{event_name}`:\n\t{e.args[0]}', replace=True)
65 if event_name=='after_fit': self.run=True #Reset self.run to True at each end of fit
66 return res
/usr/local/lib/python3.12/dist-packages/fastai/callback/core.py in __call__(self, event_name)
60 res = None
61 if self.run and _run:
---> 62 try: res = getcallable(self, event_name)()
63 except (CancelBatchException, CancelBackwardException, CancelEpochException, CancelFitException, CancelStepException, CancelTrainException, CancelValidException): raise
64 except Exception as e: raise modify_exception(e, f'Exception occured in `{self.__class__.__name__}` when calling event `{event_name}`:\n\t{e.args[0]}', replace=True)
/usr/local/lib/python3.12/dist-packages/fastai/callback/progress.py in before_fit(self)
21 if self.learn.logger != noop:
22 self.old_logger,self.learn.logger = self.logger,self._write_stats
---> 23 self._write_stats(self.recorder.metric_names)
24 else: self.old_logger = noop
25
/usr/local/lib/python3.12/dist-packages/fastai/callback/progress.py in _write_stats(self, log)
46
47 def _write_stats(self, log):
---> 48 if getattr(self, 'mbar', False): self.mbar.write([f'{l:.6f}' if isinstance(l, float) else str(l) for l in log], table=True)
49
50 _docs = dict(before_fit="Setup the master bar over the epochs",
/usr/local/lib/python3.12/dist-packages/fastprogress/fastprogress.py in write(self, line, table)
237 if table: self.lines.append(line); self.text_parts = [text2html_table(self.lines)]
238 else: self.text_parts.append(P(line))
--> 239 self.show()
240
241 # %% ../nbs/01_fastprogress.ipynb
/usr/local/lib/python3.12/dist-packages/fastprogress/fastprogress.py in show(self)
232 children = [getattr(item, 'progress', None) or item for n in self.order
233 if (item := self.inner_dict.get(n))]
--> 234 self.out.update(Div(*children))
235
236 def write(self, line, table=False):
AttributeError: Exception occured in `ProgressCallback` when calling event `before_fit`:
'NBMasterBar' object has no attribute 'out'
What is the reason for the error? What is the solution