Skip to content

Commit

Permalink
fix config
Browse files Browse the repository at this point in the history
  • Loading branch information
HIT-cwh committed Aug 31, 2023
1 parent 9a4d696 commit 089f8a1
Showing 1 changed file with 2 additions and 3 deletions.
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
# Copyright (c) OpenMMLab. All rights reserved.
import torch
from bitsandbytes.optim import PagedAdamW32bit
from datasets import load_dataset
from mmengine.dataset import DefaultSampler
from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook,
LoggerHook, ParamSchedulerHook)
from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR
from peft import LoraConfig
from torch.optim import AdamW
from transformers import (AutoModelForCausalLM, AutoTokenizer,
BitsAndBytesConfig)

Expand Down Expand Up @@ -35,7 +35,7 @@
accumulative_counts = 16 # 1bs * 16acc * 1gpu = 16 batchsize
dataloader_num_workers = 0
max_epochs = 1
optim_type = AdamW
optim_type = PagedAdamW32bit
lr = 1e-4
betas = (0.9, 0.999)
weight_decay = 0.05
Expand All @@ -62,7 +62,6 @@
type=AutoModelForCausalLM.from_pretrained,
pretrained_model_name_or_path=pretrained_model_name_or_path,
trust_remote_code=True,
load_in_8bit=True,
torch_dtype=torch.float16,
quantization_config=dict(
type=BitsAndBytesConfig,
Expand Down

0 comments on commit 089f8a1

Please sign in to comment.