Skip to content

Commit

Permalink
Revert "add llama model"
Browse files Browse the repository at this point in the history
  • Loading branch information
ftgreat authored Mar 7, 2023
1 parent ff75c3d commit 06af33f
Show file tree
Hide file tree
Showing 12 changed files with 8 additions and 589 deletions.
26 changes: 0 additions & 26 deletions examples/llama/llama_13b_generate.py

This file was deleted.

26 changes: 0 additions & 26 deletions examples/llama/llama_33b_generate.py

This file was deleted.

26 changes: 0 additions & 26 deletions examples/llama/llama_65b_generate.py

This file was deleted.

26 changes: 0 additions & 26 deletions examples/llama/llama_7b_generate.py

This file was deleted.

10 changes: 1 addition & 9 deletions flagai/auto_model/auto_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@ def __getattr__(self, name):
"opt_seq2seq": ("flagai.model.opt_model", "OPTModel"),
"opt_lm": ("flagai.model.opt_model", "OPTModel"),
"galactica_lm": ("flagai.model.galactica_model", "GalacticaModel"),
"llama_lm": ("flagai.model.llama", "LLAMA",),
"vit_classification": ("flagai.model.vision.vit", "VisionTransformer"),
"clip_txt_img_matching": ("flagai.model.mm.clip_model", "CLIP"),
"swinv1_classification": ("flagai.model.vision.swinv1", "SwinTransformer"),
Expand Down Expand Up @@ -97,10 +96,6 @@ def __getattr__(self, name):
"galactica-6.7b-en": ["flagai.model.galactica_model", "GalacticaModel", "galactica", "nlp", "flagai.data.tokenizer.galactica.galactica_tokenizer", "GalacticaTokenizer"],
"galactica-30b-en": ["flagai.model.galactica_model", "GalacticaModel", "galactica", "nlp", "flagai.data.tokenizer.galactica.galactica_tokenizer", "GalacticaTokenizer"],
"galactica-120b-en": ["flagai.model.galactica_model", "GalacticaModel", "galactica", "nlp", "flagai.data.tokenizer.galactica.galactica_tokenizer", "GalacticaTokenizer"],
"llama-7b-en": ["flagai.model.llama", "LLAMA", "llama", "nlp", "flagai.data.tokenizer.llama.tokenizer", "Tokenizer"],
"llama-13b-en": ["flagai.model.llama", "LLAMA", "llama", "nlp", "flagai.data.tokenizer.llama.tokenizer", "Tokenizer"],
"llama-30b-en": ["flagai.model.llama", "LLAMA", "llama", "nlp", "flagai.data.tokenizer.llama.tokenizer", "Tokenizer"],
"llama-65b-en": ["flagai.model.llama", "LLAMA", "llama", "nlp", "flagai.data.tokenizer.llama.tokenizer", "Tokenizer"],
"vit-base-p16-224":
["flagai.model.vision.vit", "VisionTransformer", "vit", "vision"],
"vit-base-p16-384":
Expand Down Expand Up @@ -217,12 +212,9 @@ def __init__(self,
self.model.half()

if model_type == "nlp":
if brief_model_name in ["galactica",]:
if brief_model_name in ["galactica", ]:
self.tokenizer = getattr(LazyImport(MODEL_DICT[model_name][4]),
MODEL_DICT[model_name][5])(download_path)
elif brief_model_name in ["llama",]:
self.tokenizer = getattr(LazyImport(MODEL_DICT[model_name][4]),
MODEL_DICT[model_name][5])(os.path.join(download_path, "tokenizer.model"))
else :
tokenizer_class = getattr(LazyImport("flagai.data.tokenizer"),
"Tokenizer")
Expand Down
40 changes: 0 additions & 40 deletions flagai/data/tokenizer/llama/tokenizer.py

This file was deleted.

2 changes: 1 addition & 1 deletion flagai/model/base_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ def download(cls,
model_files = eval(_get_model_files(model_name))
print("model files:" + str(model_files))
for file_name in model_files:
if not file_name.endswith("bin") and not file_name.endswith("pth"):
if not file_name.endswith("bin"):
_get_vocab_path(os.path.join(download_path, model_name), file_name, model_id)
else :
_get_checkpoint_path(os.path.join(download_path, model_name), file_name, model_id)
Loading

0 comments on commit 06af33f

Please sign in to comment.