forked from guillaume-be/rust-bert
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
2dad825
commit e6938e1
Showing
4 changed files
with
151 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,57 @@ | ||
// Copyright 2019-present, the HuggingFace Inc. team, The Google AI Language Team and Facebook, Inc. | ||
// Copyright 2019 Guillaume Becquin | ||
// Licensed under the Apache License, Version 2.0 (the "License"); | ||
// you may not use this file except in compliance with the License. | ||
// You may obtain a copy of the License at | ||
// http://www.apache.org/licenses/LICENSE-2.0 | ||
// Unless required by applicable law or agreed to in writing, software | ||
// distributed under the License is distributed on an "AS IS" BASIS, | ||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
// See the License for the specific language governing permissions and | ||
// limitations under the License. | ||
|
||
extern crate failure; | ||
|
||
use rust_bert::bert::{BertConfigResources, BertModelResources, BertVocabResources}; | ||
use rust_bert::pipelines::common::ModelType; | ||
use rust_bert::pipelines::question_answering::{ | ||
QaInput, QuestionAnsweringConfig, QuestionAnsweringModel, | ||
}; | ||
use rust_bert::resources::{RemoteResource, Resource}; | ||
|
||
fn main() -> failure::Fallible<()> { | ||
// Set-up Question Answering model | ||
let config = QuestionAnsweringConfig::new( | ||
ModelType::Bert, | ||
Resource::Remote(RemoteResource::from_pretrained(BertModelResources::BERT_QA)), | ||
Resource::Remote(RemoteResource::from_pretrained( | ||
BertConfigResources::BERT_NER, | ||
)), | ||
Resource::Remote(RemoteResource::from_pretrained( | ||
BertVocabResources::BERT_NER, | ||
)), | ||
None, //merges resource only relevant with ModelType::Roberta | ||
false, //lowercase | ||
); | ||
|
||
let qa_model = QuestionAnsweringModel::new(config)?; | ||
|
||
// Define input | ||
let question_1 = String::from("Where does Amy live ?"); | ||
let context_1 = String::from("Amy lives in Amsterdam"); | ||
let question_2 = String::from("Where does Eric live"); | ||
let context_2 = String::from("While Amy lives in Amsterdam, Eric is in The Hague."); | ||
let qa_input_1 = QaInput { | ||
question: question_1, | ||
context: context_1, | ||
}; | ||
let qa_input_2 = QaInput { | ||
question: question_2, | ||
context: context_2, | ||
}; | ||
|
||
// Get answer | ||
let answers = qa_model.predict(&vec![qa_input_1, qa_input_2], 1, 32); | ||
println!("{:?}", answers); | ||
Ok(()) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,48 @@ | ||
from transformers import BERT_PRETRAINED_CONFIG_ARCHIVE_MAP, BERT_PRETRAINED_MODEL_ARCHIVE_MAP | ||
from transformers.tokenization_bert import PRETRAINED_VOCAB_FILES_MAP | ||
from transformers.file_utils import get_from_cache | ||
from pathlib import Path | ||
import shutil | ||
import os | ||
import numpy as np | ||
import torch | ||
import subprocess | ||
|
||
config_path = BERT_PRETRAINED_CONFIG_ARCHIVE_MAP["bert-large-cased-whole-word-masking-finetuned-squad"] | ||
vocab_path = PRETRAINED_VOCAB_FILES_MAP["vocab_file"]["bert-large-cased-whole-word-masking-finetuned-squad"] | ||
weights_path = BERT_PRETRAINED_MODEL_ARCHIVE_MAP["bert-large-cased-whole-word-masking-finetuned-squad"] | ||
|
||
target_path = Path.home() / 'rustbert' / 'bert-qa' | ||
|
||
temp_config = get_from_cache(config_path) | ||
temp_vocab = get_from_cache(vocab_path) | ||
temp_weights = get_from_cache(weights_path) | ||
|
||
os.makedirs(str(target_path), exist_ok=True) | ||
|
||
config_path = str(target_path / 'config.json') | ||
vocab_path = str(target_path / 'vocab.txt') | ||
model_path = str(target_path / 'model.bin') | ||
|
||
shutil.copy(temp_config, config_path) | ||
shutil.copy(temp_vocab, vocab_path) | ||
shutil.copy(temp_weights, model_path) | ||
|
||
weights = torch.load(temp_weights, map_location='cpu') | ||
nps = {} | ||
for k, v in weights.items(): | ||
k = k.replace("gamma", "weight").replace("beta", "bias") | ||
nps[k] = np.ascontiguousarray(v.cpu().numpy()) | ||
|
||
np.savez(target_path / 'model.npz', **nps) | ||
|
||
source = str(target_path / 'model.npz') | ||
target = str(target_path / 'model.ot') | ||
|
||
toml_location = (Path(__file__).resolve() / '..' / '..' / 'Cargo.toml').resolve() | ||
|
||
subprocess.call( | ||
['cargo', 'run', '--bin=convert-tensor', '--manifest-path=%s' % toml_location, '--', source, target]) | ||
|
||
os.remove(str(target_path / 'model.bin')) | ||
os.remove(str(target_path / 'model.npz')) |