Avoid tokenizer download from torch hub
This commit is contained in:
parent
406133f0fb
commit
fcc7458d15
23
extras/BLIP/models/bert_tokenizer/config.json
Normal file
23
extras/BLIP/models/bert_tokenizer/config.json
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"architectures": [
|
||||||
|
"BertForMaskedLM"
|
||||||
|
],
|
||||||
|
"attention_probs_dropout_prob": 0.1,
|
||||||
|
"gradient_checkpointing": false,
|
||||||
|
"hidden_act": "gelu",
|
||||||
|
"hidden_dropout_prob": 0.1,
|
||||||
|
"hidden_size": 768,
|
||||||
|
"initializer_range": 0.02,
|
||||||
|
"intermediate_size": 3072,
|
||||||
|
"layer_norm_eps": 1e-12,
|
||||||
|
"max_position_embeddings": 512,
|
||||||
|
"model_type": "bert",
|
||||||
|
"num_attention_heads": 12,
|
||||||
|
"num_hidden_layers": 12,
|
||||||
|
"pad_token_id": 0,
|
||||||
|
"position_embedding_type": "absolute",
|
||||||
|
"transformers_version": "4.6.0.dev0",
|
||||||
|
"type_vocab_size": 2,
|
||||||
|
"use_cache": true,
|
||||||
|
"vocab_size": 30522
|
||||||
|
}
|
1
extras/BLIP/models/bert_tokenizer/tokenizer.json
Normal file
1
extras/BLIP/models/bert_tokenizer/tokenizer.json
Normal file
File diff suppressed because one or more lines are too long
3
extras/BLIP/models/bert_tokenizer/tokenizer_config.json
Normal file
3
extras/BLIP/models/bert_tokenizer/tokenizer_config.json
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"do_lower_case": true
|
||||||
|
}
|
30522
extras/BLIP/models/bert_tokenizer/vocab.txt
Normal file
30522
extras/BLIP/models/bert_tokenizer/vocab.txt
Normal file
File diff suppressed because it is too large
Load Diff
@ -184,7 +184,8 @@ def blip_feature_extractor(pretrained='',**kwargs):
|
|||||||
return model
|
return model
|
||||||
|
|
||||||
def init_tokenizer():
|
def init_tokenizer():
|
||||||
tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
|
tokenizer_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "bert_tokenizer")
|
||||||
|
tokenizer = BertTokenizer.from_pretrained(tokenizer_path)
|
||||||
tokenizer.add_special_tokens({'bos_token':'[DEC]'})
|
tokenizer.add_special_tokens({'bos_token':'[DEC]'})
|
||||||
tokenizer.add_special_tokens({'additional_special_tokens':['[ENC]']})
|
tokenizer.add_special_tokens({'additional_special_tokens':['[ENC]']})
|
||||||
tokenizer.enc_token_id = tokenizer.additional_special_tokens_ids[0]
|
tokenizer.enc_token_id = tokenizer.additional_special_tokens_ids[0]
|
||||||
|
@ -1 +1 @@
|
|||||||
version = '2.1.835'
|
version = '2.1.836'
|
||||||
|
@ -1,3 +1,7 @@
|
|||||||
|
# 2.1.836
|
||||||
|
|
||||||
|
* Avoid blip tokenizer download from torch hub
|
||||||
|
|
||||||
# 2.1.831
|
# 2.1.831
|
||||||
|
|
||||||
* Input Image -> Describe (Midjourney Describe)
|
* Input Image -> Describe (Midjourney Describe)
|
||||||
|
Loading…
Reference in New Issue
Block a user