google codlab 运行需要5秒

from transformers import AutoModel , AutoTokenizer,MarianMTModel
from huggingface_hub.hf_api import HfFolder

HfFolder.save_token('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')

text ="Memphis is a series of models which advance human-data models, offering good performance without relying on proprietary model outputs"
mname ='Helsinki-NLP/opus-mt-en-zh'
tokenizer = AutoTokenizer.from_pretrained(mname)
model = MarianMTModel.from_pretrained(mname)
input_ids = tokenizer.encode(text, return_tensors="pt")
outputs = model.generate(input_ids)
decoded = tokenizer.decode(outputs[0], skip_special_tokens=True)
print(decoded)#Nice to meet you

也可以使用pipeline:

from transformers import AutoModel , AutoTokenizer,MarianMTModel
from transformers import pipeline

text ="Memphis is a series of models which advance human-data models, offering good performance without relying on proprietary model outputs"
mname ='Helsinki-NLP/opus-mt-en-zh'
translator = pipeline(model=mname)
print(translator(text)[0].get("generated_text"))

Logo

有“AI”的1024 = 2048,欢迎大家加入2048 AI社区

更多推荐