Last active
April 26, 2021 11:18
-
-
Save philschmid/403b999733de10bd8f175803db28ea97 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # automatic pipeline | |
| pipe=Pipeline('summarization',model="bart") | |
| pipe() | |
| # preprocess(input) | |
| # model(input) | |
| # postprocess(input) | |
| #-------------------------------------------------------------------------# | |
| # manual pipeline | |
| model_id='bart-large-cnn' | |
| tokenizer='bart-large-cnn' | |
| preprocess_summary = PreProcessSummaryPipeline(tokenizer) | |
| model_summary = PytorchModelPipeline(tokenizer) | |
| postprocess_summary = PostProcessSummaryPipeline(tokenizer) | |
| pre = preprocess_summary(input) | |
| pred = ModelSummaryPipeline(pre) | |
| post = postprocess_summary(pred) | |
| #-------------------------------------------------------------------------# | |
| # chained pipeline | |
| # summary steps | |
| model_id='bart-large-cnn' | |
| tokenizer='bart-large-cnn' | |
| preprocess_summary = PreProcessSummaryPipeline(tokenizer) | |
| model_summary = PytorchModelPipeline(model_id) | |
| postprocess_summary = PostProcessSummaryPipeline(tokenizer) | |
| # token classification steps | |
| model_id='my-onnx-model' | |
| tokenizer='bert-base-cased' | |
| preprocess_token = PreProcessSummaryPipeline(tokenizer) | |
| model_token = ONNXModelPipeline(model_id) | |
| postprocess_model = PostProcessSummaryPipeline(tokenizer) | |
| pipeline=Pipeline([ | |
| preproces_summary, | |
| model_summary, | |
| postprocess_summary, | |
| preprocess_token, | |
| model_token, | |
| postprocess_model | |
| ]) | |
| pipeline(inuput) | |
| # preprocess_summary(input) | |
| # model_summary(input) | |
| # postprocess_summary(input) | |
| # preprocess_token(input) | |
| # model_token(input) | |
| # return postprocess_model(input) | |
| #-------------------------------------------------------------------------# | |
| # remote pipeline | |
| preprocess_summary = PreProcessSummaryPipeline() | |
| remote_model_summary = TritonClientPipeline() # wrapper for e.g. triton.client with __call__ | |
| postprocess_summary = PostProcessSummaryPipeline() | |
| pipeline=Pipeline([preproces_summary,remote_model_summary,postprocess_summary]) | |
| pipeline() | |
| #-------------------------------------------------------------------------# | |
| # parallel pipeline | |
| preprocess = PreProcessPipeline() | |
| model_sentiment = PytorchModelPipeline() | |
| model_news_class = PytorchModelPipeline() | |
| postprocess = PostProcessPipeline() | |
| pipeline=Pipeline([preprocess,[model_sentiment,model_news_class],postprocess]) | |
| pipeline() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment