Last active
May 1, 2025 13:15
-
-
Save svor/fe49e77b3879afe25b53374c2bacb30e to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
schemaVersion: 2.2.2 | |
metadata: | |
name: ollama | |
projects: | |
- name: cde-ollama-continue | |
git: | |
remotes: | |
origin: 'https://github.com/redhat-developer-demos/cde-ollama-continue' | |
checkoutFrom: | |
revision: main | |
components: | |
- name: udi | |
container: | |
image: quay.io/devfile/universal-developer-image:ubi9-latest | |
memoryLimit: 2Gi | |
memoryRequest: 2Gi | |
cpuLimit: 2000m | |
cpuRequest: 1000m | |
mountSources: true | |
sourceMapping: /projects | |
- name: ollama | |
attributes: | |
container-overrides: | |
resources: | |
limits: | |
cpu: 2000m | |
memory: 4Gi | |
# nvidia.com/gpu: 1 # Uncomment this if the pod shall be scheduled only on a GPU node | |
requests: | |
cpu: 1000m | |
memory: 4Gi | |
# nvidia.com/gpu: 1 # Uncomment this if the pod shall be scheduled only on a GPU node | |
container: | |
image: docker.io/ollama/ollama:0.5.4 | |
mountSources: true | |
sourceMapping: /.ollama | |
commands: | |
- id: pullmodel | |
exec: | |
component: ollama | |
commandLine: "ollama pull llama3:8b" | |
- id: pullautocompletemodel | |
exec: | |
component: ollama | |
commandLine: "ollama pull starcoder2:3b" | |
- id: copyconfig | |
exec: | |
component: udi | |
commandLine: "mkdir /home/user/.continue && cp /projects/cde-ollama-continue/continue-config.json /home/user/.continue/config.json" | |
events: | |
postStart: | |
- pullmodel | |
- pullautocompletemodel | |
- copyconfig |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment