-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdevfile.yaml
More file actions
71 lines (62 loc) · 1.75 KB
/
devfile.yaml
File metadata and controls
71 lines (62 loc) · 1.75 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
schemaVersion: 2.2.2
metadata:
name: ollama
displayName: Ollama
description: Get up and running with large language models with Ollama, Continue, Llama3, and StarCoder2
icon: https://ollama.com/public/ollama.png
tags:
- Ollama
- Continue
- Llama3
- Starcoder2
projectType: universal
language: Polyglot
version: 1.0.0
projects:
- name: cde-ollama-continue
git:
remotes:
origin: https://github.com/redhat-developer-demos/cde-ollama-continue
checkoutFrom:
revision: main
components:
- name: udi
container:
image: quay.io/devfile/universal-developer-image:ubi9-latest
memoryLimit: 4Gi
memoryRequest: 2Gi
cpuLimit: 4000m
cpuRequest: 1000m
mountSources: true
sourceMapping: /projects
- name: ollama
container:
image: docker.io/ollama/ollama:0.5.4
command: ["/bin/sh", "-c"]
args: ["ollama serve"]
mountSources: true
sourceMapping: /.ollama
# Die Ressourcen gehören in diesen Container-Block:
memoryLimit: 20Gi
cpuLimit: 8000m
memoryRequest: 8Gi
cpuRequest: 1000m
commands:
- id: pullmodel
exec:
component: ollama
# Wartet kurz, bis der Server auf Port 11434 reagiert
commandLine: "sleep 15 && ollama pull llama3:8b"
- id: pullautocompletemodel
exec:
component: ollama
commandLine: "ollama pull starcoder2:3b"
- id: copyconfig
exec:
component: udi
commandLine: "mkdir -p /home/user/.continue && cp /projects/cde-ollama-continue/continue-config.json /home/user/.continue/config.json"
events:
postStart:
- pullmodel
- pullautocompletemodel
- copyconfig