diff --git a/README.md b/README.md
index 65d144b..0a87b33 100644
--- a/README.md
+++ b/README.md
@@ -29,6 +29,99 @@ podman-compose -f docker-compose.yaml up
nodemon --ext '*' --exec "podman stop rag-chat-backend; podman rm rag-chat-backend; podman-compose -f docker-compose.yaml up --build"
```
+### Ollama (CLI)
+
+
+Show Models from Ollama
+
+ curl http://localhost:11434/api/tags | jq
+
+
+
+
+
+Run Chat Completion
+
+ curl http://localhost:11434/v1/chat/completions \
+ -H "Content-Type: application/json" \
+ -d '{
+ "model": "phi3:latest",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant."
+ },
+ {
+ "role": "user",
+ "content": "Hello!"
+ }
+ ]
+ }'
+
+
+
+
+### VectorDB (CLI)
+
+
+Quere Verctoriezed content
+
+ curl -X 'POST' \
+ 'http://localhost:8000/api/search-engine?query=HTML%20of%20your%20question' \
+ -H 'accept: application/json' | jq
+
+
+
+
+### VectorDB (Opensearch Dashboard)
+
+Run these at http://localhost:5601/app/dev_tools#/console
+
+
+View Chunked Vectors
+
+ GET /my-project-name/_search
+ {
+ "query": {
+ "match_all": {}
+ }
+ }
+
+
+
+
+
+
+Get the three best documents with theire embeddings
+
+ GET /my-project-name/_search
+ {
+ "size": 0,
+ "query": {
+ "bool": {"must": [{"match": {"content": "Enter your Question here..."}}]}
+ },
+ "aggs": {
+ "group_by_source": {
+ "terms": {
+ "field": "metadata.source.keyword",
+ "size": 100000
+
+ },
+ "aggs": {
+ "top_entries": {
+ "top_hits": {
+ "size": 3,
+ "sort": [{"_score": {"order": "desc"}}],
+ "_source": {"excludes": ["embedding_vector"]}
+ }
+ }
+ }
+ }
+ }
+ }
+
+
+
## TODO