Get context response
Receives the next response from the model according to the message history
Function GetContextResponse(Val URL, Val Model, Val Messages, Val AdditionalParameters = "", Val AdditionalHeaders = "") Export
| Parameter | CLI option | Type | Required | Description |
|---|---|---|---|---|
| URL | --url | String | ✔ | Ollama server URL |
| Model | --model | String | ✔ | Models name |
| Messages | --msgs | Array of Structure | ✔ | Messages log. See GetContextMessageStructure |
| AdditionalParameters | --options | Structure Of KeyAndValue | ✖ | Additional parameters. See GetRequestParametersStructure |
| AdditionalHeaders | --headers | Map Of KeyAndValue | ✖ | Additional request headers, if necessary |
Returns: Map Of KeyAndValue - Processing result
tip
Method at API documentation: Generate a chat completion
1C:Enterprise/OneScript code example
URL = "https://hut.openintegrations.dev/ollama";
Token = "12We34..."; // Authorization - not part API Ollama
AdditionalHeaders = New Map;
AdditionalHeaders.Insert("Authorization", StrTemplate("Bearer %1", Token));
Model = "tinyllama";
MessagesArray = New Array;
Question1 = OPI_Ollama.GetContextMessageStructure("user", "What is 1C:Enterprise?");
Question2 = OPI_Ollama.GetContextMessageStructure("user", "When the first version was released?"); // Question without specifics
// Adding the first question to the context
MessagesArray.Add(Question1);
Response1 = OPI_Ollama.GetContextResponse(URL, Model, MessagesArray, , AdditionalHeaders);
MessagesArray.Add(Response1["message"]); // Add response to first question in context
MessagesArray.Add(Question2); // Add second question in context
Response2 = OPI_Ollama.GetContextResponse(URL, Model, MessagesArray, , AdditionalHeaders);
MessagesArray.Add(Response2["message"]);
// ...
- Bash
- CMD/Bat
# JSON data can also be passed as a path to a .json file
oint ollama GetContextResponse \
--url "https://hut.openintegrations.dev/ollama" \
--model "tinyllama" \
--msgs "[{'role':'user','content':'Hello!'}]" \
--options "{'options':{'seed':'555','temperature':'10'}}" \
--headers "{'Authorization':'***'}"
:: JSON data can also be passed as a path to a .json file
oint ollama GetContextResponse ^
--url "https://hut.openintegrations.dev/ollama" ^
--model "tinyllama" ^
--msgs "[{'role':'user','content':'Hello!'}]" ^
--options "{'options':{'seed':'555','temperature':'10'}}" ^
--headers "{'Authorization':'***'}"
Result
{
"model": "tinyllama",
"created_at": "2025-10-31T12:26:13.839204322Z",
"message": {
"role": "assistant",
"content": "The first version of 1C:Enterprise was released in November 2004. It has since been updated with new features and improvements, such as enhancements to inventory management, accounting, and project management. In recent years, 1C:Enterprise has undergone significant upgrades and improvements, including new modules for business intelligence, mobile app support, and more. The latest version of 1C:Enterprise is 3.5, which was released in 2019."
},
"done_reason": "stop",
"done": true,
"total_duration": 15730040496,
"load_duration": 18357020,
"prompt_eval_count": 181,
"prompt_eval_duration": 4272287371,
"eval_count": 110,
"eval_duration": 11434948545
}