Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
O
Ollama and Langchain
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Iterations
Wiki
Requirements
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Locked files
Build
Pipelines
Jobs
Pipeline schedules
Test cases
Artifacts
Deploy
Releases
Package registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Code review analytics
Issue analytics
Insights
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
GitLab community forum
Contribute to GitLab
Provide feedback
Terms and privacy
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
zbhAI
Ollama and Langchain
Commits
3e2f4135
Commit
3e2f4135
authored
1 year ago
by
Embruch, Gerd
Browse files
Options
Downloads
Patches
Plain Diff
switched over to prompting to multiple models at once
parent
c40c0cab
No related branches found
No related tags found
No related merge requests found
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
main.py
+38
-29
38 additions, 29 deletions
main.py
with
38 additions
and
29 deletions
main.py
+
38
−
29
View file @
3e2f4135
import
os
import
os
import
subprocess
import
subprocess
from
pathlib
import
Path
from
pathlib
import
Path
# enables counting list items
from
operator
import
length_hint
# .env parser
# .env parser
from
dotenv
import
load_dotenv
from
dotenv
import
load_dotenv
# colored print
# colored print
...
@@ -9,6 +10,8 @@ from colorist import Color, BrightColor, bright_yellow, magenta, red, green
...
@@ -9,6 +10,8 @@ from colorist import Color, BrightColor, bright_yellow, magenta, red, green
# langchain stuff
# langchain stuff
from
langchain_community.llms
import
Ollama
from
langchain_community.llms
import
Ollama
from
langchain.prompts
import
PromptTemplate
from
langchain.prompts
import
PromptTemplate
from
langchain.model_laboratory
import
ModelLaboratory
##################################################
##################################################
### PREPARING & STARTING
### PREPARING & STARTING
...
@@ -24,34 +27,31 @@ if not os.path.isfile('./.env'):
...
@@ -24,34 +27,31 @@ if not os.path.isfile('./.env'):
load_dotenv
()
load_dotenv
()
##########
##########
# INSTALLING LLM
# INSTALLING LLM
S
##########
##########
print
(
f
"
installing llm <
{
os
.
environ
[
'
llmName
'
]
}
>. This can take a while, depending on size and if already installed.
"
)
# split llm names into array
llmNames
=
os
.
environ
[
'
LLM_NAMES
'
].
split
(
'
,
'
)
# create download folder
green
(
f
'
Installing
{
length_hint
(
llmNames
)
}
LLMs. This can take a while, depending on installation status, size and bandwith.
'
)
Path
(
os
.
environ
[
'
OLLAMA_MODELS
'
]).
mkdir
(
parents
=
True
,
exist_ok
=
True
)
# loop trhough llm names
for
idx
,
llmName
in
enumerate
(
llmNames
):
process
=
subprocess
.
run
([
"
ollama
"
,
"
pull
"
,
f
"
{
os
.
environ
[
'
llmName
'
]
}
"
],
stdout
=
subprocess
.
PIPE
,
stderr
=
subprocess
.
PIPE
)
print
(
f
"
{
idx
+
1
}
/
{
length_hint
(
llmNames
)
}
:
{
llmName
}
"
)
# install
process
=
subprocess
.
run
([
"
ollama
"
,
"
pull
"
,
f
"
{
llmName
}
"
],
stdout
=
subprocess
.
PIPE
,
stderr
=
subprocess
.
PIPE
)
# on error
# on error
if
process
.
returncode
!=
0
:
if
process
.
returncode
!=
0
:
magenta
(
process
.
stderr
.
decode
(
'
utf8
'
))
red
(
process
.
stderr
.
decode
(
'
utf8
'
))
red
(
f
"
ABORTING: Unable to install the requested LLM
"
)
# remove name from array
os
.
_exit
(
1
)
llmNames
.
pop
(
idx
)
print
(
process
.
stdout
.
decode
(
'
UTF8
'
))
llm
=
Ollama
(
# exit if no LLM could be loaded
model
=
os
.
environ
[
'
llmName
'
],
if
length_hint
(
llmNames
)
==
0
:
temperature
=
0
,
red
(
f
"
ABORTING: Unable to install even one of the requested LLMs
"
)
# callback_manager=CallbackManager([StreamingStdOutCallbackHandler()])
os
.
_exit
(
1
)
)
# write down the question to ask
question
=
"
What tall is the tallest pyramid in egypt and what
'
s it called?
"
##################################################
##################################################
###
ASK THE LLM
###
DEFINING THE PROMPT
##################################################
##################################################
green
(
'
pr
ompting
'
)
green
(
'
pr
eparing the prompt specifications
'
)
# define template
# define template
prompt_template
=
PromptTemplate
(
prompt_template
=
PromptTemplate
(
input_variables
=
[
'
question
'
],
input_variables
=
[
'
question
'
],
...
@@ -63,9 +63,18 @@ prompt_template = PromptTemplate(
...
@@ -63,9 +63,18 @@ prompt_template = PromptTemplate(
---------------------
---------------------
"""
"""
)
)
# create prompt by injecting the question
prompt
=
prompt_template
.
format
(
question
=
question
)
##################################################
# run the query
### RUNNING THE TEST
response
=
llm
.
invoke
(
prompt
)
##################################################
# return the response
# define LLMs to run with their specs
print
(
f
"
Question:
{
Color
.
MAGENTA
}{
question
}{
Color
.
OFF
}
\n
Answer:
{
BrightColor
.
MAGENTA
}{
response
}{
Color
.
OFF
}
\n
"
)
localLLMs
=
[]
for
idx
,
llmName
in
enumerate
(
llmNames
):
localLLMs
.
append
(
Ollama
(
model
=
llmName
,
temperature
=
os
.
environ
[
'
OVERAL_TEMPERATURE
'
]
))
# setup 'laboratory'
lab
=
ModelLaboratory
.
from_llms
(
localLLMs
,
prompt
=
prompt_template
)
# run prompt through all llms
lab
.
compare
(
os
.
environ
[
'
QUESTION
'
])
\ No newline at end of file
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment