Skip to content

Commit

Permalink
Rename llama to llama_cpp
Browse files Browse the repository at this point in the history
  • Loading branch information
Vadser committed Aug 14, 2024
1 parent 5bb3eb1 commit 690a441
Show file tree
Hide file tree
Showing 8 changed files with 11 additions and 11 deletions.
2 changes: 1 addition & 1 deletion app/helpers/model_versions_helper.rb
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ module ModelVersionsHelper
DEFAULT_CONFIGURATION = {
'openai' => '{"model":"gpt-3.5-turbo","temperature":0.5}',
'ollama' => '{"model":"llama3.1"}',
'llama' => '{"n_predict":500,"temperature":0.5,"stop":["<|end|>","<|user|>","<|assistant|>","<|endoftext|>","<|system|>"]}'
'llama_cpp' => '{"n_predict":500,"temperature":0.5,"stop":["<|end|>","<|user|>","<|assistant|>","<|endoftext|>","<|system|>"]}'
}.freeze

def default_configuration(executor_type)
Expand Down
2 changes: 1 addition & 1 deletion app/javascript/controllers/model_controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { Controller } from "@hotwired/stimulus"
const DEFAULT_CONFIGURATION = {
openai: '{"model":"gpt-3.5-turbo","temperature":0.5}',
ollama: '{"model":"llama3.1"}',
llama: '{"n_predict":500,"temperature":0.5,"stop":["<|end|>","<|user|>","<|assistant|>","<|endoftext|>","<|system|>"]}'
llama_cpp: '{"n_predict":500,"temperature":0.5,"stop":["<|end|>","<|user|>","<|assistant|>","<|endoftext|>","<|system|>"]}'
}

export default class extends Controller {
Expand Down
2 changes: 1 addition & 1 deletion app/models/model.rb
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ class Model < ApplicationRecord
has_many :model_versions, dependent: :destroy
accepts_nested_attributes_for :model_versions, allow_destroy: true, reject_if: :all_blank

enum :executor_type, { llama: 0, openai: 1, ollama: 2 }, scopes: false, default: :llama
enum :executor_type, { llama_cpp: 0, openai: 1, ollama: 2 }, scopes: false, default: :llama_cpp

default_scope { order(id: :desc) }
end
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# frozen_string_literal: true

module Executors
class Llama < Base
class LlamaCpp < Base
end
end
2 changes: 1 addition & 1 deletion config/locales/en.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,6 @@ en:
attributes:
model:
executor_types:
llama: 'Llama.cpp'
llama_cpp: 'Llama.cpp'
openai: 'OpenAI'
ollama: 'Ollama'
2 changes: 1 addition & 1 deletion spec/factories/models.rb
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
factory :model do
name { 'Test Model' }
url { 'http://example.com' }
executor_type { 'llama' }
executor_type { 'llama_cpp' }
api_key { 'apikey' }
association :user
end
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
require 'rails_helper'
require 'webmock/rspec'

RSpec.describe Executors::Llama do
RSpec.describe Executors::LlamaCpp do
describe '#call' do
let(:model) { create(:model, url: 'http://example.com/model_endpoint') }
let(:model_version) { create(:model_version, model:, configuration: { param: 'value' }) }
Expand Down
8 changes: 4 additions & 4 deletions spec/services/model_executor_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,14 @@
end
end

context "when executor type is 'base'" do
let(:model) { create(:model, executor_type: 'llama') }
context "when executor type is 'llama_cpp'" do
let(:model) { create(:model, executor_type: 'llama_cpp') }
let(:model_version) { create(:model_version, model:) }
let(:prompt) { 'Test prompt' }
let(:executor_instance) { instance_double(Executors::Base) }
let(:executor_instance) { instance_double(Executors::LlamaCpp) }

before do
allow(Executors::Base).to receive(:new).and_return(executor_instance)
allow(Executors::LlamaCpp).to receive(:new).and_return(executor_instance)
allow(executor_instance).to receive(:call).with(prompt).and_return({ result: 'Test result' })
end

Expand Down

0 comments on commit 690a441

Please sign in to comment.