Skip to content

Clean up warnings and unused parameters. #28

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Nov 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 17 additions & 11 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,19 +1,25 @@
# Check if a default C++ compiler exists, otherwise use g++
CXX ?= g++
CXXFLAGS = -Wall -Wextra -Wpedantic

CREATE_BUILD_DIR = mkdir -p build; cp -n llama.jpg build;

all: examples test-cpp11 test-cpp14 test-cpp20
examples: examples/main.cpp
build:
mkdir -p build
$(CXX) examples/main.cpp -Iinclude -o build/examples -std=c++11 -pthread -latomic
ifeq ($(OS),Windows_NT)
if not exist "build/llama.jpg" copy "llama.jpg" "build"
else
cp -n llama.jpg build
endif
examples: build examples/main.cpp
$(CXX) $(CXXFLAGS) examples/main.cpp -Iinclude -o build/examples -std=c++11 -pthread -latomic
test: test-cpp11
test-cpp11: test/test.cpp
mkdir -p build
$(CXX) test/test.cpp -Iinclude -Itest -o build/test -std=c++11 -pthread -latomic
test-cpp14: test/test.cpp
mkdir -p build
$(CXX) test/test.cpp -Iinclude -Itest -o build/test-cpp14 -std=c++14 -pthread -latomic
test-cpp20: test/test.cpp
mkdir -p build
$(CXX) test/test.cpp -Iinclude -Itest -o build/test-cpp20 -std=c++2a -pthread -latomic
test-cpp11: build test/test.cpp
$(CXX) $(CXXFLAGS) test/test.cpp -Iinclude -Itest -o build/test -std=c++11 -pthread -latomic
test-cpp14: build test/test.cpp
$(CXX) $(CXXFLAGS) test/test.cpp -Iinclude -Itest -o build/test-cpp14 -std=c++14 -pthread -latomic
test-cpp20: build test/test.cpp
$(CXX) $(CXXFLAGS) test/test.cpp -Iinclude -Itest -o build/test-cpp20 -std=c++2a -pthread -latomic
clean:
rm -rf build
5 changes: 3 additions & 2 deletions examples/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ int main()

// Create a blob on the ollama server using the following digest
try { ollama::create_blob("sha256:29fdb92e57cf0827ded04ae6461b5931d01fa595843f55d36f5b275a52087dd2"); std::cout << "Blob was created on Ollama server." << std::endl; }
catch( ollama::exception e) { std::cout << "Error when creating blob: " << e.what() << std::endl;}
catch( ollama::exception& e) { std::cout << "Error when creating blob: " << e.what() << std::endl;}

// Check if a blob with the following digest exists.
if ( ollama::blob_exists("sha256:29fdb92e57cf0827ded04ae6461b5931d01fa595843f55d36f5b275a52087dd2") ) std::cout << "Blob exists on Ollama server." << std::endl;
Expand Down Expand Up @@ -95,7 +95,7 @@ int main()
try {
ollama::generate("Non-existent-model", "Requesting this model will throw an error");
}
catch(ollama::exception e) { std::cout << e.what() << std::endl; }
catch(ollama::exception& e) { std::cout << e.what() << std::endl; }

//Alternatively, throwing exceptions can be disabled. In this case, either emptry values or false will be returned in the event of an error.
//ollama::allow_exceptions(false);
Expand All @@ -117,6 +117,7 @@ int main()
// Optionally send a request to ollama to load a model into memory.
// This will occur automatically during generation but this allows you to preload a model before using it.
bool model_loaded = ollama::load_model("llama3:8b");
if (model_loaded) std::cout << "Model has been loaded";

// Perform a simple generation to a string by specifying a model and a prompt. The response will be returned as one string without streaming the reply.
std::cout << ollama::generate("llama3:8b", "Why is the sky blue?") << std::endl;
Expand Down
12 changes: 6 additions & 6 deletions include/ollama.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ namespace ollama
public:
image(const std::string base64_sequence, bool valid = true)
{
this->base64_sequence = base64_sequence;
this->base64_sequence = base64_sequence; this->valid = valid;
}
~image(){};

Expand Down Expand Up @@ -254,7 +254,7 @@ namespace ollama
(*this)["stream"] = stream;

if (options!=nullptr) (*this)["options"] = options["options"];
//(*this)["format"] = format; // Commented out as providing the format causes issues with some models.
(void)format; //(*this)["format"] = format; // Commented out as providing the format causes issues with some models.
(*this)["keep_alive"] = keep_alive_duration;
type = message_type::chat;

Expand Down Expand Up @@ -329,7 +329,7 @@ namespace ollama
return simple_string;
}

const bool has_error() const
bool has_error() const
{
if ( json_data.contains("error") ) return true;
return false;
Expand Down Expand Up @@ -835,7 +835,6 @@ class Ollama
std::string get_version()
{
std::string version;
httplib::Client cli("http://localhost:11434");

auto res = this->cli->Get("/api/version");

Expand Down Expand Up @@ -872,12 +871,13 @@ class Ollama

private:

/*
bool send_request(const ollama::request& request, std::function<void(const ollama::response&)> on_receive_response=nullptr)
{

return true;
}

*/

std::string server_url;
httplib::Client *cli;
Expand Down Expand Up @@ -1040,7 +1040,7 @@ namespace ollama
ollama.setWriteTimeout(seconds);
}

};
}


#endif
12 changes: 6 additions & 6 deletions singleheader/ollama.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -34891,7 +34891,7 @@ namespace ollama
public:
image(const std::string base64_sequence, bool valid = true)
{
this->base64_sequence = base64_sequence;
this->base64_sequence = base64_sequence; this->valid = valid;
}
~image(){};

Expand Down Expand Up @@ -35044,7 +35044,7 @@ namespace ollama
(*this)["stream"] = stream;

if (options!=nullptr) (*this)["options"] = options["options"];
//(*this)["format"] = format; // Commented out as providing the format causes issues with some models.
(void)format; //(*this)["format"] = format; // Commented out as providing the format causes issues with some models.
(*this)["keep_alive"] = keep_alive_duration;
type = message_type::chat;

Expand Down Expand Up @@ -35119,7 +35119,7 @@ namespace ollama
return simple_string;
}

const bool has_error() const
bool has_error() const
{
if ( json_data.contains("error") ) return true;
return false;
Expand Down Expand Up @@ -35625,7 +35625,6 @@ class Ollama
std::string get_version()
{
std::string version;
httplib::Client cli("http://localhost:11434");

auto res = this->cli->Get("/api/version");

Expand Down Expand Up @@ -35662,12 +35661,13 @@ class Ollama

private:

/*
bool send_request(const ollama::request& request, std::function<void(const ollama::response&)> on_receive_response=nullptr)
{

return true;
}

*/

std::string server_url;
httplib::Client *cli;
Expand Down Expand Up @@ -35830,7 +35830,7 @@ namespace ollama
ollama.setWriteTimeout(seconds);
}

};
}


#endif
2 changes: 1 addition & 1 deletion test/test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ TEST_SUITE("Ollama Tests") {
try {
ollama::generate("Non-existent-model", "Requesting this model will throw an error");
}
catch(ollama::exception e) { exception_handled = true; }
catch(ollama::exception& e) { exception_handled = true; }

CHECK( exception_handled );
}
Expand Down