/home/lalyid/miniconda3/bin/x86_64-conda-linux-gnu-c++ -fPIC -fvisibility-inlines-hidden -std=c++17 -fmessage-length=0 -march=nocona -mtune=haswell -ftree-vectorize -fPIC -fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe -isystem /home/lalyid/miniconda3/include -O3 -DNDEBUG -Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections -Wl,-rpath,/home/lalyid/miniconda3/lib -Wl,-rpath-link,/home/lalyid/miniconda3/lib -L/home/lalyid/miniconda3/lib -shared -Wl,-soname,libllama.so -o ../bin/libllama.so CMakeFiles/llama.dir/llama.cpp.o "CMakeFiles/llama.dir/llama-adapter.cpp.o" "CMakeFiles/llama.dir/llama-arch.cpp.o" "CMakeFiles/llama.dir/llama-batch.cpp.o" "CMakeFiles/llama.dir/llama-chat.cpp.o" "CMakeFiles/llama.dir/llama-context.cpp.o" "CMakeFiles/llama.dir/llama-cparams.cpp.o" "CMakeFiles/llama.dir/llama-grammar.cpp.o" "CMakeFiles/llama.dir/llama-graph.cpp.o" "CMakeFiles/llama.dir/llama-hparams.cpp.o" "CMakeFiles/llama.dir/llama-impl.cpp.o" "CMakeFiles/llama.dir/llama-io.cpp.o" "CMakeFiles/llama.dir/llama-kv-cache-unified.cpp.o" "CMakeFiles/llama.dir/llama-kv-cache-unified-iswa.cpp.o" "CMakeFiles/llama.dir/llama-memory.cpp.o" "CMakeFiles/llama.dir/llama-memory-hybrid.cpp.o" "CMakeFiles/llama.dir/llama-memory-recurrent.cpp.o" "CMakeFiles/llama.dir/llama-mmap.cpp.o" "CMakeFiles/llama.dir/llama-model-loader.cpp.o" "CMakeFiles/llama.dir/llama-model-saver.cpp.o" "CMakeFiles/llama.dir/llama-model.cpp.o" "CMakeFiles/llama.dir/llama-quant.cpp.o" "CMakeFiles/llama.dir/llama-sampling.cpp.o" "CMakeFiles/llama.dir/llama-vocab.cpp.o" "CMakeFiles/llama.dir/unicode-data.cpp.o" CMakeFiles/llama.dir/unicode.cpp.o -Wl,-rpath,/home/lalyid/llama.cpp/build/bin: ../bin/libggml.so ../bin/libggml-cpu.so ../bin/libggml-base.so