#!/bin/bash CONTINUE="${CONTINUE:-false}" KONG_KONNECT_MODE="${KONG_KONNECT_MODE:-}" DEBUG="${DEBUG:-false}" PROMPT="${PROMPT:-true}" APP_NAME="ai-gateway" OUTPUT_DIR="/tmp/kong/${APP_NAME}" mkdir -p "${OUTPUT_DIR}" GET_KONGHQ_COM_URL="https://get.konghq.com" LOG_FILE="${OUTPUT_DIR}/ai-gateway.log" QUICKSTART_SCRIPT_PATH="${OUTPUT_DIR}/quickstart" KONNECT_CP_NAME="${KONNECT_CP_NAME:-ai-gateway}" # This is the total list of LLM providers LLM_PROVIDERS="${LLM_PROVIDERS:-OpenAI Cohere Azure Anthropic Mistral Llama2}" # These are LLM providers from the total list in LLM_PROVIDERS # that are expected to be hosted on the local machine LLM_LOCAL_PROVIDERS="${LLM_LOCAL_PROVIDERS:-Mistral Llama2}" ai_deck_file_openapi2kong() { echo ">ai_deck_file_openapi2kong" >> $LOG_FILE local oas_file="${1}" local deck_file_path="${2}" local response=$(cat "${oas_file}" | docker run -i --rm ${DECK_IMAGE} file openapi2kong) local rv=$? if [[ $rv -ne 0 ]]; then echo "Failed to convert OpenAPI specification file to decK format" >> $LOG_FILE return $rv else echo "${response}" > "${deck_file_path}" fi echo "> $LOG_FILE } ai_patch_oas_file() { local provider="${1}" local input_file="${2}" # specific OAS file for LLM gateway local output_file="${3}" local provider_slug=$(slugify "${provider}") local title="${provider} API" local description="Access the ${provider} API through the Kong LLM Gateway" local chat_path="/${provider_slug}/chat" local completion_path="/${provider_slug}/completions" local response=$(cat "${input_file}" | docker run -i --rm mikefarah/yq:4.40.5 \ ' .info.title = "'"${title}"'" | .info.description = "'"${description}"'" | .paths.//completions.post.summary = "'"${title} Completions"'" | .paths.//chat.post.summary = "'"${title} Chat"'" | .paths.//completions.post.operationId= "'"${provider_slug}-completions"'" | .paths.//chat.post.operationId = "'"${provider_slug}-chat"'" | (.paths.//completions | key) = "'"${completion_path}"'" | (.paths.//chat | key) = "'"${chat_path}"'" ' \ ) local rv=$? if [[ $rv -ne 0 ]]; then echo "Failed to patch OpenAPI specification file: ${response}" >> $LOG_FILE return $rv else echo "${response}" > "${output_file}" return $? fi } ai_download_ai_patch_file() { echo ">ai_download_ai_patch_file" >> $LOG_FILE local provider="${1}" local provider_slug=$(slugify "${provider}") local rv=0 if [ "${DEBUG}" = "false" ]; then echo "Downloading ${provider} patch file" >> "${LOG_FILE}" curl -s --fail-with-body -o "${OUTPUT_DIR}/${provider_slug}-kong-patch.yaml" \ "${GET_KONGHQ_COM_URL}/konnect-up-files/llm-gateway/${provider_slug}-kong-patch.yaml" >> $LOG_FILE 2>&1 rv=$? else echo "Copying local ${provider} patch file" >> "${LOG_FILE}" cp ./konnect-up-files/llm-gateway/${provider_slug}-kong-patch.yaml "${OUTPUT_DIR}/${provider_slug}-kong-patch.yaml" >> $LOG_FILE 2>&1 rv=$? fi if [[ $rv -ne 0 ]]; then echo "Failed to download ${provider} patch file" >> $LOG_FILE return $rv fi echo "> $LOG_FILE return $rv } ai_prepare_provider_files() { echo ">ai_prepare_provider_files" >> $LOG_FILE local provider="${1}" local provider_slug=$(slugify "${provider}") echo_wait "Preparing ${provider} API specification and decK configuration... " noline ai_patch_oas_file "${provider}" "${OUTPUT_DIR}/oas.yaml" "${OUTPUT_DIR}/${provider_slug}-oas.yaml" && { ai_deck_file_openapi2kong \ "${OUTPUT_DIR}/${provider_slug}-oas.yaml" \ "${OUTPUT_DIR}/${provider_slug}-kong.yaml" && { deck_file_patch "${OUTPUT_DIR}/${provider_slug}-kong.yaml" "${OUTPUT_DIR}/${provider_slug}-kong-patch.yaml" "${OUTPUT_DIR}/${provider_slug}-kong.yaml" && { echo_pass "" } || { echo_fail "" echo_fail "Failed to patch ${provider} API specification file" return 1 } } || { echo_fail "" echo_fail "Failed to convert ${provider} specification file to decK format" return 1 } } || { echo_fail "" echo_fail "Failed to patch ${provider} specification file" return 1 } echo "> $LOG_FILE } ai_download_ai_gateway_files() { echo ">ai_download_ai_gateway_oas_file" >> $LOG_FILE local rv=0 local oas_file_path="${OUTPUT_DIR}/oas.yaml" if [ "${DEBUG}" = "false" ]; then echo "Downloading OpenAPI specification file ${OAS_FILE_PATH}" >> "${LOG_FILE}" curl -s --fail-with-body -o "${oas_file_path}" \ "${GET_KONGHQ_COM_URL}/konnect-up-files/llm-gateway/oas.yaml" >> $LOG_FILE 2>&1 rv=$? else echo "Copying local OpenAPI specification file to ${oas_file_path}" >> "${LOG_FILE}" cp ./konnect-up-files/llm-gateway/oas.yaml "${oas_file_path}" rv=$? fi if [[ $rv -ne 0 ]]; then echo "Failed to download OpenAPI specification file" >> $LOG_FILE return $rv fi echo "> $LOG_FILE } ai_configure_konnect_cp() { echo ">ai_configure_konnect_cp" >> $LOG_FILE echo_wait "Configuring Control Plane ${KONNECT_CP_NAME}... " noline fetch_konnect_cp "${KONNECT_CP_NAME}" || { create_konnect_cp "${KONNECT_CP_NAME}" \ "Created by the ${APP_NAME} quickstart" \ "CLUSTER_TYPE_HYBRID" if [[ $? -ne 0 ]]; then echo_fail "" echo "Failed to create Konnect Control Plane" return 1 fi } parse_konnect_cp || { echo_fail "" echo_fail "Failed to parse Konnect Control Plane info, check $LOG_FILE" return 1 } echo_pass "" echo "> $LOG_FILE } ai_configure_konnect_certs() { echo ">ai_configure_konnect_certs" >> $LOG_FILE echo_wait "Generating Konnect Certificates... " noline if [ "${USE_EXISTING_CERTS}" = "true" ]; then if [[ -f "${OUTPUT_DIR}/tls.crt" ]] && [[ -f "${OUTPUT_DIR}/key.crt" ]]; then echo "Existing certs found, skipping cert generation" >> $LOG_FILE else echo_fail "Existing TLS certificate files not found, check $LOG_FILE" return 1 fi else generate_konnect_certs || { echo_fail "" echo_fail "Failed to generate TLS certificate, check $LOG_FILE" return 1 } deploy_konnect_certs ${CONTROL_PLANE_ID} || { echo_fail "" echo_fail "Failed to deploy TLS certificate to Konnect Control Plane, check $LOG_FILE" return 1 } fi echo_pass "" echo "> $LOG_FILE } ai_deploy_docker_gw_konnect() { echo ">ai_deploy_docker_gw_konnect" >> $LOG_FILE DISPLAY_SUMMARY=false DISPLAY_LOGGING_INFO=false KONG_ROLE="data_plane" KONG_DATABASE="off" KONG_KONNECT_MODE="on" local cp_endpoint="${CONTROL_PLANE_ENDPOINT#https://}" echo "Control Plane Endpoint: ${cp_endpoint}" >> $LOG_FILE local tel_endpoint="${CONTROL_PLANE_TELEMETRY_ENDPOINT#https://}" echo "Telemetry Endpoint: ${tel_endpoint}" >> $LOG_FILE # TODO: These AI provider API keys should be configurable / scenario specific main "$@" \ -e OPENAI_HEADER="Bearer ${OPENAI_API_KEY}" \ -e COHERE_HEADER="Bearer ${COHERE_API_KEY}" \ -e AZURE_HEADER="Bearer ${AZURE_API_KEY}" \ -e ANTHROPIC_HEADER="Bearer ${ANTHROPIC_API_KEY}" \ -e KONG_ROLE \ -e KONG_DATABASE \ -e KONG_KONNECT_MODE \ -e KONG_VITALS=off \ -e KONG_CLUSTER_MTLS=pki \ -e KONG_CLUSTER_CONTROL_PLANE="${cp_endpoint}:443" \ -e KONG_CLUSTER_SERVER_NAME="${cp_endpoint}" \ -e KONG_CLUSTER_TELEMETRY_ENDPOINT="${tel_endpoint}:443" \ -e KONG_CLUSTER_TELEMETRY_SERVER_NAME="${tel_endpoint}" \ -e KONG_LUA_SSL_TRUSTED_CERTIFICATE=system \ -p 8000:8000 \ -p 8443:8443 \ -e KONG_CLUSTER_CERT=/etc/kong/cluster.crt \ -e KONG_CLUSTER_CERT_KEY=/etc/kong/cluster_cert.key \ -v "${OUTPUT_DIR}/tls.crt:/etc/kong/cluster.crt" \ -v "${OUTPUT_DIR}/key.crt:/etc/kong/cluster_cert.key" \ -D # db-less mode echo "> $LOG_FILE } ai_deploy_docker_gw_local() { echo ">ai_deploy_docker_gw_local" >> $LOG_FILE DISPLAY_SUMMARY=false DISPLAY_LOGGING_INFO=false main "$@" \ -e OPENAI_HEADER="Bearer ${OPENAI_API_KEY}" \ -e COHERE_HEADER="Bearer ${COHERE_API_KEY}" \ -e AZURE_HEADER="Bearer ${AZURE_API_KEY}" \ -e ANTHROPIC_HEADER="Bearer ${ANTHROPIC_API_KEY}" \ -e MISTRAL_HEADER="Bearer ${MISTRAL_API_KEY}" \ -e LLAMA_2_HEADER="Bearer ${LLAMA_2_API_KEY}" echo "> $LOG_FILE } ai_collect_konnect_pat() { echo echo_point "Enter your Konnect PAT : " noline if [ -z "${KONNECT_PAT}" ]; then if [ "${PROMPT}" = "true" ]; then read -p "" KONNECT_PAT < /dev/tty || echo else echo fi else echo "" fi } ai_collect_ai_provider_api_keys() { local providers="${1}" local local_providers="${2}" local non_local_providers="" # Splitting non-local providers for provider in $providers; do if [[ ! $local_providers =~ (^|[[:space:]])"$provider"($|[[:space:]]) ]]; then non_local_providers="$non_local_providers $provider" fi done echo echo_info "First we collect API Keys for hosted LLM providers, for example OpenAI." echo " Provide a key for each provider you want to enable and press Enter." echo " Not providing a key and pressing Enter will skip configuring that provider." echo ## Loop over non-local providers for their API keys for provider in ${non_local_providers}; do local provider_upper="$(echo ${provider} | tr '[:lower:]' '[:upper:]')" API_KEY_NAME="${provider_upper}_API_KEY" API_KEY_VALUE="${!API_KEY_NAME}" ENABLED_FLAG_NAME="${provider_upper}_ENABLED" local provider_padded=$(printf '%-9s' "${provider}") echo_point "Enter API Key for ${provider_padded} : " noline if [ -z "${API_KEY_VALUE}" ]; then if [ "${PROMPT}" = "true" ]; then read -p "" API_KEY_VALUE < /dev/tty || echo eval $API_KEY_NAME="${API_KEY_VALUE}" else echo fi else echo "" fi # Check if key is empty if [ -z "${API_KEY_VALUE}" ]; then # API Key is empty eval $ENABLED_FLAG_NAME="false" else # API Key is not empty eval $ENABLED_FLAG_NAME="true" fi done echo echo_info "Next, we configure local providers which are LLM models running on" echo " this host machine. These providers will be configured to access AI API " echo " endpoints on this host machine (http://host.docker.internal:11434)." echo echo " An example tool that can run models locally is Ollama." echo " https://github.com/ollama/ollama" echo echo " If these local models are enabled on your host," echo " enter 'y' to enable a provider, or 'n' to disable it." echo ## Loop over local providers to enable/disable them for provider in ${local_providers}; do local provider_upper="$(echo ${provider} | tr '[:lower:]' '[:upper:]')" ENABLED_FLAG_NAME="${provider_upper}_ENABLED" local provider_padded=$(printf '%-9s' "${provider}") echo_point "Do you want to enable the local provider ${provider_padded}? (y/n): " noline if [ "${PROMPT}" = "true" ]; then read -p "" RESPONSE < /dev/tty || echo if [[ "$RESPONSE" =~ ^[Yy]$ ]]; then eval $ENABLED_FLAG_NAME="true" else eval $ENABLED_FLAG_NAME="false" fi else echo fi done } # harder to share this function because it pulls down the quickstart script, chicken and egg download_quickstart_script() { if [ "${DEBUG}" = "false" ]; then echo "Downloading quickstart script to ${QUICKSTART_SCRIPT_PATH}" >> "${LOG_FILE}" curl -s -f -o "${QUICKSTART_SCRIPT_PATH}" "${GET_KONGHQ_COM_URL}/quickstart" || \ return 1 else echo "Copying local quickstart script to ${QUICKSTART_SCRIPT_PATH}" >> "${LOG_FILE}" cp ./quickstart "${QUICKSTART_SCRIPT_PATH}" fi } ai_main() { echo ">ai_main" >> "${LOG_FILE}" echo echo "This quickstart will help you deploy the Kong AI Gateway." echo echo "The gateway supports multi-LLM consumption, no-code AI plugins," echo "advanced prompt engineering, AI analytics and more via new AI-focused" echo "plugins that can be used on top of every Kong Gateway installation." echo echo "In order for the AI Gateway to proxy requests, this tool must collect" echo "API keys for each LLM provider that you wish to enable. These API keys" echo "will be configured as environment variables for the locally running" echo "Kong Gateway data plane only and will not be sent outside this host machine." if [ "${CONTINUE}" = "false" ]; then echo echo -n "Do you want to continue? (y/n) " if [ "$PROMPT" = "true" ]; then read -p "" response < /dev/tty if [[ "$response" =~ ^([nN][oO]|[nN])$ ]] then echo "Exiting..." exit 0 fi else echo fi fi download_quickstart_script || { echo "Failed to download quickstart script" >> "${LOG_FILE}" return 1 } source "${QUICKSTART_SCRIPT_PATH}" --source echo "Sourced ${QUICKSTART_SCRIPT_PATH}" >> "${LOG_FILE}" ensure_docker || { echo_fail "Docker is not available, check $LOG_FILE"; return 1 } echo echo "This CLI deploys the Kong AI Gateway data plane on a local Docker instance." echo "By default, Kong Konnect (https://konghq.com/kong-konnect) provides " echo "a serverless control plane and many other advanced API management capabilities." echo echo "Optionally, you may choose to deploy the AI Gateway locally using Docker only," echo "but you will not have access to the additional Kong Konnect capabilities." prepare_for_new_run if [ -z "${KONG_KONNECT_MODE}" ]; then echo echo -n "Do you want to deploy on Kong Konnect (y/n) " read -p "" response < /dev/tty echo if [[ "$response" =~ ^([nN][oO]|[nN])$ ]] then KONG_KONNECT_MODE="false" echo "Deploying locally..." else KONG_KONNECT_MODE="on" echo "Deploying on Kong Konnect..." fi fi if [ "${KONG_KONNECT_MODE}" = "on" ]; then echo echo_bullet "A Kong Konnect account is required to proceed. If you need an account, visit " echo " https://konghq.com/products/kong-konnect/register to sign up." echo echo_bullet "Once you have an account, a Konnect Personal Access Token (PAT)" echo " or System Account Token is required to configure the AI Gateway." echo echo_bullet "Login to your account and create a PAT (https://cloud.konghq.com/global/account/tokens)" echo " or use the Organization feature to create a System Account Token." echo echo_bullet "This quickstart will prompt your for you token and use it to create resources " echo " in Kong Konnect to support the AI Gateway." ai_collect_konnect_pat || { echo_fail "Failed to collect Konnect PAT" return 1 } fi echo display_logging_info echo echo_bullet "Configuring LLM Providers..." ai_collect_ai_provider_api_keys "${LLM_PROVIDERS}" "${LLM_LOCAL_PROVIDERS}" || { echo_fail "Failed to collect LLM Provider API keys" return 1 } echo_pass "LLM Providers Configured" echo if [ "${KONG_KONNECT_MODE}" != "on" ]; then ai_deploy_docker_gw_local else echo_bullet "Configuring Konnect Control Plane... " noline ai_configure_konnect_cp ai_configure_konnect_certs echo_pass "Control Plane Configured" echo ai_deploy_docker_gw_konnect fi echo_pass "Kong Gateway successfully deployed" echo echo_bullet "Configuring AI Gateway..." # cleanup any existing files from previous runs rm "${OUTPUT_DIR}"/*-kong-patch.yaml 2>/dev/null rm "${OUTPUT_DIR}"/*-kong.yaml 2>/dev/null rm "${OUTPUT_DIR}"/*-oas.yaml 2>/dev/null rm "${OUTPUT_DIR}"/kong.yaml 2>/dev/null ai_download_ai_gateway_files || { echo_fail "Failed to download AI Gateway files" return 1 } for PROVIDER in ${LLM_PROVIDERS}; do local provider_upper=$(echo "${PROVIDER}" | tr '[:lower:]' '[:upper:]') local enabled_flag_name="${provider_upper}_ENABLED" echo "Checking if ${PROVIDER} is enabled: ${!enabled_flag_name}" >> "${LOG_FILE}" if [ "${!enabled_flag_name}" = "true" ]; then ai_download_ai_patch_file "${PROVIDER}" ai_prepare_provider_files "${PROVIDER}" fi done echo_wait "Merging decK files... " noline deck_file_merge "${OUTPUT_DIR}" '*-kong.yaml' 'kong.yaml' && echo_pass "" || { echo_fail "" echo_fail "Failed to merge OpenAPI specification files into a single decK file" return 1 } echo_wait "Syncing Kong Gateway configuration... " noline if [ "${KONG_KONNECT_MODE}" = "on" ]; then deck_gateway_sync_konnect "${OUTPUT_DIR}/kong.yaml" "${KONNECT_CP_NAME}" "${KONNECT_PAT}" && echo_pass "" || { echo_fail "" echo_fail "Failed to apply Kong configuration file, check ${LOG_FILE}" return 1 } else deck_gateway_sync "${OUTPUT_DIR}/kong.yaml" && echo_pass "" || { echo_fail "" echo_fail "Failed to apply Kong configuration file, check ${LOG_FILE}" return 1 } fi echo_pass "AI Gateway Configured" display_summary if [ "${KONG_KONNECT_MODE}" = "on" ]; then echo echo_info "You can view your AI Gateway's Control Plane at:" echo " https://cloud.konghq.com/us/gateway-manager/${CONTROL_PLANE_ID}/overview" echo echo_info "Be sure to destroy the Konnect resources created " echo " during this quickstart demo when you are done " echo " evaluating the AI Gateway." fi echo echo "=======================================================" echo " ⚒️ Routing LLM Requests" echo "=======================================================" echo echo "Your AI Gateway is ready. You can now route AI requests" echo "to the configured AI providers. For example to route a " echo "chat request to OpenAI you can use the following " echo "curl command:" echo echo "curl -s -X POST localhost:8000/openai/chat \ " echo " -H \"Content-Type: application/json\" -d '{" echo " \"messages\": [{" echo " \"role\": \"user\"," echo " \"content\": \"What is Kong Gateway?\"" echo " }] }'" echo echo "=======================================================" echo " ⚒️ What is next with the Kong AI Gateway" echo "=======================================================" echo echo "This script demonstrated the installation and usage" echo "of only one of the many AI plugins that Kong Gateway " echo "provides (the 'ai-proxy' plugin)." echo echo "See the output directory to reference the files" echo "used during the installation process and modify for" echo "your production deployment." echo_info "${OUTPUT_DIR}" echo echo "To use more AI plugins for advanced metrics, " echo "orchestration and security visit the Kong Gateway " echo "Plugin Hub." echo_info "https://docs.konghq.com/hub/?category=ai" echo "> "${LOG_FILE}" } # If a user wants to source this script they need to provide this argument # otherwise it's a challenge to detect execution vs sourcing in all contexts # (like piping from a curl or cat command) if [ "${1}" != "--source" ]; then echo "-----------------------------------------" >> $LOG_FILE echo "Starting $APP_NAME @ "$(date +"%Y-%m-%d %T") >> $LOG_FILE echo "Args: $@" >> $LOG_FILE ai_main "$@" echo "Exiting $APP_NAME @ "$(date +"%Y-%m-%d %T") >> $LOG_FILE echo "-----------------------------------------" >> $LOG_FILE fi