1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82
| func (s *Server) GenerateRoutes(rc *ollama.Registry) (http.Handler, error) { corsConfig := cors.DefaultConfig() corsConfig.AllowCredentials = true corsConfig.AllowWildcard = true corsConfig.AllowBrowserExtensions = true corsConfig.AllowHeaders = []string{ "Authorization", "Content-Type", "User-Agent", "Accept", "X-Requested-With",
"x-stainless-lang", "x-stainless-package-version", "x-stainless-os", "x-stainless-arch", "x-stainless-retry-count", "x-stainless-runtime", "x-stainless-runtime-version", "x-stainless-async", "x-stainless-helper-method", "x-stainless-poll-helper", "x-stainless-custom-poll-interval", "x-stainless-timeout", } corsConfig.AllowOrigins = envconfig.AllowedOrigins()
r := gin.Default() r.Use( cors.New(corsConfig), allowedHostsMiddleware(s.addr), )
r.HEAD("/", func(c *gin.Context) { c.String(http.StatusOK, "Ollama is running") }) r.GET("/", func(c *gin.Context) { c.String(http.StatusOK, "Ollama is running") }) r.HEAD("/api/version", func(c *gin.Context) { c.JSON(http.StatusOK, gin.H{"version": version.Version}) }) r.GET("/api/version", func(c *gin.Context) { c.JSON(http.StatusOK, gin.H{"version": version.Version}) })
r.POST("/api/pull", s.PullHandler) r.POST("/api/push", s.PushHandler) r.HEAD("/api/tags", s.ListHandler) r.GET("/api/tags", s.ListHandler) r.POST("/api/show", s.ShowHandler) r.DELETE("/api/delete", s.DeleteHandler)
r.POST("/api/create", s.CreateHandler) r.POST("/api/blobs/:digest", s.CreateBlobHandler) r.HEAD("/api/blobs/:digest", s.HeadBlobHandler) r.POST("/api/copy", s.CopyHandler)
r.GET("/api/ps", s.PsHandler) r.POST("/api/generate", s.GenerateHandler) r.POST("/api/chat", s.ChatHandler) r.POST("/api/embed", s.EmbedHandler) r.POST("/api/embeddings", s.EmbeddingsHandler)
r.POST("/v1/chat/completions", openai.ChatMiddleware(), s.ChatHandler) r.POST("/v1/completions", openai.CompletionsMiddleware(), s.GenerateHandler) r.POST("/v1/embeddings", openai.EmbeddingsMiddleware(), s.EmbedHandler) r.GET("/v1/models", openai.ListMiddleware(), s.ListHandler) r.GET("/v1/models/:model", openai.RetrieveMiddleware(), s.ShowHandler)
if rc != nil { rs := ®istry.Local{ Client: rc, Logger: slog.Default(), Fallback: r,
Prune: PruneLayers, } return rs, nil }
return r, nil }
|