Skip to content

Commit

Permalink
Move ai to pkg
Browse files Browse the repository at this point in the history
  • Loading branch information
tygern committed Apr 30, 2024
1 parent a68cc94 commit 3c9587a
Show file tree
Hide file tree
Showing 12 changed files with 16 additions and 16 deletions.
2 changes: 1 addition & 1 deletion cmd/analyzer/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ package main

import (
"context"
"github.com/initialcapacity/ai-starter/internal/ai"
"github.com/initialcapacity/ai-starter/internal/analyzer"
"github.com/initialcapacity/ai-starter/internal/collector"
"github.com/initialcapacity/ai-starter/pkg/ai"
"github.com/initialcapacity/ai-starter/pkg/dbsupport"
"github.com/initialcapacity/ai-starter/pkg/websupport"
"log/slog"
Expand Down
6 changes: 3 additions & 3 deletions cmd/collector/main.go
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
package main

import (
"github.com/initialcapacity/ai-starter/internal/ai"
"github.com/initialcapacity/ai-starter/internal/collector"
ai2 "github.com/initialcapacity/ai-starter/pkg/ai"
"github.com/initialcapacity/ai-starter/pkg/dbsupport"
"github.com/initialcapacity/ai-starter/pkg/feedsupport"
"github.com/initialcapacity/ai-starter/pkg/websupport"
Expand All @@ -23,9 +23,9 @@ func main() {
parser := feedsupport.NewParser(client)
extractor := feedsupport.NewExtractor(client)
dataGateway := collector.NewDataGateway(db)
t := ai.NewTokenizer(tokenizer.Cl100kBase)
t := ai2.NewTokenizer(tokenizer.Cl100kBase)
chunksGateway := collector.NewChunksGateway(db)
chunker := ai.NewChunker(t, 6000)
chunker := ai2.NewChunker(t, 6000)
chunksService := collector.NewChunksService(chunker, chunksGateway)

c := collector.New(parser, extractor, dataGateway, chunksService)
Expand Down
8 changes: 4 additions & 4 deletions functions.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@ import (
"context"
"github.com/GoogleCloudPlatform/functions-framework-go/functions"
"github.com/cloudevents/sdk-go/v2/event"
"github.com/initialcapacity/ai-starter/internal/ai"
"github.com/initialcapacity/ai-starter/internal/analyzer"
"github.com/initialcapacity/ai-starter/internal/collector"
ai2 "github.com/initialcapacity/ai-starter/pkg/ai"
"github.com/initialcapacity/ai-starter/pkg/dbsupport"
"github.com/initialcapacity/ai-starter/pkg/feedsupport"
"github.com/initialcapacity/ai-starter/pkg/websupport"
Expand All @@ -31,9 +31,9 @@ func triggerCollect(ctx context.Context, e event.Event) error {
parser := feedsupport.NewParser(client)
extractor := feedsupport.NewExtractor(client)
dataGateway := collector.NewDataGateway(db)
t := ai.NewTokenizer(tokenizer.Cl100kBase)
t := ai2.NewTokenizer(tokenizer.Cl100kBase)
chunksGateway := collector.NewChunksGateway(db)
chunker := ai.NewChunker(t, 6000)
chunker := ai2.NewChunker(t, 6000)
chunksService := collector.NewChunksService(chunker, chunksGateway)

c := collector.New(parser, extractor, dataGateway, chunksService)
Expand All @@ -48,7 +48,7 @@ func triggerAnalyze(ctx context.Context, e event.Event) error {
db := dbsupport.CreateConnection(databaseUrl)
chunksGateway := collector.NewChunksGateway(db)
embeddingsGateway := analyzer.NewEmbeddingsGateway(db)
aiClient := ai.NewClient(openAiKey)
aiClient := ai2.NewClient(openAiKey)

a := analyzer.NewAnalyzer(chunksGateway, embeddingsGateway, aiClient)

Expand Down
2 changes: 1 addition & 1 deletion internal/analyzer/analyze.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@ import (
"context"
"errors"
"fmt"
"github.com/initialcapacity/ai-starter/internal/ai"
"github.com/initialcapacity/ai-starter/internal/collector"
"github.com/initialcapacity/ai-starter/pkg/ai"
"log/slog"
)

Expand Down
2 changes: 1 addition & 1 deletion internal/app/handlers.go
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
package app

import (
"github.com/initialcapacity/ai-starter/internal/ai"
"github.com/initialcapacity/ai-starter/internal/analyzer"
"github.com/initialcapacity/ai-starter/pkg/ai"
"github.com/initialcapacity/ai-starter/pkg/dbsupport"
"io/fs"
"net/http"
Expand Down
2 changes: 1 addition & 1 deletion internal/app/index.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ package app

import (
"fmt"
"github.com/initialcapacity/ai-starter/internal/ai"
"github.com/initialcapacity/ai-starter/internal/analyzer"
"github.com/initialcapacity/ai-starter/pkg/ai"
"github.com/initialcapacity/ai-starter/pkg/deferrable"
"github.com/initialcapacity/ai-starter/pkg/websupport"
"log/slog"
Expand Down
2 changes: 1 addition & 1 deletion internal/collector/chunks_service.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ package collector

import (
"errors"
"github.com/initialcapacity/ai-starter/internal/ai"
"github.com/initialcapacity/ai-starter/pkg/ai"
)

type ChunksService struct {
Expand Down
File renamed without changes.
6 changes: 3 additions & 3 deletions internal/ai/chunker_test.go → pkg/ai/chunker_test.go
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
package ai_test

import (
"github.com/initialcapacity/ai-starter/internal/ai"
ai2 "github.com/initialcapacity/ai-starter/pkg/ai"
"github.com/stretchr/testify/assert"
"github.com/tiktoken-go/tokenizer"
"testing"
)

func TestChunker_Split(t *testing.T) {
token := ai.NewTokenizer(tokenizer.Cl100kBase)
chunker := ai.NewChunker(token, 30)
token := ai2.NewTokenizer(tokenizer.Cl100kBase)
chunker := ai2.NewChunker(token, 30)

result := chunker.Split("I think that this string should have 31 tokens, but it's hard to say for sure. We'll have to count them manually, I guess.")

Expand Down
File renamed without changes.
File renamed without changes.
2 changes: 1 addition & 1 deletion internal/ai/tokenizer_test.go → pkg/ai/tokenizer_test.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package ai_test

import (
"github.com/initialcapacity/ai-starter/internal/ai"
"github.com/initialcapacity/ai-starter/pkg/ai"
"github.com/stretchr/testify/assert"
tokenizer2 "github.com/tiktoken-go/tokenizer"
"testing"
Expand Down

0 comments on commit 3c9587a

Please sign in to comment.