Skip to content

Commit

Permalink
fix(ai): fix rebase conflicts
Browse files Browse the repository at this point in the history
This commit fixes some small rebase conflicts which were introduced by
git during the last rebase.
  • Loading branch information
rickstaa committed Jul 29, 2024
1 parent bc0a0c4 commit e2b0ac4
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 161 deletions.
171 changes: 12 additions & 159 deletions cmd/livepeer/starter/starter.go
Original file line number Diff line number Diff line change
Expand Up @@ -515,165 +515,6 @@ func StartLivepeer(ctx context.Context, cfg LivepeerConfig) {
}
}

var aiCaps []core.Capability
constraints := make(map[core.Capability]*core.Constraints)

if *cfg.AIWorker {
gpus := []string{}
if *cfg.Nvidia != "" {
var err error
gpus, err = common.ParseAccelDevices(*cfg.Nvidia, ffmpeg.Nvidia)
if err != nil {
glog.Errorf("Error parsing -nvidia for devices: %v", err)
return
}
}

modelsDir := *cfg.AIModelsDir
if modelsDir == "" {
var err error
modelsDir, err = filepath.Abs(path.Join(*cfg.Datadir, "models"))
if err != nil {
glog.Error("Error creating absolute path for models dir: %v", modelsDir)
return
}
}

if err := os.MkdirAll(modelsDir, 0755); err != nil {
glog.Error("Error creating models dir %v", modelsDir)
return
}

n.AIWorker, err = worker.NewWorker(*cfg.AIRunnerImage, gpus, modelsDir)
if err != nil {
glog.Errorf("Error starting AI worker: %v", err)
return
}

if *cfg.AIModels != "" {
configs, err := core.ParseAIModelConfigs(*cfg.AIModels)
if err != nil {
glog.Errorf("Error parsing -aiModels: %v", err)
return
}

for _, config := range configs {
modelConstraint := &core.ModelConstraint{Warm: config.Warm}

// If the config contains a URL we call Warm() anyway because AIWorker will just register
// the endpoint for an external container
if config.Warm || config.URL != "" {
endpoint := worker.RunnerEndpoint{URL: config.URL, Token: config.Token}
if err := n.AIWorker.Warm(ctx, config.Pipeline, config.ModelID, endpoint, config.OptimizationFlags); err != nil {
glog.Errorf("Error AI worker warming %v container: %v", config.Pipeline, err)
return
}
}

// Show warning if people set OptimizationFlags but not Warm.
if len(config.OptimizationFlags) > 0 && !config.Warm {
glog.Warningf("Model %v has 'optimization_flags' set without 'warm'. Optimization flags are currently only used for warm containers.", config.ModelID)
}

switch config.Pipeline {
case "text-to-image":
_, ok := constraints[core.Capability_TextToImage]
if !ok {
aiCaps = append(aiCaps, core.Capability_TextToImage)
constraints[core.Capability_TextToImage] = &core.Constraints{
Models: make(map[string]*core.ModelConstraint),
}
}

constraints[core.Capability_TextToImage].Models[config.ModelID] = modelConstraint

n.SetBasePriceForCap("default", core.Capability_TextToImage, config.ModelID, big.NewRat(config.PricePerUnit, config.PixelsPerUnit))
case "image-to-image":
_, ok := constraints[core.Capability_ImageToImage]
if !ok {
aiCaps = append(aiCaps, core.Capability_ImageToImage)
constraints[core.Capability_ImageToImage] = &core.Constraints{
Models: make(map[string]*core.ModelConstraint),
}
}

constraints[core.Capability_ImageToImage].Models[config.ModelID] = modelConstraint

n.SetBasePriceForCap("default", core.Capability_ImageToImage, config.ModelID, big.NewRat(config.PricePerUnit, config.PixelsPerUnit))
case "image-to-video":
_, ok := constraints[core.Capability_ImageToVideo]
if !ok {
aiCaps = append(aiCaps, core.Capability_ImageToVideo)
constraints[core.Capability_ImageToVideo] = &core.Constraints{
Models: make(map[string]*core.ModelConstraint),
}
}

constraints[core.Capability_ImageToVideo].Models[config.ModelID] = modelConstraint

n.SetBasePriceForCap("default", core.Capability_ImageToVideo, config.ModelID, big.NewRat(config.PricePerUnit, config.PixelsPerUnit))
case "upscale":
_, ok := constraints[core.Capability_Upscale]
if !ok {
aiCaps = append(aiCaps, core.Capability_Upscale)
constraints[core.Capability_Upscale] = &core.Constraints{
Models: make(map[string]*core.ModelConstraint),
}
}

constraints[core.Capability_Upscale].Models[config.ModelID] = modelConstraint

n.SetBasePriceForCap("default", core.Capability_Upscale, config.ModelID, big.NewRat(config.PricePerUnit, config.PixelsPerUnit))
case "audio-to-text":
_, ok := constraints[core.Capability_AudioToText]
if !ok {
aiCaps = append(aiCaps, core.Capability_AudioToText)
constraints[core.Capability_AudioToText] = &core.Constraints{
Models: make(map[string]*core.ModelConstraint),
}
}

constraints[core.Capability_AudioToText].Models[config.ModelID] = modelConstraint

n.SetBasePriceForCap("default", core.Capability_AudioToText, config.ModelID, big.NewRat(config.PricePerUnit, config.PixelsPerUnit))

case "text-to-speech":
_, ok := constraints[core.Capability_AudioToText]
if !ok {
aiCaps = append(aiCaps, core.Capability_TextToSpeech)
constraints[core.Capability_TextToSpeech] = &core.Constraints{
Models: make(map[string]*core.ModelConstraint),
}
}

constraints[core.Capability_TextToSpeech].Models[config.ModelID] = modelConstraint

n.SetBasePriceForCap("default", core.Capability_TextToSpeech, config.ModelID, big.NewRat(config.PricePerUnit, config.PixelsPerUnit))
}

if len(aiCaps) > 0 {
capability := aiCaps[len(aiCaps)-1]
price := n.GetBasePriceForCap("default", capability, config.ModelID)
glog.V(6).Infof("Capability %s (ID: %v) advertised with model constraint %s at price %d per %d unit", config.Pipeline, capability, config.ModelID, price.Num(), price.Denom())
}
}
} else {
glog.Error("The '-aiModels' flag was set, but no model configuration was provided. Please specify the model configuration using the '-aiModels' flag.")
return
}

defer func() {
ctx, cancel := context.WithTimeout(context.Background(), aiWorkerContainerStopTimeout)
defer cancel()
if err := n.AIWorker.Stop(ctx); err != nil {
glog.Errorf("Error stopping AI worker containers: %v", err)
return
}

glog.Infof("Stopped AI worker containers")
}()
}

if *cfg.Redeemer {
n.NodeType = core.RedeemerNode
} else if *cfg.Orchestrator {
Expand Down Expand Up @@ -1359,6 +1200,18 @@ func StartLivepeer(ctx context.Context, cfg LivepeerConfig) {
capabilityConstraints[core.Capability_AudioToText].Models[config.ModelID] = modelConstraint

n.SetBasePriceForCap("default", core.Capability_AudioToText, config.ModelID, autoPrice)
case "text-to-speech":
_, ok := capabilityConstraints[core.Capability_AudioToText]
if !ok {
aiCaps = append(aiCaps, core.Capability_TextToSpeech)
capabilityConstraints[core.Capability_TextToSpeech] = &core.PerCapabilityConstraints{
Models: make(map[string]*core.ModelConstraint),
}
}

capabilityConstraints[core.Capability_TextToSpeech].Models[config.ModelID] = modelConstraint

n.SetBasePriceForCap("default", core.Capability_TextToSpeech, config.ModelID, autoPrice)
}

if len(aiCaps) > 0 {
Expand Down
4 changes: 2 additions & 2 deletions go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,6 @@ require (
pgregory.net/rapid v1.1.0
)

replace github.com/livepeer/ai-worker => /home/user/ai-worker

require (
cloud.google.com/go v0.110.2 // indirect
cloud.google.com/go/compute v1.20.0 // indirect
Expand Down Expand Up @@ -244,3 +242,5 @@ require (
)

replace github.com/livepeer/lpms => /home/ricks/development/livepeer/ai/lpms

replace github.com/livepeer/ai-worker => /home/ricks/development/livepeer/ai/ai-worker
4 changes: 4 additions & 0 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -624,12 +624,16 @@ github.com/libp2p/go-netroute v0.2.0 h1:0FpsbsvuSnAhXFnCY0VLFbJOzaK0VnP0r1QT/o4n
github.com/libp2p/go-netroute v0.2.0/go.mod h1:Vio7LTzZ+6hoT4CMZi5/6CpY3Snzh2vgZhWgxMNwlQI=
github.com/libp2p/go-openssl v0.1.0 h1:LBkKEcUv6vtZIQLVTegAil8jbNpJErQ9AnT+bWV+Ooo=
github.com/libp2p/go-openssl v0.1.0/go.mod h1:OiOxwPpL3n4xlenjx2h7AwSGaFSC/KZvf6gNdOBQMtc=
github.com/livepeer/ai-worker v0.1.0 h1:SJBZuxeK0vEzJPBzf5osdgVCxHYZt7ZKR2CvZ7Q7iog=
github.com/livepeer/ai-worker v0.1.0/go.mod h1:Xlnb0nFG2VsGeMG9hZmReVQXeFt0Dv28ODiUT2ooyLE=
github.com/livepeer/go-tools v0.3.6-0.20240130205227-92479de8531b h1:VQcnrqtCA2UROp7q8ljkh2XA/u0KRgVv0S1xoUvOweE=
github.com/livepeer/go-tools v0.3.6-0.20240130205227-92479de8531b/go.mod h1:hwJ5DKhl+pTanFWl+EUpw1H7ukPO/H+MFpgA7jjshzw=
github.com/livepeer/joy4 v0.1.2-0.20191121080656-b2fea45cbded h1:ZQlvR5RB4nfT+cOQee+WqmaDOgGtP2oDMhcVvR4L0yA=
github.com/livepeer/joy4 v0.1.2-0.20191121080656-b2fea45cbded/go.mod h1:xkDdm+akniYxVT9KW1Y2Y7Hso6aW+rZObz3nrA9yTHw=
github.com/livepeer/livepeer-data v0.7.5-0.20231004073737-06f1f383fb18 h1:4oH3NqV0NvcdS44Ld3zK2tO8IUiNozIggm74yobQeZg=
github.com/livepeer/livepeer-data v0.7.5-0.20231004073737-06f1f383fb18/go.mod h1:Jpf4jHK+fbWioBHRDRM1WadNT1qmY27g2YicTdO0Rtc=
github.com/livepeer/lpms v0.0.0-20240711175220-227325841434 h1:E7PKN6q/jMLapEV+eEwlwv87Xe5zacaVhvZ8T6AJR3c=
github.com/livepeer/lpms v0.0.0-20240711175220-227325841434/go.mod h1:Hr/JhxxPDipOVd4ZrGYWrdJfpVF8/SEI0nNr2ctAlkM=
github.com/livepeer/m3u8 v0.11.1 h1:VkUJzfNTyjy9mqsgp5JPvouwna8wGZMvd/gAfT5FinU=
github.com/livepeer/m3u8 v0.11.1/go.mod h1:IUqAtwWPAG2CblfQa4SVzTQoDcEMPyfNOaBSxqHMS04=
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
Expand Down

0 comments on commit e2b0ac4

Please sign in to comment.