Skip to content

Commit

Permalink
[INTG-2143] consume issues feed (#216)
Browse files Browse the repository at this point in the history
* INTG-2143 add unit tests

* INTG-2143 enable issues export

* INTG-2143 wip tests 1

* INTG-2143 integration tests are passing

* INTG-2143 go doc

* INTG-2143 go doc
  • Loading branch information
MickStanciu authored Feb 23, 2022
1 parent 7fc0751 commit d99e696
Show file tree
Hide file tree
Showing 13 changed files with 1,156 additions and 5 deletions.
14 changes: 14 additions & 0 deletions internal/app/feed/export_feeds.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ func GetFeeds(v *viper.Viper) []Feed {
inspectionIncludeInactiveItems := v.GetBool("export.inspection.included_inactive_items")
templateIDs := getTemplateIDs(v)
actionLimit := GetActionLimit(v)
issueLimit := GetIssueLimit(v)
inspectionConfig := config.GetInspectionConfig(v)
exportMedia := viper.GetBool("export.media")
sitesIncludeDeleted := viper.GetBool("export.site.include_deleted")
Expand Down Expand Up @@ -62,6 +63,10 @@ func GetFeeds(v *viper.Viper) []Feed {
ModifiedAfter: inspectionConfig.ModifiedAfter,
Incremental: inspectionConfig.Incremental,
},
&IssueFeed{
Incremental: false, //this was disabled on request. Issues API doesn't support modified After filters
Limit: issueLimit,
},
}
}

Expand All @@ -74,6 +79,15 @@ func GetActionLimit(v *viper.Viper) int {
return 100
}

// GetIssueLimit Return 100 (upper bound value) if param value > 100
func GetIssueLimit(v *viper.Viper) int {
issueLimit := v.GetInt("export.issue.limit")
if issueLimit <= 100 {
return issueLimit
}
return 100
}

func getInspectionFeed(v *viper.Viper, inspectionConfig *config.InspectionConfig, templateIDs []string) *InspectionFeed {
return &InspectionFeed{
SkipIDs: inspectionConfig.SkipIDs,
Expand Down
7 changes: 7 additions & 0 deletions internal/app/feed/export_feeds_intg_test.go
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
//go:build sql
// +build sql

package feed_test
Expand Down Expand Up @@ -57,6 +58,8 @@ func TestIntegrationDbCreateSchema_should_create_all_schemas(t *testing.T) {
filesEqualish(t, "mocks/set_1/schemas/schedules.csv", filepath.Join(exporter.ExportPath, "schedules.csv"))
filesEqualish(t, "mocks/set_1/schemas/schedule_assignees.csv", filepath.Join(exporter.ExportPath, "schedule_assignees.csv"))
filesEqualish(t, "mocks/set_1/schemas/schedule_occurrences.csv", filepath.Join(exporter.ExportPath, "schedule_occurrences.csv"))

filesEqualish(t, "mocks/set_1/schemas/issues.csv", filepath.Join(exporter.ExportPath, "issues.csv"))
}

func TestIntegrationDbExportFeeds_should_export_all_feeds_to_file(t *testing.T) {
Expand Down Expand Up @@ -101,6 +104,8 @@ func TestIntegrationDbExportFeeds_should_export_all_feeds_to_file(t *testing.T)
filesEqualish(t, "mocks/set_1/outputs/schedule_assignees.csv", filepath.Join(exporter.ExportPath, "schedule_assignees.csv"))
filesEqualish(t, "mocks/set_1/outputs/schedule_occurrences.csv", filepath.Join(exporter.ExportPath, "schedule_occurrences.csv"))

filesEqualish(t, "mocks/set_1/outputs/issues.csv", filepath.Join(exporter.ExportPath, "issues.csv"))

}

// Expectation of this test is that group_users and schedule_assignees are truncated and refreshed
Expand Down Expand Up @@ -139,6 +144,8 @@ func TestIntegrationDbExportFeeds_should_perform_incremental_update_on_second_ru
filesEqualish(t, "mocks/set_2/outputs/schedules.csv", filepath.Join(exporter.ExportPath, "schedules.csv"))
filesEqualish(t, "mocks/set_2/outputs/schedule_assignees.csv", filepath.Join(exporter.ExportPath, "schedule_assignees.csv"))
filesEqualish(t, "mocks/set_2/outputs/schedule_occurrences.csv", filepath.Join(exporter.ExportPath, "schedule_occurrences.csv"))

filesEqualish(t, "mocks/set_2/outputs/issues.csv", filepath.Join(exporter.ExportPath, "issues.csv"))
}

func TestIntegrationDbExportFeeds_should_handle_lots_of_rows_ok(t *testing.T) {
Expand Down
13 changes: 13 additions & 0 deletions internal/app/feed/export_feeds_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,19 @@ func TestGetActionLimit(t *testing.T) {
assert.Equal(t, 100, feed.GetActionLimit(viperConfig))
}

func TestGetIssueLimit(t *testing.T) {
viperConfig := viper.New()

viperConfig.Set("export.issue.limit", 200)
assert.Equal(t, 100, feed.GetIssueLimit(viperConfig))

viperConfig.Set("export.issue.limit", 20)
assert.Equal(t, 20, feed.GetIssueLimit(viperConfig))

viperConfig.Set("export.issue.limit", 100)
assert.Equal(t, 100, feed.GetIssueLimit(viperConfig))
}

func TestExportFeeds_should_handle_lots_of_rows_ok(t *testing.T) {
defer gock.Off()

Expand Down
15 changes: 10 additions & 5 deletions internal/app/feed/exporter_schema_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,17 +20,22 @@ func TestSchemaWriter_should_write_schema(t *testing.T) {
viperConfig := viper.New()

testSchema := func(f feed.Feed) {
exporter.CreateSchema(f, f.RowsModel())
exporter.WriteSchema(f)
err := exporter.CreateSchema(f, f.RowsModel())
assert.Nil(t, err, fmt.Sprintf("something is wrong when creating schema: %s, %v", f.Name(), err))

actual, _ := ioutil.ReadFile(fmt.Sprintf("mocks/set_1/schemas/formatted/%s.txt", f.Name()))
err = exporter.WriteSchema(f)
assert.Nil(t, err, fmt.Sprintf("something is wrong when writing schema %s, %v", f.Name(), err))

actual, err := ioutil.ReadFile(fmt.Sprintf("mocks/set_1/schemas/formatted/%s.txt", f.Name()))
assert.Nil(t, err, fmt.Sprintf("something is wrong when reading file %s.txt, %v", f.Name(), err))
assert.Equal(t, strings.TrimSpace(buf.String()), strings.TrimSpace(string(actual)))

buf.Reset()
}

for _, feed := range feed.GetFeeds(viperConfig) {
testSchema(feed)
for _, f := range feed.GetFeeds(viperConfig) {
fmt.Printf("TESTING FEED: %s\n", f.Name())
testSchema(f)
}
}

Expand Down
127 changes: 127 additions & 0 deletions internal/app/feed/feed_issue.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,127 @@
package feed

import (
"context"
"encoding/json"
"github.com/SafetyCulture/iauditor-exporter/internal/app/api"
"github.com/SafetyCulture/iauditor-exporter/internal/app/util"
"time"
)

const (
feedName = "issues"
feedURL = "/feed/issues"
)

// Issue represents a row from the issues feed
type Issue struct {
ID string `json:"id" csv:"id" gorm:"primarykey;column:id;size:36"`
Title string `json:"title" csv:"title"`
Description string `json:"description" csv:"description"`
CreatorID string `json:"creator_id" csv:"creator_id"`
CreatorUserName string `json:"creator_user_name"`
CreatedAt time.Time `json:"created_at" csv:"created_at"`
DueAt *time.Time `json:"due_at,omitempty" csv:"due_at"`
Priority string `json:"priority" csv:"priority"`
Status string `json:"status" csv:"status"`
TemplateID string `json:"template_id" csv:"template_id"`
InspectionID string `json:"inspection_id" csv:"inspection_id"`
InspectionName string `json:"inspection_name" csv:"inspection_name"`
SiteID string `json:"site_id" csv:"site_id"`
SiteName string `json:"site_name" csv:"site_name"`
LocationName string `json:"location_name" csv:"location_name"`
CategoryID string `json:"category_id" csv:"category_id"`
CategoryLabel string `json:"category_label" csv:"category_label"`
}

// IssueFeed is a representation of the issues feed
type IssueFeed struct {
Limit int
Incremental bool
}

// Name returns the name of the feed
func (f *IssueFeed) Name() string {
return feedName
}

// Model returns the model of the feed row
func (f *IssueFeed) Model() interface{} {
return Issue{}
}

// RowsModel returns the model of the feed rows
func (f *IssueFeed) RowsModel() interface{} {
return &[]*Issue{}
}

// PrimaryKey return the primary key
func (f *IssueFeed) PrimaryKey() []string {
return []string{"id"}
}

// Columns returns the columns of the row
func (f *IssueFeed) Columns() []string {
return []string{
"id", "title", "description", "creator_id", "creator_user_name",
"created_at", "due_at", "priority", "status", "template_id",
"inspection_id", "inspection_name", "site_id", "site_name",
"location_name", "category_id", "category_label",
}
}

// Order returns the ordering when retrieving an export
func (f *IssueFeed) Order() string {
return "id"
}

// CreateSchema creates the schema of the feed for the supplied exporter
func (f *IssueFeed) CreateSchema(exporter Exporter) error {
return exporter.CreateSchema(f, &[]*Issue{})
}

// Export exports the feed to the supplied exporter
func (f *IssueFeed) Export(ctx context.Context, apiClient *api.Client, exporter Exporter, orgID string) error {
logger := util.GetLogger()

_ = exporter.InitFeed(f, &InitFeedOptions{
// Delete data if incremental refresh is disabled so there is no duplicates
Truncate: !f.Incremental,
})

var request = &api.GetFeedRequest{
InitialURL: feedURL,
Params: api.GetFeedParams{
Limit: f.Limit,
},
}

var feedFn = func(resp *api.GetFeedResponse) error {
var rows []*Issue
err := json.Unmarshal(resp.Data, &rows)
util.Check(err, "Failed to unmarshal actions data to struct")

if len(rows) != 0 {
// Calculate the size of the batch we can insert into the DB at once.
// Column count + buffer to account for primary keys
batchSize := exporter.ParameterLimit() / (len(f.Columns()) + 4)

for i := 0; i < len(rows); i += batchSize {
j := i + batchSize
if j > len(rows) {
j = len(rows)
}

err = exporter.WriteRows(f, rows[i:j])
util.Check(err, "Failed to write data to exporter")
}
}

logger.Infof("%s: %d remaining", f.Name(), resp.Metadata.RemainingRecords)
return nil
}

err := apiClient.DrainFeed(ctx, request, feedFn)
util.Check(err, "Failed to export feed")
return exporter.FinaliseExport(f, &[]*Issue{})
}
82 changes: 82 additions & 0 deletions internal/app/feed/feed_issue_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
package feed_test

import (
"context"
"github.com/SafetyCulture/iauditor-exporter/internal/app/api"
"github.com/SafetyCulture/iauditor-exporter/internal/app/feed"
"github.com/stretchr/testify/assert"
"testing"
"time"
)

func TestIssueFeed_Export_ShouldExportRows(t *testing.T) {
exporter, err := getInmemorySQLExporter("")
assert.Nil(t, err)

apiClient := api.GetTestClient()
defer resetMocks(apiClient.HTTPClient())
initMockIssuesFeed(apiClient.HTTPClient())

actionsFeed := feed.IssueFeed{
Limit: 100,
}

err = actionsFeed.Export(context.Background(), apiClient, exporter, "")
assert.Nil(t, err)

var rows []feed.Issue
resp := exporter.DB.Table("issues").Scan(&rows)

assert.Nil(t, resp.Error)
assert.Equal(t, 39, len(rows))
testAllValues(t, &rows[0])
testAllNulls(t, &rows[1])

}

func testAllNulls(t *testing.T, issue *feed.Issue) {
assert.Equal(t, "52a88aeb-5ec6-4876-8c6c-85a642e4bddc", issue.ID)
assert.Equal(t, "", issue.Title)
assert.Equal(t, "", issue.Description)
assert.Equal(t, "user_0590e8a0dfbc64798a2426c2fa76a7415", issue.CreatorID)
assert.Equal(t, "", issue.CreatorUserName)

// uses .Now() if missing
assert.NotEqual(t, time.Date(1, time.January, 1, 0, 0, 0, 0, time.UTC), issue.CreatedAt)

// not sure how correct is this approach
//assert.Equal(t, time.Date(1, time.January, 1, 0, 0, 0, 0, time.UTC), issue.DueAt)
assert.Nil(t, issue.DueAt)

assert.Equal(t, "", issue.Priority)
assert.Equal(t, "", issue.Status)
assert.Equal(t, "", issue.TemplateID)
assert.Equal(t, "", issue.InspectionID)
assert.Equal(t, "", issue.InspectionName)
assert.Equal(t, "", issue.SiteID)
assert.Equal(t, "", issue.SiteName)
assert.Equal(t, "", issue.LocationName)
assert.Equal(t, "", issue.CategoryID)
assert.Equal(t, "", issue.CategoryLabel)
}

func testAllValues(t *testing.T, issue *feed.Issue) {
assert.Equal(t, "56bc5efa-2420-483d-bad1-27b35922c403", issue.ID)
assert.Equal(t, "Injury - 14 Apr 2020, 10:36 am", issue.Title)
assert.Equal(t, "some description", issue.Description)
assert.Equal(t, "user_51d3dbc686eb4790980f6414513d1c05", issue.CreatorID)
assert.Equal(t, "🦄", issue.CreatorUserName)
assert.Equal(t, time.Date(2020, time.April, 14, 0, 36, 53, 304000000, time.UTC), issue.CreatedAt)
expected := time.Date(2020, time.April, 14, 0, 36, 53, 304000000, time.UTC)
assert.Equal(t, &expected, issue.DueAt)
assert.Equal(t, "NONE", issue.Priority)
assert.Equal(t, "OPEN", issue.Status)
assert.Equal(t, "55bc5efa-2420-483d-bad1-27b35922c455", issue.TemplateID)
assert.Equal(t, "66bc5efa-2420-483d-bad1-27b35922c466", issue.InspectionID)
assert.Equal(t, "some name", issue.InspectionName)
assert.Equal(t, "77bc5efa-2420-483d-bad1-27b35922c477", issue.SiteID)
assert.Equal(t, "site name", issue.SiteName)
assert.Equal(t, "88bc5efa-2420-483d-bad1-27b35922c488", issue.LocationName)
assert.Equal(t, "592ec130-90e0-4c0e-a1c0-1f37f12f5fb5", issue.CategoryID)
assert.Equal(t, "Tow Trucks", issue.CategoryLabel)
}
44 changes: 44 additions & 0 deletions internal/app/feed/mock_feeds_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,16 @@ func initMockFeedsSet1(httpClient *http.Client) {
Get("/feed/action_assignees").
Reply(200).
File("mocks/set_1/feed_action_assignees_1.json")

gock.New("http://localhost:9999").
Get("/feed/issues").
Reply(200).
File("mocks/set_1/feed_issues_1.json")

gock.New("http://localhost:9999/feed/issues?limit=20&next_page_token=QGI5Nzk5NzMtZWMxMy00YzVmLTk1NDQtMTI0YjQ1M2I2OWYp").
Get("/feed/issues").
Reply(200).
File("mocks/set_1/feed_issues_2.json")
}

func initMockFeedsSet2(httpClient *http.Client) {
Expand Down Expand Up @@ -153,6 +163,16 @@ func initMockFeedsSet2(httpClient *http.Client) {
Get("/feed/action_assignees").
Reply(200).
File("mocks/set_2/feed_action_assignees_2.json")

gock.New("http://localhost:9999").
Get("/feed/issues").
Reply(200).
File("mocks/set_1/feed_issues_1.json")

gock.New("http://localhost:9999/feed/issues?limit=20&next_page_token=QGI5Nzk5NzMtZWMxMy00YzVmLTk1NDQtMTI0YjQ1M2I2OWYp").
Get("/feed/issues").
Reply(200).
File("mocks/set_1/feed_issues_2.json")
}

func initMockFeedsSet3(httpClient *http.Client) {
Expand Down Expand Up @@ -222,6 +242,30 @@ func initMockFeedsSet3(httpClient *http.Client) {
Get("/feed/action_assignees").
Reply(200).
File("mocks/set_1/feed_action_assignees_1.json")

gock.New("http://localhost:9999").
Get("/feed/issues").
Reply(200).
File("mocks/set_1/feed_issues_1.json")

gock.New("http://localhost:9999/feed/issues?limit=20&next_page_token=QGI5Nzk5NzMtZWMxMy00YzVmLTk1NDQtMTI0YjQ1M2I2OWYp").
Get("/feed/issues").
Reply(200).
File("mocks/set_1/feed_issues_2.json")
}

func initMockIssuesFeed(httpClient *http.Client) {
gock.InterceptClient(httpClient)

gock.New("http://localhost:9999").
Get("/feed/issues").
Reply(200).
File("mocks/set_1/feed_issues_1.json")

gock.New("http://localhost:9999/feed/issues?limit=20&next_page_token=QGI5Nzk5NzMtZWMxMy00YzVmLTk1NDQtMTI0YjQ1M2I2OWYp").
Get("/feed/issues").
Reply(200).
File("mocks/set_1/feed_issues_2.json")
}

func resetMocks(httpClient *http.Client) {
Expand Down
Loading

0 comments on commit d99e696

Please sign in to comment.