Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: add list address dig holders in osmosis chain #639

Merged
merged 3 commits into from
Feb 15, 2024
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/golangci-lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,5 +24,5 @@ jobs:
uses: golangci/golangci-lint-action@v3
with:
# Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version
version: latest
version: v1.55.2
args: --timeout 10m
133 changes: 133 additions & 0 deletions scripts/export_dig_holder.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,133 @@
package main

import (
"bufio"
"encoding/csv"
"encoding/json"
"fmt"
"os"
"slices"
"strconv"
"strings"
)

var mapAddr = make(map[string]int)

const DigIbcDenom = "ibc/307E5C96C8F60D1CBEE269A9A86C0834E1DB06F2B3788AE4F716EDB97A48B97D"

// Function to read a large file and find the text "udig"
func findTextInLargeFile(filePath string, searchText string) bool {
file, err := os.Open(filePath)
if err != nil {
fmt.Println("File not found.")
return false
}
defer file.Close()

const chunkSize = 1024 * 1024 * 10 // 10 MB chunk size, adjust as needed
reader := bufio.NewReader(file)
buffer := make([]byte, chunkSize)

for {
n, err := reader.Read(buffer)
if err != nil && err.Error() != "EOF" {
fmt.Println("Error reading file:", err)
break
}
if n == 0 {
break
}
chunk := string(buffer[:n])
if strings.Contains(chunk, searchText) {
tmps := strings.Split(chunk, "},{\"address")
for _, tmp := range tmps {
s := "{\"address" + tmp + "}"
var result map[string]interface{}
if err := json.Unmarshal([]byte(s), &result); err != nil {
continue
}
coins, ok := result["coins"].([]interface{})
if !ok {
continue
}
balance := 0
for _, coin := range coins {
coinMap, ok := coin.(map[string]interface{})
if !ok {
continue
}
if coinMap["denom"] == DigIbcDenom {
amountStr, ok := coinMap["amount"].(string)
if !ok {
fmt.Println("Amount is not a string")
continue
}
amount, err := strconv.ParseFloat(amountStr, 64)
if err != nil {
fmt.Println("Error converting amount to float:", err)
continue
}
balance = int(amount)
break
}
}
address, ok := result["address"].(string)
if balance > 0 && ok && mapAddr[address] == 0 {
mapAddr[address] = balance
}
}
}
if err != nil {
break
}
}
return true
Comment on lines +19 to +84
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The findTextInLargeFile function lacks error handling for JSON unmarshalling and does not log or handle errors beyond continuing the loop. Consider adding error logging for debugging purposes.

- if err := json.Unmarshal([]byte(s), &result); err != nil {
-     continue
+ if err := json.Unmarshal([]byte(s), &result); err != nil {
+     fmt.Println("Error unmarshalling JSON:", err)
+     continue

Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation.

Suggested change
func findTextInLargeFile(filePath string, searchText string) bool {
file, err := os.Open(filePath)
if err != nil {
fmt.Println("File not found.")
return false
}
defer file.Close()
const chunkSize = 1024 * 1024 * 10 // 10 MB chunk size, adjust as needed
reader := bufio.NewReader(file)
buffer := make([]byte, chunkSize)
for {
n, err := reader.Read(buffer)
if err != nil && err.Error() != "EOF" {
fmt.Println("Error reading file:", err)
break
}
if n == 0 {
break
}
chunk := string(buffer[:n])
if strings.Contains(chunk, searchText) {
tmps := strings.Split(chunk, "},{\"address")
for _, tmp := range tmps {
s := "{\"address" + tmp + "}"
var result map[string]interface{}
if err := json.Unmarshal([]byte(s), &result); err != nil {
continue
}
coins, ok := result["coins"].([]interface{})
if !ok {
continue
}
balance := 0
for _, coin := range coins {
coinMap, ok := coin.(map[string]interface{})
if !ok {
continue
}
if coinMap["denom"] == DigIbcDenom {
amountStr, ok := coinMap["amount"].(string)
if !ok {
fmt.Println("Amount is not a string")
continue
}
amount, err := strconv.ParseFloat(amountStr, 64)
if err != nil {
fmt.Println("Error converting amount to float:", err)
continue
}
balance = int(amount)
break
}
}
address, ok := result["address"].(string)
if balance > 0 && ok && mapAddr[address] == 0 {
mapAddr[address] = balance
}
}
}
if err != nil {
break
}
}
return true
func findTextInLargeFile(filePath string, searchText string) bool {
file, err := os.Open(filePath)
if err != nil {
fmt.Println("File not found.")
return false
}
defer file.Close()
const chunkSize = 1024 * 1024 * 10 // 10 MB chunk size, adjust as needed
reader := bufio.NewReader(file)
buffer := make([]byte, chunkSize)
for {
n, err := reader.Read(buffer)
if err != nil && err.Error() != "EOF" {
fmt.Println("Error reading file:", err)
break
}
if n == 0 {
break
}
chunk := string(buffer[:n])
if strings.Contains(chunk, searchText) {
tmps := strings.Split(chunk, "},{\"address")
for _, tmp := range tmps {
s := "{\"address" + tmp + "}"
var result map[string]interface{}
if err := json.Unmarshal([]byte(s), &result); err != nil {
fmt.Println("Error unmarshalling JSON:", err)
continue
}
coins, ok := result["coins"].([]interface{})
if !ok {
continue
}
balance := 0
for _, coin := range coins {
coinMap, ok := coin.(map[string]interface{})
if !ok {
continue
}
if coinMap["denom"] == DigIbcDenom {
amountStr, ok := coinMap["amount"].(string)
if !ok {
fmt.Println("Amount is not a string")
continue
}
amount, err := strconv.ParseFloat(amountStr, 64)
if err != nil {
fmt.Println("Error converting amount to float:", err)
continue
}
balance = int(amount)
break
}
}
address, ok := result["address"].(string)
if balance > 0 && ok && mapAddr[address] == 0 {
mapAddr[address] = balance
}
}
}
if err != nil {
break
}
}
return true
}

}

func main() {

// snapshot time -> 13-Feb-2024 12:40
filePath := "/Users/hoank/export.json" // path of genesis file in your server
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hardcoded file path in filePath variable. Consider making this configurable through command-line arguments or environment variables.

- filePath := "/Users/hoank/export.json" // path of genesis file in your server
+ // Use flag package or os.Getenv to make the file path configurable

Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation.

Suggested change
filePath := "/Users/hoank/export.json" // path of genesis file in your server
// Use flag package or os.Getenv to make the file path configurable

searchText := "307E5C96C8F60D1CBEE269A9A86C0834E1DB06F2B3788AE4F716EDB97A48B97D"
found := findTextInLargeFile(filePath, searchText)
fmt.Printf("Text '%s' found: %t\n", searchText, found)

module_address := []string{
"osmo1szvslwsxf3y2s4lt3c7e7mm92zgy44j8krruht5zzanmhrjwyc4qqpt5nz", //pool #621 dig/osmo
"osmo1rqamy6jc3f0rwrg5xz8hy8q7n932t2488f2gqg3d0cadvd3uqaxq4wazn8", // contract address
"osmo18rqwcrvsfyy9s2tlfvmchclxf2cfw2hqvrjvkuljcrhq2vpg4suse8h6tj", // pool #620
"osmo125dgaejga0q9wc6jcpf75lh6aeeyvrdz88xvzd", // ibc-accout
}
Comment on lines +95 to +100
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The module_address slice contains hardcoded addresses. If these addresses are subject to change, consider externalizing them to a configuration file or environment variables.

- module_address := []string{
-     "osmo1szvslwsxf3y2s4lt3c7e7mm92zgy44j8krruht5zzanmhrjwyc4qqpt5nz", //pool #621 dig/osmo
-     ...
+ // Consider loading these addresses from a configuration file or environment variables

Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation.

Suggested change
module_address := []string{
"osmo1szvslwsxf3y2s4lt3c7e7mm92zgy44j8krruht5zzanmhrjwyc4qqpt5nz", //pool #621 dig/osmo
"osmo1rqamy6jc3f0rwrg5xz8hy8q7n932t2488f2gqg3d0cadvd3uqaxq4wazn8", // contract address
"osmo18rqwcrvsfyy9s2tlfvmchclxf2cfw2hqvrjvkuljcrhq2vpg4suse8h6tj", // pool #620
"osmo125dgaejga0q9wc6jcpf75lh6aeeyvrdz88xvzd", // ibc-accout
}
// Consider loading these addresses from a configuration file or environment variables


csvFilePath := "output.csv"
file, err := os.Create(csvFilePath)
if err != nil {
fmt.Println("Error creating CSV file:", err)
return
}
defer file.Close()

writer := csv.NewWriter(file)
defer writer.Flush()

// Optionally write headers, if needed
err = writer.Write([]string{"Address", "Balance"})
if err != nil {
return
Comment on lines +113 to +116
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Missing error handling after writing headers to the CSV file. If an error occurs, it should be logged or handled appropriately.

- err = writer.Write([]string{"Address", "Balance"})
- if err != nil {
-     return
+ err = writer.Write([]string{"Address", "Balance"})
+ if err != nil {
+     fmt.Println("Error writing headers to CSV:", err)
+     return

Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation.

Suggested change
// Optionally write headers, if needed
err = writer.Write([]string{"Address", "Balance"})
if err != nil {
return
// Optionally write headers, if needed
err = writer.Write([]string{"Address", "Balance"})
if err != nil {
fmt.Println("Error writing headers to CSV:", err)
return

}
totalBalance := 0
for addr, balance := range mapAddr {
totalBalance += balance
if slices.Contains(module_address, addr) {
continue
Comment on lines +121 to +122
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Using slices.Contains to filter out specific addresses. Ensure that this is the intended behavior and consider if there's a more efficient way to handle exclusions, especially if the list grows.

Consider using a map for module_address for O(1) lookups instead of O(n) with slices.Contains.

}
row := []string{addr, fmt.Sprint(balance)}
err := writer.Write(row)
if err != nil {
return
}
}
fmt.Println("total", len(mapAddr))
fmt.Printf("Data written to %s\n", csvFilePath)
fmt.Println("total balance", totalBalance)
}
Loading
Loading