tool

package
v0.29.1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Feb 15, 2024 License: AGPL-3.0 Imports: 14 Imported by: 0

Documentation

Overview

Package tool exports tool subcommands

Index

Constants

This section is empty.

Variables

View Source
var ArchiveExecutionPayloads = &cobra.Command{
	Use:   "archive-execution-payloads",
	Short: "export execution payloads from the DB to a CSV or JSON file and archive by deleting the payloads",
	Run: func(cmd *cobra.Command, args []string) {
		if len(outFiles) == 0 {
			log.Fatal("no output files specified")
		}
		log.Infof("exporting execution payloads to %s", strings.Join(outFiles, ", "))

		if idLast == 0 && dateEnd == "" {
			log.Fatal("must specify --id-to or --date-end")
		}

		dbURL, err := url.Parse(postgresDSN)
		if err != nil {
			log.WithError(err).Fatalf("couldn't read db URL")
		}
		log.Infof("Connecting to Postgres database at %s%s ...", dbURL.Host, dbURL.Path)
		db, err := database.NewDatabaseService(postgresDSN)
		if err != nil {
			log.WithError(err).Fatalf("Failed to connect to Postgres database at %s%s", dbURL.Host, dbURL.Path)
		}

		if dateStart != "" {

			query := `SELECT id FROM ` + vars.TableExecutionPayload + ` WHERE inserted_at::date >= date '` + dateStart + `' ORDER BY id ASC LIMIT 1;`
			err = db.DB.QueryRow(query).Scan(&idFirst)
			if err != nil {
				log.WithError(err).Fatalf("failed to find start id for date %s", dateStart)
			}
		}
		if dateEnd != "" {

			query := `SELECT id FROM ` + vars.TableExecutionPayload + ` WHERE inserted_at::date < date '` + dateEnd + `' ORDER BY id DESC LIMIT 1;`
			err = db.DB.QueryRow(query).Scan(&idLast)
			if err != nil {
				log.WithError(err).Fatalf("failed to find end id for date %s", dateEnd)
			}
		}
		log.Infof("exporting ids %d to %d", idFirst, idLast)

		deliveredPayloads, err := db.GetExecutionPayloads(idFirst, idLast)
		if err != nil {
			log.WithError(err).Fatal("error getting execution payloads")
		}

		log.Infof("got %d payloads", len(deliveredPayloads))
		if len(deliveredPayloads) == 0 {
			return
		}

		writeToFile := func(outFile string) {
			f, err := os.Create(outFile)
			if err != nil {
				log.WithError(err).Fatal("failed to open file")
			}
			defer f.Close()

			if strings.HasSuffix(outFile, ".csv") {

				w := csv.NewWriter(f)
				defer w.Flush()
				if err := w.Write(database.ExecutionPayloadEntryCSVHeader); err != nil {
					log.WithError(err).Fatal("error writing record to file")
				}
				for _, record := range deliveredPayloads {
					if err := w.Write(record.ToCSVRecord()); err != nil {
						log.WithError(err).Fatal("error writing record to file")
					}
				}
			} else {

				encoder := json.NewEncoder(f)
				err = encoder.Encode(deliveredPayloads)
				if err != nil {
					log.WithError(err).Fatal("failed to write json to file")
				}
			}
			log.Infof("Wrote %d entries to %s", len(deliveredPayloads), outFile)
		}

		for _, outFile := range outFiles {
			writeToFile(outFile)
		}

		if doDelete {
			log.Infof("deleting archived payloads from DB")
			err = db.DeleteExecutionPayloads(idFirst, idLast)
			if err != nil {
				log.WithError(err).Fatal("error deleting execution payloads")
			}
		}

		log.Infof("all done")
	},
}
View Source
var DataAPIExportBids = &cobra.Command{
	Use: "data-api-export-bids",
	Run: func(cmd *cobra.Command, args []string) {
		if len(outFiles) == 0 {
			outFnBase := fmt.Sprintf("builder-submissions_slot-%d-to-%d", slotFrom, slotTo)
			outFiles = append(outFiles, outFnBase+".csv")
			outFiles = append(outFiles, outFnBase+".json")
		}
		log.Infof("exporting data-api bids to %s", strings.Join(outFiles, ", "))

		if slotFrom == 0 || slotTo == 0 {
			log.Fatal("must specify --slot-from and --slot-to")
		}

		dbURL, err := url.Parse(postgresDSN)
		if err != nil {
			log.WithError(err).Fatalf("couldn't read db URL")
		}
		log.Infof("Connecting to Postgres database at %s%s ...", dbURL.Host, dbURL.Path)
		db, err := database.NewDatabaseService(postgresDSN)
		if err != nil {
			log.WithError(err).Fatalf("Failed to connect to Postgres database at %s%s", dbURL.Host, dbURL.Path)
		}

		log.Info("Connected to Postgres database, starting queries")
		log.Infof("exporting slots %d to %d (%d slots in total)...", slotFrom, slotTo, slotTo-slotFrom+1)

		bids, err := db.GetBuilderSubmissionsBySlots(slotFrom, slotTo)
		if err != nil {
			log.WithError(err).Fatal("failed getting bids")
		}

		log.Infof("got %d bids", len(bids))
		entries := make([]common.BidTraceV2WithTimestampJSON, len(bids))
		for i, bid := range bids {
			entries[i] = database.BuilderSubmissionEntryToBidTraceV2WithTimestampJSON(bid)
		}

		if len(entries) == 0 {
			return
		}

		bids = nil
		runtime.GC()

		writeToFile := func(outFile string) {
			f, err := os.Create(outFile)
			if err != nil {
				log.WithError(err).Fatal("failed to open file")
			}
			defer f.Close()

			if strings.HasSuffix(outFile, ".csv") {

				w := csv.NewWriter(f)
				defer w.Flush()
				if err := w.Write(entries[0].CSVHeader()); err != nil {
					log.WithError(err).Fatal("error writing record to file")
				}
				for _, record := range entries {
					if err := w.Write(record.ToCSVRecord()); err != nil {
						log.WithError(err).Fatal("error writing record to file")
					}
				}

			} else {

				encoder := json.NewEncoder(f)
				err = encoder.Encode(entries)
				if err != nil {
					log.WithError(err).Fatal("failed to write json to file")
				}
			}

			log.Infof("Wrote %d entries to %s", len(entries), outFile)
			runtime.GC()
		}

		for _, outFile := range outFiles {
			writeToFile(outFile)
		}
	},
}
View Source
var DataAPIExportPayloads = &cobra.Command{
	Use:   "data-api-export-payloads",
	Short: "export delivered payloads to the proposer from the DB to a CSV or JSON file",
	Run: func(cmd *cobra.Command, args []string) {
		if len(outFiles) == 0 {
			log.Fatal("no output files specified")
		}
		log.Infof("exporting data-api payloads to %s", strings.Join(outFiles, ", "))

		if idLast == 0 && dateEnd == "" {
			log.Fatal("must specify --id-to or --date-end")
		}

		dbURL, err := url.Parse(postgresDSN)
		if err != nil {
			log.WithError(err).Fatalf("couldn't read db URL")
		}
		log.Infof("Connecting to Postgres database at %s%s ...", dbURL.Host, dbURL.Path)
		db, err := database.NewDatabaseService(postgresDSN)
		if err != nil {
			log.WithError(err).Fatalf("Failed to connect to Postgres database at %s%s", dbURL.Host, dbURL.Path)
		}

		if dateStart != "" {

			query := `SELECT id FROM ` + vars.TableDeliveredPayload + ` WHERE inserted_at::date >= date '` + dateStart + `' ORDER BY id ASC LIMIT 1;`
			err = db.DB.QueryRow(query).Scan(&idFirst)
			if err != nil {
				log.WithError(err).Fatalf("failed to find start id for date %s", dateStart)
			}
		}
		if dateEnd != "" {

			query := `SELECT id FROM ` + vars.TableDeliveredPayload + ` WHERE inserted_at::date < date '` + dateEnd + `' ORDER BY id DESC LIMIT 1;`
			err = db.DB.QueryRow(query).Scan(&idLast)
			if err != nil {
				log.WithError(err).Fatalf("failed to find end id for date %s", dateEnd)
			}
		}
		log.Infof("exporting ids %d to %d", idFirst, idLast)

		deliveredPayloads, err := db.GetDeliveredPayloads(idFirst, idLast)
		if err != nil {
			log.WithError(err).Fatal("error getting recent payloads")
		}

		log.Infof("got %d payloads", len(deliveredPayloads))
		entries := make([]common.BidTraceV2JSON, len(deliveredPayloads))
		for i, payload := range deliveredPayloads {
			entries[i] = database.DeliveredPayloadEntryToBidTraceV2JSON(payload)
		}

		if len(entries) == 0 {
			return
		}

		writeToFile := func(outFile string) {
			f, err := os.Create(outFile)
			if err != nil {
				log.WithError(err).Fatal("failed to open file")
			}
			defer f.Close()

			if strings.HasSuffix(outFile, ".csv") {

				w := csv.NewWriter(f)
				defer w.Flush()
				if err := w.Write(entries[0].CSVHeader()); err != nil {
					log.WithError(err).Fatal("error writing record to file")
				}
				for _, record := range entries {
					if err := w.Write(record.ToCSVRecord()); err != nil {
						log.WithError(err).Fatal("error writing record to file")
					}
				}
			} else {

				encoder := json.NewEncoder(f)
				err = encoder.Encode(entries)
				if err != nil {
					log.WithError(err).Fatal("failed to write json to file")
				}
			}
			log.Infof("Wrote %d entries to %s", len(entries), outFile)
		}

		for _, outFile := range outFiles {
			writeToFile(outFile)
		}
	},
}
View Source
var Migrate = &cobra.Command{
	Use:   "migrate",
	Short: "migrate the database to the latest schema",
	Run: func(cmd *cobra.Command, args []string) {

		dbURL, err := url.Parse(postgresDSN)
		if err != nil {
			log.WithError(err).Fatalf("couldn't read db URL")
		}
		log.Infof("Connecting to Postgres database at %s%s ...", dbURL.Host, dbURL.Path)
		db, err := sqlx.Connect("postgres", postgresDSN)
		if err != nil {
			log.WithError(err).Fatalf("Failed to connect to Postgres database at %s%s", dbURL.Host, dbURL.Path)
		}

		log.Infof("Migrating database ...")
		migrate.SetTable(vars.TableMigrations)
		numAppliedMigrations, err := migrate.Exec(db.DB, "postgres", migrations.Migrations, migrate.Up)
		if err != nil {
			log.WithError(err).Fatalf("Failed to migrate database")
		}
		log.WithField("num_applied_migrations", numAppliedMigrations).Info("Migrations applied successfully")
	},
}

Functions

This section is empty.

Types

This section is empty.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL