aboutsummaryrefslogtreecommitdiff
path: root/pkg/localstore
diff options
context:
space:
mode:
Diffstat (limited to 'pkg/localstore')
-rw-r--r--pkg/localstore/export.go241
-rw-r--r--pkg/localstore/import.go484
-rw-r--r--pkg/localstore/store.go638
3 files changed, 1363 insertions, 0 deletions
diff --git a/pkg/localstore/export.go b/pkg/localstore/export.go
new file mode 100644
index 0000000..fa92810
--- /dev/null
+++ b/pkg/localstore/export.go
@@ -0,0 +1,241 @@
+package localstore
+
+import (
+ csvwriter "airlines/pkg/csvWriter"
+ "errors"
+ "fmt"
+ "os"
+ "path/filepath"
+ "strconv"
+ "strings"
+ "time"
+)
+
+func (s *Store) ExportUsersCSV(w *csvwriter.CsvWriter) error {
+ s.mu.RLock()
+ defer s.mu.RUnlock()
+
+ if err := w.Write([]string{"id", "nick", "name", "surname", "fathersname", "sex", "birthday", "total_flights", "total_codes", "total_countries", "total_cards"}); err != nil {
+ return err
+ }
+
+ for i := 1; i < len(s.users); i++ {
+ u := s.users[i]
+ if u == nil {
+ continue
+ }
+ bday := ""
+ if !u.Birthday.IsZero() && u.Birthday.Year() != 1 {
+ bday = u.Birthday.UTC().Format("2006-01-02")
+ }
+ row := []string{
+ strconv.FormatUint(u.ID, 10),
+ u.Nick,
+ u.Name,
+ u.Surname,
+ u.Fathersname,
+ strconv.Itoa(int(u.Sex)),
+ bday,
+ strconv.Itoa(len(s.userFlights[u.ID])),
+ strconv.Itoa(len(s.codesByUser[u.ID])),
+ strconv.Itoa(len(s.countriesByUser[u.ID])),
+ strconv.Itoa(len(s.cardsByUser[u.ID])),
+ }
+ if err := w.Write(row); err != nil {
+ return err
+ }
+ }
+
+ return w.Sync()
+}
+
+func (s *Store) ExportCardsCSV(w *csvwriter.CsvWriter) error {
+ s.mu.RLock()
+ defer s.mu.RUnlock()
+
+ if err := w.Write([]string{"id", "prefix", "number", "bonusprogramm", "user_id"}); err != nil {
+ return err
+ }
+ for i := 1; i < len(s.cards); i++ {
+ c := s.cards[i]
+ if c == nil {
+ continue
+ }
+ row := []string{
+ strconv.FormatUint(c.ID, 10),
+ c.Prefix,
+ strconv.FormatUint(c.Number, 10),
+ c.Bonusprogramm,
+ strconv.FormatUint(c.UserID, 10),
+ }
+ if err := w.Write(row); err != nil {
+ return err
+ }
+ }
+ return w.Sync()
+}
+
+func (s *Store) ExportFlightsCSV(w *csvwriter.CsvWriter) error {
+ s.mu.RLock()
+ defer s.mu.RUnlock()
+
+ if err := w.Write([]string{
+ "id", "number", "from", "to", "fromlat", "fromlon", "tolat", "tolon",
+ "dep_date", "has_time", "dep_time", "dep_iso",
+ }); err != nil {
+ return err
+ }
+ for i := 1; i < len(s.flights); i++ {
+ f := s.flights[i]
+ if f == nil {
+ continue
+ }
+ depDate := f.Date.UTC().Format("2006-01-02")
+ depTime := ""
+ depISO := ""
+ if f.HasTime {
+ depTime = f.Date.Format("15:04:05")
+ depISO = f.Date.Format(time.RFC3339)
+ }
+ row := []string{
+ strconv.FormatUint(f.ID, 10),
+ f.Number,
+ f.From,
+ f.To,
+ fmt.Sprint(f.FromCoords.Lat),
+ fmt.Sprint(f.FromCoords.Long),
+ fmt.Sprint(f.ToCoords.Lat),
+ fmt.Sprint(f.ToCoords.Long),
+ depDate,
+ strconv.FormatBool(f.HasTime),
+ depTime,
+ depISO,
+ }
+ if err := w.Write(row); err != nil {
+ return err
+ }
+ }
+ return w.Sync()
+}
+
+func (s *Store) ExportUserFlightsCSV(w *csvwriter.CsvWriter) error {
+ s.mu.RLock()
+ defer s.mu.RUnlock()
+
+ if err := w.Write([]string{"user_id", "flight_id"}); err != nil {
+ return err
+ }
+ for uid, set := range s.userFlights {
+ for fid := range set {
+ if fid == 0 || int(fid) >= len(s.flights) || s.flights[fid] == nil {
+ continue
+ }
+ if err := w.Write([]string{strconv.FormatUint(uid, 10), strconv.FormatUint(fid, 10)}); err != nil {
+ return err
+ }
+ }
+ }
+ return w.Sync()
+}
+
+func (s *Store) ExportCardFlightsCSV(w *csvwriter.CsvWriter) error {
+ s.mu.RLock()
+ defer s.mu.RUnlock()
+
+ if err := w.Write([]string{"card_id", "flight_id"}); err != nil {
+ return err
+ }
+ for cid, set := range s.cardFlights {
+ for fid := range set {
+ if fid == 0 || int(fid) >= len(s.flights) || s.flights[fid] == nil {
+ continue
+ }
+ if err := w.Write([]string{strconv.FormatUint(cid, 10), strconv.FormatUint(fid, 10)}); err != nil {
+ return err
+ }
+ }
+ }
+ return w.Sync()
+}
+
+
+func (s *Store) ExportAllCSVs(dir string) error {
+ if dir == "" {
+ return errors.New("empty directory path")
+ }
+ if err := os.MkdirAll(dir, 0o755); err != nil {
+ return err
+ }
+ if !strings.HasSuffix(dir, string(filepath.Separator)) {
+ dir += string(filepath.Separator)
+ }
+
+ // users.csv
+ uw, err := csvwriter.NewCsvWriter(dir + "users.csv")
+ if err != nil {
+ return err
+ }
+ if err := s.ExportUsersCSV(uw); err != nil {
+ _ = uw.Close()
+ return err
+ }
+ if err := uw.Close(); err != nil {
+ return err
+ }
+
+ // cards.csv
+ cw, err := csvwriter.NewCsvWriter(dir + "cards.csv")
+ if err != nil {
+ return err
+ }
+ if err := s.ExportCardsCSV(cw); err != nil {
+ _ = cw.Close()
+ return err
+ }
+ if err := cw.Close(); err != nil {
+ return err
+ }
+
+ // flights.csv
+ fw, err := csvwriter.NewCsvWriter(dir + "flights.csv")
+ if err != nil {
+ return err
+ }
+ if err := s.ExportFlightsCSV(fw); err != nil {
+ _ = fw.Close()
+ return err
+ }
+ if err := fw.Close(); err != nil {
+ return err
+ }
+
+ // user_flights.csv
+ ufw, err := csvwriter.NewCsvWriter(dir + "user_flights.csv")
+ if err != nil {
+ return err
+ }
+ if err := s.ExportUserFlightsCSV(ufw); err != nil {
+ _ = ufw.Close()
+ return err
+ }
+ if err := ufw.Close(); err != nil {
+ return err
+ }
+
+ // card_flights.csv
+ cfw, err := csvwriter.NewCsvWriter(dir + "card_flights.csv")
+ if err != nil {
+ return err
+ }
+ if err := s.ExportCardFlightsCSV(cfw); err != nil {
+ _ = cfw.Close()
+ return err
+ }
+ if err := cfw.Close(); err != nil {
+ return err
+ }
+
+ cfw.Close()
+
+ return nil
+}
diff --git a/pkg/localstore/import.go b/pkg/localstore/import.go
new file mode 100644
index 0000000..eb008ba
--- /dev/null
+++ b/pkg/localstore/import.go
@@ -0,0 +1,484 @@
+package localstore
+
+import (
+ "encoding/csv"
+ "errors"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "strconv"
+ "strings"
+ "time"
+
+ "airlines/pkg/model"
+
+ "github.com/schollz/progressbar/v3"
+)
+
+func (s *Store) ImportAllCSVs(dir string) error {
+ if dir == "" {
+ return errors.New("empty directory path")
+ }
+ if !strings.HasSuffix(dir, string(filepath.Separator)) {
+ dir += string(filepath.Separator)
+ }
+
+ // lock for writes while rebuilding everything
+ s.mu.Lock()
+ defer s.mu.Unlock()
+
+ // reset containers
+ s.users = nil
+ s.cards = nil
+ s.flights = nil
+ if s.userFlights == nil {
+ s.userFlights = make(map[uint64]map[uint64]struct{})
+ } else {
+ for k := range s.userFlights {
+ delete(s.userFlights, k)
+ }
+ }
+ if s.cardFlights == nil {
+ s.cardFlights = make(map[uint64]map[uint64]struct{})
+ } else {
+ for k := range s.cardFlights {
+ delete(s.cardFlights, k)
+ }
+ }
+ // (Re)build helper indices when possible
+ if s.cardsByUser == nil {
+ s.cardsByUser = make(map[uint64]map[uint64]struct{})
+ } else {
+ for k := range s.cardsByUser {
+ delete(s.cardsByUser, k)
+ }
+ }
+ // We cannot reconstruct codesByUser / countriesByUser from CSVs here; leave as-is or empty.
+ // Initialize if nil so your code using them won't panic.
+ if s.codesByUser == nil {
+ s.codesByUser = make(map[uint64]map[string]struct{})
+ }
+ if s.countriesByUser == nil {
+ s.countriesByUser = make(map[uint64]map[string]struct{})
+ }
+
+ // 1) users.csv
+ if err := s.loadUsersCSV(dir + "users.csv"); err != nil {
+ return fmt.Errorf("load users.csv: %w", err)
+ }
+ fmt.Println("loaed users")
+
+ // 2) cards.csv
+ if err := s.loadCardsCSV(dir + "cards.csv"); err != nil {
+ return fmt.Errorf("load cards.csv: %w", err)
+ }
+
+ // 3) flights.csv
+ if err := s.loadFlightsCSV(dir + "flights.csv"); err != nil {
+ return fmt.Errorf("load flights.csv: %w", err)
+ }
+
+ // 4) user_flights.csv
+ if err := s.loadUserFlightsCSV(dir + "user_flights.csv"); err != nil {
+ return fmt.Errorf("load user_flights.csv: %w", err)
+ }
+
+ // 5) card_flights.csv
+ if err := s.loadCardFlightsCSV(dir + "card_flights.csv"); err != nil {
+ return fmt.Errorf("load card_flights.csv: %w", err)
+ }
+
+ return nil
+}
+
+func (s *Store) loadUsersCSV(path string) error {
+ r, closer, err := openCSV(path)
+ if err != nil {
+ return err
+ }
+ defer closer.Close()
+
+ // header
+ if _, err := r.Read(); err != nil {
+ return fmt.Errorf("users header: %w", err)
+ }
+
+ bar := progressbar.Default(int64(150000), "reading users")
+
+ for {
+ bar.Add(1)
+ rec, err := r.Read()
+ if errors.Is(err, io.EOF) {
+ break
+ }
+ if errors.Is(err, io.EOF) {
+ break
+ }
+ if err != nil {
+ return fmt.Errorf("users read: %w", err)
+ }
+ // columns: id, nick, name, surname, fathersname, sex, birthday, total_flights, total_codes, total_countries, total_cards
+ if len(rec) < 11 {
+ return fmt.Errorf("users row has %d columns, expected >=11", len(rec))
+ }
+
+ id, err := parseUint(rec[0])
+ if err != nil {
+ return fmt.Errorf("user id: %w", err)
+ }
+ sexInt, err := parseInt(rec[5])
+ if err != nil {
+ return fmt.Errorf("user sex: %w", err)
+ }
+
+ var bday time.Time
+ if strings.TrimSpace(rec[6]) != "" {
+ // "2006-01-02" in UTC
+ t, err := time.Parse("2006-01-02", rec[6])
+ if err != nil {
+ return fmt.Errorf("user birthday: %w", err)
+ }
+ bday = t.UTC()
+ } else {
+ // keep zero time; or:
+ // bday = model.SentinelBirthday() // <-- if you prefer sentinel
+ }
+
+ u := &model.User{
+ ID: id,
+ Nick: rec[1],
+ Name: rec[2],
+ Surname: rec[3],
+ Fathersname: strings.TrimSpace(rec[4]),
+ Sex: model.Sex(sexInt), // adjust if your type differs
+ Birthday: bday,
+ }
+
+ s.putUser(u)
+ }
+ return nil
+}
+
+func (s *Store) loadCardsCSV(path string) error {
+ r, closer, err := openCSV(path)
+ if err != nil {
+ return err
+ }
+ defer closer.Close()
+
+ // header
+ if _, err := r.Read(); err != nil {
+ return fmt.Errorf("cards header: %w", err)
+ }
+
+ bar := progressbar.Default(int64(177000), "reading cards")
+ for {
+ bar.Add(1)
+ rec, err := r.Read()
+ if errors.Is(err, io.EOF) {
+ break
+ }
+ if err != nil {
+ return fmt.Errorf("cards read: %w", err)
+ }
+ // columns: id, prefix, number, bonusprogramm, user_id
+ if len(rec) < 5 {
+ return fmt.Errorf("cards row has %d columns, expected >=5", len(rec))
+ }
+
+ id, err := parseUint(rec[0])
+ if err != nil {
+ return fmt.Errorf("card id: %w", err)
+ }
+ num, err := parseUint(rec[2])
+ if err != nil {
+ return fmt.Errorf("card number: %w", err)
+ }
+ uid, err := parseUint(rec[4])
+ if err != nil {
+ return fmt.Errorf("card user_id: %w", err)
+ }
+
+ c := &model.Card{
+ ID: id,
+ Prefix: rec[1],
+ Number: num,
+ Bonusprogramm: rec[3],
+ UserID: uid,
+ }
+ s.putCard(c)
+
+ // index: cardsByUser
+ if _, ok := s.cardsByUser[uid]; !ok {
+ s.cardsByUser[uid] = make(map[uint64]struct{})
+ }
+ s.cardsByUser[uid][id] = struct{}{}
+ }
+ return nil
+}
+
+func (s *Store) loadFlightsCSV(path string) error {
+ r, closer, err := openCSV(path)
+ if err != nil {
+ return err
+ }
+ defer closer.Close()
+
+ // header
+ if _, err := r.Read(); err != nil {
+ return fmt.Errorf("flights header: %w", err)
+ }
+
+ bar := progressbar.Default(int64(2000000), "reading flights")
+ for {
+ bar.Add(1)
+ rec, err := r.Read()
+ if errors.Is(err, io.EOF) {
+ break
+ }
+ if err != nil {
+ return fmt.Errorf("flights read: %w", err)
+ }
+ // columns:
+ // id, number, from, to, fromlat, fromlon, tolat, tolon, dep_date, has_time, dep_time, dep_iso
+ if len(rec) < 12 {
+ return fmt.Errorf("flights row has %d columns, expected >=12", len(rec))
+ }
+
+ id, err := parseUint(rec[0])
+ if err != nil {
+ return fmt.Errorf("flight id: %w", err)
+ }
+
+ fromLat, err := parseFloat(rec[4])
+ if err != nil {
+ return fmt.Errorf("fromlat: %w", err)
+ }
+ fromLon, err := parseFloat(rec[5])
+ if err != nil {
+ return fmt.Errorf("fromlon: %w", err)
+ }
+ toLat, err := parseFloat(rec[6])
+ if err != nil {
+ return fmt.Errorf("tolat: %w", err)
+ }
+ toLon, err := parseFloat(rec[7])
+ if err != nil {
+ return fmt.Errorf("tolon: %w", err)
+ }
+
+ depDateStr := strings.TrimSpace(rec[8]) // "2006-01-02"
+ hasTime, err := strconv.ParseBool(rec[9])
+ if err != nil {
+ return fmt.Errorf("has_time: %w", err)
+ }
+
+ var dep time.Time
+ if hasTime {
+ // When exported with time present, dep_iso is RFC3339; prefer it for full fidelity.
+ depISO := strings.TrimSpace(rec[11])
+ if depISO != "" {
+ t, err := time.Parse(time.RFC3339, depISO)
+ if err != nil {
+ return fmt.Errorf("dep_iso: %w", err)
+ }
+ dep = t
+ } else {
+ // Fallback: combine dep_date + dep_time in local (treat as UTC if not specified)
+ depTime := strings.TrimSpace(rec[10]) // "15:04:05"
+ t, err := time.Parse("2006-01-02 15:04:05", depDateStr+" "+depTime)
+ if err != nil {
+ return fmt.Errorf("dep_date+dep_time: %w", err)
+ }
+ dep = t.UTC()
+ }
+ } else {
+ // Date only → set at UTC midnight of that date
+ if depDateStr == "" {
+ return fmt.Errorf("dep_date is empty while has_time=false")
+ }
+ t, err := time.Parse("2006-01-02", depDateStr)
+ if err != nil {
+ return fmt.Errorf("dep_date: %w", err)
+ }
+ dep = t.UTC()
+ }
+
+ f := &model.Flight{
+ ID: id,
+ Number: rec[1],
+ From: rec[2],
+ To: rec[3],
+ FromCoords: model.LatLong{
+ Lat: fromLat,
+ Long: fromLon,
+ },
+ ToCoords: model.LatLong{
+ Lat: toLat,
+ Long: toLon,
+ },
+ Date: dep,
+ HasTime: hasTime,
+ }
+
+ s.putFlight(f)
+ }
+ return nil
+}
+
+func (s *Store) loadUserFlightsCSV(path string) error {
+ r, closer, err := openCSV(path)
+ if err != nil {
+ return err
+ }
+ defer closer.Close()
+
+ // header
+ if _, err := r.Read(); err != nil {
+ return fmt.Errorf("user_flights header: %w", err)
+ }
+
+ bar := progressbar.Default(int64(3200000), "reading u-flights")
+ for {
+ bar.Add(1)
+ rec, err := r.Read()
+ if errors.Is(err, io.EOF) {
+ break
+ }
+ if err != nil {
+ return fmt.Errorf("user_flights read: %w", err)
+ }
+ // columns: user_id, flight_id
+ if len(rec) < 2 {
+ return fmt.Errorf("user_flights row has %d columns, expected >=2", len(rec))
+ }
+ uid, err := parseUint(rec[0])
+ if err != nil {
+ return fmt.Errorf("user_id: %w", err)
+ }
+ fid, err := parseUint(rec[1])
+ if err != nil {
+ return fmt.Errorf("flight_id: %w", err)
+ }
+
+ // guard against missing references (mirror your exporter’s checks)
+ if !s.validFlightID(fid) {
+ continue
+ }
+ if _, ok := s.userFlights[uid]; !ok {
+ s.userFlights[uid] = make(map[uint64]struct{})
+ }
+ s.userFlights[uid][fid] = struct{}{}
+ }
+ return nil
+}
+
+func (s *Store) loadCardFlightsCSV(path string) error {
+ r, closer, err := openCSV(path)
+ if err != nil {
+ return err
+ }
+ defer closer.Close()
+
+ // header
+ if _, err := r.Read(); err != nil {
+ return fmt.Errorf("card_flights header: %w", err)
+ }
+
+ for {
+ rec, err := r.Read()
+ if errors.Is(err, io.EOF) {
+ break
+ }
+ if err != nil {
+ return fmt.Errorf("card_flights read: %w", err)
+ }
+ // columns: card_id, flight_id
+ if len(rec) < 2 {
+ return fmt.Errorf("card_flights row has %d columns, expected >=2", len(rec))
+ }
+ cid, err := parseUint(rec[0])
+ if err != nil {
+ return fmt.Errorf("card_id: %w", err)
+ }
+ fid, err := parseUint(rec[1])
+ if err != nil {
+ return fmt.Errorf("flight_id: %w", err)
+ }
+
+ if !s.validFlightID(fid) {
+ continue
+ }
+ if _, ok := s.cardFlights[cid]; !ok {
+ s.cardFlights[cid] = make(map[uint64]struct{})
+ }
+ s.cardFlights[cid][fid] = struct{}{}
+ }
+ return nil
+}
+
+// --- helpers ---
+
+func openCSV(path string) (*csv.Reader, io.Closer, error) {
+ f, err := os.Open(path)
+ if err != nil {
+ return nil, nil, err
+ }
+ r := csv.NewReader(f)
+ r.ReuseRecord = true
+ // r.FieldsPerRecord = -1 // allow variable columns per row (comment out if you want strictness)
+ return r, f, nil
+}
+
+func parseUint(s string) (uint64, error) {
+ return strconv.ParseUint(strings.TrimSpace(s), 10, 64)
+}
+func parseInt(s string) (int64, error) {
+ return strconv.ParseInt(strings.TrimSpace(s), 10, 64)
+}
+func parseFloat(s string) (float64, error) {
+ if strings.TrimSpace(s) == "" {
+ return 0, nil
+ }
+ return strconv.ParseFloat(strings.TrimSpace(s), 64)
+}
+
+// Ensure slices are large enough and place item by ID (1-based supported)
+func ensureLen[T any](slice []*T, id uint64) []*T {
+ needed := int(id) + 1 // keep index == id; index 0 unused per your exporters
+ if len(slice) <= needed {
+ newSlice := make([]*T, needed)
+ copy(newSlice, slice)
+ return newSlice
+ }
+ return slice
+}
+
+func (s *Store) putUser(u *model.User) {
+ if u == nil {
+ return
+ }
+ s.users = ensureLen[model.User](s.users, u.ID)
+ s.users[u.ID] = u
+}
+
+func (s *Store) putCard(c *model.Card) {
+ if c == nil {
+ return
+ }
+ s.cards = ensureLen[model.Card](s.cards, c.ID)
+ s.cards[c.ID] = c
+}
+
+func (s *Store) putFlight(f *model.Flight) {
+ if f == nil {
+ return
+ }
+ s.flights = ensureLen[model.Flight](s.flights, f.ID)
+ s.flights[f.ID] = f
+}
+
+func (s *Store) validFlightID(fid uint64) bool {
+ return fid != 0 && int(fid) < len(s.flights) && s.flights[fid] != nil
+}
diff --git a/pkg/localstore/store.go b/pkg/localstore/store.go
new file mode 100644
index 0000000..151b2b4
--- /dev/null
+++ b/pkg/localstore/store.go
@@ -0,0 +1,638 @@
+package localstore
+
+import (
+ "errors"
+ "strings"
+ "sync"
+ "time"
+ "unicode/utf8"
+
+ "airlines/pkg/model"
+)
+
+type userNameKey struct {
+ Surname string
+ Name string
+ Fathersname string // "" allowed
+ BirthYMD int32 // 0 if unknown
+}
+
+type userInitKey struct {
+ Surname string
+ Name string
+ Init string // one letter
+ BirthYMD int32
+}
+
+type cardKey struct {
+ Prefix string
+ Bonus string // may be ""
+ Number uint64
+}
+
+type cardPairKey struct {
+ Prefix string
+ Number uint64
+}
+
+type flightKey struct {
+ Number string
+ From string
+ To string
+ DateYMD int32
+ HasTime bool
+ Sec int32 // seconds since local midnight if HasTime
+}
+
+type flightDayKey struct {
+ Number string
+ From string
+ To string
+ DateYMD int32
+}
+
+
+type Store struct {
+ mu sync.RWMutex
+
+ users []*model.User
+ cards []*model.Card
+ flights []*model.Flight
+
+ nickToUID map[string]uint64
+ nameToUID map[userNameKey]uint64
+ nameInitToUID map[userInitKey]uint64
+
+ cardToCID map[cardKey]uint64
+ cardPairToCID map[cardPairKey]uint64
+
+ flightToFID map[flightKey]uint64
+ flightByDay map[flightDayKey]uint64
+
+ userFlights map[uint64]map[uint64]struct{} // user_id -> set(flight_id)
+ cardFlights map[uint64]map[uint64]struct{} // card_id -> set(flight_id)
+
+ codesByUser map[uint64]map[string]struct{} // user_id -> set(code)
+ countriesByUser map[uint64]map[string]struct{} // user_id -> set(country)
+ cardsByUser map[uint64]map[uint64]struct{} // user_id -> set(card_id)
+}
+
+func NewLocalStore() *Store {
+ return &Store{
+ users: make([]*model.User, 1),
+ cards: make([]*model.Card, 1),
+ flights: make([]*model.Flight, 1),
+ nickToUID: make(map[string]uint64, 1<<12),
+ nameToUID: make(map[userNameKey]uint64, 1<<12),
+ cardToCID: make(map[cardKey]uint64, 1<<14),
+ cardPairToCID: make(map[cardPairKey]uint64, 1<<14),
+ flightToFID: make(map[flightKey]uint64, 1<<15),
+ flightByDay: make(map[flightDayKey]uint64, 1<<15),
+ userFlights: make(map[uint64]map[uint64]struct{}, 1<<12),
+ cardFlights: make(map[uint64]map[uint64]struct{}, 1<<12),
+ nameInitToUID: make(map[userInitKey]uint64, 1<<12),
+ codesByUser: make(map[uint64]map[string]struct{}, 1<<12),
+ countriesByUser: make(map[uint64]map[string]struct{}, 1<<12),
+ cardsByUser: make(map[uint64]map[uint64]struct{}, 1<<12),
+ }
+}
+
+
+func isZeroCoord(c model.LatLong) bool { return c.Lat == 0 && c.Long == 0 }
+
+func ymdUTC(t time.Time) int32 {
+ if t.IsZero() || t.Year() == model.SentinelBirthday().Year() {
+ return 0
+ }
+ u := time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, time.UTC)
+ return int32(u.Year()*10000 + int(u.Month())*100 + u.Day())
+}
+
+func secSinceMidnight(t time.Time) int32 {
+ h, m, s := t.Clock()
+ return int32(h*3600 + m*60 + s)
+}
+
+func ensureSet(m map[uint64]map[uint64]struct{}, k uint64) map[uint64]struct{} {
+ s, ok := m[k]
+ if !ok {
+ s = make(map[uint64]struct{}, 8)
+ m[k] = s
+ }
+ return s
+}
+
+/* ============================== users ============================= */
+
+func fatherInitial(s string) string {
+ s = strings.TrimSpace(s)
+ if s == "" {
+ return ""
+ }
+ r, _ := utf8.DecodeRuneInString(s)
+ if r == utf8.RuneError {
+ return ""
+ }
+ return string(r) // your pipeline keeps it UPPER
+}
+
+func addUserInitIndex(m map[userInitKey]uint64, u *model.User) {
+ k := userInitKey{Surname: u.Surname, Name: u.Name, Init: fatherInitial(u.Fathersname), BirthYMD: ymdUTC(u.Birthday)}
+ if _, exists := m[k]; !exists {
+ m[k] = u.ID
+ }
+}
+
+func delUserInitIndex(m map[userInitKey]uint64, u *model.User) {
+ k := userInitKey{Surname: u.Surname, Name: u.Name, Init: fatherInitial(u.Fathersname), BirthYMD: ymdUTC(u.Birthday)}
+ delete(m, k)
+}
+
+// --- merge helper (unchanged; keeps initial→full, birthday, nick, sex upgrades) ---
+func (s *Store) mergeUserFields(id uint64, in *model.User) *model.User {
+ ex := s.users[id]
+ // fathersname: initial -> full (same initial), move indexes
+ if ex.Fathersname != "" && len([]rune(ex.Fathersname)) == 1 &&
+ in.Fathersname != "" && fatherInitial(ex.Fathersname) == fatherInitial(in.Fathersname) &&
+ ex.Fathersname != in.Fathersname {
+ oldNameKey := userNameKey{Surname: ex.Surname, Name: ex.Name, Fathersname: ex.Fathersname, BirthYMD: ymdUTC(ex.Birthday)}
+ if _, ok := s.nameToUID[oldNameKey]; ok {
+ delete(s.nameToUID, oldNameKey)
+ }
+ delUserInitIndex(s.nameInitToUID, ex)
+ ex.Fathersname = in.Fathersname
+ s.nameToUID[userNameKey{Surname: ex.Surname, Name: ex.Name, Fathersname: ex.Fathersname, BirthYMD: ymdUTC(ex.Birthday)}] = id
+ addUserInitIndex(s.nameInitToUID, ex)
+ }
+ // birthday upgrade
+ if ymdUTC(ex.Birthday) == 0 && ymdUTC(in.Birthday) != 0 {
+ oldNameKey := userNameKey{Surname: ex.Surname, Name: ex.Name, Fathersname: ex.Fathersname, BirthYMD: 0}
+ if _, ok := s.nameToUID[oldNameKey]; ok {
+ delete(s.nameToUID, oldNameKey)
+ }
+ delUserInitIndex(s.nameInitToUID, ex)
+ ex.Birthday = in.Birthday
+ s.nameToUID[userNameKey{Surname: ex.Surname, Name: ex.Name, Fathersname: ex.Fathersname, BirthYMD: ymdUTC(ex.Birthday)}] = id
+ addUserInitIndex(s.nameInitToUID, ex)
+ }
+ // nick/sex
+ if ex.Nick == "" && in.Nick != "" {
+ ex.Nick = in.Nick
+ s.nickToUID[in.Nick] = id
+ }
+ if ex.Sex == model.SexUnknown && in.Sex != model.SexUnknown {
+ ex.Sex = in.Sex
+ }
+ return ex
+}
+
+// --- FIXED SaveUser: initial-key lookup tries BOTH (birth, 0) ---
+func (s *Store) SaveUser(u *model.User) (*model.User, error) {
+ if u == nil {
+ return nil, errors.New("nil user")
+ }
+ // normalize (names already UPPER)
+ u.Nick = strings.TrimSpace(u.Nick)
+ u.Name = strings.TrimSpace(u.Name)
+ u.Surname = strings.TrimSpace(u.Surname)
+ u.Fathersname = strings.Trim(strings.TrimSpace(u.Fathersname), ".,")
+ if u.Birthday.IsZero() {
+ u.Birthday = model.SentinelBirthday()
+ }
+ inBirth := ymdUTC(u.Birthday)
+ inKey := userNameKey{Surname: u.Surname, Name: u.Name, Fathersname: u.Fathersname, BirthYMD: inBirth}
+
+ s.mu.Lock()
+ defer s.mu.Unlock()
+
+ // 1) by Nick
+ if u.Nick != "" {
+ if id, ok := s.nickToUID[u.Nick]; ok {
+ return s.mergeUserFields(id, u), nil
+ }
+ }
+
+ // 2) exact tuple
+ if id, ok := s.nameToUID[inKey]; ok {
+ if u.Nick != "" && s.users[id].Nick == "" {
+ s.users[id].Nick = u.Nick
+ s.nickToUID[u.Nick] = id
+ }
+ return s.mergeUserFields(id, u), nil
+ }
+
+ // 3) initial-based match (try with incoming birth, then with 0)
+ init := fatherInitial(u.Fathersname)
+ tryInits := []userInitKey{
+ {Surname: u.Surname, Name: u.Name, Init: init, BirthYMD: inBirth},
+ }
+ if inBirth != 0 {
+ tryInits = append(tryInits, userInitKey{Surname: u.Surname, Name: u.Name, Init: init, BirthYMD: 0})
+ }
+ for _, ik := range tryInits {
+ if id, ok := s.nameInitToUID[ik]; ok {
+ ex := s.users[id]
+
+ // If ex has initial-only and incoming has full (same initial) → upgrade fathers + move indexes
+ if ex.Fathersname == fatherInitial(ex.Fathersname) &&
+ u.Fathersname != "" &&
+ fatherInitial(u.Fathersname) == fatherInitial(ex.Fathersname) {
+ // move name index
+ oldNameKey := userNameKey{Surname: ex.Surname, Name: ex.Name, Fathersname: ex.Fathersname, BirthYMD: ymdUTC(ex.Birthday)}
+ delete(s.nameToUID, oldNameKey)
+ // remove old init index (birth may differ)
+ delUserInitIndex(s.nameInitToUID, ex)
+
+ ex.Fathersname = u.Fathersname
+
+ newNameKey := userNameKey{Surname: ex.Surname, Name: ex.Name, Fathersname: ex.Fathersname, BirthYMD: ymdUTC(ex.Birthday)}
+ s.nameToUID[newNameKey] = id
+ addUserInitIndex(s.nameInitToUID, ex)
+ }
+
+ // Upgrade birthday if needed
+ if ymdUTC(ex.Birthday) == 0 && inBirth != 0 {
+ // move name key 0 -> inBirth
+ oldNameKey := userNameKey{Surname: ex.Surname, Name: ex.Name, Fathersname: ex.Fathersname, BirthYMD: 0}
+ if _, ok2 := s.nameToUID[oldNameKey]; ok2 {
+ delete(s.nameToUID, oldNameKey)
+ }
+ // move init index 0 -> inBirth
+ delUserInitIndex(s.nameInitToUID, ex)
+ ex.Birthday = u.Birthday
+ s.nameToUID[userNameKey{Surname: ex.Surname, Name: ex.Name, Fathersname: ex.Fathersname, BirthYMD: inBirth}] = id
+ addUserInitIndex(s.nameInitToUID, ex)
+ }
+
+ // nick/sex upgrades
+ if u.Nick != "" && ex.Nick == "" {
+ ex.Nick = u.Nick
+ s.nickToUID[u.Nick] = id
+ }
+ if ex.Sex == model.SexUnknown && u.Sex != model.SexUnknown {
+ ex.Sex = u.Sex
+ }
+ return ex, nil
+ }
+ }
+
+ // 4) relaxed: fathersname empty, same birth
+ if id, ok := s.nameToUID[userNameKey{Surname: u.Surname, Name: u.Name, Fathersname: "", BirthYMD: inBirth}]; ok {
+ ex := s.users[id]
+ if ex.Fathersname == "" && u.Fathersname != "" {
+ delete(s.nameToUID, userNameKey{Surname: ex.Surname, Name: ex.Name, Fathersname: "", BirthYMD: inBirth})
+ ex.Fathersname = u.Fathersname
+ s.nameToUID[userNameKey{Surname: ex.Surname, Name: ex.Name, Fathersname: ex.Fathersname, BirthYMD: inBirth}] = id
+ addUserInitIndex(s.nameInitToUID, ex)
+ }
+ if u.Nick != "" && ex.Nick == "" {
+ ex.Nick = u.Nick
+ s.nickToUID[u.Nick] = id
+ }
+ if ex.Sex == model.SexUnknown && u.Sex != model.SexUnknown {
+ ex.Sex = u.Sex
+ }
+ if ymdUTC(ex.Birthday) == 0 && inBirth != 0 {
+ ex.Birthday = u.Birthday
+ }
+ return ex, nil
+ }
+
+ // 5) same fathersname, no birth
+ if id, ok := s.nameToUID[userNameKey{Surname: u.Surname, Name: u.Name, Fathersname: u.Fathersname, BirthYMD: 0}]; ok {
+ delete(s.nameToUID, userNameKey{Surname: u.Surname, Name: u.Name, Fathersname: u.Fathersname, BirthYMD: 0})
+ ex := s.users[id]
+ ex.Birthday = u.Birthday
+ s.nameToUID[inKey] = id
+ delUserInitIndex(s.nameInitToUID, ex)
+ addUserInitIndex(s.nameInitToUID, ex)
+ if u.Nick != "" && ex.Nick == "" {
+ ex.Nick = u.Nick
+ s.nickToUID[u.Nick] = id
+ }
+ if ex.Sex == model.SexUnknown && u.Sex != model.SexUnknown {
+ ex.Sex = u.Sex
+ }
+ return ex, nil
+ }
+
+ // 6) fully unspecific existing (fathers="", birth=0)
+ if id, ok := s.nameToUID[userNameKey{Surname: u.Surname, Name: u.Name, Fathersname: "", BirthYMD: 0}]; ok {
+ ex := s.users[id]
+ delete(s.nameToUID, userNameKey{Surname: ex.Surname, Name: ex.Name, Fathersname: "", BirthYMD: 0})
+ if ex.Fathersname == "" && u.Fathersname != "" {
+ ex.Fathersname = u.Fathersname
+ }
+ if ymdUTC(ex.Birthday) == 0 && inBirth != 0 {
+ ex.Birthday = u.Birthday
+ }
+ s.nameToUID[userNameKey{Surname: ex.Surname, Name: ex.Name, Fathersname: ex.Fathersname, BirthYMD: ymdUTC(ex.Birthday)}] = id
+ addUserInitIndex(s.nameInitToUID, ex)
+ if u.Nick != "" && ex.Nick == "" {
+ ex.Nick = u.Nick
+ s.nickToUID[u.Nick] = id
+ }
+ if ex.Sex == model.SexUnknown && u.Sex != model.SexUnknown {
+ ex.Sex = u.Sex
+ }
+ return ex, nil
+ }
+
+ // 7) create
+ u.ID = uint64(len(s.users))
+ s.users = append(s.users, u)
+ if u.Nick != "" {
+ s.nickToUID[u.Nick] = u.ID
+ }
+ s.nameToUID[inKey] = u.ID
+ addUserInitIndex(s.nameInitToUID, u)
+ return u, nil
+}
+
+/* ============================== cards ============================= */
+/*
+Match order:
+ 1) exact (Prefix, Number, Bonus)
+ 2) pair (Prefix, Number) → if stored bonus=="" and incoming bonus!="", upgrade in place (move triple index)
+ 3) else create new
+Never steal UserID: only set if existing has 0 and incoming non-zero.
+*/
+
+func (s *Store) SaveCard(c *model.Card) (*model.Card, error) {
+ if c == nil {
+ return nil, errors.New("nil card")
+ }
+ c.Prefix = strings.TrimSpace(c.Prefix)
+ c.Bonusprogramm = strings.TrimSpace(c.Bonusprogramm)
+ if c.Prefix == "" {
+ return nil, errors.New("invalid card: empty prefix")
+ }
+
+ tri := cardKey{Prefix: c.Prefix, Number: c.Number, Bonus: c.Bonusprogramm}
+ pair := cardPairKey{Prefix: c.Prefix, Number: c.Number}
+
+ s.mu.Lock()
+ defer s.mu.Unlock()
+
+ // exact triple
+ if id, ok := s.cardToCID[tri]; ok {
+ ex := s.cards[id]
+ if ex.UserID == 0 && c.UserID != 0 {
+ ex.UserID = c.UserID
+ if s.cardsByUser[ex.UserID] == nil {
+ s.cardsByUser[ex.UserID] = make(map[uint64]struct{}, 1024)
+ }
+ v := s.cardsByUser[ex.UserID]
+ v[ex.ID] = struct{}{}
+ s.cardsByUser[ex.UserID] = v
+ }
+ return ex, nil
+ }
+
+ // by pair
+ if id, ok := s.cardPairToCID[pair]; ok {
+ ex := s.cards[id]
+ // link user once
+ if ex.UserID == 0 && c.UserID != 0 {
+ ex.UserID = c.UserID
+ }
+ if s.cardsByUser[ex.UserID] == nil {
+ s.cardsByUser[ex.UserID] = make(map[uint64]struct{}, 1024)
+ }
+ v := s.cardsByUser[ex.UserID]
+ v[ex.ID] = struct{}{}
+ s.cardsByUser[ex.UserID] = v
+ switch {
+ case ex.Bonusprogramm == "" && c.Bonusprogramm != "":
+ // move triple index from empty -> new bonus
+ oldTri := cardKey{Prefix: ex.Prefix, Number: ex.Number, Bonus: ex.Bonusprogramm}
+ delete(s.cardToCID, oldTri)
+ ex.Bonusprogramm = c.Bonusprogramm
+ newTri := cardKey{Prefix: ex.Prefix, Number: ex.Number, Bonus: ex.Bonusprogramm}
+ s.cardToCID[newTri] = id
+ return ex, nil
+ case ex.Bonusprogramm == "" && c.Bonusprogramm == "":
+ return ex, nil
+ case ex.Bonusprogramm != "" && c.Bonusprogramm == "":
+ return ex, nil
+ case ex.Bonusprogramm != "" && c.Bonusprogramm != "" && ex.Bonusprogramm != c.Bonusprogramm:
+ // different program → create new card record
+ default:
+ return ex, nil
+ }
+ }
+
+ // create
+ c.ID = uint64(len(s.cards))
+ s.cards = append(s.cards, c)
+ s.cardPairToCID[pair] = c.ID
+ s.cardToCID[tri] = c.ID // even if bonus == "", we still index triple
+
+ if s.cardsByUser[c.UserID] == nil {
+ s.cardsByUser[c.UserID] = make(map[uint64]struct{}, 1024)
+ }
+ v := s.cardsByUser[c.UserID]
+ v[c.ID] = struct{}{}
+ s.cardsByUser[c.UserID] = v
+
+ return c, nil
+}
+
+/* ============================== flights =========================== */
+/*
+Identity:
+ - date-only: (Number, From, To, DateYMD, false, 0)
+ - timed : (Number, From, To, DateYMD, true, SecSinceMidnight)
+Upgrade:
+ - if a date-only exists and a timed arrives for the same day, upgrade in place
+Merge:
+ - coords: fill when missing
+ - relations: add (dedup via sets)
+*/
+
+func (s *Store) SaveFlight(f *model.Flight) (*model.Flight, error) {
+ if f == nil {
+ return nil, errors.New("nil flight")
+ }
+ f.Number = strings.TrimSpace(f.Number)
+ f.From = strings.TrimSpace(f.From)
+ f.To = strings.TrimSpace(f.To)
+
+ // normalize day
+ dayUTC := time.Date(f.Date.Year(), f.Date.Month(), f.Date.Day(), 0, 0, 0, 0, time.UTC)
+ ymd := ymdUTC(dayUTC)
+
+ var pKey flightKey
+ if f.HasTime {
+ pKey = flightKey{Number: f.Number, From: f.From, To: f.To, DateYMD: ymd, HasTime: true, Sec: secSinceMidnight(f.Date)}
+ } else {
+ f.Date = dayUTC // store as date-only
+ pKey = flightKey{Number: f.Number, From: f.From, To: f.To, DateYMD: ymd, HasTime: false, Sec: 0}
+ }
+ dayKey := flightDayKey{Number: f.Number, From: f.From, To: f.To, DateYMD: ymd}
+
+ s.mu.Lock()
+ defer s.mu.Unlock()
+
+ // 1) exact (precise) key
+ if id, ok := s.flightToFID[pKey]; ok {
+ ex := s.flights[id]
+ s.mergeFlightFields(id, ex, f)
+ return ex, nil
+ }
+
+ // 2) same day exists -> maybe upgrade date-only to timed
+ if id, ok := s.flightByDay[dayKey]; ok {
+ ex := s.flights[id]
+ exKey := s.keyOfFlight(ex)
+ if !ex.HasTime && f.HasTime {
+ // move map key to timed
+ delete(s.flightToFID, exKey)
+ ex.HasTime = true
+ // set clock from incoming (keep same calendar date)
+ ex.Date = time.Date(dayUTC.Year(), dayUTC.Month(), dayUTC.Day(),
+ f.Date.Hour(), f.Date.Minute(), f.Date.Second(), f.Date.Nanosecond(), f.Date.Location())
+ s.flightToFID[s.keyOfFlight(ex)] = id
+ // day index already points to best precision
+ }
+ // merge fields/relations
+ s.mergeFlightFields(id, ex, f)
+ return ex, nil
+ }
+
+ // 3) brand new
+ f.ID = uint64(len(s.flights))
+ s.flights = append(s.flights, f)
+ s.flightToFID[pKey] = f.ID
+ s.flightByDay[dayKey] = f.ID
+
+ // if s.countriesByUser[f.UserID] == nil {
+ // s.countriesByUser[f.UserID] = make(map[string]struct{}, 1024)
+ // }
+
+ // v := s.countriesByUser[f.UserID]
+ // dd, _ := airports.LookupIATA(f.From)
+ // v[dd.Country] = struct{}{}
+ // s.countriesByUser[f.UserID] = v
+
+ if f.Code != "" {
+ if s.codesByUser[f.UserID] == nil {
+ s.codesByUser[f.UserID] = make(map[string]struct{}, 1024)
+ }
+ codesByUser := s.codesByUser[f.UserID]
+ codesByUser[f.Code] = struct{}{}
+ s.codesByUser[f.UserID] = codesByUser
+ }
+
+ // relations
+ if f.UserID != 0 {
+ ensureSet(s.userFlights, f.UserID)[f.ID] = struct{}{}
+ }
+ if f.CardID != 0 {
+ ensureSet(s.cardFlights, f.CardID)[f.ID] = struct{}{}
+ }
+ return f, nil
+}
+
+func (s *Store) keyOfFlight(f *model.Flight) flightKey {
+ ymd := ymdUTC(time.Date(f.Date.Year(), f.Date.Month(), f.Date.Day(), 0, 0, 0, 0, time.UTC))
+ if f.HasTime {
+ return flightKey{Number: f.Number, From: f.From, To: f.To, DateYMD: ymd, HasTime: true, Sec: secSinceMidnight(f.Date)}
+ }
+ return flightKey{Number: f.Number, From: f.From, To: f.To, DateYMD: ymd, HasTime: false, Sec: 0}
+}
+
+func (s *Store) mergeFlightFields(id uint64, ex, in *model.Flight) {
+ // coords: fill when empty
+ if isZeroCoord(ex.FromCoords) && !isZeroCoord(in.FromCoords) {
+ ex.FromCoords = in.FromCoords
+ }
+ if isZeroCoord(ex.ToCoords) && !isZeroCoord(in.ToCoords) {
+ ex.ToCoords = in.ToCoords
+ }
+ // relations
+ if in.UserID != 0 {
+ ensureSet(s.userFlights, in.UserID)[id] = struct{}{}
+ }
+ if in.CardID != 0 {
+ ensureSet(s.cardFlights, in.CardID)[id] = struct{}{}
+ }
+ if in.Code != "" && ex.Code == "" {
+ ex.Code = in.Code
+
+ // if s.codesByUser[in.UserID] == nil {
+ // s.codesByUser[in.UserID] = make(map[string]struct{}, 1024)
+ // }
+ // codesByUser := s.codesByUser[in.UserID]
+ // codesByUser[in.Code] = struct{}{}
+ // s.codesByUser[in.UserID] = codesByUser
+ }
+}
+
+/* ============================== finders =========================== */
+
+func (s *Store) FindUserByNick(nick string) (*model.User, bool) {
+ s.mu.RLock()
+ defer s.mu.RUnlock()
+ id, ok := s.nickToUID[strings.TrimSpace(nick)]
+ if !ok || id == 0 || int(id) >= len(s.users) {
+ return nil, false
+ }
+ return s.users[id], true
+}
+
+func (s *Store) FindUserByName(name, surname, fathers string, bday time.Time) (*model.User, bool) {
+ key := userNameKey{
+ Surname: strings.TrimSpace(surname),
+ Name: strings.TrimSpace(name),
+ Fathersname: strings.TrimSpace(fathers),
+ BirthYMD: ymdUTC(bday),
+ }
+ s.mu.RLock()
+ defer s.mu.RUnlock()
+ id, ok := s.nameToUID[key]
+ if !ok || id == 0 || int(id) >= len(s.users) {
+ return nil, false
+ }
+ return s.users[id], true
+}
+
+func (s *Store) FindCard(prefix string, number uint64, bonus string) (*model.Card, bool) {
+ tri := cardKey{Prefix: strings.TrimSpace(prefix), Number: number, Bonus: strings.TrimSpace(bonus)}
+ s.mu.RLock()
+ defer s.mu.RUnlock()
+ if id, ok := s.cardToCID[tri]; ok && id != 0 && int(id) < len(s.cards) {
+ return s.cards[id], true
+ }
+ // fall back to pair if no exact
+ pair := cardPairKey{Prefix: strings.TrimSpace(prefix), Number: number}
+ if id, ok := s.cardPairToCID[pair]; ok && id != 0 && int(id) < len(s.cards) {
+ return s.cards[id], true
+ }
+ return nil, false
+}
+
+func (s *Store) FindFlight(number, from, to string, date time.Time, hasTime bool) (*model.Flight, bool) {
+ number = strings.TrimSpace(number)
+ from = strings.TrimSpace(from)
+ to = strings.TrimSpace(to)
+
+ ymd := ymdUTC(time.Date(date.Year(), date.Month(), date.Day(), 0, 0, 0, 0, time.UTC))
+ var k flightKey
+ if hasTime {
+ k = flightKey{Number: number, From: from, To: to, DateYMD: ymd, HasTime: true, Sec: secSinceMidnight(date)}
+ } else {
+ k = flightKey{Number: number, From: from, To: to, DateYMD: ymd, HasTime: false, Sec: 0}
+ }
+
+ s.mu.RLock()
+ defer s.mu.RUnlock()
+ if id, ok := s.flightToFID[k]; ok && id != 0 && int(id) < len(s.flights) {
+ return s.flights[id], true
+ }
+ // day-level fallback (returns best precision for the day if exact key absent)
+ if id, ok := s.flightByDay[flightDayKey{Number: number, From: from, To: to, DateYMD: ymd}]; ok && id != 0 && int(id) < len(s.flights) {
+ return s.flights[id], true
+ }
+ return nil, false
+}