package main import ( "bytes" "encoding/json" "fmt" "io" "log" "os" "path/filepath" "regexp" "time" "github.com/gregjones/httpcache" "github.com/gregjones/httpcache/diskcache" "github.com/peterbourgon/diskv" ) var albumsDataTest []Album var testGetter HTTPGetter // StripNonCsvField strips all the fields which are not in the Csv func StripNonCsvField(a Album) Album { return Album{ ID: a.ID, ISBN: a.ISBN, Series: a.Series, Title: a.Title, Num: a.Num, NumA: a.NumA, State: a.State, Editor: a.Editor, Collection: a.Collection, SatID: a.SatID, LegalDeposit: a.LegalDeposit, PrintDate: a.PrintDate, PurchaseDate: a.PurchaseDate, } } func formatDate(w io.Writer, t time.Time, fakeError *bool) { if t == time.Unix(0, 0) { if *fakeError == true { fmt.Fprintf(w, "00/0000;") *fakeError = false } else { fmt.Fprintf(w, ";") } } else { fmt.Fprintf(w, "%s;", t.Format("01/2006")) } } var rxSatID = regexp.MustCompile(`\A([a-zA-Z]+)-([a-zA-Z0-9]+)\z`) func GetCsvData() *bytes.Buffer { res := &bytes.Buffer{} fmt.Fprintf(res, "%s\n", "IdAlbum;ISBN;Serie;Num;NumA;Titre;Editeur;Collection;EO;DL;AI;Cote;Etat;DateAchat;PrixAchat;Note;Scenariste;Dessinateur;Wishlist;AVendre;Perso1;Perso2;Perso3;Perso4;Format;Suivi;Commentaire;Table") fakeWrongTime := true for _, a := range albumsDataTest { fmt.Fprintf(res, "%d;%s;\"%s\";", a.ID, a.ISBN, a.Series) if a.Num < 0 { fmt.Fprintf(res, ";") } else { fmt.Fprintf(res, "%d;", a.Num) } fmt.Fprintf(res, "\"%s\";\"%s\";\"%s\";\"%s\";1;", a.NumA, a.Title, a.Editor, a.Collection) formatDate(res, a.LegalDeposit, &fakeWrongTime) formatDate(res, a.PrintDate, &fakeWrongTime) m := rxSatID.FindStringSubmatch(a.SatID) if m == nil { panic(fmt.Sprintf("Could not read regexp %#v", a)) } fmt.Fprintf(res, ";%d;%s;;;%s;;;;%s;%s;;;\"foo\";;;ALBUM\n", a.State, a.PurchaseDate.Format("02/01/2006"), "", m[1], m[2]) } return res } func init() { start := time.Now() albumsPath := filepath.Join("test", "data", "albums.json") f, err := os.Open(albumsPath) if err != nil { panic(fmt.Sprintf("Could not open '%s': %s", albumsPath, err)) } defer closeOrPanic(f, albumsPath) dec := json.NewDecoder(f) if err = dec.Decode(&albumsDataTest); err != nil { panic(fmt.Sprintf("Could not parse '%s': %s", albumsPath, err)) } log.Printf("loaded test data in %s", time.Since(start)) start = time.Now() dv := diskv.New(diskv.Options{ BasePath: filepath.Join("test", "data", "web-cache"), CacheSizeMax: 100 * 1024 * 1024, // 100MB }) testGetter = httpcache.NewTransport(diskcache.NewWithDiskv(dv)).Client() log.Printf("loaded httpcache in %s", time.Since(start)) jsonInfo, err := os.Stat(albumsPath) if err != nil { panic(fmt.Sprintf("Could not stat %s: %s", albumsPath, err)) } csvPath := filepath.Join("test", "data", "albums.csv") csvInfo, err := os.Stat(csvPath) if err != nil && os.IsNotExist(err) == false { panic(fmt.Sprintf("Could not stat %s: %s", csvPath, err)) } if err == nil && csvInfo.ModTime().After(jsonInfo.ModTime()) == true { // no need to update the file return } newFile, err := os.Create(csvPath) if err != nil { panic(fmt.Sprintf("Could not create %s: %s", csvPath, err)) } defer closeOrPanic(newFile, csvPath) _, err = io.Copy(newFile, GetCsvData()) if err != nil { panic(fmt.Sprintf("Could not save csv data: %s", err)) } }