Makes http request cached for tests

This commit is contained in:
2016-01-20 13:28:07 +01:00
parent 261c43184e
commit ea62247dad
5 changed files with 23 additions and 20 deletions

View File

@@ -81,7 +81,7 @@ func (c *AlbumCoverCache) fetch(a *Album) (io.ReadCloser, error) {
}
f, err := os.Create(c.coverPath(a))
if err != nil {
resp.Body.Close()
closeOrPanic(resp.Body, "GET:"+a.CoverURL)
return nil, err
}

View File

@@ -24,28 +24,18 @@ func (g *errorGetter) Get(URL string) (*http.Response, error) {
}
func (s *AlbumCoverCacheSuite) TestCanFetchCache(c *C) {
data := []*Album{
&Album{
ID: 41693,
CoverURL: "http://www.bedetheque.com/media/Couvertures/Couv_41693.jpg",
},
&Album{
ID: 1285,
CoverURL: "http://www.bedetheque.com/media/Couvertures/OumpahPahLepeauxrouge.jpg",
},
}
tmpdir, err := ioutil.TempDir("", "satbdexplorer-tests-cache")
c.Assert(err, IsNil)
defer os.RemoveAll(tmpdir)
cache, err := NewAlbumCoverCache(tmpdir, 10, 10*time.Second)
cache.getter = testGetter
c.Assert(err, IsNil)
var resData = []bytes.Buffer{}
for _, a := range data {
cover, err := cache.GetCover(a)
for _, a := range albumsDataTest {
cover, err := cache.GetCover(&a)
var buf bytes.Buffer
if c.Check(err, IsNil) == true {
_, err := io.Copy(&buf, cover)
@@ -59,8 +49,8 @@ func (s *AlbumCoverCacheSuite) TestCanFetchCache(c *C) {
// now we check that we get it again, but from the disk, not
// hitting the web
for i, a := range data {
cover, err := cache.GetCover(a)
for i, a := range albumsDataTest {
cover, err := cache.GetCover(&a)
var buf bytes.Buffer
if c.Check(err, IsNil) == true {
_, err := io.Copy(&buf, cover)
@@ -74,8 +64,8 @@ func (s *AlbumCoverCacheSuite) TestCanFetchCache(c *C) {
// now if we it the TTL, we will reftech and get error
cache.TTL = 0
for _, a := range data {
cover, err := cache.GetCover(a)
for _, a := range albumsDataTest {
cover, err := cache.GetCover(&a)
c.Check(cover, IsNil)
c.Check(err, ErrorMatches, "I will always have an error")
}

View File

@@ -32,7 +32,7 @@ func (g *AlbumDescriptionGetter) Get(a *Album) error {
if err != nil {
return err
}
defer resp.Body.Close()
defer closeOrPanic(resp.Body, "GET:http://"+URL)
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {

View File

@@ -9,10 +9,16 @@ import (
"path/filepath"
"regexp"
"time"
"github.com/gregjones/httpcache"
"github.com/gregjones/httpcache/diskcache"
"github.com/peterbourgon/diskv"
)
var albumsDataTest []Album
var testGetter HTTPGetter
// StripNonCsvField strips all the fields which are not in the Csv
func StripNonCsvField(a Album) Album {
return Album{
@@ -89,4 +95,11 @@ func init() {
if err = dec.Decode(&albumsDataTest); err != nil {
panic(fmt.Sprintf("Could not parse '%s': %s", albumsPath, err))
}
dv := diskv.New(diskv.Options{
BasePath: filepath.Join("testdata", "web-cache"),
CacheSizeMax: 100 * 1024 * 1024, // 100MB
})
testGetter = httpcache.NewTransport(diskcache.NewWithDiskv(dv)).Client()
}

View File

@@ -39,7 +39,7 @@ func Execute() error {
errors <- err
return
}
defer csvFile.Close()
defer closeOrLog(csvFile, "Could not close '"+os.Args[1]+"': %s")
csvReader, err := NewAlbumCsvReader(csvFile)
if err != nil {
errors <- err