Coverage for portality/models/cache.py: 89%
63 statements
« prev ^ index » next coverage.py v6.4.2, created at 2022-07-19 18:38 +0100
« prev ^ index » next coverage.py v6.4.2, created at 2022-07-19 18:38 +0100
1""" The cache contains file metadata for the sitemap, journal csv, and the public data dump. It also holds site stats
2 for the front page """
4from portality.dao import DomainObject
5from datetime import datetime
6from portality.core import app
9class Cache(DomainObject):
10 __type__ = "cache"
12 @classmethod
13 def get_site_statistics(cls):
14 rec = cls.pull("site_statistics")
15 returnable = rec is not None
16 if rec is not None:
17 if rec.is_stale():
18 returnable = False
20 # if the cache exists and is in date (or is otherwise returnable), then explicilty build the
21 # cache object and return it. If we are read-only mode, then we always return the current stats
22 # since the cache won't be allowed to store the regenerated ones
23 try:
24 if returnable or app.config.get("READ_ONLY_MODE", False):
25 return {
26 "journals": rec.data.get("journals"),
27 "countries": rec.data.get("countries"),
28 "abstracts": rec.data.get("abstracts"),
29 "no_apc": rec.data.get("no_apc"),
30 "new_journals": rec.data.get("new_journals")
31 }
32 except AttributeError:
33 pass # Return None below
35 # if we get to here, then we don't return the cache
36 return None
38 @classmethod
39 def cache_site_statistics(cls, stats):
40 cobj = cls(**stats)
41 cobj.set_id("site_statistics")
42 cobj.save()
44 @classmethod
45 def cache_csv(cls, url):
46 cobj = cls(**{
47 "url": url
48 })
49 cobj.set_id("csv")
50 cobj.save()
52 @classmethod
53 def get_latest_csv(cls):
54 return cls.pull("csv")
56 @classmethod
57 def cache_sitemap(cls, url):
58 cobj = cls(**{
59 "filename" : url
60 })
61 cobj.set_id("sitemap")
62 cobj.save()
64 @classmethod
65 def get_latest_sitemap(cls):
66 rec = cls.pull("sitemap")
67 if rec is None:
68 return None
69 return rec.get("filename")
71 @classmethod
72 def cache_public_data_dump(cls, article_url, article_size, journal_url, journal_size):
73 cobj = cls(**{
74 "article": { "url" : article_url, "size" : article_size },
75 "journal": { "url" : journal_url, "size" : journal_size }
76 })
77 cobj.set_id("public_data_dump")
78 cobj.save()
80 @classmethod
81 def get_public_data_dump(cls):
82 return cls.pull("public_data_dump")
84 def is_stale(self):
85 if not self.last_updated:
86 lu = '1970-01-01T00:00:00Z'
87 else:
88 lu = self.last_updated
90 lu = datetime.strptime(lu, "%Y-%m-%dT%H:%M:%SZ")
91 now = datetime.utcnow()
92 dt = now - lu
94 # compatibility with Python 2.6
95 if hasattr(dt, 'total_seconds'):
96 total_seconds = dt.total_seconds()
97 else:
98 total_seconds = (dt.microseconds + (dt.seconds + dt.days * 24 * 3600) * 10**6) / 10**6
100 return total_seconds > app.config.get("SITE_STATISTICS_TIMEOUT")
102 def marked_regen(self):
103 return self.data.get("regen", False)