~fabrixxm/confy

ref: 0.6.0 confy/src/local.py -rw-r--r-- 10.1 KiB
3659db31fabrixxm Confy 0.6.0 5 months ago
                                                                                
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
# local.py
#
# Copyright 2020 Fabio
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.

import types
import os
import json
import hashlib
import urllib.request
import sqlite3
import importlib.util
import datetime
import time

from gi.repository import GLib

from .settings import Settings
from .fetcher import Fetcher
from . import remotes


### init and migration machinery

class LocalDBDoesNotExistsException(Exception):
    pass

class LocalDBNotOpen(Exception):
    pass

class DownloadCancelled(Exception):
    pass

dbfile = None
db = None
appdir = None
cachedir = None


def getDb():
    return db

def init(app_id:str):
    """Init paths and open db"""
    global appdir, cachedir

    appdir = os.path.join(GLib.get_user_data_dir(), app_id)
    if not os.path.exists(appdir):
        os.makedirs(appdir)

    cachedir = os.path.join(GLib.get_user_cache_dir(), app_id)
    if not os.path.exists(cachedir):
        os.makedirs(cachedir)

    Settings.init(app_id)

def import_migration(filename:str) -> types.ModuleType:
    modulename = os.path.basename(filename).replace(".py", "")
    spec = importlib.util.spec_from_file_location(modulename, filename)
    migration = importlib.util.module_from_spec(spec)
    spec.loader.exec_module(migration)
    return migration

def migrate():
    schema = -1
    migrated = False
    try:
        c = db.execute("SELECT value FROM meta WHERE key='schema'" )
        info = c.fetchone()
        if info is not None:
            schema = int(info['value'])
    except Exception as e:
        print("... " , e)

    migrationsdir = os.path.join(os.path.dirname(__file__), "migrations")
    files = sorted([ f for f in os.listdir(migrationsdir) if f.endswith(".py") ])

    try:
        c = db.cursor()
        for f in files:
            newschema = int(f.split("_")[0])
            if newschema > schema:
                ## TODO: how to log using glib?
                print("Migrating to {}".format(f.replace(".py", "")))
                import_migration(os.path.join(migrationsdir,f)).up(c)
                schema = newschema
                migrated = True
    except Exception as e:
        db.rollback()
        raise e from None

    db.execute("INSERT OR REPLACE INTO meta (key, value) VALUES ('schema', ?)", (schema,))
    db.commit()

    return migrated

# event database

def openconf(conf, is_online=True):
    """Open event database for `conf`

    conf is a models.Conference
    After opening, tries to update meta
    """

    def _update_meta(*args):
        from .models import Meta
        with Meta() as m:
            m.title = m.title if m.title else conf.title
            m.start = m.start if m.start else conf.start
            m.end = m.end  if m.end else conf.end

    f = opendb(conf.url, is_online)
    if f is None:
        _update_meta()
    else:
        f.connect("done", _update_meta)

    return f


def opendb(url, is_online=True):
    global dbfile
    """Open event database for `url`

    if `is_online`, fetch original pentabarf and update cache

    return Fetcher instance if downloading, else None
    """
    global db
    dbhashname = hashlib.md5(url.encode('utf8')).hexdigest()
    dbfilename = os.path.join(cachedir, dbhashname+".db")
    if not os.path.exists(dbfilename) and not is_online:
        db = None
        raise LocalDBDoesNotExistsException(_("Device disconnected and no cached data available."))

    print("open db", dbfilename)
    db = sqlite3.connect(dbfilename, detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES)
    db.row_factory = sqlite3.Row

    migrated = migrate()

    dbfile = dbfilename

    from .models import Meta

    is_time_to_update = (time.time() - Meta().last_update) >  Settings.instance().get_event_cache()

    if is_online and (migrated or is_time_to_update):
        return updatedb(url)

    return None


def updatedb(from_url):
    """Update event cache from schedule url

    return Fetcher instance
    """
    if db is None:
        raise LocalDBNotOpen()
    return remotes.update_schedule(from_url)


def close():
    """Close db"""
    global db, dbfile
    dbfile = None
    if db is not None:
        db.close()
        db = None



## schedules list (from giggity and user defined)
def update_menu() -> Fetcher:
    """Fetch remote menu json"""
    ggcachefile = os.path.join(cachedir, "ggmenu.json")
    url = "https://ggt.gaa.st/menu.json"
    return Fetcher(url, ggcachefile)


def open_menu(is_online):
    """
    Update menu cache file if expired or not exists.
    returns None or Fetcher
    """
    ggcachefile = os.path.join(cachedir, "ggmenu.json")
    print("menu cache file:", ggcachefile)

    need_refresh = not os.path.exists(ggcachefile)
    need_refresh = need_refresh or ((time.time() -  os.path.getmtime(ggcachefile)) > Settings.instance().get_list_cache())
    if need_refresh and is_online:
        return update_menu()

    return None


def get_menu():
    """
    Get menu entries from menu cache and from user menu entries.
    """
    ggcachefile = os.path.join(cachedir, "ggmenu.json")
    userfile = os.path.join(cachedir, "usermenu.json")

    schedules = []
    if os.path.exists(ggcachefile):
        with open(ggcachefile, "r") as f:
            menu = json.load(f)
        schedules += menu['schedules']

    if os.path.exists(userfile):
        with open(userfile, "r") as f:
            schedules += json.load(f)

    def _sort_key(elm):
        start = elm['start']
        return datetime.date(*[int(s) for s in start.split("-")])

    return sorted(schedules, key=_sort_key, reverse=True)


def _add_user_menu_ready(sender, local_file_path, cbk):
    userfile = os.path.join(cachedir, "usermenu.json")
    from .models import Meta
    m = Meta()
    item = m.to_json()

    if m.title is None:
        m.title = ""
        m.save()

    schedules = []
    if os.path.exists(userfile):
        with open(userfile, "r") as f:
            schedules = json.load(f)

    schedules.append(item)
    with open(userfile, "w") as f:
        json.dump(schedules, f)

    cbk(m)
    close()


class MenuItemAlreadyExistsException(Exception):
    def __init__(self, message, obj):
        super().__init__(message)
        self.obj = obj


def add_user_menu(url, cbk):
    """
    Add and fetch schedule from url.
    Returns Fetcher or None.
    When ready calls "cbk({..metadata..})"
    """
    # try to not add same event twice:
    objs = [ e for e in get_menu() if e['url'] == url ]
    if len(objs) > 0:
        raise MenuItemAlreadyExistsException(_("Event already in list"), objs[0])

    f = opendb(url, True) # I suppose that we are online at this point..
    if f is not None:
        f.connect("done", _add_user_menu_ready, cbk)
    else:
        _add_user_menu_ready(None, None, cbk)

    return f


def update_user_menu(data):
    """
    Update user menu entry
    data is a dict()
    """
    userfile = os.path.join(cachedir, "usermenu.json")
    if not os.path.exists(userfile):
        # non dovrei arrivare qui. solo i meta con user=True
        # posso essere editati, e quindi deve esistere il file
        raise Exception("User menu file not found. This should not happen.")

    with open(userfile, "r") as f:
        schedules = json.load(f)

    _id = None
    for i, s in enumerate(schedules):
        if s['url'] == data['url']:
            _id = i
            break
    if _id is None:
        schedules.append(data)
    else:
        schedules[_id] = data

    with open(userfile, "w") as f:
        json.dump(schedules, f)


def delete_user_menu(data):
    """
    Remove user menu entry
    `data` is a dict()
    """
    userfile = os.path.join(cachedir, "usermenu.json")
    if not os.path.exists(userfile):
        # non dovrei arrivare qui. solo i meta con user=True
        # posso essere editati, e quindi deve esistere il file
        raise Exception("User menu file not found. This should not happen.")

    with open(userfile, "r") as f:
        schedules = json.load(f)

    _id = None
    for i, s in enumerate(schedules):
        if s['url'] == data['url']:
            _id = i
            break
    if _id is not None:
        del schedules[_id]

    with open(userfile, "w") as f:
        json.dump(schedules, f)


# async image fetcher
def get_image_async(fileurl, cbk):
    """
    Save a remote image in local cache.
    Returns Fetcher or None
    When ready calls "cbk(None, 'localfilename')"
    """
    if fileurl is None:
        return None
    localfilename = hashlib.md5(fileurl.encode('utf8')).hexdigest()
    localdir = os.path.join(cachedir, "images")
    if not os.path.exists(localdir):
        os.makedirs(localdir)
    localfile = os.path.join(localdir, localfilename)
    if not os.path.exists(localfile):
        try:
            return Fetcher(fileurl, localfile, cbk)
        except Exception:
            return None
    cbk(None, localfile)
    return None


# cache handling
def get_cache_size():
    """get total size of cache files: db and images.
    skip json menu files"""
    total_size = 0
    for dirpath, dirnames, filenames in os.walk(cachedir):
        for f in filenames:
            if not f.endswith(".json"):
                fp = os.path.join(dirpath, f)
                # skip if it is symbolic link
                if not os.path.islink(fp):
                    total_size += os.path.getsize(fp)
    return total_size

def clear_cache():
    """remove all files in cachedir except currently opened database and json menu files"""
    for dirpath, dirnames, filenames in os.walk(cachedir):
        for f in filenames:
            if not f.endswith(".json"):
                fp = os.path.join(dirpath, f)
                if fp != dbfile:
                    os.remove(fp)