Fix incorrect dates; add final_flush, move committing after serializing

This commit is contained in:
Xavier Bustamante Talavera 2019-02-04 18:20:50 +01:00
parent 2cbaf14c45
commit d6ca5e2922
9 changed files with 83 additions and 51 deletions

View File

@ -1,9 +1,29 @@
import citext
from sqlalchemy import event
from sqlalchemy.dialects import postgresql
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import expression
from sqlalchemy_utils import view
from teal.db import SchemaSQLAlchemy
from teal.db import SchemaSQLAlchemy, SchemaSession
class DhSession(SchemaSession):
def final_flush(self):
"""A regular flush that performs expensive final operations
through Devicehub (like saving searches), so it is thought
to be used once in each request, at the very end before
a commit.
"""
# This was done before with an ``before_commit`` sqlalchemy event
# however it is too fragile it does not detect previously-flushed
# things
# This solution makes this more aware to the user, although
# has the same problem. This is not final solution.
# todo a solution would be for this session to save, on every
# flush, all the new / dirty interesting things in a variable
# until DeviceSearch is executed
from ereuse_devicehub.resources.device.search import DeviceSearch
DeviceSearch.update_modified_devices(session=self)
class SQLAlchemy(SchemaSQLAlchemy):
@ -23,6 +43,9 @@ class SQLAlchemy(SchemaSQLAlchemy):
if common_schema:
self.drop_schema(schema='common')
def create_session(self, options):
return sessionmaker(class_=DhSession, db=self, **options)
def create_view(name, selectable):
"""Creates a view.

View File

@ -8,7 +8,6 @@ import ereuse_utils.cli
from ereuse_utils.session import DevicehubClient
from flask.globals import _app_ctx_stack, g
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import event
from teal.teal import Teal
from ereuse_devicehub.auth import Auth
@ -47,16 +46,10 @@ class Devicehub(Teal):
self.id = inventory
"""The Inventory ID of this instance. In Teal is the app.schema."""
self.dummy = Dummy(self)
self.before_request(self.register_db_events_listeners)
self.cli.command('regenerate-search')(self.regenerate_search)
self.cli.command('init-db')(self.init_db)
self.before_request(self._prepare_request)
def register_db_events_listeners(self):
"""Registers the SQLAlchemy event listeners."""
# todo can I make it with a global Session only?
event.listen(db.session, 'before_commit', DeviceSearch.update_modified_devices)
# noinspection PyMethodOverriding
@click.option('--name', '-n',
default='Test 1',

View File

@ -141,9 +141,9 @@ class Dummy:
assert len(inventory['items'])
i, _ = user.get(res=Device, query=[('search', 'intel')])
assert len(i['items']) == 12
assert 12 == len(i['items'])
i, _ = user.get(res=Device, query=[('search', 'pc')])
assert len(i['items']) == 14
assert 14 == len(i['items'])
# Let's create a set of events for the pc device
# Make device Ready

View File

@ -10,7 +10,7 @@ import teal.db
from boltons import urlutils
from citext import CIText
from flask import current_app as app, g
from sqlalchemy import BigInteger, Boolean, CheckConstraint, Column, DateTime, Enum as DBEnum, \
from sqlalchemy import BigInteger, Boolean, CheckConstraint, Column, Enum as DBEnum, \
Float, ForeignKey, Integer, Interval, JSON, Numeric, SmallInteger, Unicode, event, orm
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.ext.declarative import declared_attr
@ -378,9 +378,10 @@ class Step(db.Model):
type = Column(Unicode(STR_SM_SIZE), nullable=False)
num = Column(SmallInteger, primary_key=True)
severity = Column(teal.db.IntEnum(Severity), default=Severity.Info, nullable=False)
start_time = Column(DateTime, nullable=False)
start_time = Column(db.TIMESTAMP(timezone=True), nullable=False)
start_time.comment = Event.start_time.comment
end_time = Column(DateTime, CheckConstraint('end_time > start_time'), nullable=False)
end_time = Column(db.TIMESTAMP(timezone=True), CheckConstraint('end_time > start_time'),
nullable=False)
end_time.comment = Event.end_time.comment
erasure = relationship(EraseBasic,
@ -1187,7 +1188,7 @@ class Trade(JoinedTableMixin, EventWithMultipleDevices):
This class and its inheritors
extend `Schema's Trade <http://schema.org/TradeAction>`_.
"""
shipping_date = Column(DateTime)
shipping_date = Column(db.TIMESTAMP(timezone=True))
shipping_date.comment = """
When are the devices going to be ready for shipping?
"""

View File

@ -23,9 +23,10 @@ class EventView(View):
Model = db.Model._decl_class_registry.data[json['type']]()
event = Model(**e)
db.session.add(event)
db.session.commit()
db.session().final_flush()
ret = self.schema.jsonify(event)
ret.status_code = 201
db.session.commit()
return ret
def one(self, id: UUID):
@ -84,7 +85,8 @@ class SnapshotView(View):
snapshot.events |= rates
db.session.add(snapshot)
db.session.commit()
db.session().final_flush()
ret = self.schema.jsonify(snapshot) # transform it back
ret.status_code = 201
db.session.commit()
return ret

View File

@ -34,9 +34,10 @@ class LotView(View):
l = request.get_json()
lot = Lot(**l)
db.session.add(lot)
db.session.commit()
db.session().final_flush()
ret = self.schema.jsonify(lot)
ret.status_code = 201
db.session.commit()
return ret
def patch(self, id):
@ -144,17 +145,21 @@ class LotBaseChildrenView(View):
def post(self, id: uuid.UUID):
lot = self.get_lot(id)
self._post(lot, self.get_ids())
db.session.commit()
db.session().final_flush()
ret = self.schema.jsonify(lot)
ret.status_code = 201
db.session.commit()
return ret
def delete(self, id: uuid.UUID):
lot = self.get_lot(id)
self._delete(lot, self.get_ids())
db.session().final_flush()
response = self.schema.jsonify(lot)
db.session.commit()
return self.schema.jsonify(lot)
return response
def _post(self, lot: Lot, ids: Set[uuid.UUID]):
raise NotImplementedError

View File

@ -32,8 +32,10 @@ class TagView(View):
tags_id, _ = g.tag_provider.post('/', {}, query=[('num', num)])
tags = [Tag(id=tag_id, provider=g.inventory.tag_provider) for tag_id in tags_id]
db.session.add_all(tags)
db.session().final_flush()
response = things_response(self.schema.dump(tags, many=True, nested=1), code=201)
db.session.commit()
return things_response(self.schema.dump(tags, many=True, nested=1), code=201)
return response
def _post_one(self):
# todo do we use this?
@ -42,6 +44,7 @@ class TagView(View):
if tag.like_etag():
raise CannotCreateETag(tag.id)
db.session.add(tag)
db.session().final_flush()
db.session.commit()
return Response(status=201)
@ -69,6 +72,7 @@ class TagDeviceView(View):
raise LinkedToAnotherDevice(tag.device_id)
else:
tag.device_id = device_id
db.session().final_flush()
db.session.commit()
return Response(status=204)

View File

@ -16,17 +16,17 @@ components:
manufacturer: c1mr
events:
- type: EraseSectors
startTime: 2018-06-01T08:12:06
endTime: 2018-06-01T09:12:06
startTime: '2018-06-01T08:12:06+02:00'
endTime: '2018-06-01T09:12:06+02:00'
steps:
- type: StepZero
severity: Info
startTime: 2018-06-01T08:15:00
endTime: 2018-06-01T09:16:00
startTime: '2018-06-01T08:15:00+02:00'
endTime: '2018-06-01T09:16:00+02:00'
- type: StepRandom
severity: Info
startTime: 2018-06-01T08:16:00
endTime: 2018-06-01T09:17:00
startTime: '2018-06-01T08:16:00+02:00'
endTime: '2018-06-01T09:17:00+02:00'
- type: Processor
serialNumber: p1s
model: p1ml

View File

@ -298,7 +298,9 @@ def test_erase_privacy_standards(user: UserClient):
privacy properties.
"""
s = file('erase-sectors.snapshot')
assert '2018-06-01T09:12:06+02:00' == s['components'][0]['events'][0]['endTime']
snapshot = snapshot_and_check(user, s, (EraseSectors.t,), perform_second_snapshot=True)
assert '2018-06-01T07:12:06+00:00' == snapshot['events'][0]['endTime']
storage, *_ = snapshot['components']
assert storage['type'] == 'SolidStateDrive', 'Components must be ordered by input order'
storage, _ = user.get(res=m.Device, item=storage['id']) # Let's get storage events too
@ -306,13 +308,15 @@ def test_erase_privacy_standards(user: UserClient):
erasure1, _snapshot1, erasure2, _snapshot2 = storage['events']
assert erasure1['type'] == erasure2['type'] == 'EraseSectors'
assert _snapshot1['type'] == _snapshot2['type'] == 'Snapshot'
assert snapshot == user.get(res=Event, item=_snapshot2['id'])[0]
get_snapshot, _ = user.get(res=Event, item=_snapshot2['id'])
assert get_snapshot['events'][0]['endTime'] == '2018-06-01T07:12:06+00:00'
assert snapshot == get_snapshot
erasure, _ = user.get(res=Event, item=erasure1['id'])
assert len(erasure['steps']) == 2
assert erasure['steps'][0]['startTime'] == '2018-06-01T08:15:00+00:00'
assert erasure['steps'][0]['endTime'] == '2018-06-01T09:16:00+00:00'
assert erasure['steps'][1]['startTime'] == '2018-06-01T08:16:00+00:00'
assert erasure['steps'][1]['endTime'] == '2018-06-01T09:17:00+00:00'
assert erasure['steps'][0]['startTime'] == '2018-06-01T06:15:00+00:00'
assert erasure['steps'][0]['endTime'] == '2018-06-01T07:16:00+00:00'
assert erasure['steps'][1]['startTime'] == '2018-06-01T06:16:00+00:00'
assert erasure['steps'][1]['endTime'] == '2018-06-01T07:17:00+00:00'
assert erasure['device']['id'] == storage['id']
step1, step2 = erasure['steps']
assert step1['type'] == 'StepZero'