diff --git a/CHANGELOG.md b/CHANGELOG.md index 141fb297..6f86a69d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ ml). ## [1.0.5-beta] - [addend] #124 adding endpoint for extract the internal stats of use +- [addend] #122 system for verify all documents that it's produced from devicehub ## [1.0.4-beta] - [addend] #95 adding endpoint for check the hash of one report diff --git a/ereuse_devicehub/__init__.py b/ereuse_devicehub/__init__.py index a20c909b..53f0b176 100644 --- a/ereuse_devicehub/__init__.py +++ b/ereuse_devicehub/__init__.py @@ -1 +1 @@ -__version__ = "1.0.4-beta" +__version__ = "1.0.5-beta" diff --git a/ereuse_devicehub/migrations/versions/6a2a939d5668_drop_unique_org_for_tag.py b/ereuse_devicehub/migrations/versions/6a2a939d5668_drop_unique_org_for_tag.py new file mode 100644 index 00000000..17e0290e --- /dev/null +++ b/ereuse_devicehub/migrations/versions/6a2a939d5668_drop_unique_org_for_tag.py @@ -0,0 +1,62 @@ +"""drop unique org for tag + +Revision ID: 6a2a939d5668 +Revises: eca457d8b2a4 +Create Date: 2021-02-25 18:47:47.441195 + +""" +from alembic import op +import sqlalchemy as sa +from alembic import context + + +# revision identifiers, used by Alembic. +revision = '6a2a939d5668' +down_revision = 'eca457d8b2a4' +branch_labels = None +depends_on = None + + +def get_inv(): + INV = context.get_x_argument(as_dictionary=True).get('inventory') + if not INV: + raise ValueError("Inventory value is not specified") + return INV + + +def upgrade_data(): + con = op.get_bind() + tags = con.execute(f"select id from {get_inv()}.tag") + i = 1 + for c in tags: + id_tag = c.id + internal_id = i + i += 1 + sql = f"update {get_inv()}.tag set internal_id='{internal_id}' where id='{id_tag}';" + con.execute(sql) + + sql = f"CREATE SEQUENCE {get_inv()}.tag_internal_id_seq START {i};" + con.execute(sql) + + +def upgrade(): + op.drop_constraint('one tag id per organization', 'tag', schema=f'{get_inv()}') + op.drop_constraint('one secondary tag per organization', 'tag', schema=f'{get_inv()}') + op.create_primary_key('one tag id per owner', 'tag', ['id', 'owner_id'], schema=f'{get_inv()}'), + op.create_unique_constraint('one secondary tag per owner', 'tag', ['secondary', 'owner_id'], schema=f'{get_inv()}'), + op.add_column('tag', sa.Column('internal_id', sa.BigInteger(), nullable=True, + comment='The identifier of the tag for this database. Used only\n internally for software; users should not use this.\n'), schema=f'{get_inv()}') + + upgrade_data() + + op.alter_column('tag', sa.Column('internal_id', sa.BigInteger(), nullable=False, + comment='The identifier of the tag for this database. Used only\n internally for software; users should not use this.\n'), schema=f'{get_inv()}') + + +def downgrade(): + op.drop_constraint('one tag id per owner', 'tag', schema=f'{get_inv()}') + op.drop_constraint('one secondary tag per owner', 'tag', schema=f'{get_inv()}') + op.create_primary_key('one tag id per organization', 'tag', ['id', 'org_id'], schema=f'{get_inv()}'), + op.create_unique_constraint('one secondary tag per organization', 'tag', ['secondary', 'org_id'], schema=f'{get_inv()}'), + op.drop_column('tag', 'internal_id', schema=f'{get_inv()}') + op.execute(f"DROP SEQUENCE {get_inv()}.tag_internal_id_seq;") diff --git a/ereuse_devicehub/resources/action/__init__.py b/ereuse_devicehub/resources/action/__init__.py index 13538f90..f85c32dc 100644 --- a/ereuse_devicehub/resources/action/__init__.py +++ b/ereuse_devicehub/resources/action/__init__.py @@ -250,6 +250,11 @@ class MakeAvailable(ActionDef): SCHEMA = schemas.MakeAvailable +class TradeDef(ActionDef): + VIEW = None + SCHEMA = schemas.Trade + + class CancelTradeDef(ActionDef): VIEW = None SCHEMA = schemas.CancelTrade diff --git a/ereuse_devicehub/resources/documents/device_row.py b/ereuse_devicehub/resources/documents/device_row.py index 74a322be..cb5e82d8 100644 --- a/ereuse_devicehub/resources/documents/device_row.py +++ b/ereuse_devicehub/resources/documents/device_row.py @@ -50,8 +50,7 @@ class DeviceRow(OrderedDict): self['Tag 2 Type'] = self['Tag 2 ID'] = self['Tag 2 Organization'] = '' self['Tag 3 Type'] = self['Tag 3 ID'] = self['Tag 3 Organization'] = '' for i, tag in zip(range(1, 3), device.tags): - # TODO @cayop we need redefined how save the Tag Type info - self['Tag {} Type'.format(i)] = 'unamed' + self['Tag {} Type'.format(i)] = 'unamed' if tag.provider else 'named' self['Tag {} ID'.format(i)] = tag.id self['Tag {} Organization'.format(i)] = tag.org.name diff --git a/ereuse_devicehub/resources/documents/documents.py b/ereuse_devicehub/resources/documents/documents.py index 692e6992..cc47fd30 100644 --- a/ereuse_devicehub/resources/documents/documents.py +++ b/ereuse_devicehub/resources/documents/documents.py @@ -27,7 +27,7 @@ from ereuse_devicehub.resources.documents.device_row import (DeviceRow, StockRow InternalStatsRow) from ereuse_devicehub.resources.lot import LotView from ereuse_devicehub.resources.lot.models import Lot -from ereuse_devicehub.resources.hash_reports import insert_hash, ReportHash +from ereuse_devicehub.resources.hash_reports import insert_hash, ReportHash, verify_hash class Format(enum.Enum): @@ -80,6 +80,7 @@ class DocumentView(DeviceView): res = flask_weasyprint.render_pdf( flask_weasyprint.HTML(string=template), download_filename='{}.pdf'.format(type) ) + insert_hash(res.data) else: res = flask.make_response(template) return res @@ -186,7 +187,9 @@ class LotsDocumentView(LotView): cw.writerow(l.keys()) first = False cw.writerow(l.values()) - output = make_response(data.getvalue()) + bfile = data.getvalue().encode('utf-8') + output = make_response(bfile) + insert_hash(bfile) output.headers['Content-Disposition'] = 'attachment; filename=lots-info.csv' output.headers['Content-type'] = 'text/csv' return output @@ -223,7 +226,9 @@ class StockDocumentView(DeviceView): cw.writerow(d.keys()) first = False cw.writerow(d.values()) - output = make_response(data.getvalue()) + bfile = data.getvalue().encode('utf-8') + output = make_response(bfile) + insert_hash(bfile) output.headers['Content-Disposition'] = 'attachment; filename=devices-stock.csv' output.headers['Content-type'] = 'text/csv' return output @@ -247,12 +252,32 @@ class StampsView(View): This view render one public ans static page for see the links for to do the check of one csv file """ - def get(self): + def get_url_path(self): url = urlutils.URL(request.url) url.normalize() url.path_parts = url.path_parts[:-2] + ['check', ''] - url_path = url.to_text() - return flask.render_template('documents/stamp.html', rq_url=url_path) + return url.to_text() + + def get(self): + result = ('', '') + return flask.render_template('documents/stamp.html', rq_url=self.get_url_path(), + result=result) + + def post(self): + result = ('', '') + if 'docUpload' in request.files: + file_check = request.files['docUpload'] + bad = 'There are no coincidences. The attached file data does not come \ + from our backend or it has been subsequently modified.' + ok = '100% coincidence. The attached file contains data 100% existing in \ + to our backend' + result = ('Bad', bad) + if file_check.mimetype in ['text/csv', 'application/pdf']: + if verify_hash(file_check): + result = ('Ok', ok) + + return flask.render_template('documents/stamp.html', rq_url=self.get_url_path(), + result=result) class InternalStatsView(DeviceView): @@ -345,7 +370,7 @@ class DocumentDef(Resource): self.add_url_rule('/check/', defaults={}, view_func=check_view, methods=get) stamps_view = StampsView.as_view('StampsView', definition=self, auth=app.auth) - self.add_url_rule('/stamps/', defaults={}, view_func=stamps_view, methods=get) + self.add_url_rule('/stamps/', defaults={}, view_func=stamps_view, methods={'GET', 'POST'}) internalstats_view = InternalStatsView.as_view( 'InternalStatsView', definition=self, auth=app.auth) diff --git a/ereuse_devicehub/resources/documents/templates/documents/stamp.html b/ereuse_devicehub/resources/documents/templates/documents/stamp.html index c204c2ff..d07efc49 100644 --- a/ereuse_devicehub/resources/documents/templates/documents/stamp.html +++ b/ereuse_devicehub/resources/documents/templates/documents/stamp.html @@ -38,6 +38,17 @@
+ +
+ +
+
+ +
+ +
+
+
diff --git a/ereuse_devicehub/resources/hash_reports.py b/ereuse_devicehub/resources/hash_reports.py index f84e4b18..f44a79be 100644 --- a/ereuse_devicehub/resources/hash_reports.py +++ b/ereuse_devicehub/resources/hash_reports.py @@ -32,3 +32,8 @@ def insert_hash(bfile): db.session.add(db_hash) db.session.commit() db.session.flush() + + +def verify_hash(bfile): + hash3 = hashlib.sha3_256(bfile.read()).hexdigest() + return ReportHash.query.filter(ReportHash.hash3 == hash3).count() diff --git a/ereuse_devicehub/resources/tag/__init__.py b/ereuse_devicehub/resources/tag/__init__.py index e6418e54..2423d80a 100644 --- a/ereuse_devicehub/resources/tag/__init__.py +++ b/ereuse_devicehub/resources/tag/__init__.py @@ -48,6 +48,10 @@ class TagDef(Resource): 'device/<{0.ID_CONVERTER.value}:device_id>'.format(DeviceDef), view_func=device_view, methods={'PUT'}) + self.add_url_rule('/<{0.ID_CONVERTER.value}:tag_id>/'.format(self) + + 'device/<{0.ID_CONVERTER.value}:device_id>'.format(DeviceDef), + view_func=device_view, + methods={'DELETE'}) @option('-u', '--owner', help=OWNER_H) @option('-o', '--org', help=ORG_H) diff --git a/ereuse_devicehub/resources/tag/model.py b/ereuse_devicehub/resources/tag/model.py index 8aae5670..92c8d5d4 100644 --- a/ereuse_devicehub/resources/tag/model.py +++ b/ereuse_devicehub/resources/tag/model.py @@ -3,7 +3,7 @@ from typing import Set from boltons import urlutils from flask import g -from sqlalchemy import BigInteger, Column, ForeignKey, UniqueConstraint +from sqlalchemy import BigInteger, Column, ForeignKey, UniqueConstraint, Sequence from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.orm import backref, relationship, validates from teal.db import DB_CASCADE_SET_NULL, Query, URL @@ -15,6 +15,7 @@ from ereuse_devicehub.resources.agent.models import Organization from ereuse_devicehub.resources.device.models import Device from ereuse_devicehub.resources.models import Thing from ereuse_devicehub.resources.user.models import User +from ereuse_devicehub.resources.utils import hashcode class Tags(Set['Tag']): @@ -25,17 +26,23 @@ class Tags(Set['Tag']): return ', '.join(format(tag, format_spec) for tag in self).strip() + + class Tag(Thing): + internal_id = Column(BigInteger, Sequence('tag_internal_id_seq'), unique=True, nullable=False) + internal_id.comment = """The identifier of the tag for this database. Used only + internally for software; users should not use this. + """ id = Column(db.CIText(), primary_key=True) id.comment = """The ID of the tag.""" owner_id = Column(UUID(as_uuid=True), ForeignKey(User.id), + primary_key=True, nullable=False, default=lambda: g.user.id) owner = relationship(User, primaryjoin=owner_id == User.id) org_id = Column(UUID(as_uuid=True), ForeignKey(Organization.id), - primary_key=True, # If we link with the Organization object this instance # will be set as persistent and added to session # which is something we don't want to enforce by default @@ -97,8 +104,8 @@ class Tag(Thing): return url __table_args__ = ( - UniqueConstraint(id, org_id, name='one tag id per organization'), - UniqueConstraint(secondary, org_id, name='one secondary tag per organization') + UniqueConstraint(id, owner_id, name='one tag id per owner'), + UniqueConstraint(secondary, owner_id, name='one secondary tag per organization') ) @property @@ -109,7 +116,7 @@ class Tag(Thing): def url(self) -> urlutils.URL: """The URL where to GET this device.""" # todo this url only works for printable internal tags - return urlutils.URL(url_for_resource(Tag, item_id=self.id)) + return urlutils.URL(url_for_resource(Tag, item_id=self.code)) @property def printable(self) -> bool: @@ -125,6 +132,23 @@ class Tag(Thing): """Return a SQLAlchemy filter expression for printable queries.""" return cls.org_id == Organization.get_default_org_id() + @property + def code(self) -> str: + return hashcode.encode(self.internal_id) + + def delete(self): + """Deletes the tag. + + This method removes the tag if is named tag and don't have any linked device. + """ + if self.device: + raise TagLinked(self) + if self.provider: + # if is an unnamed tag not delete + raise TagUnnamed(self.id) + + db.session.delete(self) + def __repr__(self) -> str: return ''.format(self) @@ -133,3 +157,15 @@ class Tag(Thing): def __format__(self, format_spec: str) -> str: return '{0.org.name} {0.id}'.format(self) + + +class TagLinked(ValidationError): + def __init__(self, tag): + message = 'The tag {} is linked to device {}.'.format(tag.id, tag.device.id) + super().__init__(message, field_names=['device']) + + +class TagUnnamed(ValidationError): + def __init__(self, id): + message = 'This tag {} is unnamed tag. It is imposible delete.'.format(id) + super().__init__(message, field_names=['device']) diff --git a/ereuse_devicehub/resources/tag/schema.py b/ereuse_devicehub/resources/tag/schema.py index 7db4fe0d..e1c8b608 100644 --- a/ereuse_devicehub/resources/tag/schema.py +++ b/ereuse_devicehub/resources/tag/schema.py @@ -28,3 +28,4 @@ class Tag(Thing): secondary = SanitizedStr(lower=True, description=m.Tag.secondary.comment) printable = Boolean(dump_only=True, decsription=m.Tag.printable.__doc__) url = URL(dump_only=True, description=m.Tag.url.__doc__) + code = SanitizedStr(dump_only=True, description=m.Tag.internal_id.comment) diff --git a/ereuse_devicehub/resources/tag/view.py b/ereuse_devicehub/resources/tag/view.py index 2376635e..404ec0c1 100644 --- a/ereuse_devicehub/resources/tag/view.py +++ b/ereuse_devicehub/resources/tag/view.py @@ -6,18 +6,29 @@ from teal.resource import View, url_for_resource from ereuse_devicehub import auth from ereuse_devicehub.db import db from ereuse_devicehub.query import things_response +from ereuse_devicehub.resources.utils import hashcode from ereuse_devicehub.resources.device.models import Device from ereuse_devicehub.resources.tag import Tag class TagView(View): + def one(self, code): + """Gets the device from the named tag, /tags/namedtag.""" + internal_id = hashcode.decode(code.upper()) or -1 + tag = Tag.query.filter_by(internal_id=internal_id).one() # type: Tag + if not tag.device: + raise TagNotLinked(tag.id) + return redirect(location=url_for_resource(Device, tag.device.id)) + @auth.Auth.requires_auth def post(self): """Creates a tag.""" num = request.args.get('num', type=int) if num: + # create unnamed tag res = self._create_many_regular_tags(num) else: + # create named tag res = self._post_one() return res @@ -42,7 +53,6 @@ class TagView(View): return response def _post_one(self): - # todo do we use this? t = request.get_json() tag = Tag(**t) if tag.like_etag(): @@ -52,34 +62,69 @@ class TagView(View): db.session.commit() return Response(status=201) + @auth.Auth.requires_auth + def delete(self, id): + tag = Tag.from_an_id(id).filter_by(owner=g.user).one() + tag.delete() + db.session().final_flush() + db.session.commit() + return Response(status=204) + class TagDeviceView(View): """Endpoints to work with the device of the tag; /tags/23/device.""" def one(self, id): """Gets the device from the tag.""" + if request.authorization: + return self.one_authorization(id) + tag = Tag.from_an_id(id).one() # type: Tag if not tag.device: raise TagNotLinked(tag.id) - if not request.authorization: - return redirect(location=url_for_resource(Device, tag.device.id)) + return redirect(location=url_for_resource(Device, tag.device.id)) + + @auth.Auth.requires_auth + def one_authorization(self, id): + tag = Tag.from_an_id(id).filter_by(owner=g.user).one() # type: Tag + if not tag.device: + raise TagNotLinked(tag.id) return app.resources[Device.t].schema.jsonify(tag.device) # noinspection PyMethodOverriding + @auth.Auth.requires_auth def put(self, tag_id: str, device_id: str): """Links an existing tag with a device.""" - tag = Tag.from_an_id(tag_id).one() # type: Tag + # tag = Tag.from_an_id(tag_id).one() # type: Tag + tag = Tag.from_an_id(tag_id).filter_by(owner=g.user).one() # type: Tag if tag.device_id: if tag.device_id == device_id: return Response(status=204) else: raise LinkedToAnotherDevice(tag.device_id) else: + # Check if this device exist for this owner + Device.query.filter_by(owner=g.user).filter_by(id=device_id).one() tag.device_id = device_id + db.session().final_flush() db.session.commit() return Response(status=204) + @auth.Auth.requires_auth + def delete(self, tag_id: str, device_id: str): + tag = Tag.from_an_id(tag_id).filter_by(owner=g.user).one() # type: Tag + device = Device.query.filter_by(owner=g.user).filter_by(id=device_id).one() + if tag.provider: + # if is an unamed tag not do nothing + return Response(status=204) + + if tag.device == device: + tag.device_id = None + db.session().final_flush() + db.session.commit() + return Response(status=204) + def get_device_from_tag(id: str): """Gets the device by passing a tag id. diff --git a/ereuse_devicehub/resources/utils.py b/ereuse_devicehub/resources/utils.py new file mode 100644 index 00000000..3d02d610 --- /dev/null +++ b/ereuse_devicehub/resources/utils.py @@ -0,0 +1,6 @@ +from hashids import Hashids +from decouple import config + +ALPHABET = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ' +SECRET = config('TAG_HASH', '') +hashcode = Hashids(SECRET, min_length=5, alphabet=ALPHABET) diff --git a/tests/files/proposal_extended_csv_report.csv b/tests/files/proposal_extended_csv_report.csv index 63709330..cf1e88a7 100644 --- a/tests/files/proposal_extended_csv_report.csv +++ b/tests/files/proposal_extended_csv_report.csv @@ -1,3 +1,3 @@ System ID;DocumentID;Public Link;Tag 1 Type;Tag 1 ID;Tag 1 Organization;Tag 2 Type;Tag 2 ID;Tag 2 Organization;Tag 3 Type;Tag 3 ID;Tag 3 Organization;Device Hardware ID;Device Type;Device Chassis;Device Serial Number;Device Model;Device Manufacturer;Registered in;Registered (process);Updated in (software);Updated in (web);Physical state;Trading state;Processor;RAM (MB);Data Storage Size (MB);Processor 1;Processor 1 Manufacturer;Processor 1 Model;Processor 1 Serial Number;Processor 1 Number of cores;Processor 1 Speed (GHz);Benchmark Processor 1 (points);Benchmark ProcessorSysbench Processor 1 (points);Processor 2;Processor 2 Manufacturer;Processor 2 Model;Processor 2 Serial Number;Processor 2 Number of cores;Processor 2 Speed (GHz);Benchmark Processor 2 (points);Benchmark ProcessorSysbench Processor 2 (points);RamModule 1;RamModule 1 Manufacturer;RamModule 1 Model;RamModule 1 Serial Number;RamModule 1 Size (MB);RamModule 1 Speed (MHz);RamModule 2;RamModule 2 Manufacturer;RamModule 2 Model;RamModule 2 Serial Number;RamModule 2 Size (MB);RamModule 2 Speed (MHz);RamModule 3;RamModule 3 Manufacturer;RamModule 3 Model;RamModule 3 Serial Number;RamModule 3 Size (MB);RamModule 3 Speed (MHz);RamModule 4;RamModule 4 Manufacturer;RamModule 4 Model;RamModule 4 Serial Number;RamModule 4 Size (MB);RamModule 4 Speed (MHz);DataStorage 1;DataStorage 1 Manufacturer;DataStorage 1 Model;DataStorage 1 Serial Number;DataStorage 1 Size (MB);Erasure DataStorage 1;Erasure DataStorage 1 Serial Number;Erasure DataStorage 1 Size (MB);Erasure DataStorage 1 Software;Erasure DataStorage 1 Result;Erasure DataStorage 1 Type;Erasure DataStorage 1 Method;Erasure DataStorage 1 Elapsed (hours);Erasure DataStorage 1 Date;Erasure DataStorage 1 Steps;Erasure DataStorage 1 Steps Start Time;Erasure DataStorage 1 Steps End Time;Benchmark DataStorage 1 Read Speed (MB/s);Benchmark DataStorage 1 Writing speed (MB/s);Test DataStorage 1 Software;Test DataStorage 1 Type;Test DataStorage 1 Result;Test DataStorage 1 Power on (hours used);Test DataStorage 1 Lifetime remaining (percentage);DataStorage 2;DataStorage 2 Manufacturer;DataStorage 2 Model;DataStorage 2 Serial Number;DataStorage 2 Size (MB);Erasure DataStorage 2;Erasure DataStorage 2 Serial Number;Erasure DataStorage 2 Size (MB);Erasure DataStorage 2 Software;Erasure DataStorage 2 Result;Erasure DataStorage 2 Type;Erasure DataStorage 2 Method;Erasure DataStorage 2 Elapsed (hours);Erasure DataStorage 2 Date;Erasure DataStorage 2 Steps;Erasure DataStorage 2 Steps Start Time;Erasure DataStorage 2 Steps End Time;Benchmark DataStorage 2 Read Speed (MB/s);Benchmark DataStorage 2 Writing speed (MB/s);Test DataStorage 2 Software;Test DataStorage 2 Type;Test DataStorage 2 Result;Test DataStorage 2 Power on (hours used);Test DataStorage 2 Lifetime remaining (percentage);DataStorage 3;DataStorage 3 Manufacturer;DataStorage 3 Model;DataStorage 3 Serial Number;DataStorage 3 Size (MB);Erasure DataStorage 3;Erasure DataStorage 3 Serial Number;Erasure DataStorage 3 Size (MB);Erasure DataStorage 3 Software;Erasure DataStorage 3 Result;Erasure DataStorage 3 Type;Erasure DataStorage 3 Method;Erasure DataStorage 3 Elapsed (hours);Erasure DataStorage 3 Date;Erasure DataStorage 3 Steps;Erasure DataStorage 3 Steps Start Time;Erasure DataStorage 3 Steps End Time;Benchmark DataStorage 3 Read Speed (MB/s);Benchmark DataStorage 3 Writing speed (MB/s);Test DataStorage 3 Software;Test DataStorage 3 Type;Test DataStorage 3 Result;Test DataStorage 3 Power on (hours used);Test DataStorage 3 Lifetime remaining (percentage);DataStorage 4;DataStorage 4 Manufacturer;DataStorage 4 Model;DataStorage 4 Serial Number;DataStorage 4 Size (MB);Erasure DataStorage 4;Erasure DataStorage 4 Serial Number;Erasure DataStorage 4 Size (MB);Erasure DataStorage 4 Software;Erasure DataStorage 4 Result;Erasure DataStorage 4 Type;Erasure DataStorage 4 Method;Erasure DataStorage 4 Elapsed (hours);Erasure DataStorage 4 Date;Erasure DataStorage 4 Steps;Erasure DataStorage 4 Steps Start Time;Erasure DataStorage 4 Steps End Time;Benchmark DataStorage 4 Read Speed (MB/s);Benchmark DataStorage 4 Writing speed (MB/s);Test DataStorage 4 Software;Test DataStorage 4 Type;Test DataStorage 4 Result;Test DataStorage 4 Power on (hours used);Test DataStorage 4 Lifetime remaining (percentage);Motherboard 1;Motherboard 1 Manufacturer;Motherboard 1 Model;Motherboard 1 Serial Number;Display 1;Display 1 Manufacturer;Display 1 Model;Display 1 Serial Number;GraphicCard 1;GraphicCard 1 Manufacturer;GraphicCard 1 Model;GraphicCard 1 Serial Number;GraphicCard 1 Memory (MB);GraphicCard 2;GraphicCard 2 Manufacturer;GraphicCard 2 Model;GraphicCard 2 Serial Number;GraphicCard 2 Memory (MB);NetworkAdapter 1;NetworkAdapter 1 Manufacturer;NetworkAdapter 1 Model;NetworkAdapter 1 Serial Number;NetworkAdapter 2;NetworkAdapter 2 Manufacturer;NetworkAdapter 2 Model;NetworkAdapter 2 Serial Number;SoundCard 1;SoundCard 1 Manufacturer;SoundCard 1 Model;SoundCard 1 Serial Number;SoundCard 2;SoundCard 2 Manufacturer;SoundCard 2 Model;SoundCard 2 Serial Number;Device Rate;Device Range;Processor Rate;Processor Range;RAM Rate;RAM Range;Data Storage Rate;Data Storage Range;Price;Benchmark RamSysbench (points) -1;;http://localhost/devices/1;unamed;foo;FooOrg;;;;;;;laptop-asustek_computer_inc-1001pxd-b8oaas048285-14:da:e9:42:f6:7b;Laptop;Netbook;b8oaas048285;1001pxd;asustek computer inc.;Thu Nov 12 19:53:01 2020;Workbench 11.0a2;2020-11-12 19:54:03.959185+01:00;;;;intel atom cpu n455 @ 2.66ghz;1024;238475;Processor 4: model intel atom cpu n455 @ 2.66ghz, S/N None;intel corp.;intel atom cpu n455 @ 2.66ghz;;1;2.667;6666.24;164.0803;;;;;;;;;RamModule 8: model None, S/N None;;;;1024;667;;;;;;;;;;;;;;;;;;;HardDrive 9: model hts54322, S/N e2024242cv86mm;hitachi;hts54322;e2024242cv86mm;238475;harddrive-hitachi-hts54322-e2024242cv86mm;e2024242cv86mm;238475;Workbench 11.0a2;Success;EraseBasic;Shred;1:16:49;2020-11-12 19:53:01.899092+01:00;✓ – StepRandom 1:16:49;2018-07-03 11:15:22.257059+02:00;2018-07-03 12:32:11.843190+02:00;66.2;21.8;Workbench 11.0a2;Short;Failure;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;Motherboard 10: model 1001pxd, S/N eee0123456720;asustek computer inc.;1001pxd;eee0123456720;;;;;GraphicCard 5: model atom processor d4xx/d5xx/n4xx/n5xx integrated graphics controller, S/N None;intel corporation;atom processor d4xx/d5xx/n4xx/n5xx integrated graphics controller;;256;;;;;;NetworkAdapter 2: model ar9285 wireless network adapter, S/N 74:2f:68:8b:fd:c9;qualcomm atheros;ar9285 wireless network adapter;74:2f:68:8b:fd:c9;NetworkAdapter 3: model ar8152 v2.0 fast ethernet, S/N 14:da:e9:42:f6:7b;qualcomm atheros;ar8152 v2.0 fast ethernet;14:da:e9:42:f6:7b;SoundCard 6: model nm10/ich7 family high definition audio controller, S/N None;intel corporation;nm10/ich7 family high definition audio controller;;;;;;1.75;LOW;1.55;LOW;1.53;LOW;3.76;HIGH;52.50 €;15.7188 +1;;http://localhost/devices/1;named;foo;FooOrg;;;;;;;laptop-asustek_computer_inc-1001pxd-b8oaas048285-14:da:e9:42:f6:7b;Laptop;Netbook;b8oaas048285;1001pxd;asustek computer inc.;Thu Nov 12 19:53:01 2020;Workbench 11.0a2;2020-11-12 19:54:03.959185+01:00;;;;intel atom cpu n455 @ 2.66ghz;1024;238475;Processor 4: model intel atom cpu n455 @ 2.66ghz, S/N None;intel corp.;intel atom cpu n455 @ 2.66ghz;;1;2.667;6666.24;164.0803;;;;;;;;;RamModule 8: model None, S/N None;;;;1024;667;;;;;;;;;;;;;;;;;;;HardDrive 9: model hts54322, S/N e2024242cv86mm;hitachi;hts54322;e2024242cv86mm;238475;harddrive-hitachi-hts54322-e2024242cv86mm;e2024242cv86mm;238475;Workbench 11.0a2;Success;EraseBasic;Shred;1:16:49;2020-11-12 19:53:01.899092+01:00;✓ – StepRandom 1:16:49;2018-07-03 11:15:22.257059+02:00;2018-07-03 12:32:11.843190+02:00;66.2;21.8;Workbench 11.0a2;Short;Failure;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;Motherboard 10: model 1001pxd, S/N eee0123456720;asustek computer inc.;1001pxd;eee0123456720;;;;;GraphicCard 5: model atom processor d4xx/d5xx/n4xx/n5xx integrated graphics controller, S/N None;intel corporation;atom processor d4xx/d5xx/n4xx/n5xx integrated graphics controller;;256;;;;;;NetworkAdapter 2: model ar9285 wireless network adapter, S/N 74:2f:68:8b:fd:c9;qualcomm atheros;ar9285 wireless network adapter;74:2f:68:8b:fd:c9;NetworkAdapter 3: model ar8152 v2.0 fast ethernet, S/N 14:da:e9:42:f6:7b;qualcomm atheros;ar8152 v2.0 fast ethernet;14:da:e9:42:f6:7b;SoundCard 6: model nm10/ich7 family high definition audio controller, S/N None;intel corporation;nm10/ich7 family high definition audio controller;;;;;;1.75;LOW;1.55;LOW;1.53;LOW;3.76;HIGH;52.50 €;15.7188 11;;http://localhost/devices/11;;;;;;;;;;laptop-asustek_computer_inc-1001pxd-b8oaas048287-14:da:e9:42:f6:7c;Laptop;Netbook;b8oaas048287;1001pxd;asustek computer inc.;Thu Nov 12 19:53:02 2020;Workbench 11.0b11;2020-11-12 19:53:02.225373+01:00;;;;intel atom cpu n455 @ 1.66ghz;2048;558558;Processor 15: model intel atom cpu n455 @ 1.66ghz, S/N None;intel corp.;intel atom cpu n455 @ 1.66ghz;;1;1.667;6666.24;164.0803;;;;;;;;;RamModule 18: model None, S/N None;;;;1024;667;RamModule 19: model 48594d503131325336344350362d53362020, S/N 4f43487b;hynix semiconductor;48594d503131325336344350362d53362020;4f43487b;1024;667;;;;;;;;;;;;;HardDrive 20: model hts54322, S/N e2024242cv86hj;hitachi;hts54322;e2024242cv86hj;238475;harddrive-hitachi-hts54322-e2024242cv86hj;e2024242cv86hj;238475;Workbench 11.0b11;Success;EraseBasic;Shred;1:16:49;2020-11-12 19:53:02.175189+01:00;✓ – StepRandom 1:16:49;2018-07-03 11:15:22.257059+02:00;2018-07-03 12:32:11.843190+02:00;66.2;21.8;Workbench 11.0b11;Extended;Failure;;;DataStorage 21: model wdc wd1600bevt-2, S/N wd-wx11a80w7430;western digital;wdc wd1600bevt-2;wd-wx11a80w7430;160041;datastorage-western_digital-wdc_wd1600bevt-2-wd-wx11a80w7430;wd-wx11a80w7430;160041;Workbench 11.0b11;Failure;EraseBasic;Shred;0:45:36;2020-11-12 19:53:02.176882+01:00;✓ – StepRandom 0:45:36;2019-10-23 09:49:54.410830+02:00;2019-10-23 10:35:31.400587+02:00;41.6;17.3;Workbench 11.0b11;Short;Success;5293;195 days, 12:00:00;SolidStateDrive 22: model wdc wd1600bevt-2, S/N wd-wx11a80w7430;western digital;wdc wd1600bevt-2;wd-wx11a80w7430;160042;solidstatedrive-western_digital-wdc_wd1600bevt-2-wd-wx11a80w7430;wd-wx11a80w7430;160042;Workbench 11.0b11;Success;EraseSectors;Badblocks;1:46:03;2020-11-12 19:53:02.180043+01:00;✓ – StepRandom 0:46:03,✓ – StepZero 1:00:00;2019-08-19 18:48:19.690458+02:00,2019-08-19 19:34:22.690458+02:00;2019-08-19 19:34:22.930562+02:00,2019-08-19 20:34:22.930562+02:00;41.1;17.1;Workbench 11.0b11;Short;Success;5231;194 days, 17:00:00;;;;;;;;;;;;;;;;;;;;;;;;;Motherboard 23: model 1001pxd, S/N eee0123456789;asustek computer inc.;1001pxd;eee0123456789;;"auo ""auo""";auo lcd monitor;;GraphicCard 16: model atom processor d4xx/d5xx/n4xx/n5xx integrated graphics controller, S/N None;intel corporation;atom processor d4xx/d5xx/n4xx/n5xx integrated graphics controller;;256;;;;;;NetworkAdapter 13: model ar9285 wireless network adapter, S/N 74:2f:68:8b:fd:c8;qualcomm atheros;ar9285 wireless network adapter;74:2f:68:8b:fd:c8;NetworkAdapter 14: model ar8152 v2.0 fast ethernet, S/N 14:da:e9:42:f6:7c;qualcomm atheros;ar8152 v2.0 fast ethernet;14:da:e9:42:f6:7c;SoundCard 7: model usb 2.0 uvc vga webcam, S/N 0x0001;azurewave;usb 2.0 uvc vga webcam;0x0001;SoundCard 17: model nm10/ich7 family high definition audio controller, S/N None;intel corporation;nm10/ich7 family high definition audio controller;;1.72;LOW;1.31;LOW;1.99;LOW;3.97;HIGH;51.60 €;15.7188 diff --git a/tests/test_action.py b/tests/test_action.py index 62338b7f..119cc5a0 100644 --- a/tests/test_action.py +++ b/tests/test_action.py @@ -753,6 +753,8 @@ def test_deallocate_bad_dates(user: UserClient): def test_trade2(action_model_state: Tuple[Type[models.Action], states.Trading], user: UserClient): """Tests POSTing all Trade actions.""" # todo missing None states.Trading for after cancelling renting, for example + # import pdb; pdb.set_trace() + # Remove this test action_model, state = action_model_state snapshot, _ = user.post(file('basic.snapshot'), res=models.Snapshot) action = { diff --git a/tests/test_documents.py b/tests/test_documents.py index 5ac747b5..1f8907f8 100644 --- a/tests/test_documents.py +++ b/tests/test_documents.py @@ -1,8 +1,9 @@ import csv import hashlib from datetime import datetime -from io import StringIO +from io import StringIO, BytesIO from pathlib import Path +from flask import url_for import pytest from werkzeug.exceptions import Unauthorized @@ -463,6 +464,159 @@ def test_get_document_lots(user: UserClient, user2: UserClient): assert export2_csv[1][3] == 'comments,lot3,testcomment-lot3,' +@pytest.mark.mvp +def test_verify_stamp(user: UserClient, client: Client): + """Test verify stamp of one export device information in a csv file.""" + snapshot, _ = user.post(file('basic.snapshot'), res=Snapshot) + csv_str, _ = user.get(res=documents.DocumentDef.t, + item='devices/', + accept='text/csv', + query=[('filter', {'type': ['Computer']})]) + + response, _ = client.post(res=documents.DocumentDef.t, + item='stamps/', + content_type='multipart/form-data', + accept='text/html', + data={'docUpload': [(BytesIO(bytes(csv_str, 'utf-8')), 'example.csv')]}, + status=200) + assert "alert alert-info" in response + assert not "alert alert-danger" in response + + response, _ = client.post(res=documents.DocumentDef.t, + item='stamps/', + content_type='multipart/form-data', + accept='text/html', + data={'docUpload': [(BytesIO(b'abc'), 'example.csv')]}, + status=200) + + assert not "alert alert-info" in response + assert "alert alert-danger" in response + + response, _ = client.get(res=documents.DocumentDef.t, + item='stamps/', + accept='text/html', + status=200) + + assert not "alert alert-info" in response + assert not "alert alert-danger" in response + + +@pytest.mark.mvp +def test_verify_stamp_log_info(user: UserClient, client: Client): + """Test verify stamp of one export lots-info in a csv file.""" + + l, _ = user.post({'name': 'Lot1', 'description': 'comments,lot1,testcomment-lot1,'}, res=Lot) + l, _ = user.post({'name': 'Lot2', 'description': 'comments,lot2,testcomment-lot2,'}, res=Lot) + + csv_str, _ = user.get(res=documents.DocumentDef.t, + item='lots/', + accept='text/csv') + + response, _ = client.post(res=documents.DocumentDef.t, + item='stamps/', + content_type='multipart/form-data', + accept='text/html', + data={'docUpload': [(BytesIO(bytes(csv_str, 'utf-8')), + 'example.csv')]}, + status=200) + assert "alert alert-info" in response + + +@pytest.mark.mvp +def test_verify_stamp_devices_stock(user: UserClient, client: Client): + """Test verify stamp of one export device information in a csv file.""" + + snapshot, _ = user.post(file('basic.snapshot'), res=Snapshot) + + csv_str, _ = user.get(res=documents.DocumentDef.t, + item='stock/', + accept='text/csv', + query=[('filter', {'type': ['Computer']})]) + + response, _ = client.post(res=documents.DocumentDef.t, + item='stamps/', + content_type='multipart/form-data', + accept='text/html', + data={'docUpload': [(BytesIO(bytes(csv_str, 'utf-8')), + 'example.csv')]}, + status=200) + assert "alert alert-info" in response + + +@pytest.mark.mvp +def test_verify_stamp_csv_actions(user: UserClient, client: Client): + """Test verify stamp of one export device information in a csv file with others users.""" + acer = file('acer.happy.battery.snapshot') + snapshot, _ = user.post(acer, res=Snapshot) + device_id = snapshot['device']['id'] + post_request = {"transaction": "ccc", "name": "John", "endUsers": 1, + "devices": [device_id], "description": "aaa", + "finalUserCode": "abcdefjhi", + "startTime": "2020-11-01T02:00:00+00:00", + "endTime": "2020-12-01T02:00:00+00:00" + } + + user.post(res=Allocate, data=post_request) + hdd = [c for c in acer['components'] if c['type'] == 'HardDrive'][0] + hdd_action = [a for a in hdd['actions'] if a['type'] == 'TestDataStorage'][0] + hdd_action['lifetime'] += 1000 + acer.pop('elapsed') + acer['licence_version'] = '1.0.0' + snapshot, _ = client.post(acer, res=Live) + + csv_str, _ = user.get(res=documents.DocumentDef.t, + item='actions/', + accept='text/csv', + query=[('filter', {'type': ['Computer']})]) + + response, _ = client.post(res=documents.DocumentDef.t, + item='stamps/', + content_type='multipart/form-data', + accept='text/html', + data={'docUpload': [(BytesIO(bytes(csv_str, 'utf-8')), + 'example.csv')]}, + status=200) + assert "alert alert-info" in response + + +@pytest.mark.mvp +def test_verify_stamp_erasure_certificate(user: UserClient, client: Client): + """Test verify stamp of one export certificate in PDF.""" + s = file('erase-sectors.snapshot') + snapshot, response = user.post(s, res=Snapshot) + + doc, _ = user.get(res=documents.DocumentDef.t, + item='erasures/', + query=[('filter', {'id': [snapshot['device']['id']]})], + accept=ANY) + + response, _ = client.post(res=documents.DocumentDef.t, + item='stamps/', + content_type='multipart/form-data', + accept='text/html', + data={'docUpload': [(BytesIO(bytes(doc, 'utf-8')), + 'example.csv')]}, + status=200) + assert "alert alert-danger" in response + + doc, _ = user.get(res=documents.DocumentDef.t, + item='erasures/', + query=[ + ('filter', {'id': [snapshot['device']['id']]}), + ('format', 'PDF') + ], + accept='application/pdf') + + response, _ = client.post(res=documents.DocumentDef.t, + item='stamps/', + content_type='multipart/form-data', + accept='text/html', + data={'docUpload': [(BytesIO(doc), + 'example.csv')]}, + status=200) + assert "alert alert-info" in response + + @pytest.mark.mvp def test_get_document_internal_stats(user: UserClient, user2: UserClient): """Tests for get teh internal stats.""" diff --git a/tests/test_tag.py b/tests/test_tag.py index b0336021..f6402bd4 100644 --- a/tests/test_tag.py +++ b/tests/test_tag.py @@ -8,7 +8,7 @@ from pytest import raises from teal.db import MultipleResourcesFound, ResourceNotFound, UniqueViolation, DBError from teal.marshmallow import ValidationError -from ereuse_devicehub.client import UserClient +from ereuse_devicehub.client import UserClient, Client from ereuse_devicehub.db import db from ereuse_devicehub.devicehub import Devicehub from ereuse_devicehub.resources.action.models import Snapshot @@ -33,6 +33,68 @@ def test_create_tag(user: UserClient): tag = Tag.query.one() assert tag.id == 'bar-1' assert tag.provider == URL('http://foo.bar') + res, _ = user.get(res=Tag, item=tag.code, status=422) + assert res['type'] == 'TagNotLinked' + + +@pytest.mark.mvp +@pytest.mark.usefixtures(conftest.app_context.__name__) +def test_create_tag_with_device(user: UserClient): + """Creates a tag specifying linked with one device.""" + pc = Desktop(serial_number='sn1', chassis=ComputerChassis.Tower, owner_id=user.user['id']) + db.session.add(pc) + db.session.commit() + tag = Tag(id='bar', owner_id=user.user['id']) + db.session.add(tag) + db.session.commit() + data = '{tag_id}/device/{device_id}'.format(tag_id=tag.id, device_id=pc.id) + user.put({}, res=Tag, item=data, status=204) + user.get(res=Tag, item='{}/device'.format(tag.id)) + user.delete({}, res=Tag, item=data, status=204) + res, _ = user.get(res=Tag, item='{}/device'.format(tag.id), status=422) + assert res['type'] == 'TagNotLinked' + + +@pytest.mark.mvp +@pytest.mark.usefixtures(conftest.app_context.__name__) +def test_delete_tags(user: UserClient, client: Client): + """Delete a named tag.""" + # Delete Tag Named + pc = Desktop(serial_number='sn1', chassis=ComputerChassis.Tower, owner_id=user.user['id']) + db.session.add(pc) + db.session.commit() + tag = Tag(id='bar', owner_id=user.user['id'], device_id=pc.id) + db.session.add(tag) + db.session.commit() + tag = Tag.query.one() + assert tag.id == 'bar' + # Is not possible delete one tag linked to one device + res, _ = user.delete(res=Tag, item=tag.id, status=422) + msg = 'The tag bar is linked to device' + assert msg in res['message'][0] + + tag.device_id = None + db.session.add(tag) + db.session.commit() + # Is not possible delete one tag from an anonymous user + client.delete(res=Tag, item=tag.id, status=401) + + # Is possible delete one normal tag + user.delete(res=Tag, item=tag.id) + user.get(res=Tag, item=tag.id, status=404) + + # Delete Tag UnNamed + org = Organization(name='bar', tax_id='bartax') + tag = Tag(id='bar-1', org=org, provider=URL('http://foo.bar'), owner_id=user.user['id']) + db.session.add(tag) + db.session.commit() + tag = Tag.query.one() + assert tag.id == 'bar-1' + res, _ = user.delete(res=Tag, item=tag.id, status=422) + msg = 'This tag {} is unnamed tag. It is imposible delete.'.format(tag.id) + assert msg in res['message'] + tag = Tag.query.one() + assert tag.id == 'bar-1' @pytest.mark.mvp @@ -51,13 +113,15 @@ def test_create_tag_default_org(user: UserClient): @pytest.mark.mvp @pytest.mark.usefixtures(conftest.app_context.__name__) -def test_create_tag_no_slash(): - """Checks that no tags can be created that contain a slash.""" - with raises(ValidationError): - Tag('/') - - with raises(ValidationError): - Tag('bar', secondary='/') +def test_create_same_tag_default_org_two_users(user: UserClient, user2: UserClient): + """Creates a tag using the default organization.""" + tag = Tag(id='foo-1', owner_id=user.user['id']) + tag2 = Tag(id='foo-1', owner_id=user2.user['id']) + db.session.add(tag) + db.session.add(tag2) + db.session.commit() + assert tag.org.name == 'FooOrg' # as defined in the settings + assert tag2.org.name == 'FooOrg' # as defined in the settings @pytest.mark.mvp @@ -75,7 +139,19 @@ def test_create_two_same_tags(user: UserClient): db.session.add(Tag(id='foo-bar', owner_id=user.user['id'])) org2 = Organization(name='org 2', tax_id='tax id org 2') db.session.add(Tag(id='foo-bar', org=org2, owner_id=user.user['id'])) - db.session.commit() + with raises(DBError): + db.session.commit() + + +@pytest.mark.mvp +@pytest.mark.usefixtures(conftest.app_context.__name__) +def test_create_tag_no_slash(): + """Checks that no tags can be created that contain a slash.""" + with raises(ValidationError): + Tag('/') + + with raises(ValidationError): + Tag('bar', secondary='/') @pytest.mark.mvp @@ -131,17 +207,39 @@ def test_tag_get_device_from_tag_endpoint_no_tag(user: UserClient): @pytest.mark.mvp -def test_tag_get_device_from_tag_endpoint_multiple_tags(app: Devicehub, user: UserClient): - """As above, but when there are two tags with the same ID, the +@pytest.mark.usefixtures(conftest.app_context.__name__) +def test_tag_get_device_from_tag_endpoint_multiple_tags(app: Devicehub, user: UserClient, user2: UserClient, client: Client): + """As above, but when there are two tags with the secondary ID, the system should not return any of both (to be deterministic) so it should raise an exception. """ - with app.app_context(): - db.session.add(Tag(id='foo-bar', owner_id=user.user['id'])) - org2 = Organization(name='org 2', tax_id='tax id org 2') - db.session.add(Tag(id='foo-bar', org=org2, owner_id=user.user['id'])) + db.session.add(Tag(id='foo', secondary='bar', owner_id=user.user['id'])) + db.session.commit() + + db.session.add(Tag(id='foo', secondary='bar', owner_id=user2.user['id'])) + db.session.commit() + + db.session.add(Tag(id='foo2', secondary='bar', owner_id=user.user['id'])) + with raises(DBError): db.session.commit() - user.get(res=Tag, item='foo-bar/device', status=MultipleResourcesFound) + db.session.rollback() + + tag1 = Tag.from_an_id('foo').filter_by(owner_id=user.user['id']).one() + tag2 = Tag.from_an_id('foo').filter_by(owner_id=user2.user['id']).one() + pc1 = Desktop(serial_number='sn1', chassis=ComputerChassis.Tower, owner_id=user.user['id']) + pc2 = Desktop(serial_number='sn2', chassis=ComputerChassis.Tower, owner_id=user2.user['id']) + pc1.tags.add(tag1) + pc2.tags.add(tag2) + db.session.add(pc1) + db.session.add(pc2) + db.session.commit() + computer, _ = user.get(res=Tag, item='foo/device') + assert computer['serialNumber'] == 'sn1' + computer, _ = user2.get(res=Tag, item='foo/device') + assert computer['serialNumber'] == 'sn2' + + _, status = client.get(res=Tag, item='foo/device', status=MultipleResourcesFound) + assert status.status_code == 422 @pytest.mark.mvp @@ -216,8 +314,7 @@ def test_tag_secondary_workbench_link_find(user: UserClient): t = Tag('foo', secondary='bar', owner_id=user.user['id']) db.session.add(t) db.session.flush() - assert Tag.from_an_id('bar').one() == t - assert Tag.from_an_id('foo').one() == t + assert Tag.from_an_id('bar').one() == Tag.from_an_id('foo').one() with pytest.raises(ResourceNotFound): Tag.from_an_id('nope').one()