2019-02-28 17:21:24 +00:00
|
|
|
import csv
|
2022-07-22 13:23:49 +00:00
|
|
|
import datetime
|
2018-11-21 13:26:56 +00:00
|
|
|
import enum
|
2022-07-22 13:23:49 +00:00
|
|
|
import json
|
2021-05-25 10:57:21 +00:00
|
|
|
import time
|
2022-07-22 13:23:49 +00:00
|
|
|
import uuid
|
2020-07-28 14:16:17 +00:00
|
|
|
from collections import OrderedDict
|
2019-02-28 17:21:24 +00:00
|
|
|
from io import StringIO
|
2018-11-21 13:26:56 +00:00
|
|
|
from typing import Callable, Iterable, Tuple
|
|
|
|
|
|
|
|
import boltons
|
|
|
|
import flask
|
|
|
|
import flask_weasyprint
|
|
|
|
import teal.marshmallow
|
|
|
|
from boltons import urlutils
|
2021-02-22 20:18:25 +00:00
|
|
|
from flask import current_app as app
|
2022-07-22 13:23:49 +00:00
|
|
|
from flask import g, make_response, request
|
2020-12-21 12:40:07 +00:00
|
|
|
from flask.json import jsonify
|
2019-02-28 17:21:24 +00:00
|
|
|
from teal.cache import cache
|
2020-12-21 12:40:07 +00:00
|
|
|
from teal.resource import Resource, View
|
2018-11-21 13:26:56 +00:00
|
|
|
|
2021-04-15 19:21:41 +00:00
|
|
|
from ereuse_devicehub import auth
|
2018-11-21 13:26:56 +00:00
|
|
|
from ereuse_devicehub.db import db
|
2019-05-11 14:27:22 +00:00
|
|
|
from ereuse_devicehub.resources.action import models as evs
|
2022-07-22 13:23:49 +00:00
|
|
|
from ereuse_devicehub.resources.action.models import Trade
|
2021-02-07 17:16:43 +00:00
|
|
|
from ereuse_devicehub.resources.deliverynote.models import Deliverynote
|
2022-07-22 13:23:49 +00:00
|
|
|
from ereuse_devicehub.resources.device import models as devs
|
|
|
|
from ereuse_devicehub.resources.device.models import Device
|
2018-11-21 13:26:56 +00:00
|
|
|
from ereuse_devicehub.resources.device.views import DeviceView
|
2022-07-22 13:23:49 +00:00
|
|
|
from ereuse_devicehub.resources.documents.device_row import (
|
|
|
|
ActionRow,
|
|
|
|
DeviceRow,
|
|
|
|
StockRow,
|
|
|
|
)
|
|
|
|
from ereuse_devicehub.resources.enums import SessionType
|
|
|
|
from ereuse_devicehub.resources.hash_reports import ReportHash, insert_hash, verify_hash
|
2020-07-28 14:16:17 +00:00
|
|
|
from ereuse_devicehub.resources.lot import LotView
|
|
|
|
from ereuse_devicehub.resources.lot.models import Lot
|
2022-07-22 13:23:49 +00:00
|
|
|
from ereuse_devicehub.resources.user.models import Session
|
2018-11-21 13:26:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Format(enum.Enum):
|
|
|
|
HTML = 'HTML'
|
|
|
|
PDF = 'PDF'
|
|
|
|
|
|
|
|
|
|
|
|
class DocumentView(DeviceView):
|
|
|
|
class FindArgs(DeviceView.FindArgs):
|
|
|
|
format = teal.marshmallow.EnumField(Format, missing=None)
|
|
|
|
|
|
|
|
def get(self, id):
|
|
|
|
"""Get a collection of resources or a specific one.
|
|
|
|
---
|
|
|
|
parameters:
|
|
|
|
- name: id
|
|
|
|
in: path
|
|
|
|
description: The identifier of the resource.
|
|
|
|
type: string
|
|
|
|
required: false
|
|
|
|
responses:
|
|
|
|
200:
|
|
|
|
description: Return the collection or the specific one.
|
|
|
|
"""
|
2022-11-02 18:44:39 +00:00
|
|
|
args = self.QUERY_PARSER.parse(
|
|
|
|
self.find_args, flask.request, locations=('querystring',)
|
|
|
|
)
|
2021-12-21 08:55:06 +00:00
|
|
|
ids = []
|
|
|
|
if 'filter' in request.args:
|
|
|
|
filters = json.loads(request.args.get('filter', {}))
|
|
|
|
ids = filters.get('ids', [])
|
2021-12-21 09:32:04 +00:00
|
|
|
|
|
|
|
if not ids and not id:
|
|
|
|
msg = 'Document must be an ID or UUID.'
|
|
|
|
raise teal.marshmallow.ValidationError(msg)
|
|
|
|
|
|
|
|
if id:
|
|
|
|
try:
|
|
|
|
id = uuid.UUID(id)
|
|
|
|
except ValueError:
|
|
|
|
try:
|
|
|
|
ids.append(int(id))
|
|
|
|
except ValueError:
|
|
|
|
msg = 'Document must be an ID or UUID.'
|
|
|
|
raise teal.marshmallow.ValidationError(msg)
|
|
|
|
else:
|
|
|
|
query = devs.Device.query.filter(Device.id.in_(ids))
|
|
|
|
else:
|
|
|
|
query = evs.Action.query.filter_by(id=id)
|
|
|
|
else:
|
|
|
|
query = devs.Device.query.filter(Device.id.in_(ids))
|
2021-12-21 08:55:06 +00:00
|
|
|
|
|
|
|
# if id:
|
|
|
|
# # todo we assume we can pass both device id and action id
|
|
|
|
# # for certificates... how is it going to end up being?
|
|
|
|
# try:
|
|
|
|
# id = uuid.UUID(id)
|
|
|
|
# except ValueError:
|
|
|
|
# try:
|
|
|
|
# id = int(id)
|
|
|
|
# except ValueError:
|
|
|
|
# raise teal.marshmallow.ValidationError('Document must be an ID or UUID.')
|
|
|
|
# else:
|
|
|
|
# query = devs.Device.query.filter_by(id=id)
|
|
|
|
# else:
|
|
|
|
# query = evs.Action.query.filter_by(id=id)
|
|
|
|
# else:
|
|
|
|
# flask.current_app.auth.requires_auth(lambda: None)() # todo not nice
|
|
|
|
# query = self.query(args)
|
2018-11-21 13:26:56 +00:00
|
|
|
|
|
|
|
type = urlutils.URL(flask.request.url).path_parts[-2]
|
|
|
|
if type == 'erasures':
|
|
|
|
template = self.erasure(query)
|
|
|
|
if args.get('format') == Format.PDF:
|
|
|
|
res = flask_weasyprint.render_pdf(
|
2022-11-02 18:44:39 +00:00
|
|
|
flask_weasyprint.HTML(string=template),
|
|
|
|
download_filename='{}.pdf'.format(type),
|
2018-11-21 13:26:56 +00:00
|
|
|
)
|
2021-02-18 10:36:55 +00:00
|
|
|
insert_hash(res.data)
|
2018-11-21 13:26:56 +00:00
|
|
|
else:
|
|
|
|
res = flask.make_response(template)
|
|
|
|
return res
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def erasure(query: db.Query):
|
|
|
|
def erasures():
|
|
|
|
for model in query:
|
|
|
|
if isinstance(model, devs.Computer):
|
|
|
|
for erasure in model.privacy:
|
|
|
|
yield erasure
|
|
|
|
elif isinstance(model, devs.DataStorage):
|
|
|
|
erasure = model.privacy
|
|
|
|
if erasure:
|
|
|
|
yield erasure
|
|
|
|
else:
|
|
|
|
assert isinstance(model, evs.EraseBasic)
|
|
|
|
yield model
|
|
|
|
|
|
|
|
url_pdf = boltons.urlutils.URL(flask.request.url)
|
|
|
|
url_pdf.query_params['format'] = 'PDF'
|
|
|
|
params = {
|
2023-03-20 16:32:58 +00:00
|
|
|
'title': 'Device Sanitization',
|
2018-11-21 13:26:56 +00:00
|
|
|
'erasures': tuple(erasures()),
|
2022-11-02 18:44:39 +00:00
|
|
|
'url_pdf': url_pdf.to_text(),
|
2018-11-21 13:26:56 +00:00
|
|
|
}
|
|
|
|
return flask.render_template('documents/erasure.html', **params)
|
|
|
|
|
|
|
|
|
2019-02-28 17:21:24 +00:00
|
|
|
class DevicesDocumentView(DeviceView):
|
|
|
|
@cache(datetime.timedelta(minutes=1))
|
|
|
|
def find(self, args: dict):
|
2021-11-29 11:15:37 +00:00
|
|
|
query = self.query(args)
|
2021-12-16 13:00:54 +00:00
|
|
|
ids = []
|
|
|
|
if 'filter' in request.args:
|
|
|
|
filters = json.loads(request.args.get('filter', {}))
|
|
|
|
ids = filters.get('ids', [])
|
2021-12-16 12:21:35 +00:00
|
|
|
query = self.query(args).filter(Device.id.in_(ids))
|
2019-02-28 17:21:24 +00:00
|
|
|
return self.generate_post_csv(query)
|
|
|
|
|
|
|
|
def generate_post_csv(self, query):
|
2019-06-19 11:35:26 +00:00
|
|
|
"""Get device query and put information in csv format."""
|
2019-02-28 17:21:24 +00:00
|
|
|
data = StringIO()
|
2022-11-02 18:44:39 +00:00
|
|
|
cw = csv.writer(
|
|
|
|
data,
|
|
|
|
delimiter=';',
|
|
|
|
lineterminator="\n",
|
|
|
|
quotechar='"',
|
|
|
|
quoting=csv.QUOTE_ALL,
|
|
|
|
)
|
2019-02-28 17:21:24 +00:00
|
|
|
first = True
|
2021-02-07 17:16:43 +00:00
|
|
|
document_ids = self.get_documents_id()
|
2019-02-28 17:21:24 +00:00
|
|
|
for device in query:
|
2021-02-07 17:16:43 +00:00
|
|
|
d = DeviceRow(device, document_ids)
|
2019-02-28 17:21:24 +00:00
|
|
|
if first:
|
2019-05-08 17:12:05 +00:00
|
|
|
cw.writerow(d.keys())
|
2019-02-28 17:21:24 +00:00
|
|
|
first = False
|
2019-05-08 17:12:05 +00:00
|
|
|
cw.writerow(d.values())
|
2020-12-21 10:34:03 +00:00
|
|
|
bfile = data.getvalue().encode('utf-8')
|
|
|
|
output = make_response(bfile)
|
2020-12-21 15:09:30 +00:00
|
|
|
insert_hash(bfile)
|
2019-02-28 17:21:24 +00:00
|
|
|
output.headers['Content-Disposition'] = 'attachment; filename=export.csv'
|
|
|
|
output.headers['Content-type'] = 'text/csv'
|
|
|
|
return output
|
|
|
|
|
2021-02-07 17:16:43 +00:00
|
|
|
def get_documents_id(self):
|
|
|
|
# documentIds = {dev_id: document_id, ...}
|
|
|
|
deliverys = Deliverynote.query.all()
|
|
|
|
documentIds = {x.id: d.document_id for d in deliverys for x in d.lot.devices}
|
|
|
|
return documentIds
|
|
|
|
|
2020-08-18 17:19:18 +00:00
|
|
|
|
2021-01-08 16:37:52 +00:00
|
|
|
class ActionsDocumentView(DeviceView):
|
|
|
|
@cache(datetime.timedelta(minutes=1))
|
|
|
|
def find(self, args: dict):
|
2021-12-07 12:20:16 +00:00
|
|
|
filters = json.loads(request.args.get('filter', {}))
|
|
|
|
ids = filters.get('ids', [])
|
|
|
|
query = self.query(args).filter(Device.id.in_(ids))
|
2021-01-08 16:37:52 +00:00
|
|
|
return self.generate_post_csv(query)
|
|
|
|
|
|
|
|
def generate_post_csv(self, query):
|
|
|
|
"""Get device query and put information in csv format."""
|
|
|
|
data = StringIO()
|
2022-11-02 18:44:39 +00:00
|
|
|
cw = csv.writer(
|
|
|
|
data,
|
|
|
|
delimiter=';',
|
|
|
|
lineterminator="\n",
|
|
|
|
quotechar='"',
|
|
|
|
quoting=csv.QUOTE_ALL,
|
|
|
|
)
|
2021-01-08 16:37:52 +00:00
|
|
|
first = True
|
2021-10-15 13:04:58 +00:00
|
|
|
devs_id = []
|
2021-01-08 16:37:52 +00:00
|
|
|
for device in query:
|
2021-10-15 13:04:58 +00:00
|
|
|
devs_id.append(device.id)
|
2021-01-13 17:11:41 +00:00
|
|
|
for allocate in device.get_metrics():
|
|
|
|
d = ActionRow(allocate)
|
2021-01-08 16:37:52 +00:00
|
|
|
if first:
|
|
|
|
cw.writerow(d.keys())
|
|
|
|
first = False
|
|
|
|
cw.writerow(d.values())
|
2022-11-02 18:44:39 +00:00
|
|
|
query_trade = Trade.query.filter(
|
|
|
|
Trade.devices.any(Device.id.in_(devs_id))
|
|
|
|
).all()
|
2021-10-15 13:04:58 +00:00
|
|
|
|
2021-12-07 12:20:16 +00:00
|
|
|
lot_id = request.args.get('lot')
|
|
|
|
if lot_id and not query_trade:
|
|
|
|
lot = Lot.query.filter_by(id=lot_id).one()
|
|
|
|
if hasattr(lot, "trade") and lot.trade:
|
|
|
|
if g.user in [lot.trade.user_from, lot.trade.user_to]:
|
|
|
|
query_trade = [lot.trade]
|
|
|
|
|
2021-10-15 13:04:58 +00:00
|
|
|
for trade in query_trade:
|
2021-10-19 16:19:25 +00:00
|
|
|
data_rows = trade.get_metrics()
|
|
|
|
for row in data_rows:
|
|
|
|
d = ActionRow(row)
|
|
|
|
if first:
|
|
|
|
cw.writerow(d.keys())
|
|
|
|
first = False
|
|
|
|
cw.writerow(d.values())
|
2021-10-15 13:04:58 +00:00
|
|
|
|
2021-01-08 16:37:52 +00:00
|
|
|
bfile = data.getvalue().encode('utf-8')
|
|
|
|
output = make_response(bfile)
|
|
|
|
insert_hash(bfile)
|
2022-11-02 18:44:39 +00:00
|
|
|
output.headers[
|
|
|
|
'Content-Disposition'
|
|
|
|
] = 'attachment; filename=actions_export.csv'
|
2021-01-08 16:37:52 +00:00
|
|
|
output.headers['Content-type'] = 'text/csv'
|
|
|
|
return output
|
|
|
|
|
|
|
|
|
2020-07-28 14:16:17 +00:00
|
|
|
class LotsDocumentView(LotView):
|
|
|
|
def find(self, args: dict):
|
2020-08-18 19:02:54 +00:00
|
|
|
query = (x for x in self.query(args) if x.owner_id == g.user.id)
|
2020-08-03 16:25:55 +00:00
|
|
|
return self.generate_lots_csv(query)
|
2020-07-28 14:16:17 +00:00
|
|
|
|
|
|
|
def generate_lots_csv(self, query):
|
2020-07-28 14:22:17 +00:00
|
|
|
"""Get lot query and put information in csv format."""
|
2020-07-28 14:16:17 +00:00
|
|
|
data = StringIO()
|
|
|
|
cw = csv.writer(data)
|
|
|
|
first = True
|
|
|
|
for lot in query:
|
2021-10-20 12:53:33 +00:00
|
|
|
_lot = LotRow(lot)
|
2020-07-28 14:16:17 +00:00
|
|
|
if first:
|
2021-10-20 12:53:33 +00:00
|
|
|
cw.writerow(_lot.keys())
|
2020-07-28 14:16:17 +00:00
|
|
|
first = False
|
2021-10-20 12:53:33 +00:00
|
|
|
cw.writerow(_lot.values())
|
2021-02-18 10:36:55 +00:00
|
|
|
bfile = data.getvalue().encode('utf-8')
|
|
|
|
output = make_response(bfile)
|
|
|
|
insert_hash(bfile)
|
2020-07-28 14:16:17 +00:00
|
|
|
output.headers['Content-Disposition'] = 'attachment; filename=lots-info.csv'
|
|
|
|
output.headers['Content-type'] = 'text/csv'
|
|
|
|
return output
|
|
|
|
|
|
|
|
|
|
|
|
class LotRow(OrderedDict):
|
|
|
|
def __init__(self, lot: Lot) -> None:
|
|
|
|
super().__init__()
|
|
|
|
self.lot = lot
|
2020-07-28 14:22:17 +00:00
|
|
|
# General information about lot
|
2020-07-28 14:16:17 +00:00
|
|
|
self['Id'] = lot.id.hex
|
|
|
|
self['Name'] = lot.name
|
|
|
|
self['Registered in'] = format(lot.created, '%c')
|
|
|
|
try:
|
|
|
|
self['Description'] = lot.description
|
2023-03-20 16:34:20 +00:00
|
|
|
except Exception:
|
2020-07-28 14:16:17 +00:00
|
|
|
self['Description'] = ''
|
|
|
|
|
|
|
|
|
2020-07-23 13:56:51 +00:00
|
|
|
class StockDocumentView(DeviceView):
|
|
|
|
# @cache(datetime.timedelta(minutes=1))
|
|
|
|
def find(self, args: dict):
|
2020-08-17 09:18:52 +00:00
|
|
|
query = (x for x in self.query(args) if x.owner_id == g.user.id)
|
2020-07-23 13:56:51 +00:00
|
|
|
return self.generate_post_csv(query)
|
|
|
|
|
|
|
|
def generate_post_csv(self, query):
|
|
|
|
"""Get device query and put information in csv format."""
|
|
|
|
data = StringIO()
|
2022-11-02 18:44:39 +00:00
|
|
|
cw = csv.writer(
|
|
|
|
data,
|
|
|
|
delimiter=';',
|
|
|
|
lineterminator="\n",
|
|
|
|
quotechar='"',
|
|
|
|
quoting=csv.QUOTE_ALL,
|
|
|
|
)
|
2020-07-23 13:56:51 +00:00
|
|
|
first = True
|
|
|
|
for device in query:
|
2020-08-05 09:56:59 +00:00
|
|
|
d = StockRow(device)
|
2020-07-23 13:56:51 +00:00
|
|
|
if first:
|
|
|
|
cw.writerow(d.keys())
|
|
|
|
first = False
|
|
|
|
cw.writerow(d.values())
|
2021-02-18 10:36:55 +00:00
|
|
|
bfile = data.getvalue().encode('utf-8')
|
|
|
|
output = make_response(bfile)
|
|
|
|
insert_hash(bfile)
|
2020-08-05 09:49:18 +00:00
|
|
|
output.headers['Content-Disposition'] = 'attachment; filename=devices-stock.csv'
|
2020-07-23 13:56:51 +00:00
|
|
|
output.headers['Content-type'] = 'text/csv'
|
|
|
|
return output
|
|
|
|
|
|
|
|
|
2020-12-21 12:40:07 +00:00
|
|
|
class CheckView(View):
|
|
|
|
model = ReportHash
|
|
|
|
|
|
|
|
def get(self):
|
|
|
|
qry = dict(request.values)
|
2020-12-21 15:09:30 +00:00
|
|
|
hash3 = qry.get('hash')
|
2020-12-21 12:40:07 +00:00
|
|
|
|
|
|
|
result = False
|
2020-12-21 15:09:30 +00:00
|
|
|
if hash3 and ReportHash.query.filter_by(hash3=hash3).count():
|
2020-12-21 12:40:07 +00:00
|
|
|
result = True
|
|
|
|
return jsonify(result)
|
|
|
|
|
|
|
|
|
2021-01-18 11:40:31 +00:00
|
|
|
class StampsView(View):
|
|
|
|
"""
|
|
|
|
This view render one public ans static page for see the links for to do the check
|
|
|
|
of one csv file
|
|
|
|
"""
|
2022-11-02 18:44:39 +00:00
|
|
|
|
2021-02-17 16:38:54 +00:00
|
|
|
def get_url_path(self):
|
2021-01-18 16:18:47 +00:00
|
|
|
url = urlutils.URL(request.url)
|
|
|
|
url.normalize()
|
|
|
|
url.path_parts = url.path_parts[:-2] + ['check', '']
|
2021-03-16 19:50:57 +00:00
|
|
|
return url.to_text()
|
2021-02-17 16:38:54 +00:00
|
|
|
|
|
|
|
def get(self):
|
|
|
|
result = ('', '')
|
2022-11-02 18:44:39 +00:00
|
|
|
return flask.render_template(
|
|
|
|
'documents/stamp.html', rq_url=self.get_url_path(), result=result
|
|
|
|
)
|
2021-02-17 16:38:54 +00:00
|
|
|
|
|
|
|
def post(self):
|
|
|
|
result = ('', '')
|
|
|
|
if 'docUpload' in request.files:
|
|
|
|
file_check = request.files['docUpload']
|
2021-03-04 09:38:04 +00:00
|
|
|
bad = 'There are no coincidences. The attached file data does not come \
|
|
|
|
from our backend or it has been subsequently modified.'
|
|
|
|
ok = '100% coincidence. The attached file contains data 100% existing in \
|
|
|
|
to our backend'
|
|
|
|
result = ('Bad', bad)
|
2022-11-02 18:44:39 +00:00
|
|
|
mime = [
|
|
|
|
'text/csv',
|
|
|
|
'application/pdf',
|
|
|
|
'text/plain',
|
|
|
|
'text/markdown',
|
|
|
|
'image/jpeg',
|
|
|
|
'image/png',
|
|
|
|
'text/html',
|
|
|
|
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
|
|
|
'application/vnd.oasis.opendocument.spreadsheet',
|
|
|
|
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
|
|
|
|
'application/msword',
|
|
|
|
]
|
2021-08-13 15:16:31 +00:00
|
|
|
if file_check.mimetype in mime:
|
2021-02-17 16:38:54 +00:00
|
|
|
if verify_hash(file_check):
|
2021-03-04 09:38:04 +00:00
|
|
|
result = ('Ok', ok)
|
2021-02-17 16:38:54 +00:00
|
|
|
|
2022-11-02 18:44:39 +00:00
|
|
|
return flask.render_template(
|
|
|
|
'documents/stamp.html', rq_url=self.get_url_path(), result=result
|
|
|
|
)
|
2021-01-18 11:40:31 +00:00
|
|
|
|
|
|
|
|
2021-03-02 16:11:34 +00:00
|
|
|
class WbConfDocumentView(DeviceView):
|
2021-03-01 18:21:57 +00:00
|
|
|
def get(self, wbtype: str):
|
2021-03-02 10:42:07 +00:00
|
|
|
if not wbtype.lower() in ['usodyrate', 'usodywipe']:
|
|
|
|
return jsonify('')
|
|
|
|
|
2022-11-02 18:44:39 +00:00
|
|
|
data = {
|
|
|
|
'token': self.get_token(),
|
|
|
|
'host': app.config['HOST'],
|
|
|
|
'inventory': app.config['SCHEMA'],
|
|
|
|
}
|
2021-04-12 10:05:52 +00:00
|
|
|
data['erase'] = False
|
2021-04-12 17:34:33 +00:00
|
|
|
# data['erase'] = True if wbtype == 'usodywipe' else False
|
2021-03-02 10:42:07 +00:00
|
|
|
|
2021-04-12 10:05:52 +00:00
|
|
|
env = flask.render_template('documents/wbSettings.ini', **data)
|
2021-03-02 10:42:07 +00:00
|
|
|
output = make_response(env)
|
2021-04-12 10:05:52 +00:00
|
|
|
output.headers['Content-Disposition'] = 'attachment; filename=settings.ini'
|
2021-03-02 10:42:07 +00:00
|
|
|
output.headers['Content-type'] = 'text/plain'
|
|
|
|
return output
|
2021-03-01 18:21:57 +00:00
|
|
|
|
2021-04-13 16:33:49 +00:00
|
|
|
def get_token(self):
|
2021-05-25 10:57:21 +00:00
|
|
|
if not g.user.sessions:
|
|
|
|
ses = Session(user=g.user)
|
|
|
|
db.session.add(ses)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
tk = ''
|
|
|
|
now = time.time()
|
|
|
|
for s in g.user.sessions:
|
|
|
|
if s.type == SessionType.Internal and (s.expired == 0 or s.expired > now):
|
|
|
|
tk = s.token
|
|
|
|
break
|
|
|
|
|
|
|
|
assert tk != ''
|
|
|
|
|
2021-04-15 19:21:41 +00:00
|
|
|
token = auth.Auth.encode(tk)
|
|
|
|
return token
|
2021-04-13 16:33:49 +00:00
|
|
|
|
2021-02-19 11:53:42 +00:00
|
|
|
|
2018-11-21 13:26:56 +00:00
|
|
|
class DocumentDef(Resource):
|
|
|
|
__type__ = 'Document'
|
|
|
|
SCHEMA = None
|
|
|
|
VIEW = None # We do not want to create default / documents endpoint
|
|
|
|
AUTH = False
|
2020-08-17 14:45:18 +00:00
|
|
|
|
2022-11-02 18:44:39 +00:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
app,
|
|
|
|
import_name=__name__,
|
|
|
|
static_folder='static',
|
|
|
|
static_url_path=None,
|
|
|
|
template_folder='templates',
|
|
|
|
url_prefix=None,
|
|
|
|
subdomain=None,
|
|
|
|
url_defaults=None,
|
|
|
|
root_path=None,
|
|
|
|
cli_commands: Iterable[Tuple[Callable, str or None]] = tuple(),
|
|
|
|
):
|
|
|
|
super().__init__(
|
|
|
|
app,
|
|
|
|
import_name,
|
|
|
|
static_folder,
|
|
|
|
static_url_path,
|
|
|
|
template_folder,
|
|
|
|
url_prefix,
|
|
|
|
subdomain,
|
|
|
|
url_defaults,
|
|
|
|
root_path,
|
|
|
|
cli_commands,
|
|
|
|
)
|
2018-11-21 13:26:56 +00:00
|
|
|
d = {'id': None}
|
|
|
|
get = {'GET'}
|
|
|
|
|
|
|
|
view = DocumentView.as_view('main', definition=self, auth=app.auth)
|
2020-08-05 15:51:13 +00:00
|
|
|
|
2020-08-06 13:55:14 +00:00
|
|
|
# TODO @cayop This two lines never pass
|
2018-11-21 13:26:56 +00:00
|
|
|
if self.AUTH:
|
|
|
|
view = app.auth.requires_auth(view)
|
2020-08-06 13:55:14 +00:00
|
|
|
|
2018-11-21 13:26:56 +00:00
|
|
|
self.add_url_rule('/erasures/', defaults=d, view_func=view, methods=get)
|
2022-11-02 18:44:39 +00:00
|
|
|
self.add_url_rule(
|
|
|
|
'/erasures/<{}:{}>'.format(self.ID_CONVERTER.value, self.ID_NAME),
|
|
|
|
view_func=view,
|
|
|
|
methods=get,
|
|
|
|
)
|
|
|
|
|
|
|
|
devices_view = DevicesDocumentView.as_view(
|
|
|
|
'devicesDocumentView', definition=self, auth=app.auth
|
|
|
|
)
|
2020-08-06 13:55:14 +00:00
|
|
|
devices_view = app.auth.requires_auth(devices_view)
|
2020-07-23 18:55:27 +00:00
|
|
|
|
|
|
|
stock_view = StockDocumentView.as_view('stockDocumentView', definition=self)
|
2020-08-06 14:51:49 +00:00
|
|
|
stock_view = app.auth.requires_auth(stock_view)
|
2020-07-23 18:55:27 +00:00
|
|
|
|
2019-02-28 17:21:24 +00:00
|
|
|
self.add_url_rule('/devices/', defaults=d, view_func=devices_view, methods=get)
|
2020-08-06 13:55:14 +00:00
|
|
|
|
2020-08-18 09:23:50 +00:00
|
|
|
lots_view = LotsDocumentView.as_view('lotsDocumentView', definition=self)
|
|
|
|
lots_view = app.auth.requires_auth(lots_view)
|
2020-07-28 14:16:17 +00:00
|
|
|
self.add_url_rule('/lots/', defaults=d, view_func=lots_view, methods=get)
|
2020-08-18 09:23:50 +00:00
|
|
|
|
2022-11-02 18:44:39 +00:00
|
|
|
stock_view = StockDocumentView.as_view(
|
|
|
|
'stockDocumentView', definition=self, auth=app.auth
|
|
|
|
)
|
2020-08-06 13:55:14 +00:00
|
|
|
stock_view = app.auth.requires_auth(stock_view)
|
2020-07-23 13:56:51 +00:00
|
|
|
self.add_url_rule('/stock/', defaults=d, view_func=stock_view, methods=get)
|
2020-12-21 12:40:07 +00:00
|
|
|
|
|
|
|
check_view = CheckView.as_view('CheckView', definition=self, auth=app.auth)
|
|
|
|
self.add_url_rule('/check/', defaults={}, view_func=check_view, methods=get)
|
2021-01-08 16:37:52 +00:00
|
|
|
|
2021-01-18 11:40:31 +00:00
|
|
|
stamps_view = StampsView.as_view('StampsView', definition=self, auth=app.auth)
|
2022-11-02 18:44:39 +00:00
|
|
|
self.add_url_rule(
|
|
|
|
'/stamps/', defaults={}, view_func=stamps_view, methods={'GET', 'POST'}
|
|
|
|
)
|
2021-01-18 11:40:31 +00:00
|
|
|
|
2022-11-02 18:44:39 +00:00
|
|
|
actions_view = ActionsDocumentView.as_view(
|
|
|
|
'ActionsDocumentView', definition=self, auth=app.auth
|
|
|
|
)
|
2021-01-08 16:37:52 +00:00
|
|
|
actions_view = app.auth.requires_auth(actions_view)
|
|
|
|
self.add_url_rule('/actions/', defaults=d, view_func=actions_view, methods=get)
|
2021-03-01 18:21:57 +00:00
|
|
|
|
2022-11-02 18:44:39 +00:00
|
|
|
wbconf_view = WbConfDocumentView.as_view(
|
|
|
|
'WbConfDocumentView', definition=self, auth=app.auth
|
|
|
|
)
|
2021-03-01 18:21:57 +00:00
|
|
|
wbconf_view = app.auth.requires_auth(wbconf_view)
|
2021-03-02 10:42:07 +00:00
|
|
|
self.add_url_rule('/wbconf/<string:wbtype>', view_func=wbconf_view, methods=get)
|