Compare commits

..

1 Commits

Author SHA1 Message Date
Cayo Puigdefabregas e4c6ddca28 benchmarks 2021-12-02 12:11:50 +01:00
443 changed files with 7098 additions and 142502 deletions

View File

@ -1,15 +0,0 @@
{
"presets": [
[
"@babel/preset-env",
{
"targets": {
"edge": "17",
"firefox": "60",
"chrome": "67",
"safari": "11.1"
}
}
]
]
}

View File

@ -1,5 +0,0 @@
ereuse_devicehub/static/vendor
ereuse_devicehub/static/js/print.pdf.js
ereuse_devicehub/static/js/qrcode.js
*.build.js
*.min.js

View File

@ -1,37 +0,0 @@
{
"env": {
"browser": true,
"es2021": true,
"jquery": true
},
"extends": [
"airbnb",
"prettier"
],
"plugins": [
"prettier"
],
"parserOptions": {
"ecmaVersion": "latest"
},
"rules": {
"quotes": ["error","double"],
"no-use-before-define": "off",
"no-unused-vars": "warn",
"no-undef": "warn",
"camelcase": "off",
"no-console": "off",
"no-plusplus": "off",
"no-param-reassign": "off",
"no-new": "warn",
"strict": "off",
"class-methods-use-this": "off",
"eqeqeq": "warn",
"radix": "warn",
"max-classes-per-file": "warn"
},
"globals": {
"API_URLS": true,
"Api": true
}
}

View File

@ -1,38 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: ''
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Desktop (please complete the following information):**
- OS: [e.g. iOS]
- Browser [e.g. chrome, safari]
- Version [e.g. 22]
**Smartphone (please complete the following information):**
- Device: [e.g. iPhone6]
- OS: [e.g. iOS8.1]
- Browser [e.g. stock browser, safari]
- Version [e.g. 22]
**Additional context**
Add any other context about the problem here.

View File

@ -1,20 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: ''
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View File

@ -1,27 +0,0 @@
## Description
Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change.
Fixes # (issue)
## Type of change
Please delete options that are not relevant.
- [ ] Bug fix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
- [ ] This change requires a documentation update
## How Has This Been Tested?
Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration
- [ ] Test A
- [ ] Test B
## Checklist:
- [ ] I have performed a self-review of my own code
- [ ] I have added tests that prove my fix is effective or that my feature works
## TODO
- [x] something that was recently finished
- [ ] something you are working on
- [ ] something else you are working on

View File

@ -1,55 +0,0 @@
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.
# ESLint is a tool for identifying and reporting on patterns
# found in ECMAScript/JavaScript code.
# More details at https://github.com/eslint/eslint
# and https://eslint.org
name: ESLint
on:
push:
branches: [master, testing]
pull_request_target:
branches: [master, testing]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v1
with:
node-version: '16'
- name: Install dependencies
run: npm install
- name: Run linters
uses: wearerequired/lint-action@v1
with:
eslint: true
prettier: false
commit_message: "Fix code style issues with ${linter}"
auto_fix: true
commit: true
github_token: "${{ secrets.GITHUB_TOKEN }}"
git_name: "Lint Action"
- name: Save Code Linting Report JSON
# npm script for ESLint
# eslint --output-file eslint_report.json --format json src
# See https://eslint.org/docs/user-guide/command-line-interface#options
run: npm run lint:report
# Continue to the next step even if this fails
continue-on-error: true
- name: Annotate Code Linting Results
uses: ataylorme/eslint-annotate-action@1.2.0
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
report-json: "eslint_report.json"
only-pr-files: true
- name: Upload ESLint report
uses: actions/upload-artifact@v2
with:
name: eslint_report.json
path: eslint_report.json

View File

@ -32,22 +32,24 @@ jobs:
strategy:
max-parallel: 4
matrix:
python-version: [3.9]
python-version: [3.7]
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
- name: Install dependencies
run: |
sudo apt-get update -qy
sudo apt-get -y install postgresql-client --no-install-recommends
sudo apt-get -y install postgresql-client
python -m pip install --upgrade pip
pip install -r requirements-dev.txt
pip install virtualenv
virtualenv env
source env/bin/activate
pip install flake8 pytest coverage
pip install -r requirements.txt
- name: Prepare database
@ -62,21 +64,10 @@ jobs:
psql -h "localhost" -U "$POSTGRES_USER" -d "$POSTGRES_DB" -c "CREATE EXTENSION citext SCHEMA public;"
psql -h "localhost" -U "$POSTGRES_USER" -d "$POSTGRES_DB" -c "CREATE EXTENSION pg_trgm SCHEMA public;"
- name: Lint with flake8
run: |
# stop the build if:
# - E9,F63,F7,F82: Python syntax errors or undefined names
# - E501: line longer than 120 characters
# - C901: complexity greater than 10
# - F401: modules imported but unused
# See: https://flake8.pycqa.org/en/latest/user/error-codes.html
flake8 . --select=E9,F63,F7,F82,E501,C901,F401
flake8 . --exit-zero
- name: Run Tests
run: |
export SECRET_KEY=`python3 -c 'import secrets; print(secrets.token_hex())'`
coverage run --source='ereuse_devicehub' -m pytest -m mvp --maxfail=5 tests/
source env/bin/activate
coverage run --source='ereuse_devicehub' env/bin/pytest -m mvp --maxfail=5 tests/
coverage report --include='ereuse_devicehub/*'
coverage xml

View File

@ -1,76 +0,0 @@
name: Selenium
on:
pull_request:
types: [ready_for_review, review_requested]
jobs:
build:
runs-on: ubuntu-latest
# Service containers to run with `container-job`
services:
# Label used to access the service container
postgres:
# Docker Hub image
image: postgres:11
ports:
- 5432:5432
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
env:
POSTGRES_DB: dh_test
POSTGRES_USER: dhub
POSTGRES_PASSWORD: ereuse
strategy:
max-parallel: 4
matrix:
python-version: [3.9]
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
- name: Install dependencies
run: |
sudo apt-get update -qy
sudo apt-get -y install postgresql-client --no-install-recommends
python -m pip install --upgrade pip
pip install -r requirements-dev.txt
pip install -r requirements.txt
pip install -e .
mkdir bin
wget https://github.com/mozilla/geckodriver/releases/download/v0.30.0/geckodriver-v0.30.0-linux64.tar.gz
tar xf geckodriver-v0.30.0-linux64.tar.gz -C bin/
- name: Prepare database
env:
POSTGRES_DB: dh_test
POSTGRES_USER: dhub
POSTGRES_PASSWORD: ereuse
run: |
export PGPASSWORD=$POSTGRES_PASSWORD
psql -h "localhost" -U "$POSTGRES_USER" -d "$POSTGRES_DB" -c "CREATE EXTENSION pgcrypto SCHEMA public;"
psql -h "localhost" -U "$POSTGRES_USER" -d "$POSTGRES_DB" -c "CREATE EXTENSION ltree SCHEMA public;"
psql -h "localhost" -U "$POSTGRES_USER" -d "$POSTGRES_DB" -c "CREATE EXTENSION citext SCHEMA public;"
psql -h "localhost" -U "$POSTGRES_USER" -d "$POSTGRES_DB" -c "CREATE EXTENSION pg_trgm SCHEMA public;"
- name: Selenium tests
env:
SECRET_KEY: 'f00046306835001b55c230092e3a7990485beda0bc3bf732088d1ba1b5b74110e22e3f9ec3a24890272554b37d4'
DB_DATABASE: dh_test
FLASK_APP: examples/app.py
dhi: dbtest
run: |
alembic -x inventory=dbtest upgrade head
dh dummy --yes
flask run & pytest tests/test_selenium.py

21
.gitignore vendored
View File

@ -119,24 +119,3 @@ ENV/
# Temporal dir
tmp/
# NPM modules
node_modules/
yarn.lock
# ESLint Report
eslint_report.json
# modules/
tmp/
.env*
bin/
env*
examples/create-db2.sh
package-lock.json
snapshots/
!examples/snapshots
modules/
# emacs
*~

View File

@ -1,31 +0,0 @@
repos:
- repo: https://github.com/psf/black
rev: 22.6.0
hooks:
- id: black
- repo: https://github.com/PyCQA/isort
rev: 5.10.1
hooks:
- id: isort
- repo: https://github.com/PyCQA/flake8
rev: 4.0.1
hooks:
- id: flake8
- repo: https://github.com/conorfalvey/check_pdb_hook
rev: 0.0.9
hooks:
- id: check_pdb_hook
- repo: local
hooks:
- id: build-js
name: build-js
# pre-commit pass as parameters files included on the commit
# so babel command should be wrapped to ignore these files on
# package.json script
entry: npm run babel
language: node
files: ^ereuse_devicehub/static/js/main_inventory.js
- repo: https://github.com/jazzband/pip-tools
rev: 6.8.0
hooks:
- id: pip-compile

View File

@ -1,3 +0,0 @@
{
"printWidth": 250
}

View File

@ -5,272 +5,26 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.ht
ml).
## master
[1.0.11-beta]
## testing
## [2.5.3] - 2023-05-13
- [added] #450 add new datawipe in csv.
- [changed] #447 Share a lot between 2 users, one is owner the other is read only.
- [changed] #448 enhancements in export lots.
- [changed] #449 remove button of submit in filter of list of devices.
- [changed] #452 New version of settings for workbench.
- [fixed] #445 required File for new documents bat optional for edit document.
- [fixed] #446 Fix id_supplier and id_internal in export devices.
- [fixed] #451 fix new datawipe in certificate erasure.
- [fixed] #453 fix value method in certificate erasure.
- [fixed] #454 remove validation of email for placeholders type mobile.
- [fixed] #455 add placeholders in csv metrics and pdf certificate.
- [fixed] #456 upload placeholders with type datastorage.
- [fixed] #457 change format erase datawipe.
- [fixed] #458 not datawipe for placeholders computers.
## [2.5.2] - 2023-04-20
- [added] #414 add new vars in the settings file for wb.
- [added] #440 add lots in export devices.
- [added] #441 allow remove documents.
- [added] #442 allow edit documents.
- [added] #443 add documents to devices.
- [added] #444 add new columns in list of documents.
- [changed] #439 move teal as internal module.
- [fixed] #437 replace names erasure by sanitization in templates.
## [2.5.1] - 2023-03-17
- [changed] #423 new hid.
- [changed] #426 new version of public page of device.
- [changed] #427 update links of terms and condotions.
- [changed] #428 only the data storage allow syncrinize, the rest are duplicate.
- [changed] #430 new version of erasure certificate.
- [fixed] #416 fix dhid in snapshot logs.
- [fixed] #419 fix settings version and template.
- [fixed] #420 not appear all lots in the dropdown menu for select the a lot.
- [fixed] #421 fix remove a placeholder from one old trade lot.
- [fixed] #422 fix simple datatables.
- [fixed] #424 fix new hid.
- [fixed] #431 fix forms for customer details.
- [fixed] #432 fix erasure certificate for a servers.
- [fixed] #433 fix get the last incoming for show customer datas in certificate.
- [fixed] #434 fix reopen transfer.
- [fixed] #436 fix hid in erasure certificate.
## [2.5.0] - 2022-11-30
- [added] #407 erasure section with tabs in top.
- [added] #411 add new generic device as Other.
- [changed] #409 add backend pagination instead of javascript.
- [changed] #410 change teh top search for advanced search.
- [fixed] #412 show in snapshots log, type upload correctly.
- [fixed] #413 put order in documents.
- [fixed] #415 put prefix of lot in result of search.
## [2.4.3] - 2022-11-18
- [added] #386 add registration module.
- [added] #387 add template settings for Secure Erasure.
- [added] #397 add obada standard export.
- [added] #402 add reset password module.
- [added] #406 add orphans disks page.
- [changed] #391 add dhid in table and export of Erasure section.
- [changed] #395 change response for the new api to workbench.
- [changed] #396 modularize commands.
- [fixed] #388 lock update different motherboard with the same id.
- [fixed] #389 some datastorage without placeholder.
- [fixed] #390 fix image in form edit device.
- [fixed] #398 placeholder in new components.
- [fixed] #399 add api_host in config.
- [fixed] #401 db_host need to be api address.
- [fixed] #403 change delimiter in obada export.
- [fixed] #404 javascript select all devices.
- [fixed] #405 update pillow.
## [2.4.2] - 2022-10-18
- [added] #373 Enhancement - UX Lots.
- [added] #377 add prefix in lots in device list.
- [added] #378 add new button transfer.
- [added] #381 add servers erase and show storage disk in list of device.
- [added] #383 new setup page and add server_erase in placeholder.
- [added] #384 add redirect snapshot to twin public page.
- [changed] #371 changes phid.
- [changed] #372 remove logo.
- [changed] #374 changes links UI management and Data Storage Erasure.
- [changed] #375 changes columns in snapshot logs.
- [changed] #379 changes representation date times.
- [fixed] #380 fix layout print label.
- [fixed] #382 fix template device list.
- [fixed] #385 components in unbinding process.
## [2.4.1] - 2022-10-05
- [added] #365 Manage dependencies using pip-tools.
- [added] #368 add migrations of monitors and mobiles.
- [changed]] #371 changes about phid, incremental per user.
- [fixed] #364 bad redirect to all devices.
- [fixed] #367 column PHID Erasure host.
- [fixed] #369 bug in test data storage.
- [fixed] #370 print label in details of the label.
## [2.4.0] - 2022-09-23
- [added] #312 Placeholder: new, edit, update. (manually and with excel).
- [added] #316 Placeholder: binding/unbinding. (manually).
- [added] #319 Add command report cli.
- [added] #326 settings for user demo.
- [added] #327 add Binding.
- [added] #328 export placeholders.
- [added] #330 workbench page.
- [added] #334 backup dhid and phid.
- [added] #340 add parth number for placeholders.
- [added] #349 add a new columns in report.
- [added] #356 new export hdds.
- [added] #362 add new columns in a snapshot log.
- [changed] #329 update Binding.
- [changed] #331 update workbench page.
- [changed] #338 change labels when add a new device.
- [changed] #339 change description upload placeholders page.
- [changed] #342 change concepts for binding, (Twin).
- [changed] #344 add "Ods File" as description in Placeholders Logs.
- [changed] #345 remove generation concept of device.
- [changed] #346 change editable device page.
- [changed] #347 change snapshot instead of abstract and placeholder instead of real.
- [changed] #348 change buttons new device.
- [changed] #355 changes links.
- [changed] #357 change button "New Actions".
- [changed] #358 change report device.
- [changed] #360 add placeholder device in lot instead of devices.
- [changed] #361 change message in form add device.
- [fixed] #313 Bump numpy from 1.21.6 to 1.22.0.
- [fixed] #314 bugs create placeholder from lot.
- [fixed] #317 bugs about exports placeholders.
- [fixed] #318 bugs about unlink tag of device.
- [fixed] #321 bugs in labels of serial number.
- [fixed] #322 validation imei for mobil.
- [fixed] #323 bug export devices.
- [fixed] #335 bugs in excel phid with nan.
- [fixed] #336 bugs Unassigned is visualized in all device view.
- [fixed] #337 bugs upload csv placeholders.
- [fixed] #343 forze Phid to by a string.
- [fixed] #350 bugs in certificates.
- [fixed] #351 bugs devices without phid.
- [fixed] #352 export certificate for placeholders.
- [fixed] #353 get the last update of the one device twin.
- [fixed] #354 titles of table.
- [fixed] #359 fix backup dhid.
- [fixed] #363 problems with render add documents in a transfer lot.
## [2.3.0] - 2022-07-12
- [added] #281 Add selenium test.
- [added] #305 Add button to download ISO Workbench.
- [added] #306 Add link to download JSON snapshot.
- [added] #308 Add sentry.
- [changed] #302 Add system uuid to check the identity of one device.
- [fixed] #309 Column lifecycle status is always empty.
**IMPORTANT**: PR #302 involves some changes in the deployment process:
```bash
# First, run script `extract_uuids.sh` before applying alembic migrations (e.g. with schema `dbtest`)
sh scripts/extract_uuids.sh
# Then, apply alembic migrations
alembic -x inventory=dbtest upgrade head
```
**NOTE**: If you forget (or don't need) to run this script before applying new migration it will work but any device will be updated.
## [2.2.0] - 2022-06-24
- [changed] #304 change anchor of link devices lots.
- [fixed] #315 create in a lot a new placeholder.
## [2.2.0 rc2] - 2022-06-22
- [added] #299 Multiselect with Shift.
- [added] #300 Add Sid in label.
- [added] #301 Add logo in label.
- [added] #303 Add export Lots.
- [added] #303 Add export relating lots with devices.
- [added] #303 To do possible add and remove one device in one lot transfer.
## [2.2.0 rc1] - 2022-06-07
- [added] #212 Server side render parser Workbench Snapshots.
- [added] #225 List of snapshots.
- [added] #265 Add feature for download Workbench settings.
- [added] #268 Add column created in device list.
- [added] #270 Add tags in device list.
- [added] #271 Add view for show all devices.
- [added] #272 Show lots on deviceList.
- [added] #273 Allow search/filter lots on lots management component.
- [added] #274 Add columns status in device list.
- [added] #277 Add developement build & precomit build.
- [added] #289 Add transfer.
- [added] #290 Add advanced search.
- [added] #291 SnapshotLog in old api.
- [added] #292 Add delivery note and receiver note.
- [changed] #275 remove all components in the filter of the device list.
- [changed] #282 upgrade dependencies pyjwt from 2.0.0a1 to 2.4.0.
- [changed] #283 Change visual format for dates in device list.
- [changed] #293 add options in select number of items per page. (50, 100)
- [fixed] #263 Fix select All devices options in select filter.
- [fixed] #267 ESLint ignore builded JS files.
- [fixed] #269 Allocate bugs.
- [fixed] #276 Create Computer Monitor instead of Monitor in form of create a new device.
- [fixed] #280 fix enums in migration process.
- [fixed] #284 Allocate bugs.
- [fixed] #285 lots search not working.
- [fixed] #287 apply button out of card.
## [2.1.1] - 2022-05-11
Hot fix release.
- [fixed] #256 JS support to old browsers using babel.
- [fixed] #266 Fix error when trade.document.url is None on device_list.html
## [2.1.0] - 2022-05-11
- [added] #219 Add functionality to searchbar (Lots and devices).
- [added] #222 Allow user to update its password.
- [added] #233 Filter in out trades from lots selector.
- [added] #236 Allow select multiple devices in multiple pages.
- [added] #237 Confirmation dialog on apply lots changes.
- [added] #238 Customize labels.
- [added] #242 Add icons in list of devices.
- [added] #244 Select full devices.
- [added] #257 Add functionality to search generic categories like all components.
- [added] #252 new tabs lots and public link in details of one device.
- [changed] #211 Print DHID-QR label for selected devices.
- [changed] #218 Add reactivity to device lots.
- [changed] #220 Add reactive lots list.
- [changed] #232 Set max lots list to 20.
- [changed] #235 Hide trade buttons.
- [changed] #239 Change Tags for Unique Identifier.
- [changed] #247 Change colors.
- [changed] #253 Drop download public links.
- [fixed] #214 Login workflow
- [fixed] #221 Fix responsive issues on frontend.
- [fixed] #223 fix trade lots modal.
- [fixed] #224 fix clickable lots selector not working when click in text.
- [fixed] #254 Fix minor types in frontend.
- [fixed] #255 Fix status column on device list.
## [2.0.0] - 2022-03-15
First server render HTML version. Completely rewrites views of angular JS client on flask.
- [added] #193 render on backend devices and lots
- [added] #195 render on backend tags system
- [added] #196 render on backend action system
- [added] #201 render on backend Data Wipe action
- [added] #203 render on backend Trade action
- [added] #204 render on backend export files
- [added] #205 UX improvements
- [added] #208 render on backend filter for type of devices in the general list
- [changed] #191 pass to drop teal and use the pure flask and use render from flask
- [changed] #207 Create automatic tag only for Computers.
- [changed] #209 adding a new device in a lot if it is created from a lot
- [fixed] #206 fix 2 bugs about visibility devices when you are not the owner
[1.0.12-beta]
## [1.0.12-beta]
- [changed] #187 now is possible duplicate slots of RAM.
- [changed] #188 Excel report devices allow to see device to old owners.
- [changes] #187 now is possible duplicate slots of RAM.
- [changes] #188 Excel report devices allow to see device to old owners.
## [1.0.11-beta]
- [added] #186 adding property power_on_hours.
- [addend] #186 adding property power_on_hours.
## [1.0.10-beta]
- [added] #170 can delete/deactivate devices.
- [addend] #170 can delete/deactivate devices.
- [bugfix] #168 can to do a trade without devices.
- [added] #167 new actions of status devices: use, recycling, refurbish and management.
- [changes] #177 new structure of trade.
- [bugfix] #184 clean nested of schemas of lot
- [added] #182 adding power on hours
- [changed] #177 new structure of trade.
- [fixed] #168 can to do a trade without devices.
- [fixed] #184 clean nested of schemas of lot
## [1.0.9-beta]
- [added] #159 external document as proof of erase of disk
@ -278,7 +32,7 @@ First server render HTML version. Completely rewrites views of angular JS client
## [1.0.8-beta]
- [fixed] #161 fixing DataStorage with bigInteger
- [bugfix] #161 fixing DataStorage with bigInteger
## [1.0.7-beta]
- [added] #158 support for encrypted snapshots data
@ -286,26 +40,26 @@ First server render HTML version. Completely rewrites views of angular JS client
- [added] #140 adding endpoint for download the settings for usb workbench
## [1.0.6-beta]
- [fixed] #143 biginteger instead of integer in TestDataStorage
- [bugfix] #143 biginteger instead of integer in TestDataStorage
## [1.0.5-beta]
- [added] #124 adding endpoint for extract the internal stats of use
- [added] #122 system for verify all documents that it's produced from devicehub
- [added] #127 add one code for every named tag
- [added] #131 add one code for every device
- [fixed] #138 search device with devicehubId
- [bugfix] #138 search device with devicehubId
## [1.0.4-beta]
- [added] #95 adding endpoint for check the hash of one report
- [added] #98 adding endpoint for insert a new live
- [added] #98 adding endpoint for get all licences in one query
- [added] #102 adding endpoint for download metrics
- [changed] #114 clean blockchain of all models
- [changed] #118 deactivate manual merge
- [changed] #118 clean datas of public information of devices
- [fixed] #100 fixing bug of scheme live
- [fixed] #101 fixing bug when 2 users have one device and launch one live
- [removed] #114 remove proof system
- [bugfix] #100 fixing bug of scheme live
- [bugfix] #101 fixing bug when 2 users have one device and launch one live
- [changes] #114 clean blockchain of all models
- [changes] #118 deactivate manual merge
- [changes] #118 clean datas of public information of devices
- [remove] #114 remove proof system
## [1.0.3-beta]
- [added] #85 add mac of network adapter to device hid
@ -313,6 +67,6 @@ First server render HTML version. Completely rewrites views of angular JS client
## [1.0.2-beta]
- [added] #87 allocate, deallocate and live actions
- [added] #83 add owner_id in all kind of device
- [fixed] #89 save json on disk only for shapshots
- [added] #83 add owner_id in all kind of device
- [fixed] #91 The most old time allow is 1970-01-01

View File

@ -1,58 +0,0 @@
# Contributing to devicehub
## Writing code
### Javascript and compatibility with "old" browsers
**Warning:** This project is using babel compiler... You need run an additional build step to make build js file
```bash
npm install
npm run babel
```
NOTE: If you prefer you can use yarn instead, it's compatible
NOTE2: This only affect to file `ereuse_devicehub/static/js/main_inventory.js`.
### Coding style
#### Python style
- Unless otherwise specified, follow [PEP 8](https://www.python.org/dev/peps/pep-0008). Use [flake8](https://pypi.org/project/flake8/) to check for problems in this area.
- Use [isort](https://github.com/PyCQA/isort#readme) to automate import sorting.
To automatize this work just configure `pre-commit` hooks in your development environment:
```bash
# on your virtual environment
pip install -r requirements-dev.txt
pre-commit install
```
#### HTML (templates)
- Template file names should be all lowercase, using underscores instead of camelCase.
Do this: `device_detail.html`
Don't do this: `DeviceDetail.html`, `Device-detail.html`
## Adding a new dependency to the project
This project tracks its packages using pip-tools, it could be installed by running:
```
pip install pip-tools
```
Whenever you need to install a new package using pip install <package-name>:
1. Put the package name into `requirements.in` instead.
```
# requirements.in
...
new_package
```
2. Compile the requirements
```
pip-compile requirements.in --output-file=requirements.txt
```
3. Then install upgraded dependencies:
```
pip install -U -r requirements.txt
```

View File

@ -1,43 +0,0 @@
# Definitions
* A dpp is two hash strings joined by the character ":"
We call the first chain chid and the second phid.
* The chid and phid are hash strings of certain values.
We call the set of these values Documents.
Here we define these values.
## Chid
The chid is the part of dpp that defines a device, be it a computer,
a hard drive, etc. The chid is the most important part of a dpp since
anyone who comes across a device should be able to play it.
The chid is made up of four values:
* type
* manufacturer
* model
* serial_number
type represents the device type according to the devicehub.
These values are always represented in lowercase.
These values have to be ordered and concatenated with the character "-"
So:
{type}-{manufacturer}-{model}-{serial_number}
For example:
```
harddrive-seagate-st500lt0121dg15-s3p9a81f
```
In computer types this combination is not perfect and **can lead to collisions**.
That is why we need a value that is reliable and comes from the manufacturer.
## Phid
The values of the phid do not have to be reproducible. For this reason, each inventory can establish its own values and its order as a document.
It is important that each inventory store the document in string so that it can reproduce exactly the document that was hashed. So a document can be verifiable.
In the case of the DeviceHub, we use as the chid document all the values that the Workbench collects that describe the hardware's own data.
These data change depending on the version of the Workbench used.

View File

@ -1,49 +0,0 @@
project := dkr-dsg.ac.upc.edu/ereuse
branch := `git branch --show-current`
commit := `git log -1 --format=%h`
#tag := ${branch}__${commit}
tag := latest
# docker images
devicehub_image := ${project}/devicehub:${tag}
postgres_image := ${project}/postgres:${tag}
# 2. Create a virtual environment.
docker_build:
docker build -f docker/devicehub.Dockerfile -t ${devicehub_image} .
# DEBUG
#docker build -f docker/devicehub.Dockerfile -t ${devicehub_image} . --progress=plain --no-cache
docker build -f docker/postgres.Dockerfile -t ${postgres_image} .
# DEBUG
#docker build -f docker/postgres.Dockerfile -t ${postgres_image} . --progress=plain --no-cache
@printf "\n##########################\n"
@printf "\ndevicehub image: ${devicehub_image}\n"
@printf "postgres image: ${postgres_image}\n"
@printf "\ndocker images built\n"
@printf "\n##########################\n\n"
docker_publish:
docker push ${devicehub_image}
docker push ${postgres_image}
.PHONY: docker
docker:
$(MAKE) docker_build
$(MAKE) docker_publish
@printf "\ndocker images published\n"
# manage 2 kinds of deployments with docker compose
dc_up_devicehub:
docker compose -f docker-compose_devicehub.yml up || true
dc_down_devicehub:
docker compose -f docker-compose_devicehub.yml down -v || true
dc_up_devicehub_dpp:
docker compose -f docker-compose_devicehub-dpp.yml up || true
dc_down_devicehub_dpp:
docker compose -f docker-compose_devicehub-dpp.yml down -v || true

122
README.md
View File

@ -1,122 +0,0 @@
# Devicehub
Devicehub is a distributed IT Asset Management System focused on reusing digital devices, created under the [eReuse.org](https://www.ereuse.org) initiative.
This README explains how to install and use Devicehub. [The documentation](http://devicehub.ereuse.org) explains the concepts, usage and the API it provides.
Devicehub is built with [Teal](https://github.com/ereuse/teal) and [Flask](http://flask.pocoo.org).
Devicehub relies on the existence of an [API_DLT connector](https://gitlab.com/dsg-upc/ereuse-dpp) verifiable data registry service, where specific operations are recorded to keep an external track record (ledger).
# Installing
Please visit the [Manual Installation](README_MANUAL_INSTALLATION.md) instructions to understand the detailed steps to install it locally or deploy it on a server. However, we recommend the following Docker deployment process.
# Docker
There is a Docker compose file for an automated deployment. Two instances of DeviceHub will be deployed. The following steps describe how to run and use it.
1. Download the sources:
```
git clone https://github.com/eReuse/devicehub-teal.git -b oidc4vp
cd devicehub-teal
```
2. If you want to initialise one of DeviceHub instances (running on port 5000) with sample device snapshots, copy it/them into that directory. e.g.
```
cp snapshot01.json examples/snapshots/
```
Otherwise, the device inventory of your DeviceHub instance will be empty and ready to add new devices. For that (no snapshot import), you need to change the var to 'n' in the **.env** file
```
IMPORT_SNAPSHOTS='n'
```
To register new devices, the [workbench software](https://github.com/eReuse/workbench) can be run on a device to generate its hardware snapshot that can be uploaded to one of the two DeviceHub instance.
3. Setup the environment variables in the .env file. You can find one example in examples/env.example.
If you don't have any, you can copy that example and modify the basic vars
```
cp examples/env.example .env
```
You can use these parameters as default for a local test, but default values may not be suitable for an internet-exposed service for security reasons. However, these six variables need to be initialised:
```
API_DLT
API_DLT_TOKEN
API_RESOLVER
ABAC_TOKEN
ABAC_USER
ABAC_URL
SERVER_ID_FEDERATED
CLIENT_ID_FEDERATED
```
The first six values should come from an already operational [API_DLT connector](https://gitlab.com/dsg-upc/ereuse-dpp) service instance.
For the last two values check [manual install step 9]('https://github.com/eReuse/devicehub-teal/blob/oidc4vp/README_MANUAL_INSTALLATION.md#installing') for more details.
4. Build and run the docker containers:
```
./launcher.sh
```
To stop these docker containers, you can use Ctl+C. You'll maintain the data and infrastructure state if you run "compose up" again.
On the terminal screen, you can follow the installation steps. If there are any problems, error messages will appear here. The appearance of several warnings is normal and can be ignored.
If the last line you see one text like this, *exited with code*:
```
devicehub-teal-devicehub-id-client-1 exited with code 1
```
means the installation failed.
If the deployment was end-to-end successful (two running Devicehub instances successfully connected to the DLT backend selected in the .env file), you can see this text in the last lines:
```
devicehub-teal-devicehub-id-client-1 | * Running on http://172.28.0.2:5000/ (Press CTRL+C to quit)
devicehub-teal-devicehub-id-server-1 | * Running on all addresses.
devicehub-teal-devicehub-id-server-1 | WARNING: This is a development server. Do not use it in a production deployment.
devicehub-teal-devicehub-id-server-1 | * Running on http://172.28.0.5:5000/ (Press CTRL+C to quit)
```
That means the two Devicehub instances are running in their containers, which can be reached as http://localhost:5000/ and http://localhost:5001/
Once the DeviceHub instances are running, you might want to register a user binding to the DLT with the following commands (here, it assumes you want to execute it on devicehub-id-client, you might also want to do it in devicehub-id-server). Change the variables accordingly
```
FILE=my_users_devicehub.json
DOCKER_SERVICE=devicehub-id-server
docker compose cp /path/to/${FILE} ${DOCKER_SERVICE}:/tmp/
docker compose exec ${DOCKER_SERVICE} flask dlt_register_user /tmp/${FILE}
```
**my_users_devicehub.json** is a custom file which is similar to the one provided in `examples/users_devicehub.json`
5. To shut down the services and remove the corresponding data, you can use:
```
docker compose down -v
```
If you want to enter a shell inside a **new instance of the container**:
```
docker run -it --entrypoint= ${target_docker_image} bash
```
If you want to enter a shell on an **already running container**:
```
docker exec -it ${target_docker_image} bash
```
To know the valid value for ${target_docker_image} you can use:
```
docker ps
```
6. These are the details for use in this implementation:
Devicehub with URL (http://localhost:5000) is the identity provider of OIDC and have a user defined in **.env** file with SERVER_ID_EMAIL_DEMO var.
Devicehub with URL (http://localhost:5001) is the client identity of OIDC and have a user defined in **.env** file with SERVER_ID_EMAIL_DEMO var.
You can change these values in the *.env* file
7. If you want to use Workbench for these DeviceHub instances, you need to go to
```
http://localhost:5001/workbench/
```
with the demo user and then download the settings and ISO files. Follow the instructions on the [help](https://help.usody.com/en/setup/setup-pendrive/) page.

159
README.rst Normal file
View File

@ -0,0 +1,159 @@
Devicehub
#########
Devicehub is a distributed IT Asset Management System focused in reusing
devices, created under the project
`eReuse.org <https://www.ereuse.org>`__.
This README explains how to install and use Devicehub.
`The documentation <http://devicehub.ereuse.org>`_ explains the concepts
and the API.
Devicehub is built with `Teal <https://github.com/ereuse/teal>`__ and
`Flask <http://flask.pocoo.org>`__.
Installing
**********
The requirements are:
- Python 3.7.3 or higher. In debian 10 is ``# apt install python3``.
- `PostgreSQL 11 or higher <https://www.postgresql.org/download/>`__.
- Weasyprint
`dependencies <http://weasyprint.readthedocs.io/en/stable/install.html>`__.
Install Devicehub with *pip*:
``pip3 install -U -r requirements.txt -e .``.
Running
*******
Create a PostgreSQL database called *devicehub* by running
`create-db <examples/create-db.sh>`__:
- In Linux, execute the following two commands (adapt them to your distro):
1. ``sudo su - postgres``.
2. ``bash examples/create-db.sh devicehub dhub``, and password
``ereuse``.
- In MacOS: ``bash examples/create-db.sh devicehub dhub``, and password
``ereuse``.
Configure project using environment file (you can use provided example as quickstart):
.. code:: bash
$ cp examples/env.example .env
Using the `dh` tool for set up with one or multiple inventories.
Create the tables in the database by executing:
.. code:: bash
$ export dhi=dbtest; dh inv add --common --name dbtest
Finally, run the app:
.. code:: bash
$ export dhi=dbtest;dh run --debugger
The error bdist_wheel can happen when you work with a *virtual environment*.
To fix it, install in the *virtual environment* wheel
package. ``pip3 install wheel``
Multiple instances
------------------
Devicehub can run as a single inventory or with multiple inventories,
each inventory being an instance of the ``devicehub``. To add a new inventory
execute:
.. code:: bash
$ export dhi=dbtest; dh inv add --name dbtest
Note: The ``dh`` command is like ``flask``, but
it allows you to create and delete instances, and interface to them
directly.
Testing
*******
1. ``git clone`` this project.
2. Create a database for testing executing ``create-db.sh`` like the
normal installation but changing the first parameter from
``devicehub`` to ``dh_test``: ``create-db.sh dh_test dhub`` and
password ``ereuse``.
3. Execute at the root folder of the project ``python3 setup.py test``.
Migrations
**********
At this stage, migration files are created manually.
Set up the database:
.. code:: bash
$ sudo su - postgres
$ bash $PATH_TO_DEVIHUBTEAL/examples/create-db.sh devicehub dhub
Initialize the database:
.. code:: bash
$ export dhi=dbtest; dh inv add --common --name dbtest
This command will create the schemas, tables in the specified database.
Then we need to stamp the initial migration.
.. code:: bash
$ alembic stamp head
This command will set the revision **fbb7e2a0cde0_initial** as our initial migration.
For more info in migration stamping please see https://alembic.sqlalchemy.org/en/latest/cookbook.html
Whenever a change needed eg to create a new schema, alter an existing table, column or perform any
operation on tables, create a new revision file:
.. code:: bash
$ alembic revision -m "A table change"
This command will create a new revision file with name `<revision_id>_a_table_change`.
Edit the generated file with the necessary operations to perform the migration:
.. code:: bash
$ alembic edit <revision_id>
Apply migrations using:
.. code:: bash
$ alembic -x inventory=dbtest upgrade head
Then to go back to previous db version:
.. code:: bash
$ alembic -x inventory=dbtest downgrade <revision_id>
To see a full list of migrations use
.. code:: bash
$ alembic history
Generating the docs
*******************
1. ``git clone`` this project.
2. Install plantuml. In Debian 9 is ``# apt install plantuml``.
3. Execute ``pip3 install -e .[docs]`` in the project root folder.
4. Go to ``<project root folder>/docs`` and execute ``make html``.
Repeat this step to generate new docs.
To auto-generate the docs do ``pip3 install -e .[docs-auto]``, then
execute, in the root folder of the project
``sphinx-autobuild docs docs/_build/html``.

View File

@ -1,187 +0,0 @@
# Devicehub
Devicehub is a distributed IT Asset Management System focused in reusing devices, created under the project [eReuse.org](https://www.ereuse.org)
This README explains how to install and use Devicehub. [The documentation](http://devicehub.ereuse.org) explains the concepts and the API.
Devicehub is built with [Teal](https://github.com/ereuse/teal) and [Flask](http://flask.pocoo.org).
# Installing
The requirements are:
0. Required
- python3.9
- [PostgreSQL 11 or higher](https://www.postgresql.org/download/).
- Weasyprint [dependencie](http://weasyprint.readthedocs.io/en/stable/install.html)
1. Generate a clone of the repository.
```
git clone git@github.com:eReuse/devicehub-teal.git -b oidc4vp
cd devicehub-teal
```
2. Create a virtual environment and install Devicehub with *pip*.
```
python3.9 -m venv env
source env/bin/activate
sh examples/pip_install.sh
```
3. Create a PostgreSQL database called *devicehub* by running [create-db](examples/create-db.sh):
- In Linux, execute the following two commands (adapt them to your distro):
1. `sudo su - postgres`.
2. `bash examples/create-db.sh devicehub dhub`, and password `ereuse`.
- In MacOS: `bash examples/create-db.sh devicehub dhub`, and password `ereuse`.
Configure project using environment file (you can use provided example as quickstart):
```bash
$ cp examples/env.example .env
```
You can use these parameters as default for a local test, but default values may not be suitable for an internet-exposed service for security reasons. However, these six variables need to be initialized:
```
API_DLT
API_DLT_TOKEN
API_RESOLVER
ABAC_TOKEN
ABAC_USER
ABAC_URL
```
These values should come from an already operational [API_DLT connector](https://gitlab.com/dsg-upc/ereuse-dpp) service instance.
4. Running alembic from oidc module.
```
alembic -x inventory=dbtest upgrade head
```
5. Running alembic from oidc module.
```
cd ereuse_devicehub/modules/oidc
alembic -x inventory=dbtest upgrade head
```
6. Running alembic from dpp module.
```
cd ereuse_devicehub/modules/dpp/
alembic -x inventory=dbtest upgrade head
```
7. Add a suitable app.py file.
```
cp examples/app.py .
```
8. Generate a minimal data structure.
```
flask initdata
```
9. Add a new server to the 'api resolver' to be able to integrate it into the federation.
The domain name for this new server has to be unique. When installing two instances their domain name must differ: e.g. dpp.mydomain1.cxm, dpp.mydomain2.cxm.
If your domain is dpp.mydomain.cxm:
```
flask dlt_insert_members http://dpp.mydomain.cxm
```
modify the .env file as indicated in point 3.
Add the corresponding 'DH' in ID_FEDERATED.
example: ID_FEDERATED='DH10'
10. Do a rsync api resolve.
```
flask dlt_rsync_members
```
11. Register a new user in devicehub.
```
flask adduser email@example.org password
```
12. Register a new user to the DLT.
```
flask dlt_register_user examples/users_devicehub.json
```
You need define your users in the file **users_devicehub.json**
13. Finally, run the app:
```bash
$ flask run --debugger
```
The error bdist_wheel can happen when you work with a *virtual environment*.
To fix it, install in the *virtual environment* wheel
package. `pip3 install wheel`
# Testing
1. `git clone` this project.
2. Create a database for testing executing `create-db.sh` like the normal installation but changing the first parameter from `devicehub` to `dh_test`: `create-db.sh dh_test dhub` and password `ereuse`.
3. Execute at the root folder of the project `python3 setup.py test`.
# Upgrade a deployment
For upgrade an instance of devicehub you need to do:
```bash
$ cd $PATH_TO_DEVIHUBTEAL
$ source venv/bin/activate
$ git pull
$ alembic -x inventory=dbtest upgrade head
```
If all migrations pass successfully, then it is necessary restart the devicehub.
Normaly you can use a little script for restart or run.
```
# systemctl stop gunicorn_devicehub.socket
# systemctl stop gunicorn_devicehub.service
# systemctl start gunicorn_devicehub.service
```
# OpenId Connect:
We want to interconnect two devicehub instances already installed. One has a set of devices (OIDC client), the other has a set of users (OIDC identity server). Let's assume their domains are: dpp.mydomain1.cxm, dpp.mydomain2.cxm
20. In order to connect the two devicehub instances, it is necessary:
* 20.1. Register a user in the devicehub instance acting as OIDC identity server.
* 20.2. Fill in the openid connect form.
* 20.3. Add in the OIDC client inventory the data of client_id, client_secret.
For 20.1. This can be achieved on the terminal on the devicehub instance acting as OIDC identity server.
```
flask adduser email@example.org password
```
* 20.2. This is an example of how to fill in the form.
In the web interface of the OIDC identity service, click on the profile of the just added user, select "My Profile" and click on "OpenID Connect":
Then we can go to the "OpenID Connect" panel and fill out the form:
The important thing about this form is:
* "Client URL" The URL of the OIDC Client instance, as registered in point 12. dpp.mydomain1.cxm in our example.
* "Allowed Scope" has to have these three words:
```
openid profile rols
```
* "Redirect URIs" it has to be the URL that was put in "Client URL" plus "/allow_code"
* "Allowed Grant Types" has to be "authorization_code"
* "Allowed Response Types" has to be "code"
* "Token Endpoint Auth Method" has to be "Client Secret Basic"
After clicking on "Submit" the "OpenID Connect" tab of the user profile should now include details for "client_id" and "client_secret".
* 20.3. In the OIDC client inventory run: (in our example: url_domain is dpp.mydomain2.cxm, client_id and client_secret as resulting from the previous step)
```
flask add_client_oidc url_domain client_id client_secret
```
After this step, both servers must be connected. Opening one DPP page on dpp.mydomain1.cxm (OIDC Client) the user can choose to authenticate using dpp.mydomain2.cxm (OIDC Server).
## Generating the docs
1. `git clone` this project.
2. Install plantuml. In Debian 9 is `# apt install plantuml`.
3. Execute `pip3 install -e .[docs]` in the project root folder.
4. Go to `<project root folder>/docs` and execute `make html`. Repeat this step to generate new docs.
To auto-generate the docs do `pip3 install -e .[docs-auto]`, then execute, in the root folder of the project `sphinx-autobuild docs docs/_build/html`.

Binary file not shown.

After

Width:  |  Height:  |  Size: 215 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 214 KiB

View File

@ -0,0 +1,257 @@
* Serving Flask app "ereuse_devicehub" (lazy loading)
* Environment: production
WARNING: Do not use the development server in a production environment.
Use a production WSGI server instead.
* Debug mode: on
--------------------------------------------------------------------------------
PATH: '/versions/'
6874 function calls (6593 primitive calls) in 1.066 seconds
Ordered by: internal time, call count
List reduced from 856 to 30 due to restriction <30>
ncalls tottime percall cumtime percall filename:lineno(function)
9 1.049 0.117 1.049 0.117 {method 'acquire' of '_thread.RLock' objects}
9 0.004 0.000 0.004 0.000 {method 'execute' of 'psycopg2.extensions.cursor' objects}
3 0.002 0.001 0.002 0.001 {method 'rollback' of 'psycopg2.extensions.connection' objects}
1 0.002 0.002 0.002 0.002 {built-in method psycopg2._psycopg._connect}
12/3 0.000 0.000 0.000 0.000 /usr/lib/python3.7/sre_parse.py:469(_parse)
322 0.000 0.000 0.000 0.000 /usr/lib/python3.7/weakref.py:435(__contains__)
65/2 0.000 0.000 0.000 0.000 {built-in method _abc._abc_subclasscheck}
10 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/type_api.py:479(_cached_result_processor)
243/241 0.000 0.000 0.000 0.000 {built-in method builtins.getattr}
25/6 0.000 0.000 0.000 0.000 /usr/lib/python3.7/sre_parse.py:174(getwidth)
94/82 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py:836(__get__)
47 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/event/attr.py:166(update_subclass)
435 0.000 0.000 0.000 0.000 {built-in method builtins.isinstance}
22/3 0.000 0.000 0.000 0.000 /usr/lib/python3.7/sre_compile.py:71(_compile)
5 0.000 0.000 0.003 0.001 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1155(_execute_context)
2 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/werkzeug/routing.py:1443(match)
114 0.000 0.000 0.000 0.000 /usr/lib/python3.7/weakref.py:395(__getitem__)
47 0.000 0.000 0.001 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/event/attr.py:227(__init__)
103 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/werkzeug/routing.py:753(match)
37/28 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py:909(__getattr__)
73 0.000 0.000 0.000 0.000 {built-in method builtins.hasattr}
47 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/event/attr.py:160(_assign_cls_collection)
9 0.000 0.000 0.001 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py:1716(_label_select_column)
50 0.000 0.000 0.001 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/event/base.py:86(<genexpr>)
9 0.000 0.000 0.000 0.000 {method 'cursor' of 'psycopg2.extensions.connection' objects}
27 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/werkzeug/local.py:160(top)
10 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py:826(visit_column)
77 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/session.py:1339(_add_bind)
32/4 0.000 0.000 0.001 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:84(_compiler_dispatch)
5 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py:2899(_froms)
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
PATH: '/devices/'
2869 function calls (2841 primitive calls) in 0.951 seconds
Ordered by: internal time, call count
List reduced from 664 to 30 due to restriction <30>
ncalls tottime percall cumtime percall filename:lineno(function)
3 0.924 0.308 0.924 0.308 {method 'acquire' of '_thread.RLock' objects}
1 0.012 0.012 0.012 0.012 {method 'rollback' of 'psycopg2.extensions.connection' objects}
2 0.005 0.003 0.005 0.003 {method 'execute' of 'psycopg2.extensions.cursor' objects}
1 0.003 0.003 0.003 0.003 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/event/attr.py:279(exec_once)
1 0.002 0.002 0.002 0.002 {built-in method psycopg2._psycopg._connect}
77 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/session.py:1339(_add_bind)
83 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/inspection.py:37(inspect)
2 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/werkzeug/routing.py:1443(match)
72 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/werkzeug/routing.py:753(match)
27 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/werkzeug/local.py:160(top)
11 0.000 0.000 0.000 0.000 {method 'sub' of 're.Pattern' objects}
1 0.000 0.000 0.924 0.924 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/base.py:401(_inspect_mapped_class)
154 0.000 0.000 0.000 0.000 {built-in method builtins.isinstance}
1 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/session.py:639(__init__)
1 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/flask_sqlalchemy/__init__.py:917(get_tables_for_bind)
86 0.000 0.000 0.000 0.000 {built-in method builtins.getattr}
31 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/werkzeug/local.py:68(__getattr__)
8 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py:826(visit_column)
3 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py:2899(_froms)
2 0.000 0.000 0.006 0.003 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1048(_execute_clauseelement)
2 0.000 0.000 0.006 0.003 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1155(_execute_context)
1 0.000 0.000 0.006 0.006 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/pool.py:693(__connect)
2 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:221(iterate)
80 0.000 0.000 0.000 0.000 {method 'search' of 're.Pattern' objects}
7 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py:1716(_label_select_column)
158 0.000 0.000 0.000 0.000 {method 'get' of 'dict' objects}
2 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/flask_sqlalchemy/__init__.py:109(_calling_context)
2 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/default.py:622(_init_compiled)
1 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/loading.py:337(_instance_processor)
8 0.000 0.000 0.000 0.000 /usr/lib/python3.7/weakref.py:356(__init__)
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
PATH: '/lots/'
827262 function calls (801219 primitive calls) in 1.097 seconds
Ordered by: internal time, call count
List reduced from 1182 to 30 due to restriction <30>
ncalls tottime percall cumtime percall filename:lineno(function)
3932 0.094 0.000 0.102 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/event/attr.py:330(__init__)
468 0.043 0.000 0.512 0.001 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/strategies.py:39(_register_attribute)
66949 0.036 0.000 0.042 0.000 {built-in method builtins.isinstance}
6200 0.034 0.000 0.066 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/event/registry.py:67(_stored_in_collection)
6201/6200 0.028 0.000 0.264 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/event/registry.py:193(listen)
2797 0.025 0.000 0.039 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:221(iterate)
4269 0.025 0.000 0.040 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/elements.py:3030(__init__)
18666/10636 0.024 0.000 0.045 0.000 {built-in method builtins.hasattr}
16009/7963 0.024 0.000 0.053 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py:909(__getattr__)
4067 0.024 0.000 0.129 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/attributes.py:1701(register_attribute_impl)
4067 0.019 0.000 0.043 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/attributes.py:436(__init__)
6200 0.019 0.000 0.312 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/event/api.py:33(listen)
16707 0.018 0.000 0.018 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/annotation.py:99(__hash__)
2032 0.017 0.000 0.139 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/mapper.py:2501(visit_binary)
4236 0.015 0.000 0.122 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/annotation.py:102(__eq__)
3380 0.014 0.000 0.051 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/attributes.py:761(__init__)
2 0.014 0.007 0.014 0.007 {method 'execute' of 'psycopg2.extensions.cursor' objects}
6200 0.014 0.000 0.030 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/event/api.py:22(_event_key)
78379 0.013 0.000 0.013 0.000 {method 'pop' of 'dict' objects}
27663 0.013 0.000 0.017 0.000 {built-in method builtins.getattr}
4269 0.012 0.000 0.072 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/default_comparator.py:40(_boolean_compare)
4206 0.012 0.000 0.020 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/elements.py:3050(__bool__)
3436 0.011 0.000 0.020 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py:881(expire_instance)
6200 0.010 0.000 0.202 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/event/registry.py:237(base_listen)
8572/4269 0.009 0.000 0.109 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/operators.py:358(__eq__)
4269 0.009 0.000 0.082 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/type_api.py:63(operate)
10464 0.009 0.000 0.015 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/base.py:220(manager_of_class)
7736 0.009 0.000 0.009 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/instrumentation.py:193(_attr_has_impl)
4504 0.009 0.000 0.020 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/mapper.py:1971(_log)
1 0.009 0.009 0.009 0.009 {method 'rollback' of 'psycopg2.extensions.connection' objects}
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
PATH: '/versions/'
9206 function calls (9031 primitive calls) in 0.154 seconds
Ordered by: internal time, call count
List reduced from 999 to 30 due to restriction <30>
ncalls tottime percall cumtime percall filename:lineno(function)
1 0.109 0.109 0.109 0.109 {method 'rollback' of 'psycopg2.extensions.connection' objects}
3 0.024 0.008 0.024 0.008 {method 'execute' of 'psycopg2.extensions.cursor' objects}
8 0.007 0.001 0.007 0.001 {built-in method posix.stat}
1 0.001 0.001 0.001 0.001 {built-in method _socket.getaddrinfo}
318 0.001 0.000 0.001 0.000 /usr/lib/python-exec/python3.7/../../../lib/python3.7/os.py:676(__getitem__)
2 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/urllib3/exceptions.py:20(__init__)
332 0.000 0.000 0.002 0.000 /usr/lib/python-exec/python3.7/../../../lib/python3.7/_collections_abc.py:742(__iter__)
11 0.000 0.000 0.001 0.000 /usr/lib/python3.7/queue.py:121(put)
630/629 0.000 0.000 0.000 0.000 {method 'decode' of 'bytes' objects}
60/5 0.000 0.000 0.000 0.000 {built-in method _abc._abc_subclasscheck}
312 0.000 0.000 0.000 0.000 /usr/lib/python-exec/python3.7/../../../lib/python3.7/os.py:699(__iter__)
727 0.000 0.000 0.001 0.000 {built-in method builtins.isinstance}
620 0.000 0.000 0.000 0.000 /usr/lib/python-exec/python3.7/../../../lib/python3.7/os.py:758(decode)
2 0.000 0.000 0.002 0.001 /usr/lib/python3.7/urllib/request.py:2489(getproxies_environment)
318 0.000 0.000 0.000 0.000 /usr/lib/python-exec/python3.7/../../../lib/python3.7/os.py:754(encode)
112/111 0.000 0.000 0.000 0.000 {built-in method builtins.len}
2 0.000 0.000 0.000 0.000 /usr/lib/python3.7/socket.py:139(__init__)
342 0.000 0.000 0.000 0.000 {method 'encode' of 'str' objects}
217 0.000 0.000 0.000 0.000 {method 'get' of 'dict' objects}
26 0.000 0.000 0.000 0.000 /usr/lib/python-exec/python3.7/../../../lib/python3.7/abc.py:137(__instancecheck__)
51 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/werkzeug/local.py:160(top)
16 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py:826(visit_column)
172 0.000 0.000 0.000 0.000 {built-in method builtins.getattr}
3 0.000 0.000 0.025 0.008 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1155(_execute_context)
1 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/flask_sqlalchemy/__init__.py:917(get_tables_for_bind)
6 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py:2899(_froms)
77 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/session.py:1339(_add_bind)
14 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py:1716(_label_select_column)
4 0.000 0.000 0.007 0.002 <frozen importlib._bootstrap_external>:1356(find_spec)
2 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/loading.py:337(_instance_processor)
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
PATH: '/lots/'
78032 function calls (74983 primitive calls) in 0.415 seconds
Ordered by: internal time, call count
List reduced from 1209 to 30 due to restriction <30>
ncalls tottime percall cumtime percall filename:lineno(function)
58 0.225 0.004 0.227 0.004 {method 'execute' of 'psycopg2.extensions.cursor' objects}
4 0.068 0.017 0.078 0.020 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py:3607(_populate_column_collection)
58 0.008 0.000 0.015 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:221(iterate)
34 0.005 0.000 0.005 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/elements.py:697(__getattr__)
1 0.005 0.005 0.005 0.005 {method 'rollback' of 'psycopg2.extensions.connection' objects}
55 0.003 0.000 0.007 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/loading.py:337(_instance_processor)
129 0.003 0.000 0.006 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py:2899(_froms)
55 0.002 0.000 0.009 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/query.py:3898(row_processor)
58 0.002 0.000 0.002 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/flask_sqlalchemy/__init__.py:109(_calling_context)
2496/1771 0.002 0.000 0.243 0.000 {built-in method builtins.getattr}
3395 0.002 0.000 0.002 0.000 {built-in method builtins.isinstance}
58 0.001 0.000 0.242 0.004 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1155(_execute_context)
663 0.001 0.000 0.002 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/interfaces.py:512(_get_context_loader)
58 0.001 0.000 0.004 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/default.py:622(_init_compiled)
60 0.001 0.000 0.004 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/schema.py:976(__init__)
269 0.001 0.000 0.002 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/base.py:38(<listcomp>)
52 0.001 0.000 0.237 0.005 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/strategies.py:730(_emit_lazyload)
60 0.001 0.000 0.012 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/schema.py:1521(_make_proxy)
92 0.001 0.000 0.015 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/loading.py:35(instances)
58 0.001 0.000 0.253 0.004 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1048(_execute_clauseelement)
445/363 0.001 0.000 0.083 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py:836(__get__)
4351 0.001 0.000 0.001 0.000 {method 'get' of 'dict' objects}
37/25 0.001 0.000 0.251 0.010 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/marshmallow/marshalling.py:90(serialize)
52 0.001 0.000 0.218 0.004 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/ext/baked.py:356(__iter__)
128 0.001 0.000 0.002 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py:826(visit_column)
58 0.001 0.000 0.001 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:263(traverse_using)
305 0.001 0.000 0.001 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/werkzeug/local.py:160(top)
103/22 0.001 0.000 0.001 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py:245(get_cls_kwargs)
893 0.001 0.000 0.001 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/result.py:643(_getter)
1086 0.001 0.000 0.001 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/schema.py:1574(get_children)
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
PATH: '/devices/'
3735722 function calls (3415930 primitive calls) in 4.524 seconds
Ordered by: internal time, call count
List reduced from 1551 to 30 due to restriction <30>
ncalls tottime percall cumtime percall filename:lineno(function)
1807 0.774 0.000 0.799 0.000 {method 'execute' of 'psycopg2.extensions.cursor' objects}
3849 0.136 0.000 0.468 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/loading.py:337(_instance_processor)
3947 0.086 0.000 0.302 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:221(iterate)
227782 0.073 0.000 0.077 0.000 {built-in method builtins.isinstance}
811/630 0.070 0.000 0.077 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py:909(__getattr__)
4222 0.067 0.000 0.165 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py:2899(_froms)
136047/99198 0.061 0.000 3.692 0.000 {built-in method builtins.getattr}
45026 0.060 0.000 0.080 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/interfaces.py:512(_get_context_loader)
64724/61730 0.060 0.000 3.163 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/attributes.py:267(__get__)
1298/30 0.057 0.000 4.153 0.138 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/marshmallow/marshalling.py:90(serialize)
43993 0.045 0.000 0.238 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/interfaces.py:550(create_row_processor)
66367 0.044 0.000 0.103 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/result.py:643(_getter)
5417 0.042 0.000 0.129 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sortedcontainers/sortedset.py:108(__init__)
1807 0.039 0.000 0.118 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/default.py:622(_init_compiled)
188239/60525 0.037 0.000 0.081 0.000 /usr/lib/python3.7/json/encoder.py:333(_iterencode_dict)
14593 0.037 0.000 0.059 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/result.py:588(_key_fallback)
1807 0.036 0.000 0.050 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/flask_sqlalchemy/__init__.py:109(_calling_context)
1807 0.034 0.000 1.155 0.001 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1155(_execute_context)
22324 0.033 0.000 0.055 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/werkzeug/local.py:160(top)
12519 0.030 0.000 0.083 0.000 {built-in method builtins.sorted}
19326/1469 0.030 0.000 4.133 0.003 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/marshmallow/fields.py:229(serialize)
19326 0.030 0.000 3.691 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/marshmallow/utils.py:338(_get_value_for_key)
3947 0.029 0.000 0.039 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:263(traverse_using)
9085 0.029 0.000 0.068 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/base.py:38(<listcomp>)
66367 0.027 0.000 0.130 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/result.py:724(_getter)
4669/4549 0.026 0.000 0.840 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/loading.py:35(instances)
5417 0.026 0.000 0.065 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sortedcontainers/sortedlist.py:320(update)
148287 0.026 0.000 0.026 0.000 {method 'get' of 'dict' objects}
13774 0.025 0.000 0.048 0.000 /home/cayo/ribaguifi/ereuse/dhub/ereuse_devicehub/resources/action/models.py:240(__lt__)
5532/300 0.024 0.000 3.586 0.012 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/teal/marshmallow.py:212(serialize)
--------------------------------------------------------------------------------

View File

@ -0,0 +1,257 @@
* Serving Flask app "ereuse_devicehub" (lazy loading)
* Environment: production
WARNING: Do not use the development server in a production environment.
Use a production WSGI server instead.
* Debug mode: on
--------------------------------------------------------------------------------
PATH: '/versions/'
6344 function calls (6063 primitive calls) in 1.183 seconds
Ordered by: internal time, call count
List reduced from 849 to 30 due to restriction <30>
ncalls tottime percall cumtime percall filename:lineno(function)
9 1.164 0.129 1.164 0.129 {method 'acquire' of '_thread.RLock' objects}
9 0.004 0.000 0.005 0.001 {method 'execute' of 'psycopg2.extensions.cursor' objects}
3 0.003 0.001 0.003 0.001 {method 'rollback' of 'psycopg2.extensions.connection' objects}
1 0.003 0.003 0.003 0.003 {built-in method psycopg2._psycopg._connect}
12/3 0.000 0.000 0.000 0.000 /usr/lib/python3.7/sre_parse.py:469(_parse)
65/2 0.000 0.000 0.000 0.000 {built-in method _abc._abc_subclasscheck}
72/60 0.000 0.000 0.001 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py:836(__get__)
433 0.000 0.000 0.000 0.000 {built-in method builtins.isinstance}
77 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/session.py:1339(_add_bind)
195/193 0.000 0.000 0.000 0.000 {built-in method builtins.getattr}
22/3 0.000 0.000 0.000 0.000 /usr/lib/python3.7/sre_compile.py:71(_compile)
11 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/base.py:37(_from_objects)
5 0.000 --------------------------------------------------------------------------------
PATH: '/lots/'
821894 function calls (796177 primitive calls) in 1.204 seconds
Ordered by: internal time, call count
List reduced from 1187 to 30 due to restriction <30>
ncalls tottime percall cumtime percall filename:lineno(function)
6149 0.080 0.000 0.081 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/event/registry.py:165(_key)
66417 0.044 0.000 0.050 0.000 {built-in method builtins.isinstance}
467 0.043 0.000 0.537 0.001 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/strategies.py:39(_register_attribute)
6149 0.036 0.000 0.150 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/event/registry.py:67(_stored_in_collection)
4269 0.032 0.000 0.052 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/elements.py:3030(__init__)
6150/6149 0.032 0.000 0.285 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/event/registry.py:193(listen)
15744/7854 0.031 0.000 0.065 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py:909(__getattr__)
2797 0.028 0.000 0.054 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:221(iterate)
18395/10521 0.026 0.000 0.048 0.000 {built-in method builtins.hasattr}
3990 0.025 0.000 0.135 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/attributes.py:1701(register_attribute_impl)
2032 0.022 0.000 0.174 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/mapper.py:2501(visit_binary)
3901 0.019 0.000 0.028 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/event/attr.py:330(__init__)
16706 0.019 0.000 0.019 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/annotation.py:99(__hash__)
3990 0.019 0.000 0.043 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/attributes.py:436(__init__)
89 0.019 0.000 0.023 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/relationships.py:2405(visit_binary)
4236 0.017 0.000 0.151 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/annotation.py:102(__eq__)
6149 0.015 0.000 0.033 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/event/api.py:22(_event_key)
3308 0.014 0.000 0.051 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/attributes.py:761(__init__)
27512 0.014 0.000 0.019 0.000 {built-in method builtins.getattr}
4206 0.014 0.000 0.024 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/elements.py:3050(__bool__)
4269 0.014 0.000 0.092 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/default_comparator.py:40(_boolean_compare)
78080 0.013 0.000 0.013 0.000 {method 'pop' of 'dict' objects}
4269 0.012 0.000 0.104 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/type_api.py:63(operate)
11500 0.012 0.000 0.012 0.000 {method 'popleft' of 'collections.deque' objects}
3424 0.012 0.000 0.020 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py:881(expire_instance)
6149 0.011 0.000 0.215 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/event/registry.py:237(base_listen)
8572/4269 0.011 0.000 0.135 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/operators.py:358(__eq__)
6149 0.011 0.000 0.329 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/event/api.py:33(listen)
10298 0.010 0.000 0.017 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/base.py:220(manager_of_class)
7582 0.010 0.000 0.010 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/instrumentation.py:193(_attr_has_impl)
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
PATH: '/devices/'
2869 function calls (2841 primitive calls) in 1.116 seconds
Ordered by: internal time, call count
List reduced from 664 to 30 due to restriction <30>
0.000 0.004 0.001 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1155(_execute_context)
2 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/werkzeug/routing.py:1443(match)
103 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/werkzeug/routing.py:753(match)
1 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/flask_sqlalchemy/__init__.py:917(get_tables_for_bind)
37/28 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py:909(__getattr__)
92 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/inspection.py:37(inspect)
73 0.000 0.000 0.000 0.000 {built-in method builtins.hasattr}
184 0.000 0.000 0.000 0.000 /usr/lib/python3.7/sre_parse.py:164(__getitem__)
27 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/werkzeug/local.py:160(top)
106 0.000 0.000 0.000 0.000 /usr/lib/python3.7/weakref.py:435(__contains__)
9 0.000 0.000 0.001 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py:1716(_label_select_column)
10 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py:826(visit_column)
32/4 0.000 0.000 0.001 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:84(_compiler_dispatch)
9 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py:772(visit_label)
5 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py:2899(_froms)
64 0.000 0.000 0.000 0.000 /usr/lib/python3.7/weakref.py:395(__getitem__)
21/8 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py:245(get_cls_kwargs)
9 0.000 0.000 0.000 0.000 {method 'cursor' of 'psycopg2.extensions.connection' objects}
--------------------------------------------------------------------------------
ncalls tottime percall cumtime percall filename:lineno(function)
3 1.073 0.358 1.073 0.358 {method 'acquire' of '_thread.RLock' objects}
1 0.025 0.025 0.025 0.025 {method 'rollback' of 'psycopg2.extensions.connection' objects}
2 0.007 0.003 0.007 0.003 {method 'execute' of 'psycopg2.extensions.cursor' objects}
1 0.004 0.004 0.004 0.004 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/event/attr.py:279(exec_once)
1 0.003 0.003 0.003 0.003 {built-in method psycopg2._psycopg._connect}
1 0.001 0.001 0.026 0.026 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/pool.py:716(_finalize_fairy)
1 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/elements.py:1663(get_children)
1 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/flask/app.py:2117(do_teardown_request)
1 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/result.py:316(<listcomp>)
1 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/flask_sqlalchemy/__init__.py:917(get_tables_for_bind)
77 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/session.py:1339(_add_bind)
1 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/flask_sqlalchemy/__init__.py:925(get_binds)
2 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/werkzeug/routing.py:1443(match)
27 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/werkzeug/local.py:160(top)
8 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py:826(visit_column)
72 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/werkzeug/routing.py:753(match)
31 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/werkzeug/local.py:68(__getattr__)
11 0.000 0.000 0.000 0.000 {method 'sub' of 're.Pattern' objects}
154 0.000 0.000 0.000 0.000 {built-in method builtins.isinstance}
83 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/inspection.py:37(inspect)
2 0.000 0.000 0.007 0.004 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1155(_execute_context)
158 0.000 0.000 0.000 0.000 {method 'get' of 'dict' objects}
3 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py:2899(_froms)
86 0.000 0.000 0.000 0.000 {built-in method builtins.getattr}
1 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/session.py:639(__init__)
7 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py:1716(_label_select_column)
2 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/default.py:622(_init_compiled)
80 0.000 0.000 0.000 0.000 {method 'search' of 're.Pattern' objects}
2 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:221(iterate)
2 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/flask_sqlalchemy/__init__.py:109(_calling_context)
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
PATH: '/versions/'
9122 function calls (9001 primitive calls) in 0.204 seconds
Ordered by: internal time, call count
List reduced from 999 to 30 due to restriction <30>
ncalls tottime percall cumtime percall filename:lineno(function)
8 0.090 0.011 0.090 0.011 {built-in method posix.stat}
3 0.048 0.016 0.048 0.016 {method 'execute' of 'psycopg2.extensions.cursor' objects}
1 0.017 0.017 0.017 0.017 {built-in method _socket.getaddrinfo}
1 0.013 0.013 0.019 0.019 <frozen importlib._bootstrap_external>:914(get_data)
2 0.008 0.004 0.008 0.004 {function socket.close at 0x7fd08d1efd40}
1 0.006 0.006 0.006 0.006 {method 'read' of '_io.FileIO' objects}
2 0.005 0.002 0.005 0.002 /usr/lib/python3.7/socket.py:139(__init__)
1 0.004 0.004 0.004 0.004 {method 'rollback' of 'psycopg2.extensions.connection' objects}
2 0.001 0.000 0.001 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/query.py:3898(row_processor)
318 0.000 0.000 0.001 0.000 /usr/lib/python-exec/python3.7/../../../lib/python3.7/os.py:676(__getitem__)
332 0.000 0.000 0.002 0.000 /usr/lib/python-exec/python3.7/../../../lib/python3.7/_collections_abc.py:742(__iter__)
630/629 0.000 0.000 0.000 0.000 {method 'decode' of 'bytes' objects}
312 0.000 0.000 0.000 0.000 /usr/lib/python-exec/python3.7/../../../lib/python3.7/os.py:699(__iter__)
32/4 0.000 0.000 0.000 0.000 {built-in method _abc._abc_subclasscheck}
727 0.000 0.000 0.000 0.000 {built-in method builtins.isinstance}
2 0.000 0.000 0.002 0.001 /usr/lib/python3.7/urllib/request.py:2489(getproxies_environment)
2 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/loading.py:337(_instance_processor)
16 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py:826(visit_column)
620 0.000 0.000 0.000 0.000 /usr/lib/python-exec/python3.7/../../../lib/python3.7/os.py:758(decode)
318 0.000 0.000 0.000 0.000 /usr/lib/python-exec/python3.7/../../../lib/python3.7/os.py:754(encode)
6 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py:2899(_froms)
14 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py:772(visit_label)
14 0.000 0.000 0.001 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py:1716(_label_select_column)
342 0.000 0.000 0.000 0.000 {method 'encode' of 'str' objects}
3 0.000 0.000 0.050 0.017 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1155(_execute_context)
51 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/werkzeug/local.py:160(top)
39/3 0.000 0.000 0.001 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:84(_compiler_dispatch)
77 0.000 0.000 0.000 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/session.py:1339(_add_bind)
1 0.000 0.000 0.000 0.000 {built-in method marshal.loads}
172 0.000 0.000 0.000 0.000 {built-in method builtins.getattr}
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
PATH: '/devices/'
3749097 function calls (3426518 primitive calls) in 8.391 seconds
Ordered by: internal time, call count
List reduced from 1528 to 30 due to restriction <30>
ncalls tottime percall cumtime percall filename:lineno(function)
1829 4.330 0.002 4.362 0.002 {method 'execute' of 'psycopg2.extensions.cursor' objects}
44950 0.147 0.000 0.164 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/interfaces.py:512(_get_context_loader)
3871 0.143 0.000 0.568 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/loading.py:337(_instance_processor)
4047 0.090 0.000 0.318 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:221(iterate)
719 0.081 0.000 0.088 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/elements.py:890(__init__)
228904 0.077 0.000 0.080 0.000 {built-in method builtins.isinstance}
4285 0.069 0.000 0.172 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py:2899(_froms)
189751/61029 0.067 0.000 0.120 0.000 /usr/lib/python3.7/json/encoder.py:333(_iterencode_dict)
137316/99651 0.063 0.000 7.521 0.000 {built-in method builtins.getattr}
64723/61729 0.060 0.000 6.845 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/attributes.py:267(__get__)
1298/30 0.058 0.000 7.993 0.266 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/marshmallow/marshalling.py:90(serialize)
44052 0.047 0.000 0.327 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/interfaces.py:550(create_row_processor)
66094 0.046 0.000 0.108 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/result.py:643(_getter)
5416 0.044 0.000 0.128 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sortedcontainers/sortedset.py:108(__init__)
1829 0.040 0.000 0.056 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/flask_sqlalchemy/__init__.py:109(_calling_context)
1829 0.039 0.000 4.747 0.003 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1155(_execute_context)
1829 0.038 0.000 0.126 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/default.py:622(_init_compiled)
14593 0.038 0.000 0.061 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/result.py:588(_key_fallback)
22870 0.034 0.000 0.056 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/werkzeug/local.py:160(top)
4047 0.032 0.000 0.043 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:263(traverse_using)
19422 0.031 0.000 7.233 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/marshmallow/utils.py:338(_get_value_for_key)
9227 0.031 0.000 0.073 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/base.py:38(<listcomp>)
19422/1469 0.030 0.000 7.972 0.005 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/marshmallow/fields.py:229(serialize)
1829 0.030 0.000 0.032 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py:632(construct_params)
12540 0.030 0.000 0.091 0.000 {built-in method builtins.sorted}
4715/4595 0.028 0.000 1.076 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/loading.py:35(instances)
13738 0.028 0.000 0.048 0.000 /home/cayo/ribaguifi/ereuse/dhub/ereuse_devicehub/resources/action/models.py:240(__lt__)
3715 0.027 0.000 0.035 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py:1497(_truncated_identifier)
66094 0.027 0.000 0.134 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/result.py:724(_getter)
1829 0.027 0.000 5.041 0.003 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1048(_execute_clauseelement)
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
PATH: '/lots/'
22194313 function calls (19571472 primitive calls) in 28.238 seconds
Ordered by: internal time, call count
List reduced from 1554 to 30 due to restriction <30>
ncalls tottime percall cumtime percall filename:lineno(function)
10276 7.129 0.001 7.271 0.001 {method 'execute' of 'psycopg2.extensions.cursor' objects}
22077 0.874 0.000 2.719 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/loading.py:337(_instance_processor)
23003 0.512 0.000 1.742 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:221(iterate)
1317963 0.435 0.000 0.447 0.000 {built-in method builtins.isinstance}
24077 0.390 0.000 0.949 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py:2899(_froms)
256711 0.358 0.000 0.408 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/interfaces.py:512(_get_context_loader)
414404/395611 0.352 0.000 20.872 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/attributes.py:267(__get__)
792239/576826 0.348 0.000 24.675 0.000 {built-in method builtins.getattr}
7583/25 0.336 0.000 27.409 1.096 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/marshmallow/marshalling.py:90(serialize)
1478163/360759 0.278 0.000 0.600 0.000 /usr/lib/python3.7/json/encoder.py:333(_iterencode_dict)
252403 0.260 0.000 1.325 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/interfaces.py:550(create_row_processor)
376026 0.244 0.000 0.592 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/result.py:643(_getter)
30982 0.241 0.000 0.749 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sortedcontainers/sortedset.py:108(__init__)
10276 0.210 0.000 0.644 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/default.py:622(_init_compiled)
83633 0.209 0.000 0.348 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/result.py:588(_key_fallback)
10276 0.209 0.000 0.295 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/flask_sqlalchemy/__init__.py:109(_calling_context)
10273 0.197 0.000 1.212 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/orm/query.py:3898(row_processor)
10276 0.194 0.000 9.188 0.001 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1155(_execute_context)
133552 0.186 0.000 0.309 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/werkzeug/local.py:160(top)
113482/475 0.181 0.000 27.400 0.058 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/marshmallow/fields.py:229(serialize)
23003 0.178 0.000 0.236 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:263(traverse_using)
113482 0.177 0.000 23.755 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/marshmallow/utils.py:338(_get_value_for_key)
71690 0.176 0.000 0.501 0.000 {built-in method builtins.sorted}
51856 0.167 0.000 0.392 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/sql/base.py:38(<listcomp>)
91604 0.151 0.000 0.286 0.000 /home/cayo/ribaguifi/ereuse/dhub/ereuse_devicehub/resources/action/models.py:240(__lt__)
1007699/360723 0.151 0.000 0.546 0.000 /usr/lib/python3.7/json/encoder.py:277(_iterencode_list)
376026 0.148 0.000 0.740 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sqlalchemy/engine/result.py:724(_getter)
33713/150 0.147 0.000 27.395 0.183 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/teal/marshmallow.py:212(serialize)
30982 0.146 0.000 0.391 0.000 /home/cayo/ribaguifi/ereuse/dhub/env/lib/python3.7/site-packages/sortedcontainers/sortedlist.py:320(update)
850451 0.143 0.000 0.143 0.000 {method 'get' of 'dict' objects}
--------------------------------------------------------------------------------

View File

@ -1,49 +0,0 @@
# Setup developement project
## Installing
complete this steps from [README - Installing](README.md#installing)
## Setup project
Create a PostgreSQL database called devicehub by running [create-db](examples/create-db.sh):
- Start postgresDB
- `bash examples/create-db.sh devicehub dhub, and password ereuse.`
- `cp examples/env.example .env`
Create a secretkey and add into `.env`
```bash
echo "SECRET_KEY=$(python3 -c 'import secrets; print(secrets.token_hex())')" >> .env
```
Using the dh tool for set up with one or multiple inventories. Create the tables in the database by executing:
```bash
export dhi=dbtest; dh inv add --common --name dbtest
```
Create a demo table
```bash
export dhi=dbtest; dh dummy
```
## Run project
Run the app
```bash
export FLASK_APP=app.py; export FLASK_ENV=development; flask run --debugger
```
Finally login into `localhost:5000/login/`
- User: user@dhub.com
- Pass: 1234
## Troubleshooting
- If when execute dh command it thows an error, install this dependencies in your distro
- `sudo apt install -y libpango1.0-0 libcairo2 libpq-dev`

View File

@ -1 +0,0 @@
docker-compose_devicehub-dpp.yml

View File

@ -1,103 +0,0 @@
version: "3.9"
services:
devicehub-id-server:
init: true
image: dkr-dsg.ac.upc.edu/ereuse/devicehub:latest
environment:
- DB_USER=${DB_USER}
- DB_PASSWORD=${DB_PASSWORD}
- DB_HOST=postgres-id-server
- DB_DATABASE=${DB_DATABASE}
- HOST=${HOST}
- EMAIL_DEMO=${SERVER_ID_EMAIL_DEMO}
- PASSWORD_DEMO=${PASSWORD_DEMO}
- JWT_PASS=${JWT_PASS}
- SECRET_KEY=${SECRET_KEY}
- API_DLT=${API_DLT}
- API_RESOLVER=${API_RESOLVER}
- API_DLT_TOKEN=${API_DLT_TOKEN}
- DEVICEHUB_HOST=${SERVER_ID_DEVICEHUB_HOST}
- ID_FEDERATED=${SERVER_ID_FEDERATED}
- URL_MANUALS=${URL_MANUALS}
- ID_SERVICE=${SERVER_ID_SERVICE}
- AUTHORIZED_CLIENT_URL=${CLIENT_ID_DEVICEHUB_HOST}
- DPP_MODULE=y
- IMPORT_SNAPSHOTS=${IMPORT_SNAPSHOTS}
ports:
- 5000:5000
volumes:
- ${SNAPSHOTS_PATH:-./examples/snapshots}:/mnt/snapshots:ro
- shared:/shared:rw
- app_id_server:/opt/devicehub:rw
postgres-id-server:
image: dkr-dsg.ac.upc.edu/ereuse/postgres:latest
# 4. To create the database.
# 5. Give permissions to the corresponding users in the database.
# extra src https://github.com/docker-library/docs/blob/master/postgres/README.md#environment-variables
environment:
- POSTGRES_PASSWORD=${DB_PASSWORD}
- POSTGRES_USER=${DB_USER}
- POSTGRES_DB=${DB_DATABASE}
# DEBUG
#ports:
# - 5432:5432
# TODO persistence
#volumes:
# - pg_data:/var/lib/postgresql/data
devicehub-id-client:
init: true
image: dkr-dsg.ac.upc.edu/ereuse/devicehub:latest
environment:
- DB_USER=${DB_USER}
- DB_PASSWORD=${DB_PASSWORD}
- DB_HOST=postgres-id-client
- DB_DATABASE=${DB_DATABASE}
- HOST=${HOST}
- EMAIL_DEMO=${CLIENT_ID_EMAIL_DEMO}
- PASSWORD_DEMO=${PASSWORD_DEMO}
- JWT_PASS=${JWT_PASS}
- SECRET_KEY=${SECRET_KEY}
- API_DLT=${API_DLT}
- API_RESOLVER=${API_RESOLVER}
- API_DLT_TOKEN=${API_DLT_TOKEN}
- DEVICEHUB_HOST=${CLIENT_ID_DEVICEHUB_HOST}
- SERVER_ID_HOST=${SERVER_ID_DEVICEHUB_HOST}
- ID_FEDERATED=${CLIENT_ID_FEDERATED}
- URL_MANUALS=${URL_MANUALS}
- ID_SERVICE=${CLIENT_ID_SERVICE}
- DPP_MODULE=y
- IMPORT_SNAPSHOTS=${IMPORT_SNAPSHOTS}
ports:
- 5001:5000
volumes:
- ${SNAPSHOTS_PATH:-./examples/snapshots}:/mnt/snapshots:ro
- shared:/shared:ro
- app_id_client:/opt/devicehub:rw
postgres-id-client:
image: dkr-dsg.ac.upc.edu/ereuse/postgres:latest
# 4. To create the database.
# 5. Give permissions to the corresponding users in the database.
# extra src https://github.com/docker-library/docs/blob/master/postgres/README.md#environment-variables
environment:
- POSTGRES_PASSWORD=${DB_PASSWORD}
- POSTGRES_USER=${DB_USER}
- POSTGRES_DB=${DB_DATABASE}
# DEBUG
#ports:
# - 5432:5432
# TODO persistence
#volumes:
# - pg_data:/var/lib/postgresql/data
# TODO https://testdriven.io/blog/dockerizing-django-with-postgres-gunicorn-and-nginx/
#nginx
volumes:
shared:
app_id_client:
app_id_server:

View File

@ -1,54 +0,0 @@
version: "3.9"
services:
devicehub:
init: true
image: dkr-dsg.ac.upc.edu/ereuse/devicehub:dpp__c6ec6658
environment:
- DB_USER=${DB_USER}
- DB_PASSWORD=${DB_PASSWORD}
- DB_HOST=postgres
- DB_DATABASE=${DB_DATABASE}
- HOST=${HOST}
- EMAIL_DEMO=${EMAIL_DEMO}
- PASSWORD_DEMO=${PASSWORD_DEMO}
- JWT_PASS=${JWT_PASS}
- SECRET_KEY=${SECRET_KEY}
- DEVICEHUB_HOST=${DEVICEHUB_HOST}
- URL_MANUALS=${URL_MANUALS}
- DPP_MODULE=n
- IMPORT_SNAPSHOTS=${IMPORT_SNAPSHOTS}
- DEPLOYMENT=${DEPLOYMENT}
ports:
- 5000:5000
volumes:
- ${SNAPSHOTS_PATH:-./examples/snapshots}:/mnt/snapshots:ro
- shared:/shared:rw
- app:/opt/devicehub:rw
postgres:
image: dkr-dsg.ac.upc.edu/ereuse/postgres:dpp__c6ec6658
# 4. To create the database.
# 5. Give permissions to the corresponding users in the database.
# extra src https://github.com/docker-library/docs/blob/master/postgres/README.md#environment-variables
environment:
- POSTGRES_PASSWORD=${DB_PASSWORD}
- POSTGRES_USER=${DB_USER}
- POSTGRES_DB=${DB_DATABASE}
volumes:
- pg_data:/var/lib/postgresql/data
# DEBUG
#ports:
# - 5432:5432
nginx:
image: nginx
ports:
- 8080:8080
volumes:
- ./docker/nginx-devicehub.nginx.conf:/etc/nginx/nginx.conf:ro
volumes:
shared:
pg_data:
app:

View File

@ -1,32 +0,0 @@
FROM debian:bullseye-slim
RUN apt update && apt-get install --no-install-recommends -y \
python3-minimal \
python3-pip \
python-is-python3 \
python3-psycopg2 \
python3-dev \
libpq-dev \
build-essential \
libpangocairo-1.0-0 \
curl \
jq \
time \
netcat
WORKDIR /opt/devicehub
# this is exactly the same as examples/pip_install.sh except the last command
# to improve the docker layer builds, it has been separated
RUN pip install --upgrade pip
RUN pip install alembic==1.8.1 anytree==2.8.0 apispec==0.39.0 atomicwrites==1.4.0 blinker==1.5 boltons==23.0.0 cairocffi==1.4.0 cairosvg==2.5.2 certifi==2022.9.24 cffi==1.15.1 charset-normalizer==2.0.12 click==6.7 click-spinner==0.1.8 colorama==0.3.9 colour==0.1.5 cssselect2==0.7.0 defusedxml==0.7.1 et-xmlfile==1.1.0 flask==1.0.2 flask-cors==3.0.10 flask-login==0.5.0 flask-sqlalchemy==2.5.1 flask-weasyprint==0.4 flask-wtf==1.0.0 hashids==1.2.0 html5lib==1.1 idna==3.4 inflection==0.5.1 itsdangerous==2.0.1 jinja2==3.0.3 mako==1.2.3 markupsafe==2.1.1 marshmallow==3.0.0b11 marshmallow-enum==1.4.1 more-itertools==8.12.0 numpy==1.22.0 odfpy==1.4.1 openpyxl==3.0.10 pandas==1.3.5 passlib==1.7.1 phonenumbers==8.9.11 pillow==9.2.0 pint==0.9 psycopg2-binary==2.8.3 py-dmidecode==0.1.0 pycparser==2.21 pyjwt==2.4.0 pyphen==0.13.0 python-dateutil==2.7.3 python-decouple==3.3 python-dotenv==0.14.0 python-editor==1.0.4 python-stdnum==1.9 pytz==2022.2.1 pyyaml==5.4 requests==2.27.1 requests-mock==1.5.2 requests-toolbelt==0.9.1 six==1.16.0 sortedcontainers==2.1.0 sqlalchemy==1.3.24 sqlalchemy-citext==1.3.post0 sqlalchemy-utils==0.33.11 tinycss2==1.1.1 tqdm==4.32.2 urllib3==1.26.12 weasyprint==44 webargs==5.5.3 webencodings==0.5.1 werkzeug==2.0.3 wtforms==3.0.1 xlrd==2.0.1 cryptography==39.0.1 Authlib==1.2.1 gunicorn==21.2.0
RUN pip install -i https://test.pypi.org/simple/ ereuseapitest==0.0.14
COPY . .
# this operation might be overriding inside container another app.py you would have
COPY examples/app.py .
RUN pip install -e .
COPY docker/devicehub.entrypoint.sh /
ENTRYPOINT sh /devicehub.entrypoint.sh

View File

@ -1,12 +0,0 @@
.git
.env
# TODO need to comment it to copy the entrypoint
#docker
Makefile
# Emacs backup files
*~
.\#*
# Vim swap files
*.swp
*.swo

View File

@ -1,228 +0,0 @@
#!/bin/sh
set -e
set -u
# DEBUG
set -x
# 3. Generate an environment .env file.
gen_env_vars() {
CONFIG_OIDC="${CONFIG_OIDC:-y}"
# specific dpp env vars
if [ "${DPP_MODULE}" = 'y' ]; then
dpp_env_vars="$(cat <<END
API_DLT='${API_DLT}'
API_DLT_TOKEN='${API_DLT_TOKEN}'
API_RESOLVER='${API_RESOLVER}'
ID_FEDERATED='${ID_FEDERATED}'
END
)"
fi
# generate config using env vars from docker
cat > .env <<END
${dpp_env_vars:-}
DB_USER='${DB_USER}'
DB_PASSWORD='${DB_PASSWORD}'
DB_HOST='${DB_HOST}'
DB_DATABASE='${DB_DATABASE}'
URL_MANUALS='${URL_MANUALS}'
HOST='${HOST}'
SCHEMA='dbtest'
DB_SCHEMA='dbtest'
EMAIL_DEMO='${EMAIL_DEMO}'
PASSWORD_DEMO='${PASSWORD_DEMO}'
JWT_PASS=${JWT_PASS}
SECRET_KEY=${SECRET_KEY}
END
}
wait_for_postgres() {
# old one was
#sleep 4
default_postgres_port=5432
# thanks https://testdriven.io/blog/dockerizing-django-with-postgres-gunicorn-and-nginx/
while ! nc -z ${DB_HOST} ${default_postgres_port}; do
sleep 0.5
done
}
init_data() {
# 7. Run alembic of the project.
alembic -x inventory=dbtest upgrade head
# 8. Running alembic from oidc module.y
cd ereuse_devicehub/modules/oidc
alembic -x inventory=dbtest upgrade head
cd -
# 9. Running alembic from dpp module.
cd ereuse_devicehub/modules/dpp/
alembic -x inventory=dbtest upgrade head
cd -
# 11. Generate a minimal data structure.
# TODO it has some errors (?)
flask initdata || true
if [ "${EREUSE_PILOT:-}" = 'y' ]; then
flask dlt_register_user /opt/devicehub/users_devicehub.json || true
fi
}
big_error() {
local message="${@}"
echo "###############################################" >&2
echo "# ERROR: ${message}" >&2
echo "###############################################" >&2
exit 1
}
handle_federated_id() {
# devicehub host and id federated checker
# //getAll queries are not accepted by this service, so we remove them
EXPECTED_ID_FEDERATED="$(curl -s "${API_RESOLVER%/}/getAll" \
| jq -r '.url | to_entries | .[] | select(.value == "'"${DEVICEHUB_HOST}"'") | .key' \
| head -n 1)"
# if is a new DEVICEHUB_HOST, then register it
if [ -z "${EXPECTED_ID_FEDERATED}" ]; then
# TODO better docker compose run command
cmd="docker compose run --entrypoint= devicehub flask dlt_insert_members ${DEVICEHUB_HOST}"
big_error "No FEDERATED ID maybe you should run \`${cmd}\`"
fi
# if not new DEVICEHUB_HOST, then check consistency
# if there is already an ID in the DLT, it should match with my internal ID
if [ ! "${EXPECTED_ID_FEDERATED}" = "${ID_FEDERATED}" ]; then
big_error "ID_FEDERATED should be ${EXPECTED_ID_FEDERATED} instead of ${ID_FEDERATED}"
fi
# not needed, but reserved
# EXPECTED_DEVICEHUB_HOST="$(curl -s "${API_RESOLVER%/}/getAll" \
# | jq -r '.url | to_entries | .[] | select(.key == "'"${ID_FEDERATED}"'") | .value' \
# | head -n 1)"
# if [ ! "${EXPECTED_DEVICEHUB_HOST}" = "${DEVICEHUB_HOST}" ]; then
# big_error "ERROR: DEVICEHUB_HOST should be ${EXPECTED_DEVICEHUB_HOST} instead of ${DEVICEHUB_HOST}"
# fi
}
config_oidc() {
# TODO test allowing more than 1 client
if [ "${ID_SERVICE}" = "server_id" ]; then
client_description="client identity from docker compose demo"
# in AUTHORIZED_CLIENT_URL we remove anything before ://
flask add_contract_oidc \
"${EMAIL_DEMO}" \
"${client_description}" \
"${AUTHORIZED_CLIENT_URL}" \
> /shared/client_id_${AUTHORIZED_CLIENT_URL#*://}
elif [ "${ID_SERVICE}" = "client_id" ]; then
# in DEVICEHUB_HOST we remove anything before ://
client_id_config="/shared/client_id_${DEVICEHUB_HOST#*://}"
client_id=
client_secret=
# wait that the file generated by the server_id is readable
while true; do
if [ -f "${client_id_config}" ]; then
client_id="$(cat "${client_id_config}" | jq -r '.client_id')"
client_secret="$(cat "${client_id_config}" | jq -r '.client_secret')"
if [ "${client_id}" ] && [ "${client_secret}" ]; then
break
fi
fi
sleep 1
done
flask add_client_oidc \
"${SERVER_ID_HOST}" \
"${client_id}" \
"${client_secret}"
else
big_error "Something went wrong ${ID_SERVICE} is not server_id nor client_id"
fi
}
config_dpp_part1() {
# 12. Add a new server to the 'api resolver'
handle_federated_id
# 13. Do a rsync api resolve
flask dlt_rsync_members
# 14. Register a new user to the DLT
#flask dlt_register_user "${EMAIL_DEMO}" ${PASSWORD_DEMO} Operator
}
config_phase() {
init_flagfile='docker__already_configured'
if [ ! -f "${init_flagfile}" ]; then
# 7, 8, 9, 11
init_data
if [ "${DPP_MODULE}" = 'y' ]; then
# 12, 13, 14
config_dpp_part1
fi
# non DL user (only for the inventory)
# flask adduser user2@dhub.com ${PASSWORD_DEMO}
# # 15. Add inventory snapshots for user "${EMAIL_DEMO}".
if [ "${IMPORT_SNAPSHOTS}" = 'y' ]; then
mkdir -p ereuse_devicehub/commands/snapshot_files
cp /mnt/snapshots/snapshot*.json ereuse_devicehub/commands/snapshot_files/
/usr/bin/time flask snapshot "${EMAIL_DEMO}" ${PASSWORD_DEMO}
fi
if [ "${CONFIG_OIDC}" = 'y' ]; then
# 16.
# commented because this fails with wrong DLT credentials
#flask check_install "${EMAIL_DEMO}" "${PASSWORD_DEMO}"
# 20. config server or client ID
config_oidc
fi
# remain next command as the last operation for this if conditional
touch "${init_flagfile}"
fi
}
main() {
gen_env_vars
wait_for_postgres
config_phase
# 17. Use gunicorn
# thanks https://akira3030.github.io/formacion/articulos/python-flask-gunicorn-docker.html
if [ "${DEPLOYMENT:-}" = "PROD" ]; then
# TODO workers 1 because we have a shared secret in RAM
gunicorn --access-logfile - --error-logfile - --workers 1 -b :5000 app:app
else
# run development server
FLASK_DEBUG=1 flask run --host=0.0.0.0 --port 5000
fi
# DEBUG
#sleep infinity
}
main "${@}"

View File

@ -1,32 +0,0 @@
user www-data;
worker_processes auto;
pid /run/nginx.pid;
error_log /var/log/nginx/error.log;
include /etc/nginx/modules-enabled/*.conf;
events {
worker_connections 768;
# multi_accept on;
}
http {
#upstream socket_backend {
# server unix:/socket/gunicorn.sock fail_timeout=0;
#}
server {
listen 8080;
listen [::]:8080;
#server_name devicehub.example.org;
location / {
# TODO env var on proxy_pass
proxy_pass http://devicehub:5000/;
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_redirect off;
}
}
}

View File

@ -1,8 +0,0 @@
FROM postgres:15.4-bookworm
# this is the latest in 2023-09-14_13-01-38
#FROM postgres:latest
# Add a SQL script that will be executed upon container startup
COPY docker/postgres.setupdb.sql /docker-entrypoint-initdb.d/
EXPOSE 5432

View File

@ -1,5 +0,0 @@
-- 6. Create the necessary extensions.
CREATE EXTENSION pgcrypto SCHEMA public;
CREATE EXTENSION ltree SCHEMA public;
CREATE EXTENSION citext SCHEMA public;
CREATE EXTENSION pg_trgm SCHEMA public;

View File

@ -30,6 +30,7 @@ from teal.enums import Country, Currency, Layouts, Subdivision
from teal.marshmallow import EnumField
from ereuse_devicehub.marshmallow import NestedOn
from ereuse_devicehub.resources.schemas import Thing
project = 'Devicehub'
copyright = '2020, eReuse.org team'
@ -55,7 +56,7 @@ extensions = [
'sphinx.ext.viewcode',
'sphinxcontrib.plantuml',
'sphinx.ext.autosectionlabel',
'sphinx.ext.autodoc',
'sphinx.ext.autodoc'
]
# Add any paths that contain templates here, relative to this directory.
@ -125,12 +126,15 @@ latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
@ -140,20 +144,18 @@ latex_elements = {
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc,
'Devicehub.tex',
'Devicehub Documentation',
'eReuse.org team',
'manual',
),
(master_doc, 'Devicehub.tex', 'Devicehub Documentation',
'eReuse.org team', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, 'devicehub', 'Devicehub Documentation', [author], 1)]
man_pages = [
(master_doc, 'devicehub', 'Devicehub Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
@ -161,15 +163,9 @@ man_pages = [(master_doc, 'devicehub', 'Devicehub Documentation', [author], 1)]
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
'Devicehub',
'Devicehub Documentation',
author,
'Devicehub',
'One line description of project.',
'Miscellaneous',
),
(master_doc, 'Devicehub', 'Devicehub Documentation',
author, 'Devicehub', 'One line description of project.',
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
@ -203,7 +199,6 @@ class DhlistDirective(Directive):
This requires :py:class:`ereuse_devicehub.resources.schemas.SchemaMeta`.
You will find in that module more information.
"""
has_content = False
# Definition of passed-in options
@ -221,7 +216,7 @@ class DhlistDirective(Directive):
sections = []
sections.append(self.links(things)) # Make index
for thng in things:
for thng in things: # type: Thing
# Generate a section for each class, with a title,
# fields description and a paragraph
section = n.section(ids=[self._id(thng)])
@ -233,9 +228,7 @@ class DhlistDirective(Directive):
for key, f in thng._own:
name = n.field_name(text=f.data_key or key)
body = [
self.parse(
'{} {}'.format(self.type(f), f.metadata.get('description', ''))
)
self.parse('{} {}'.format(self.type(f), f.metadata.get('description', '')))
]
if isinstance(f, EnumField):
body.append(self._parse_enum_field(f))
@ -251,7 +244,6 @@ class DhlistDirective(Directive):
def _parse_enum_field(self, f):
from ereuse_devicehub.resources.device import states
if issubclass(f.enum, (Subdivision, Currency, Country, Layouts, states.State)):
return self.parse(f.enum.__doc__)
else:
@ -306,7 +298,7 @@ class DhlistDirective(Directive):
def parse(self, text) -> n.container:
"""Parses text possibly containing ReST stuff and adds it in
a node."""
a node."""
p = n.container('')
self.state.nested_parse(StringList(string2lines(inspect.cleandoc(text))), 0, p)
return p

View File

@ -9,12 +9,6 @@ dags-with-materialized-paths-using-postgres-ltree/>`_ you have
a low-level technical implementation of how lots and their
relationships are mapped.
Getting lots
************
You can get lots list by ``GET /lots/``
There are one optional filter ``type``, only works with this 3 values ``temporary``, ``incoming`` and ``outgoing``
Create lots
***********
You create a lot by ``POST /lots/`` a `JSON Lot object <https://
@ -34,6 +28,7 @@ And for devices is all the same:
``POST /lots/<parent-lot-id>/devices/?id=<device-id-1>&id=<device-id-2>``;
idem for removing devices.
Sharing lots
************
Sharing a lot means giving certain permissions to users, like reading

View File

@ -1 +1 @@
__version__ = "2.5.3"
__version__ = "1.0.12-beta"

View File

@ -1,113 +0,0 @@
import json
from binascii import Error as asciiError
from flask import Blueprint
from flask import current_app as app
from flask import g, jsonify, request
from flask.views import View
from flask.wrappers import Response
from marshmallow.exceptions import ValidationError
from werkzeug.exceptions import Unauthorized
from ereuse_devicehub.auth import Auth
from ereuse_devicehub.db import db
from ereuse_devicehub.parser.models import SnapshotsLog
from ereuse_devicehub.parser.parser import ParseSnapshotLsHw
from ereuse_devicehub.parser.schemas import Snapshot_lite
from ereuse_devicehub.resources.action.views.snapshot import (
SnapshotMixin,
move_json,
save_json,
)
from ereuse_devicehub.resources.enums import Severity
api = Blueprint('api', __name__, url_prefix='/api')
class LoginMixin(View):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.authenticate()
def authenticate(self):
unauthorized = Unauthorized('Provide a suitable token.')
basic_token = request.headers.get('Authorization', " ").split(" ")
if not len(basic_token) == 2:
raise unauthorized
token = basic_token[1]
try:
token = Auth.decode(token)
except asciiError:
raise unauthorized
self.user = Auth().authenticate(token)
g.user = self.user
class InventoryView(LoginMixin, SnapshotMixin):
methods = ['POST']
def dispatch_request(self):
snapshot_json = json.loads(request.data)
self.tmp_snapshots = app.config['TMP_SNAPSHOTS']
self.path_snapshot = save_json(snapshot_json, self.tmp_snapshots, g.user.email)
snapshot_json = self.validate(snapshot_json)
if type(snapshot_json) == Response:
return snapshot_json
self.snapshot_json = ParseSnapshotLsHw(snapshot_json).get_snapshot()
snapshot = self.build()
snapshot.device.set_hid()
snapshot.device.binding.device.set_hid()
db.session.add(snapshot)
snap_log = SnapshotsLog(
description='Ok',
snapshot_uuid=snapshot.uuid,
severity=Severity.Info,
sid=snapshot.sid,
version=str(snapshot.version),
snapshot=snapshot,
)
snap_log.save()
db.session().final_flush()
db.session.commit()
url = "https://{}/".format(app.config['HOST'])
public_url = "{}{}".format(url.strip("/"), snapshot.device.url.to_text())
self.response = jsonify(
{
'dhid': snapshot.device.dhid,
'url': url,
'public_url': public_url,
}
)
self.response.status_code = 201
move_json(self.tmp_snapshots, self.path_snapshot, g.user.email)
return self.response
def validate(self, snapshot_json):
self.schema = Snapshot_lite()
try:
return self.schema.load(snapshot_json)
except ValidationError as err:
txt = "{}".format(err)
uuid = snapshot_json.get('uuid')
sid = snapshot_json.get('sid')
version = snapshot_json.get('version')
error = SnapshotsLog(
description=txt,
snapshot_uuid=uuid,
severity=Severity.Error,
sid=sid,
version=str(version),
)
error.save(commit=True)
# raise err
self.response = jsonify(err)
self.response.status_code = 400
return self.response
api.add_url_rule('/inventory/', view_func=InventoryView.as_view('inventory'))

View File

@ -1,9 +1,9 @@
from sqlalchemy.exc import DataError
from teal.auth import TokenAuth
from teal.db import ResourceNotFound
from werkzeug.exceptions import Unauthorized
from ereuse_devicehub.resources.user.models import Session, User
from ereuse_devicehub.teal.auth import TokenAuth
from ereuse_devicehub.teal.db import ResourceNotFound
from ereuse_devicehub.resources.user.models import User, Session
class Auth(TokenAuth):

View File

@ -1,24 +1,22 @@
import os
import click.testing
import ereuse_utils
import flask.cli
import ereuse_devicehub.ereuse_utils
from ereuse_devicehub.config import DevicehubConfig
from ereuse_devicehub.devicehub import Devicehub
import sys
sys.ps1 = '\001\033[92m\002>>> \001\033[0m\002'
sys.ps2 = '\001\033[94m\002... \001\033[0m\002'
import os, readline, atexit
sys.ps2= '\001\033[94m\002... \001\033[0m\002'
import os, readline, rlcompleter, atexit
history_file = os.path.join(os.environ['HOME'], '.python_history')
try:
readline.read_history_file(history_file)
readline.read_history_file(history_file)
except IOError:
pass
pass
readline.parse_and_bind("tab: complete")
readline.parse_and_bind('"\e[5~": history-search-backward')
readline.parse_and_bind('"\e[6~": history-search-forward')
@ -31,7 +29,6 @@ readline.parse_and_bind('"\e[1;5D": backward-word')
readline.set_history_length(100000)
atexit.register(readline.write_history_file, history_file)
class DevicehubGroup(flask.cli.FlaskGroup):
# todo users cannot make cli to use a custom db this way!
CONFIG = DevicehubConfig
@ -52,37 +49,26 @@ class DevicehubGroup(flask.cli.FlaskGroup):
def get_version(ctx, param, value):
if not value or ctx.resilient_parsing:
return
click.echo(
'Devicehub {}'.format(
ereuse_devicehub.ereuse_utils.version('ereuse-devicehub')
),
color=ctx.color,
)
click.echo('Devicehub {}'.format(ereuse_utils.version('ereuse-devicehub')), color=ctx.color)
flask.cli.get_version(ctx, param, value)
@click.option(
'--version',
help='Devicehub version.',
expose_value=False,
callback=get_version,
is_flag=True,
is_eager=True,
)
@click.group(
cls=DevicehubGroup,
context_settings=Devicehub.cli_context_settings,
add_version_option=False,
help="""Manages the Devicehub of the inventory {}.
@click.option('--version',
help='Devicehub version.',
expose_value=False,
callback=get_version,
is_flag=True,
is_eager=True)
@click.group(cls=DevicehubGroup,
context_settings=Devicehub.cli_context_settings,
add_version_option=False,
help="""Manages the Devicehub of the inventory {}.
Use 'export dhi=xx' to set the inventory that this CLI
manages. For example 'export dhi=db1' and then executing
'dh tag add' adds a tag in the db1 database. Operations
that affect the common database (like creating an user)
are not affected by this.
""".format(
os.environ.get('dhi')
),
)
""".format(os.environ.get('dhi')))
def cli():
pass

View File

@ -1,14 +1,11 @@
from inspect import isclass
from typing import Dict, Iterable, Type, Union
from ereuse_devicehub.ereuse_utils.test import JSON, Res
from flask.testing import FlaskClient
from flask_wtf.csrf import generate_csrf
from ereuse_utils.test import JSON, Res
from teal.client import Client as TealClient, Query, Status
from werkzeug.exceptions import HTTPException
from ereuse_devicehub.resources import models, schemas
from ereuse_devicehub.teal.client import Client as TealClient
from ereuse_devicehub.teal.client import Query, Status
ResourceLike = Union[Type[Union[models.Thing, schemas.Thing]], str]
@ -16,156 +13,110 @@ ResourceLike = Union[Type[Union[models.Thing, schemas.Thing]], str]
class Client(TealClient):
"""A client suited for Devicehub main usage."""
def __init__(
self,
application,
response_wrapper=None,
use_cookies=False,
allow_subdomain_redirects=False,
):
super().__init__(
application, response_wrapper, use_cookies, allow_subdomain_redirects
)
def __init__(self, application,
response_wrapper=None,
use_cookies=False,
allow_subdomain_redirects=False):
super().__init__(application, response_wrapper, use_cookies, allow_subdomain_redirects)
def open(
self,
uri: str,
res: ResourceLike = None,
status: Status = 200,
query: Query = tuple(),
accept=JSON,
content_type=JSON,
item=None,
headers: dict = None,
token: str = None,
**kw,
) -> Res:
def open(self,
uri: str,
res: ResourceLike = None,
status: Status = 200,
query: Query = tuple(),
accept=JSON,
content_type=JSON,
item=None,
headers: dict = None,
token: str = None,
**kw) -> Res:
if isclass(res) and issubclass(res, (models.Thing, schemas.Thing)):
res = res.t
return super().open(
uri, res, status, query, accept, content_type, item, headers, token, **kw
)
return super().open(uri, res, status, query, accept, content_type, item, headers, token,
**kw)
def get(
self,
uri: str = '',
res: ResourceLike = None,
query: Query = tuple(),
status: Status = 200,
item: Union[int, str] = None,
accept: str = JSON,
headers: dict = None,
token: str = None,
**kw,
) -> Res:
def get(self,
uri: str = '',
res: ResourceLike = None,
query: Query = tuple(),
status: Status = 200,
item: Union[int, str] = None,
accept: str = JSON,
headers: dict = None,
token: str = None,
**kw) -> Res:
return super().get(uri, res, query, status, item, accept, headers, token, **kw)
def post(
self,
data: str or dict,
uri: str = '',
res: ResourceLike = None,
query: Query = tuple(),
status: Status = 201,
content_type: str = JSON,
accept: str = JSON,
headers: dict = None,
token: str = None,
**kw,
) -> Res:
return super().post(
data, uri, res, query, status, content_type, accept, headers, token, **kw
)
def post(self,
data: str or dict,
uri: str = '',
res: ResourceLike = None,
query: Query = tuple(),
status: Status = 201,
content_type: str = JSON,
accept: str = JSON,
headers: dict = None,
token: str = None,
**kw) -> Res:
return super().post(data, uri, res, query, status, content_type, accept, headers, token,
**kw)
def patch(
self,
data: str or dict,
uri: str = '',
res: ResourceLike = None,
query: Query = tuple(),
item: Union[int, str] = None,
status: Status = 200,
content_type: str = JSON,
accept: str = JSON,
headers: dict = None,
token: str = None,
**kw,
) -> Res:
return super().patch(
data,
uri,
res,
query,
item,
status,
content_type,
accept,
token,
headers,
**kw,
)
def patch(self,
data: str or dict,
uri: str = '',
res: ResourceLike = None,
query: Query = tuple(),
item: Union[int, str] = None,
status: Status = 200,
content_type: str = JSON,
accept: str = JSON,
headers: dict = None,
token: str = None,
**kw) -> Res:
return super().patch(data, uri, res, query, item, status, content_type, accept, token,
headers, **kw)
def put(
self,
data: str or dict,
uri: str = '',
res: ResourceLike = None,
query: Query = tuple(),
item: Union[int, str] = None,
status: Status = 201,
content_type: str = JSON,
accept: str = JSON,
headers: dict = None,
token: str = None,
**kw,
) -> Res:
return super().put(
data,
uri,
res,
query,
item,
status,
content_type,
accept,
token,
headers,
**kw,
)
def put(self,
data: str or dict,
uri: str = '',
res: ResourceLike = None,
query: Query = tuple(),
item: Union[int, str] = None,
status: Status = 201,
content_type: str = JSON,
accept: str = JSON,
headers: dict = None,
token: str = None,
**kw) -> Res:
return super().put(data, uri, res, query, item, status, content_type, accept, token,
headers, **kw)
def delete(
self,
uri: str = '',
res: ResourceLike = None,
query: Query = tuple(),
status: Status = 204,
item: Union[int, str] = None,
accept: str = JSON,
headers: dict = None,
token: str = None,
**kw,
) -> Res:
return super().delete(
uri, res, query, status, item, accept, headers, token, **kw
)
def delete(self,
uri: str = '',
res: ResourceLike = None,
query: Query = tuple(),
status: Status = 204,
item: Union[int, str] = None,
accept: str = JSON,
headers: dict = None,
token: str = None,
**kw) -> Res:
return super().delete(uri, res, query, status, item, accept, headers, token, **kw)
def login(self, email: str, password: str):
assert isinstance(email, str)
assert isinstance(password, str)
return self.post(
{'email': email, 'password': password}, '/users/login/', status=200
)
return self.post({'email': email, 'password': password}, '/users/login/', status=200)
def get_many(
self,
res: ResourceLike,
resources: Iterable[Union[dict, int]],
key: str = None,
**kw,
) -> Iterable[Union[Dict[str, object], str]]:
def get_many(self,
res: ResourceLike,
resources: Iterable[Union[dict, int]],
key: str = None,
**kw) -> Iterable[Union[Dict[str, object], str]]:
"""Like :meth:`.get` but with many resources."""
return (
self.get(res=res, item=r[key] if key else r, **kw)[0] for r in resources
self.get(res=res, item=r[key] if key else r, **kw)[0]
for r in resources
)
@ -175,119 +126,33 @@ class UserClient(Client):
It will automatically perform login on the first request.
"""
def __init__(
self,
application,
email: str,
password: str,
response_wrapper=None,
use_cookies=False,
allow_subdomain_redirects=False,
):
super().__init__(
application, response_wrapper, use_cookies, allow_subdomain_redirects
)
def __init__(self, application,
email: str,
password: str,
response_wrapper=None,
use_cookies=False,
allow_subdomain_redirects=False):
super().__init__(application, response_wrapper, use_cookies, allow_subdomain_redirects)
self.email = email # type: str
self.password = password # type: str
self.user = None # type: dict
def open(
self,
uri: str,
res: ResourceLike = None,
status: int or HTTPException = 200,
query: Query = tuple(),
accept=JSON,
content_type=JSON,
item=None,
headers: dict = None,
token: str = None,
**kw,
) -> Res:
return super().open(
uri,
res,
status,
query,
accept,
content_type,
item,
headers,
self.user['token'] if self.user else token,
**kw,
)
def open(self,
uri: str,
res: ResourceLike = None,
status: int or HTTPException = 200,
query: Query = tuple(),
accept=JSON,
content_type=JSON,
item=None,
headers: dict = None,
token: str = None,
**kw) -> Res:
return super().open(uri, res, status, query, accept, content_type, item, headers,
self.user['token'] if self.user else token, **kw)
# noinspection PyMethodOverriding
def login(self):
response = super().login(self.email, self.password)
self.user = response[0]
return response
class UserClientFlask:
def __init__(
self,
application,
email: str,
password: str,
response_wrapper=None,
use_cookies=True,
follow_redirects=True,
):
self.email = email
self.password = password
self.follow_redirects = follow_redirects
self.user = None
self.client = FlaskClient(application, use_cookies=use_cookies)
self.client.get('/login/')
data = {
'email': email,
'password': password,
'csrf_token': generate_csrf(),
}
body, status, headers = self.client.post(
'/login/', data=data, follow_redirects=True
)
self.headers = headers
body = next(body).decode("utf-8")
assert "Unassigned" in body
def get(
self,
uri='',
data=None,
follow_redirects=True,
content_type='text/html; charset=utf-8',
decode=True,
**kw,
):
body, status, headers = self.client.get(
uri, data=data, follow_redirects=follow_redirects, headers=self.headers
)
if decode:
body = next(body).decode("utf-8")
return (body, status)
def post(
self,
uri='',
data=None,
follow_redirects=True,
content_type='application/x-www-form-urlencoded',
decode=True,
**kw,
):
body, status, headers = self.client.post(
uri,
data=data,
follow_redirects=follow_redirects,
headers=self.headers,
content_type=content_type,
)
if decode:
body = next(body).decode("utf-8")
return (body, status)

View File

@ -1,24 +0,0 @@
import click
from ereuse_devicehub.db import db
from ereuse_devicehub.resources.agent.models import Person
from ereuse_devicehub.resources.user.models import User
class AddUser:
def __init__(self, app) -> None:
super().__init__()
self.app = app
self.schema = app.config.get('DB_SCHEMA')
self.app.cli.command('adduser', short_help='add a user.')(self.run)
@click.argument('email')
@click.argument('password')
def run(self, email, password):
name = email.split('@')[0]
user = User(email=email, password=password)
user.individuals.add(Person(name=name))
db.session.add(user)
db.session.commit()

View File

@ -1,125 +0,0 @@
"""This command is used for up one snapshot."""
import json
import click
from ereuse_devicehub.resources.action.models import Snapshot
from ereuse_devicehub.resources.user.models import User
class CheckInstall:
"""Command.
This command check if the installation was ok and the
integration with the api of DLT was ok too.
"""
def __init__(self, app) -> None:
"""Init function."""
super().__init__()
self.app = app
self.schema = app.config.get('DB_SCHEMA')
self.app.cli.command('check_install', short_help='Upload snapshots.')(self.run)
@click.argument('email')
@click.argument('password')
def run(self, email, password):
"""Run command."""
self.email = email
self.password = password
self.OKGREEN = '\033[92m'
# self.WARNING = '\033[93m'
self.FAIL = '\033[91m'
self.ENDC = '\033[0m'
print("\n")
try:
self.check_user()
self.check_snapshot()
except Exception:
txt = "There was an Error in the installation!"
print("\n" + self.FAIL + txt + self.ENDC)
return
txt = "The installation is OK!"
print("\n" + self.OKGREEN + txt + self.ENDC)
def check_user(self):
"""Get datamodel of user."""
self.user = User.query.filter_by(email=self.email).first()
txt = "Register user to the DLT "
try:
assert self.user.api_keys_dlt is not None
token_dlt = self.user.get_dlt_keys(self.password)
assert token_dlt.get('data', {}).get('eth_pub_key') is not None
except Exception:
self.print_fail(txt)
raise (txt)
self.print_ok(txt)
api_token = token_dlt.get('data', {}).get('api_token')
txt = "Register user roles in the DLT "
try:
rols = self.user.get_rols(api_token)
assert self.user.rols_dlt is not None
assert self.user.rols_dlt != []
assert self.user.rols_dlt == json.dumps([x for x, y in rols])
except Exception:
self.print_fail(txt)
raise (txt)
self.print_ok(txt)
def check_snapshot(self):
self.snapshot = Snapshot.query.filter_by(author=self.user).first()
if not self.snapshot:
txt = "Impossible register snapshot "
self.print_fail(txt)
raise (txt)
self.device = self.snapshot.device
txt = "Generate DPP "
try:
assert self.device.chid is not None
assert self.snapshot.json_wb is not None
assert self.snapshot.phid_dpp is not None
except Exception:
self.print_fail(txt)
raise (txt)
self.print_ok(txt)
txt = "Register DPP in the DLT "
try:
assert len(self.device.dpps) > 0
dpp = self.device.dpps[0]
assert type(dpp.timestamp) == int
assert dpp in self.snapshot.dpp
assert dpp.documentId == str(self.snapshot.uuid)
# if 'Device already exists' in DLT before
# device.proofs == 0
# Snapshot.proof == 1 [erase]
# if Device is new in DLT before
# device.proofs == 1
# Snapshot.proof == 1 or 2 [Register, erase]
assert len(self.device.proofs) in [0, 1]
assert len(self.snapshot.proofs) in [0, 1, 2]
except Exception:
self.print_fail(txt)
raise (txt)
self.print_ok(txt)
def print_ok(self, msg):
print(msg + self.OKGREEN + " OK!" + self.ENDC)
def print_fail(self, msg):
print(msg + self.FAIL + " FAIL!" + self.ENDC)

View File

@ -1,41 +0,0 @@
from uuid import uuid4
from boltons.urlutils import URL
from decouple import config
from ereuse_devicehub.db import db
from ereuse_devicehub.resources.agent.models import Person
from ereuse_devicehub.resources.inventory.model import Inventory
from ereuse_devicehub.resources.user.models import User
class InitDatas:
def __init__(self, app) -> None:
super().__init__()
self.app = app
self.schema = app.config.get('DB_SCHEMA')
self.email = config('EMAIL_DEMO')
self.name = self.email.split('@')[0] if self.email else None
self.password = config('PASSWORD_DEMO')
self.app.cli.command(
'initdata', short_help='Save a minimum structure of datas.'
)(self.run)
def run(self):
inv = Inventory(
id=self.schema,
name="usody",
tag_provider=URL('http://localhost:8081'),
tag_token=uuid4(),
org_id=uuid4(),
)
db.session.add(inv)
db.session.commit()
if self.email:
user = User(email=self.email, password=self.password)
user.individuals.add(Person(name=self.name))
db.session.add(user)
db.session.commit()

View File

@ -1,103 +0,0 @@
"""This command is used for up one snapshot."""
import json
# from uuid import uuid4
from io import BytesIO
from os import listdir
from os import remove as remove_file
from os.path import isfile, join
from pathlib import Path
import click
from flask.testing import FlaskClient
from flask_wtf.csrf import generate_csrf
from ereuse_devicehub.resources.user.models import User
class UploadSnapshots:
"""Command.
This command allow upload all snapshots than exist
in the directory snapshots_upload.
If this snapshot exist replace it.
"""
def __init__(self, app) -> None:
"""Init function."""
super().__init__()
self.app = app
self.schema = app.config.get('DB_SCHEMA')
self.app.cli.command('snapshot', short_help='Upload snapshots.')(self.run)
@click.argument('email')
@click.argument('password')
def run(self, email, password=None):
"""Run command."""
self.email = email
self.password = password
self.json_wb = None
self.onlyfiles = []
self.get_user()
self.get_files()
for f in self.onlyfiles:
self.file_snapshot = f
self.open_snapshot()
self.build_snapshot()
self.remove_files()
def get_user(self):
"""Get datamodel of user."""
self.user = User.query.filter_by(email=self.email).one()
self.client = FlaskClient(self.app, use_cookies=True)
self.client.get('/login/')
data = {
'email': self.email,
'password': self.password,
'remember': False,
'csrf_token': generate_csrf(),
}
self.client.post('/login/', data=data, follow_redirects=True)
def remove_files(self):
"""Open snapshot file."""
for f in self.onlyfiles:
remove_file(Path(__file__).parent.joinpath('snapshot_files').joinpath(f))
def open_snapshot(self):
"""Open snapshot file."""
with Path(__file__).parent.joinpath('snapshot_files').joinpath(
self.file_snapshot,
).open() as file_snapshot:
self.json_wb = json.loads(file_snapshot.read())
b_snapshot = bytes(json.dumps(self.json_wb), 'utf-8')
self.file_snap = (BytesIO(b_snapshot), self.file_snapshot)
def build_snapshot(self):
"""Build the devices of snapshot."""
uri = '/inventory/upload-snapshot/'
if not self.json_wb:
return
self.client.get(uri)
data = {
'snapshot': self.file_snap,
'csrf_token': generate_csrf(),
}
self.client.post(uri, data=data, content_type="multipart/form-data")
def get_files(self):
"""Read snaoshot_files dir."""
mypath = Path(__file__).parent.joinpath('snapshot_files')
for f in listdir(mypath):
if not isfile(join(mypath, f)):
continue
if not f[-5:] == ".json":
continue
self.onlyfiles.append(f)

View File

@ -1,20 +0,0 @@
import click
from ereuse_devicehub import auth
from ereuse_devicehub.resources.user.models import User
class GetToken:
def __init__(self, app) -> None:
super().__init__()
self.app = app
self.app.cli.command('get_token', short_help='show the user token.')(
self.run
)
@click.argument('email')
def run(self, email):
user = User.query.filter_by(email=email, active=True, phantom=False).one_or_none()
if user:
print(auth.Auth.encode(user.token))

View File

@ -1,79 +1,57 @@
from distutils.version import StrictVersion
from itertools import chain
from typing import Set
from decouple import config
from ereuse_devicehub.resources import (
action,
agent,
deliverynote,
inventory,
lot,
tag,
user,
)
from teal.auth import TokenAuth
from teal.config import Config
from teal.enums import Currency
from teal.utils import import_resource
from ereuse_devicehub.resources import action, agent, deliverynote, inventory, \
lot, tag, user
from ereuse_devicehub.resources.device import definitions
from ereuse_devicehub.resources.did import did
from ereuse_devicehub.resources.documents import documents
from ereuse_devicehub.resources.tradedocument import definitions as tradedocument
from ereuse_devicehub.resources.enums import PriceSoftware
from ereuse_devicehub.resources.versions import versions
from ereuse_devicehub.resources.licences import licences
from ereuse_devicehub.resources.metric import definitions as metric_def
from ereuse_devicehub.resources.tradedocument import definitions as tradedocument
from ereuse_devicehub.resources.versions import versions
from ereuse_devicehub.teal.auth import TokenAuth
from ereuse_devicehub.teal.config import Config
from ereuse_devicehub.teal.enums import Currency
from ereuse_devicehub.teal.utils import import_resource
class DevicehubConfig(Config):
RESOURCE_DEFINITIONS = set(
chain(
import_resource(definitions),
import_resource(action),
import_resource(user),
import_resource(tag),
import_resource(did),
import_resource(agent),
import_resource(lot),
import_resource(deliverynote),
import_resource(documents),
import_resource(tradedocument),
import_resource(inventory),
import_resource(versions),
import_resource(licences),
import_resource(metric_def),
),
)
PASSWORD_SCHEMES = {'pbkdf2_sha256'}
SECRET_KEY = config('SECRET_KEY')
RESOURCE_DEFINITIONS = set(chain(import_resource(definitions),
import_resource(action),
import_resource(user),
import_resource(tag),
import_resource(agent),
import_resource(lot),
import_resource(deliverynote),
import_resource(documents),
import_resource(tradedocument),
import_resource(inventory),
import_resource(versions),
import_resource(licences),
import_resource(metric_def),
),)
PASSWORD_SCHEMES = {'pbkdf2_sha256'} # type: Set[str]
DB_USER = config('DB_USER', 'dhub')
DB_PASSWORD = config('DB_PASSWORD', 'ereuse')
DB_HOST = config('DB_HOST', 'localhost')
DB_DATABASE = config('DB_DATABASE', 'devicehub')
DB_SCHEMA = config('DB_SCHEMA', 'dbtest')
SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{pw}@{host}/{db}'.format(
user=DB_USER,
pw=DB_PASSWORD,
host=DB_HOST,
db=DB_DATABASE,
) # type: str
SQLALCHEMY_POOL_SIZE = int(config("SQLALCHEMY_POOL_SIZE", 10))
SQLALCHEMY_MAX_OVERFLOW = int(config("SQLALCHEMY_MAX_OVERFLOW", 20))
SQLALCHEMY_TRACK_MODIFICATIONS = bool(config("SQLALCHEMY_TRACK_MODIFICATIONS", False))
SQLALCHEMY_POOL_TIMEOUT = int(config("SQLALCHEMY_POOL_TIMEOUT", 0))
SQLALCHEMY_POOL_RECYCLE = int(config("SQLALCHEMY_POOL_RECYCLE", 3600))
SCHEMA = config('SCHEMA', 'dbtest')
HOST = config('HOST', 'localhost')
API_HOST = config('API_HOST', 'localhost')
HOST = config('HOST', 'localhost')
MIN_WORKBENCH = StrictVersion('11.0a1') # type: StrictVersion
"""The minimum version of ereuse.org workbench that this devicehub
accepts. we recommend not changing this value.
"""
SCHEMA_WORKBENCH = ["1.0.0"]
TMP_SNAPSHOTS = config('TMP_SNAPSHOTS', '/tmp/snapshots')
TMP_LIVES = config('TMP_LIVES', '/tmp/lives')
@ -81,7 +59,11 @@ class DevicehubConfig(Config):
"""This var is for save a snapshots in json format when fail something"""
API_DOC_CONFIG_TITLE = 'Devicehub'
API_DOC_CONFIG_VERSION = '0.2'
API_DOC_CONFIG_COMPONENTS = {'securitySchemes': {'bearerAuth': TokenAuth.API_DOCS}}
API_DOC_CONFIG_COMPONENTS = {
'securitySchemes': {
'bearerAuth': TokenAuth.API_DOCS
}
}
API_DOC_CLASS_DISCRIMINATOR = 'type'
PRICE_SOFTWARE = PriceSoftware.Ereuse
@ -91,34 +73,7 @@ class DevicehubConfig(Config):
"""Admin email"""
EMAIL_ADMIN = config('EMAIL_ADMIN', '')
EMAIL_DEMO = config('EMAIL_DEMO', 'hello@usody.com')
"""Definition of path where save the documents of customers"""
PATH_DOCUMENTS_STORAGE = config('PATH_DOCUMENTS_STORAGE', '/tmp/')
JWT_PASS = config('JWT_PASS', '')
MAIL_SERVER = config('MAIL_SERVER', '')
MAIL_USERNAME = config('MAIL_USERNAME', '')
MAIL_PASSWORD = config('MAIL_PASSWORD', '')
MAIL_PORT = config('MAIL_PORT', 587)
MAIL_USE_TLS = config('MAIL_USE_TLS', True)
MAIL_DEFAULT_SENDER = config('MAIL_DEFAULT_SENDER', '')
API_DLT = config('API_DLT', None)
API_DLT_TOKEN = config('API_DLT_TOKEN', None)
ID_FEDERATED = config('ID_FEDERATED', None)
URL_MANUALS = config('URL_MANUALS', None)
ABAC_TOKEN = config('ABAC_TOKEN', None)
ABAC_COOKIE = config('ABAC_COOKIE', None)
ABAC_URL = config('ABAC_URL', None)
VERIFY_URL = config('VERIFY_URL', None)
"""Definition of oauth jwt details."""
OAUTH2_JWT_ENABLED = config('OAUTH2_JWT_ENABLED', False)
OAUTH2_JWT_ISS = config('OAUTH2_JWT_ISS', '')
OAUTH2_JWT_KEY = config('OAUTH2_JWT_KEY', None)
OAUTH2_JWT_ALG = config('OAUTH2_JWT_ALG', 'HS256')
if API_DLT:
API_DLT = API_DLT.strip("/")
WALLET_INX_EBSI_PLUGIN_TOKEN = config('WALLET_INX_EBSI_PLUGIN_TOKEN', None)
WALLET_INX_EBSI_PLUGIN_URL = config('WALLET_INX_EBSI_PLUGIN_URL', None)

View File

@ -4,8 +4,7 @@ from sqlalchemy.dialects import postgresql
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import expression
from sqlalchemy_utils import view
from ereuse_devicehub.teal.db import SchemaSession, SchemaSQLAlchemy
from teal.db import SchemaSQLAlchemy, SchemaSession
class DhSession(SchemaSession):
@ -24,7 +23,6 @@ class DhSession(SchemaSession):
# flush, all the new / dirty interesting things in a variable
# until DeviceSearch is executed
from ereuse_devicehub.resources.device.search import DeviceSearch
DeviceSearch.update_modified_devices(session=self)
@ -33,7 +31,6 @@ class SQLAlchemy(SchemaSQLAlchemy):
schema of the database, as it is in the `search_path`
defined in teal.
"""
# todo add here all types of columns used so we don't have to
# manually import them all the time
UUID = postgresql.UUID
@ -63,15 +60,11 @@ def create_view(name, selectable):
# We need to ensure views are created / destroyed before / after
# SchemaSQLAlchemy's listeners execute
# That is why insert=True in 'after_create'
event.listen(
db.metadata, 'after_create', view.CreateView(name, selectable), insert=True
)
event.listen(db.metadata, 'after_create', view.CreateView(name, selectable), insert=True)
event.listen(db.metadata, 'before_drop', view.DropView(name))
return table
db = SQLAlchemy(
session_options={'autoflush': False},
)
db = SQLAlchemy(session_options={'autoflush': False})
f = db.func
exp = expression

View File

@ -5,131 +5,54 @@ from typing import Type
import boltons.urlutils
import click
import click_spinner
from flask import _app_ctx_stack, g
from flask_login import LoginManager, current_user
import ereuse_utils.cli
from ereuse_utils.session import DevicehubClient
from flask.globals import _app_ctx_stack, g
from flask_sqlalchemy import SQLAlchemy
from teal.db import SchemaSQLAlchemy
from teal.teal import Teal
import ereuse_devicehub.ereuse_utils.cli
from ereuse_devicehub.auth import Auth
from ereuse_devicehub.client import Client, UserClient
from ereuse_devicehub.commands.adduser import AddUser
from ereuse_devicehub.commands.check_install import CheckInstall
from ereuse_devicehub.commands.initdatas import InitDatas
from ereuse_devicehub.commands.snapshots import UploadSnapshots
# from ereuse_devicehub.commands.reports import Report
from ereuse_devicehub.commands.users import GetToken
from ereuse_devicehub.config import DevicehubConfig
from ereuse_devicehub.db import db
from ereuse_devicehub.dummy.dummy import Dummy
from ereuse_devicehub.ereuse_utils.session import DevicehubClient
from ereuse_devicehub.resources.device.search import DeviceSearch
from ereuse_devicehub.resources.inventory import Inventory, InventoryDef
from ereuse_devicehub.resources.user.models import User
from ereuse_devicehub.teal.db import ResourceNotFound, SchemaSQLAlchemy
from ereuse_devicehub.teal.teal import Teal
from ereuse_devicehub.templating import Environment
try:
from ereuse_devicehub.modules.oidc.commands.sync_dlt import GetMembers
except Exception:
GetMembers = None
try:
from ereuse_devicehub.modules.dpp.commands.register_user_dlt import RegisterUserDlt
except Exception:
RegisterUserDlt = None
try:
from ereuse_devicehub.modules.oidc.commands.add_member import AddMember
except Exception:
AddMember = None
try:
from ereuse_devicehub.modules.oidc.commands.client_member import AddClientOidc
except Exception:
AddClientOidc = None
try:
from ereuse_devicehub.modules.oidc.commands.insert_member_in_dlt import InsertMember
except Exception:
InsertMembe = None
try:
from ereuse_devicehub.modules.oidc.commands.add_contract_oidc import AddContractOidc
except Exception:
AddContractOidc = None
class Devicehub(Teal):
test_client_class = Client
Dummy = Dummy
# Report = Report
jinja_environment = Environment
def __init__(
self,
inventory: str,
config: DevicehubConfig = DevicehubConfig(),
db: SQLAlchemy = db,
import_name=__name__.split('.')[0],
static_url_path=None,
static_folder='static',
static_host=None,
host_matching=False,
subdomain_matching=False,
template_folder='templates',
instance_path=None,
instance_relative_config=False,
root_path=None,
Auth: Type[Auth] = Auth,
):
def __init__(self,
inventory: str,
config: DevicehubConfig = DevicehubConfig(),
db: SQLAlchemy = db,
import_name=__name__.split('.')[0],
static_url_path=None,
static_folder='static',
static_host=None,
host_matching=False,
subdomain_matching=False,
template_folder='templates',
instance_path=None,
instance_relative_config=False,
root_path=None,
Auth: Type[Auth] = Auth):
assert inventory
super().__init__(
config,
db,
inventory,
import_name,
static_url_path,
static_folder,
static_host,
host_matching,
subdomain_matching,
template_folder,
instance_path,
instance_relative_config,
root_path,
False,
Auth,
)
super().__init__(config, db, inventory, import_name, static_url_path, static_folder,
static_host,
host_matching, subdomain_matching, template_folder, instance_path,
instance_relative_config, root_path, False, Auth)
self.id = inventory
"""The Inventory ID of this instance. In Teal is the app.schema."""
self.dummy = Dummy(self)
# self.report = Report(self)
self.get_token = GetToken(self)
self.initdata = InitDatas(self)
self.adduser = AddUser(self)
self.uploadsnapshots = UploadSnapshots(self)
self.checkinstall = CheckInstall(self)
if GetMembers:
self.get_members = GetMembers(self)
if RegisterUserDlt:
self.dlt_register_user = RegisterUserDlt(self)
if AddMember:
self.dlt_insert_members = AddMember(self)
if AddClientOidc:
self.add_client_oidc = AddClientOidc(self)
if InsertMember:
self.dlt_insert_members = InsertMember(self)
if AddContractOidc:
self.add_contract_oidc = AddContractOidc(self)
@self.cli.group(
short_help='Inventory management.',
help='Manages the inventory {}.'.format(os.environ.get('dhi')),
)
@self.cli.group(short_help='Inventory management.',
help='Manages the inventory {}.'.format(os.environ.get('dhi')))
def inv():
pass
@ -138,74 +61,38 @@ class Devicehub(Teal):
inv.command('search')(self.regenerate_search)
self.before_request(self._prepare_request)
self.configure_extensions()
def configure_extensions(self):
# configure Flask-Login
login_manager = LoginManager()
login_manager.init_app(self)
login_manager.login_view = "core.login"
@login_manager.user_loader
def load_user(user_id):
# TODO(@slamora) refactor when teal library has been drop.
# `load_user` expects None if the user ID is invalid or the
# session has expired so we need to handle Exception raised
# by teal (it's overriding default behaviour of flask-sqlalchemy
# which already returns None)
try:
return User.query.get(user_id)
except ResourceNotFound:
return None
# noinspection PyMethodOverriding
@click.option(
'--name', '-n', default='Test 1', help='The human name of the inventory.'
)
@click.option(
'--org-name',
'-on',
default='My Organization',
help='The name of the default organization that owns this inventory.',
)
@click.option(
'--org-id', '-oi', default='foo-bar', help='The Tax ID of the organization.'
)
@click.option(
'--tag-url',
'-tu',
type=ereuse_devicehub.ereuse_utils.cli.URL(scheme=True, host=True, path=False),
default='http://example.com',
help='The base url (scheme and host) of the tag provider.',
)
@click.option(
'--tag-token',
'-tt',
type=click.UUID,
default='899c794e-1737-4cea-9232-fdc507ab7106',
help='The token provided by the tag provider. It is an UUID.',
)
@click.option(
'--erase/--no-erase',
default=False,
help='Delete the schema before? '
'If --common is set this includes the common database.',
)
@click.option(
'--common/--no-common',
default=False,
help='Creates common databases. Only execute if the database is empty.',
)
def init_db(
self,
name: str,
org_name: str,
org_id: str,
tag_url: boltons.urlutils.URL,
tag_token: uuid.UUID,
erase: bool,
common: bool,
):
@click.option('--name', '-n',
default='Test 1',
help='The human name of the inventory.')
@click.option('--org-name', '-on',
default='My Organization',
help='The name of the default organization that owns this inventory.')
@click.option('--org-id', '-oi',
default='foo-bar',
help='The Tax ID of the organization.')
@click.option('--tag-url', '-tu',
type=ereuse_utils.cli.URL(scheme=True, host=True, path=False),
default='http://example.com',
help='The base url (scheme and host) of the tag provider.')
@click.option('--tag-token', '-tt',
type=click.UUID,
default='899c794e-1737-4cea-9232-fdc507ab7106',
help='The token provided by the tag provider. It is an UUID.')
@click.option('--erase/--no-erase',
default=False,
help='Delete the schema before? '
'If --common is set this includes the common database.')
@click.option('--common/--no-common',
default=False,
help='Creates common databases. Only execute if the database is empty.')
def init_db(self, name: str,
org_name: str,
org_id: str,
tag_url: boltons.urlutils.URL,
tag_token: uuid.UUID,
erase: bool,
common: bool):
"""Creates an inventory.
This creates the database and adds the inventory to the
@ -220,14 +107,10 @@ class Devicehub(Teal):
with click_spinner.spinner():
if erase:
self.db.drop_all(common_schema=common)
assert not db.has_schema(self.id), 'Schema {} already exists.'.format(
self.id
)
assert not db.has_schema(self.id), 'Schema {} already exists.'.format(self.id)
exclude_schema = 'common' if not common else None
self._init_db(exclude_schema=exclude_schema)
InventoryDef.set_inventory_config(
name, org_name, org_id, tag_url, tag_token
)
InventoryDef.set_inventory_config(name, org_name, org_id, tag_url, tag_token)
DeviceSearch.set_all_devices_tokens_if_empty(self.db.session)
self._init_resources(exclude_schema=exclude_schema)
self.db.session.commit()
@ -242,11 +125,8 @@ class Devicehub(Teal):
return True
@click.confirmation_option(
prompt='Are you sure you want to delete the inventory {}?'.format(
os.environ.get('dhi')
)
)
@click.confirmation_option(prompt='Are you sure you want to delete the inventory {}?'
.format(os.environ.get('dhi')))
def delete_inventory(self):
"""Erases an inventory.
@ -268,12 +148,8 @@ class Devicehub(Teal):
def _prepare_request(self):
"""Prepares request stuff."""
inv = g.inventory = Inventory.current # type: Inventory
g.tag_provider = DevicehubClient(
base_url=inv.tag_provider, token=DevicehubClient.encode_token(inv.tag_token)
)
# NOTE: models init methods expects that current user is
# available on g.user (e.g. to initialize object owner)
g.user = current_user
g.tag_provider = DevicehubClient(base_url=inv.tag_provider,
token=DevicehubClient.encode_token(inv.tag_token))
def create_client(self, email='user@dhub.com', password='1234'):
client = UserClient(self, email, password, response_wrapper=self.response_class)

View File

@ -1,37 +1,40 @@
import itertools
import json
import uuid
import jwt
from pathlib import Path
from typing import Set
import click
import click_spinner
import jwt
import ereuse_utils.cli
import yaml
from ereuse_devicehub.ereuse_utils.test import ANY
from ereuse_devicehub import ereuse_utils
from ereuse_utils.test import ANY
from ereuse_devicehub.client import UserClient
from ereuse_devicehub.db import db
from ereuse_devicehub.parser.models import SnapshotsLog
from ereuse_devicehub.resources.action import models as m
from ereuse_devicehub.resources.agent.models import Person
from ereuse_devicehub.resources.device.models import Device
from ereuse_devicehub.resources.enums import SessionType
from ereuse_devicehub.resources.lot.models import Lot
from ereuse_devicehub.resources.tag.model import Tag
from ereuse_devicehub.resources.user import User
from ereuse_devicehub.resources.user.models import Session
from ereuse_devicehub.resources.enums import SessionType
class Dummy:
TAGS = ('tag1', 'tag2', 'tag3')
TAGS = (
'tag1',
'tag2',
'tag3'
)
"""Tags to create."""
ET = (
('DT-AAAAA', 'A0000000000001'),
('DT-BBBBB', 'A0000000000002'),
('DT-CCCCC', 'A0000000000003'),
('DT-BRRAB', '04970DA2A15984'),
('DT-XXXXX', '04e4bc5af95980'),
('DT-XXXXX', '04e4bc5af95980')
)
"""eTags to create."""
ORG = 'eReuse.org CAT', '-t', 'G-60437761', '-c', 'ES'
@ -40,35 +43,28 @@ class Dummy:
def __init__(self, app) -> None:
super().__init__()
self.app = app
self.app.cli.command('dummy', short_help='Creates dummy devices and users.')(
self.run
)
self.app.cli.command('dummy', short_help='Creates dummy devices and users.')(self.run)
@click.option(
'--tag-url',
'-tu',
type=ereuse_utils.cli.URL(scheme=True, host=True, path=False),
default='http://localhost:8081',
help='The base url (scheme and host) of the tag provider.',
)
@click.option(
'--tag-token',
'-tt',
type=click.UUID,
default='899c794e-1737-4cea-9232-fdc507ab7106',
help='The token provided by the tag provider. It is an UUID.',
)
@click.confirmation_option(
prompt='This command (re)creates the DB from scratch.'
'Do you want to continue?'
)
@click.option('--tag-url', '-tu',
type=ereuse_utils.cli.URL(scheme=True, host=True, path=False),
default='http://localhost:8081',
help='The base url (scheme and host) of the tag provider.')
@click.option('--tag-token', '-tt',
type=click.UUID,
default='899c794e-1737-4cea-9232-fdc507ab7106',
help='The token provided by the tag provider. It is an UUID.')
@click.confirmation_option(prompt='This command (re)creates the DB from scratch.'
'Do you want to continue?')
def run(self, tag_url, tag_token):
runner = self.app.test_cli_runner()
self.app.init_db(
'Dummy', 'ACME', 'acme-id', tag_url, tag_token, erase=True, common=True
)
self.app.init_db('Dummy',
'ACME',
'acme-id',
tag_url,
tag_token,
erase=True,
common=True)
print('Creating stuff...'.ljust(30), end='')
assert SnapshotsLog.query.filter().all() == []
with click_spinner.spinner():
out = runner.invoke('org', 'add', *self.ORG).output
org_id = json.loads(out)['id']
@ -81,84 +77,55 @@ class Dummy:
for id in self.TAGS:
user1.post({'id': id}, res=Tag)
for id, sec in self.ET:
runner.invoke(
'tag',
'add',
id,
'-p',
'https://t.devicetag.io',
'-s',
sec,
'-u',
user1.user["id"],
'-o',
org_id,
)
runner.invoke('tag', 'add', id,
'-p', 'https://t.devicetag.io',
'-s', sec,
'-u', user1.user["id"],
'-o', org_id)
# create tag for pc-laudem
runner.invoke(
'tag',
'add',
'tagA',
'-p',
'https://t.devicetag.io',
'-u',
user1.user["id"],
'-s',
'tagA-secondary',
)
runner.invoke('tag', 'add', 'tagA',
'-p', 'https://t.devicetag.io',
'-u', user1.user["id"],
'-s', 'tagA-secondary')
files = tuple(Path(__file__).parent.joinpath('files').iterdir())
print('done.')
sample_pc = None # We treat this one as a special sample for demonstrations
pcs = set()
pcs = set() # type: Set[int]
with click.progressbar(files, label='Creating devices...'.ljust(28)) as bar:
for path in bar:
with path.open() as f:
snapshot = yaml.load(f)
if snapshot['device']['type'] in ['Desktop', 'Laptop']:
snapshot['device']['system_uuid'] = uuid.uuid4()
s, _ = user1.post(res=m.Snapshot, data=self.json_encode(snapshot))
if s.get('uuid', None) == 'ec23c11b-80b6-42cd-ac5c-73ba7acddbc4':
sample_pc = s['device']['id']
sample_pc_devicehub_id = s['device']['devicehubID']
else:
pcs.add(s['device']['id'])
if (
s.get('uuid', None) == 'de4f495e-c58b-40e1-a33e-46ab5e84767e'
): # oreo
if s.get('uuid', None) == 'de4f495e-c58b-40e1-a33e-46ab5e84767e': # oreo
# Make one hdd ErasePhysical
hdd = next(
hdd for hdd in s['components'] if hdd['type'] == 'HardDrive'
)
user1.post(
{
'type': 'ErasePhysical',
'method': 'Shred',
'device': hdd['id'],
},
res=m.Action,
)
hdd = next(hdd for hdd in s['components'] if hdd['type'] == 'HardDrive')
user1.post({'type': 'ErasePhysical', 'method': 'Shred', 'device': hdd['id']},
res=m.Action)
assert sample_pc
print('PC sample is', sample_pc)
# Link tags and eTags
for tag, pc in zip(
(self.TAGS[1], self.TAGS[2], self.ET[0][0], self.ET[1][1]), pcs
):
for tag, pc in zip((self.TAGS[1], self.TAGS[2], self.ET[0][0], self.ET[1][1]), pcs):
user1.put({}, res=Tag, item='{}/device/{}'.format(tag, pc), status=204)
# Perform generic actions
for pc, model in zip(
pcs, {m.ToRepair, m.Repair, m.ToPrepare, m.Ready, m.ToPrepare, m.Prepare}
):
for pc, model in zip(pcs,
{m.ToRepair, m.Repair, m.ToPrepare, m.Ready, m.ToPrepare,
m.Prepare}):
user1.post({'type': model.t, 'devices': [pc]}, res=m.Action)
# Perform a Sell to several devices
# user1.post(
# {
# 'type': m.Sell.t,
# 'to': user1.user['individuals'][0]['id'],
# 'devices': list(itertools.islice(pcs, len(pcs) // 2))
# },
# res=m.Action)
# {
# 'type': m.Sell.t,
# 'to': user1.user['individuals'][0]['id'],
# 'devices': list(itertools.islice(pcs, len(pcs) // 2))
# },
# res=m.Action)
lot_user, _ = user1.post({'name': 'LoteStephan'}, res=Lot)
@ -168,43 +135,35 @@ class Dummy:
lot_user4, _ = user4.post({'name': 'LoteJordi'}, res=Lot)
lot, _ = user1.post(
{},
res=Lot,
item='{}/devices'.format(lot_user['id']),
query=[('id', pc) for pc in itertools.islice(pcs, 1, 4)],
)
lot, _ = user1.post({},
res=Lot,
item='{}/devices'.format(lot_user['id']),
query=[('id', pc) for pc in itertools.islice(pcs, 1, 4)])
# assert len(lot['devices'])
lot2, _ = user2.post(
{},
res=Lot,
item='{}/devices'.format(lot_user2['id']),
query=[('id', pc) for pc in itertools.islice(pcs, 4, 6)],
)
lot2, _ = user2.post({},
res=Lot,
item='{}/devices'.format(lot_user2['id']),
query=[('id', pc) for pc in itertools.islice(pcs, 4, 6)])
lot3, _ = user3.post(
{},
res=Lot,
item='{}/devices'.format(lot_user3['id']),
query=[('id', pc) for pc in itertools.islice(pcs, 11, 14)],
)
lot3, _ = user3.post({},
res=Lot,
item='{}/devices'.format(lot_user3['id']),
query=[('id', pc) for pc in itertools.islice(pcs, 11, 14)])
lot4, _ = user4.post(
{},
res=Lot,
item='{}/devices'.format(lot_user4['id']),
query=[('id', pc) for pc in itertools.islice(pcs, 14, 16)],
)
lot4, _ = user4.post({},
res=Lot,
item='{}/devices'.format(lot_user4['id']),
query=[('id', pc) for pc in itertools.islice(pcs, 14, 16)])
# Keep this at the bottom
inventory, _ = user1.get(res=Device)
assert len(inventory['items'])
# i, _ = user1.get(res=Device, query=[('search', 'intel')])
# assert len(i['items']) in [14, 12]
# i, _ = user1.get(res=Device, query=[('search', 'pc')])
# assert len(i['items']) in [17, 14]
i, _ = user1.get(res=Device, query=[('search', 'intel')])
assert 12 == len(i['items'])
i, _ = user1.get(res=Device, query=[('search', 'pc')])
assert 14 == len(i['items'])
# Let's create a set of actions for the pc device
# Make device Ready
@ -212,25 +171,23 @@ class Dummy:
user1.post({'type': m.ToPrepare.t, 'devices': [sample_pc]}, res=m.Action)
user1.post({'type': m.Prepare.t, 'devices': [sample_pc]}, res=m.Action)
user1.post({'type': m.Ready.t, 'devices': [sample_pc]}, res=m.Action)
user1.post(
{'type': m.Price.t, 'device': sample_pc, 'currency': 'EUR', 'price': 85},
res=m.Action,
)
user1.post({'type': m.Price.t, 'device': sample_pc, 'currency': 'EUR', 'price': 85},
res=m.Action)
# todo test reserve
# user1.post( # Sell device
# {
# 'type': m.Sell.t,
# 'to': user1.user['individuals'][0]['id'],
# 'devices': [sample_pc]
# },
# res=m.Action)
# {
# 'type': m.Sell.t,
# 'to': user1.user['individuals'][0]['id'],
# 'devices': [sample_pc]
# },
# res=m.Action)
# todo Receive
user1.get(res=Device, item=sample_pc_devicehub_id) # Test
anonymous = self.app.test_client()
html, _ = anonymous.get(res=Device, item=sample_pc_devicehub_id, accept=ANY)
assert 'hewlett-packard' in html
assert 'intel core2 duo cpu' in html
# For netbook: to preapre -> torepair -> to dispose -> disposed
print('⭐ Done.')
@ -246,20 +203,19 @@ class Dummy:
db.session.add(session_external)
db.session.commit()
client = UserClient(
self.app, user.email, password, response_wrapper=self.app.response_class
)
client = UserClient(self.app, user.email, password,
response_wrapper=self.app.response_class)
client.login()
return client
def json_encode(self, dev: str) -> dict:
"""Encode json."""
data = {"type": "Snapshot"}
data['data'] = jwt.encode(
dev,
self.app.config['JWT_PASS'],
algorithm="HS256",
json_encoder=ereuse_utils.JSONEncoder,
data['data'] = jwt.encode(dev,
self.app.config['JWT_PASS'],
algorithm="HS256",
json_encoder=ereuse_utils.JSONEncoder
)
return data

View File

@ -178,7 +178,6 @@
],
"type": "Laptop"
},
"debug": {"lshw": {"configuration": {"uuid": "79c5098f-bc44-4834-8a59-9ea61d956c31"}}},
"elapsed": 14725,
"endTime": "2018-11-24T18:06:37.611704+00:00",
"software": "Workbench",

View File

@ -119,7 +119,6 @@
"manufacturer": "ASUSTeK Computer INC."
}
],
"debug": {"lshw": {"configuration": {"uuid": "645f00bf-1ec0-4fdb-9608-b5ac73e285f6"}}},
"version": "11.0a4",
"elapsed": 6,
"endTime": "2016-11-03T17:17:17.266543+00:00"

View File

@ -148,7 +148,6 @@
"model": "0UG982"
}
],
"debug": {"lshw": {"configuration": {"uuid": "5dcdd380-5a54-48bc-99bf-aff6019e8491"}}},
"version": "11.0a3",
"closed": false,
"elapsed": 1512,

View File

@ -132,6 +132,5 @@
"model": "HP Compaq 8100 Elite SFF",
"manufacturer": "Hewlett-Packard"
},
"debug": {"lshw": {"configuration": {"uuid": "f6cfe48a-93d5-4e94-ab7b-3ee371e4d048"}}},
"version": "11.0a3"
}

View File

@ -170,6 +170,5 @@
},
"software": "Workbench",
"endTime": "2018-07-11T10:30:22.395958+00:00",
"debug": {"lshw": {"configuration": {"uuid": "75dcb454-ae80-4a87-a192-185d3b0250c0"}}},
"elapsed": 2766
}

View File

@ -146,7 +146,6 @@
"pcmcia": 0
}
],
"debug": {"lshw": {"configuration": {"uuid": "fcaf784e-5e57-43a2-b03f-8c56dabd0415"}}},
"uuid": "a01eacdb-db01-43ec-b6fb-a9b8cd21492d",
"type": "Snapshot",
"version": "11.0a4",

View File

@ -4,7 +4,6 @@
"closed": false,
"endTime": "2018-07-11T13:26:29.365504+00:00",
"type": "Snapshot",
"debug": {"lshw": {"configuration": {"uuid": "4f256440-e43f-429a-a2c6-1e8f3365de56"}}},
"device": {
"serialNumber": "PB357N0",
"actions": [

View File

@ -148,7 +148,6 @@
"slots": 4
}
],
"debug": {"lshw": {"configuration": {"uuid": "077cad5d-ae1b-4156-a9a1-98bca6fa5c35"}}},
"version": "11.0a3",
"endTime": "2018-07-11T10:28:55.879745+00:00",
"type": "Snapshot",

View File

@ -136,8 +136,8 @@
],
"elapsed": 203,
"device": {
"manufacturer": "Asus",
"model": "P7P55D",
"manufacturer": null,
"model": null,
"chassis": "Tower",
"type": "Desktop",
"serialNumber": null,
@ -158,7 +158,7 @@
]
},
"version": "11.0a6",
"debug": {"lshw": {"configuration": {"uuid": "59ca9a2a-65bd-4802-89bb-315156a9352b"}}},
"type": "Snapshot",
"closed": true,
"software": "Workbench"

View File

@ -142,7 +142,7 @@
},
"elapsed": 238,
"endTime": "2018-10-15T13:59:37.431309+00:00",
"debug": {"lshw": {"configuration": {"uuid": "43686b8e-e1ae-4e4e-bc51-f98f51e97c2d"}}},
"software": "Workbench",
"type": "Snapshot",
"uuid": "ec23c11b-80b6-42cd-ac5c-73ba7acddbc4",

View File

@ -158,6 +158,5 @@
}
]
},
"debug": {"lshw": {"configuration": {"uuid": "a0cef731-9a78-4087-889c-dfb6ba5c2e9b"}}},
"closed": false
}

View File

@ -114,7 +114,6 @@
}
],
"version": "11.0a3",
"debug": {"lshw": {"configuration": {"uuid": "f2c50acd-501a-4f0b-b07c-58254b2ab8c9"}}},
"device": {
"type": "Desktop",
"model": "HP Compaq 8000 Elite SFF",

View File

@ -1,7 +1,6 @@
{
"closed": false,
"uuid": "f9e5e587-baee-44e1-9a94-255d216bbda9",
"debug": {"lshw": {"configuration": {"uuid": "4d21dd26-aa45-4902-a5f2-8a06e364cf25"}}},
"components": [
{
"actions": [],

View File

@ -131,7 +131,6 @@
"model": "NB200"
},
"uuid": "918726ae-c6bc-40aa-97cf-ad80d69268f9",
"debug": {"lshw": {"configuration": {"uuid": "33627ef0-89a9-4659-bb29-faa936727e0b"}}},
"closed": false,
"type": "Snapshot"
}

View File

@ -1,173 +0,0 @@
import enum
import ipaddress
import json
import locale
from collections import Iterable
from datetime import datetime, timedelta
from decimal import Decimal
from distutils.version import StrictVersion
from functools import wraps
from typing import Generator, Union
from uuid import UUID
class JSONEncoder(json.JSONEncoder):
"""An overloaded JSON Encoder with extra type support."""
def default(self, obj):
if isinstance(obj, enum.Enum):
return obj.name
elif isinstance(obj, datetime):
return obj.isoformat()
elif isinstance(obj, timedelta):
return round(obj.total_seconds())
elif isinstance(obj, UUID):
return str(obj)
elif isinstance(obj, StrictVersion):
return str(obj)
elif isinstance(obj, set):
return list(obj)
elif isinstance(obj, Decimal):
return float(obj)
elif isinstance(obj, Dumpeable):
return obj.dump()
elif isinstance(obj, ipaddress._BaseAddress):
return str(obj)
# Instead of failing, return the string representation by default
return str(obj)
class Dumpeable:
"""Dumps dictionaries and jsons for Devicehub.
A base class to allow subclasses to generate dictionaries
and json suitable for sending to a Devicehub, i.e. preventing
private and constants to be in the JSON and camelCases field names.
"""
ENCODER = JSONEncoder
def dump(self):
"""
Creates a dictionary consisting of the
non-private fields of this instance with camelCase field names.
"""
import inflection
return {
inflection.camelize(name, uppercase_first_letter=False): getattr(self, name)
for name in self._field_names()
if not name.startswith('_') and not name[0].isupper()
}
def _field_names(self):
"""An iterable of the names to dump."""
# Feel free to override this
return vars(self).keys()
def to_json(self):
"""
Creates a JSON representation of the non-private fields of
this class.
"""
return json.dumps(self, cls=self.ENCODER, indent=2)
class DumpeableModel(Dumpeable):
"""A dumpeable for SQLAlchemy models.
Note that this does not avoid recursive relations.
"""
def _field_names(self):
from sqlalchemy import inspect
return (a.key for a in inspect(self).attrs)
def ensure_utf8(app_name_to_show_on_error: str):
"""
Python3 uses by default the system set, but it expects it to be
utf-8 to work correctly.
This can generate problems in reading and writing files and in
``.decode()`` method.
An example how to 'fix' it::
echo 'export LC_CTYPE=en_US.UTF-8' > .bash_profile
echo 'export LC_ALL=en_US.UTF-8' > .bash_profile
"""
encoding = locale.getpreferredencoding()
if encoding.lower() != 'utf-8':
raise OSError(
'{} works only in UTF-8, but yours is set at {}'
''.format(app_name_to_show_on_error, encoding)
)
def now() -> datetime:
"""
Returns a compatible 'now' with DeviceHub's API,
this is as UTC and without microseconds.
"""
return datetime.utcnow().replace(microsecond=0)
def flatten_mixed(values: Iterable) -> Generator:
"""
Flatten a list containing lists and other elements. This is not deep.
>>> list(flatten_mixed([1, 2, [3, 4]]))
[1, 2, 3, 4]
"""
for x in values:
if isinstance(x, list):
for y in x:
yield y
else:
yield x
def if_none_return_none(f):
"""If the first value is None return None, otherwise execute f."""
@wraps(f)
def wrapper(self, value, *args, **kwargs):
if value is None:
return None
return f(self, value, *args, **kwargs)
return wrapper
def local_ip(
dest='109.69.8.152',
) -> Union[ipaddress.IPv4Address, ipaddress.IPv6Address]:
"""Gets the local IP of the interface that has access to the
Internet.
This is a reliable way to test if a device has an active
connection to the Internet.
This method works by connecting, by default,
to the IP of ereuse01.ereuse.org.
>>> local_ip()
:raise OSError: The device cannot connect to the Internet.
"""
import socket, ipaddress
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect((dest, 80))
ip = s.getsockname()[0]
s.close()
return ipaddress.ip_address(ip)
def version(package_name: str) -> StrictVersion:
"""Returns the version of a package name installed with pip."""
# From https://stackoverflow.com/a/2073599
import pkg_resources
return StrictVersion(pkg_resources.require(package_name)[0].version)

View File

@ -1,301 +0,0 @@
import enum as _enum
import getpass
import itertools
import os
import pathlib
import threading
from contextlib import contextmanager
from time import sleep
from typing import Any, Iterable, Type
from boltons import urlutils
from click import types as click_types
from colorama import Fore
from tqdm import tqdm
from ereuse_devicehub.ereuse_utils import if_none_return_none
COMMON_CONTEXT_S = {'help_option_names': ('-h', '--help')}
"""Common Context settings used for our implementations of the
Click cli.
"""
# Py2/3 compat. Empty conditional to avoid coverage
try:
_unicode = unicode
except NameError:
_unicode = str
class Enum(click_types.Choice):
"""
Enum support for click.
Use it as a collection: @click.option(..., type=cli.Enum(MyEnum)).
Then, this expects you to pass the *name* of a member of the enum.
From `this github issue <https://github.com/pallets/click/issues/
605#issuecomment-277539425>`_.
"""
def __init__(self, enum: Type[_enum.Enum]):
self.__enum = enum
super().__init__(enum.__members__)
def convert(self, value, param, ctx):
return self.__enum[super().convert(value, param, ctx)]
class Path(click_types.Path):
"""Like click.Path but returning ``pathlib.Path`` objects."""
def convert(self, value, param, ctx):
return pathlib.Path(super().convert(value, param, ctx))
class URL(click_types.StringParamType):
"""Returns a bolton's URL."""
name = 'url'
def __init__(
self,
scheme=None,
username=None,
password=None,
host=None,
port=None,
path=None,
query_params=None,
fragment=None,
) -> None:
super().__init__()
"""Creates the type URL. You can require or enforce parts
of the URL by setting parameters of this constructor.
If the param is...
- None, no check is performed (default).
- True, it is then required as part of the URL.
- False, it is then required NOT to be part of the URL.
- Any other value, then such value is required to be in
the URL.
"""
self.attrs = (
('scheme', scheme),
('username', username),
('password', password),
('host', host),
('port', port),
('path', path),
('query_params', query_params),
('fragment', fragment),
)
@if_none_return_none
def convert(self, value, param, ctx):
url = urlutils.URL(super().convert(value, param, ctx))
for name, attr in self.attrs:
if attr is True:
if not getattr(url, name):
self.fail(
'URL {} must contain {} but it does not.'.format(url, name)
)
elif attr is False:
if getattr(url, name):
self.fail('URL {} cannot contain {} but it does.'.format(url, name))
elif attr:
if getattr(url, name) != attr:
self.fail('{} form {} can only be {}'.format(name, url, attr))
return url
def password(service: str, username: str, prompt: str = 'Password:') -> str:
"""Gets a password from the keyring or the terminal."""
import keyring
return keyring.get_password(service, username) or getpass.getpass(prompt)
class Line(tqdm):
spinner_cycle = itertools.cycle(['-', '/', '|', '\\'])
def __init__(
self,
total=None,
desc=None,
leave=True,
file=None,
ncols=None,
mininterval=0.2,
maxinterval=10.0,
miniters=None,
ascii=None,
disable=False,
unit='it',
unit_scale=False,
dynamic_ncols=True,
smoothing=0.3,
bar_format=None,
initial=0,
position=None,
postfix=None,
unit_divisor=1000,
write_bytes=None,
gui=False,
close_message: Iterable = None,
error_message: Iterable = None,
**kwargs,
):
"""This cannot work with iterables. Iterable use is considered
backward-compatibility in tqdm and inconsistent in Line.
Manually call ``update``.
"""
self._close_message = close_message
self._error_message = error_message
if total:
bar_format = '{desc}{percentage:.1f}% |{bar}| {n:1g}/{total:1g} {elapsed}<{remaining}'
super().__init__(
None,
desc,
total,
leave,
file,
ncols,
mininterval,
maxinterval,
miniters,
ascii,
disable,
unit,
unit_scale,
dynamic_ncols,
smoothing,
bar_format,
initial,
position,
postfix,
unit_divisor,
write_bytes,
gui,
**kwargs,
)
def write_at_line(self, *args):
self.clear()
with self._lock:
self.display(''.join(str(arg) for arg in args))
def close_message(self, *args):
self._close_message = args
def error_message(self, *args):
self._error_message = args
def close(self): # noqa: C901
"""
Cleanup and (if leave=False) close the progressbar.
"""
if self.disable:
return
# Prevent multiple closures
self.disable = True
# decrement instance pos and remove from internal set
pos = abs(self.pos)
self._decr_instances(self)
# GUI mode
if not hasattr(self, "sp"):
return
# annoyingly, _supports_unicode isn't good enough
def fp_write(s):
self.fp.write(_unicode(s))
try:
fp_write('')
except ValueError as e:
if 'closed' in str(e):
return
raise # pragma: no cover
with self._lock:
if self.leave:
if self._close_message:
self.display(
''.join(str(arg) for arg in self._close_message), pos=pos
)
elif self.last_print_n < self.n:
# stats for overall rate (no weighted average)
self.avg_time = None
self.display(pos=pos)
if not max(
[abs(getattr(i, "pos", 0)) for i in self._instances] + [pos]
):
# only if not nested (#477)
fp_write('\n')
else:
if self._close_message:
self.display(
''.join(str(arg) for arg in self._close_message), pos=pos
)
else:
self.display(msg='', pos=pos)
if not pos:
fp_write('\r')
@contextmanager
def spin(self, prefix: str):
self._stop_running = threading.Event()
spin_thread = threading.Thread(target=self._spin, args=[prefix])
spin_thread.start()
try:
yield
finally:
self._stop_running.set()
spin_thread.join()
def _spin(self, prefix: str):
while not self._stop_running.is_set():
self.write_at_line(prefix, next(self.spinner_cycle))
sleep(0.50)
@classmethod
@contextmanager
def reserve_lines(self, n):
try:
yield
finally:
self.move_down(n - 1)
@classmethod
def move_down(cls, n: int):
print('\n' * n)
def __exit__(self, *exc):
if exc[0]:
self._close_message = self._error_message
return super().__exit__(*exc)
def clear():
os.system('clear')
def title(text: Any, ljust=32) -> str:
# Note that is 38 px + 1 extra space = 39 min
return str(text).ljust(ljust) + ' '
def danger(text: Any) -> str:
return '{}{}{}'.format(Fore.RED, text, Fore.RESET)
def warning(text: Any) -> str:
return '{}{}{}'.format(Fore.YELLOW, text, Fore.RESET)
def done(text: Any = 'done.') -> str:
return '{}{}{}'.format(Fore.GREEN, text, Fore.RESET)

View File

@ -1,148 +0,0 @@
import subprocess
from contextlib import suppress
from typing import Any, Set
from ereuse_devicehub.ereuse_utils import text
def run(
*cmd: Any,
out=subprocess.PIPE,
err=subprocess.DEVNULL,
to_string=True,
check=True,
shell=False,
**kwargs,
) -> subprocess.CompletedProcess:
"""subprocess.run with a better API.
:param cmd: A list of commands to execute as parameters.
Parameters will be passed-in to ``str()`` so they
can be any object that can handle str().
:param out: As ``subprocess.run.stdout``.
:param err: As ``subprocess.run.stderr``.
:param to_string: As ``subprocess.run.universal_newlines``.
:param check: As ``subprocess.run.check``.
:param shell:
:param kwargs: Any other parameters that ``subprocess.run``
accepts.
:return: The result of executing ``subprocess.run``.
"""
cmds = tuple(str(c) for c in cmd)
return subprocess.run(
' '.join(cmds) if shell else cmds,
stdout=out,
stderr=err,
universal_newlines=to_string,
check=check,
shell=shell,
**kwargs,
)
class ProgressiveCmd:
"""Executes a cmd while interpreting its completion percentage.
The completion percentage of the cmd is stored in
:attr:`.percentage` and the user can obtain percentage
increments by executing :meth:`.increment`.
This class is useful to use within a child thread, so a main
thread can request from time to time the percentage / increment
status of the running command.
"""
READ_LINE = None
DECIMALS = {4, 5, 6}
DECIMAL_NUMBERS = 2
INT = {1, 2, 3}
def __init__(
self,
*cmd: Any,
stdout=subprocess.DEVNULL,
number_chars: Set[int] = INT,
decimal_numbers: int = None,
read: int = READ_LINE,
callback=None,
check=True,
):
"""
:param cmd: The command to execute.
:param stderr: the stderr passed-in to Popen.
:param stdout: the stdout passed-in to Popen
:param number_chars: The number of chars used to represent
the percentage. Normalized cases are
:attr:`.DECIMALS` and :attr:`.INT`.
:param read: For commands that do not print lines, how many
characters we should read between updates.
The percentage should be between those
characters.
:param callback: If passed in, this method is executed every time
run gets an update from the command, passing
in the increment from the last execution.
If not passed-in, you can get such increment
by executing manually the ``increment`` method.
:param check: Raise error if subprocess return code is non-zero.
"""
self.cmd = tuple(str(c) for c in cmd)
self.read = read
self.step = 0
self.check = check
self.number_chars = number_chars
self.decimal_numbers = decimal_numbers
# We call subprocess in the main thread so the main thread
# can react on ``CalledProcessError`` exceptions
self.conn = conn = subprocess.Popen(
self.cmd, universal_newlines=True, stderr=subprocess.PIPE, stdout=stdout
)
self.out = conn.stdout if stdout == subprocess.PIPE else conn.stderr
self._callback = callback
self.last_update_percentage = 0
self.percentage = 0
@property
def percentage(self):
return self._percentage
@percentage.setter
def percentage(self, v):
self._percentage = v
if self._callback and self._percentage > 0:
increment = self.increment()
if (
increment > 0
): # Do not bother calling if there has not been any increment
self._callback(increment, self._percentage)
def run(self) -> None:
"""Processes the output."""
while True:
out = self.out.read(self.read) if self.read else self.out.readline()
if out:
with suppress(StopIteration):
self.percentage = next(
text.positive_percentages(
out, self.number_chars, self.decimal_numbers
)
)
else: # No more output
break
return_code = self.conn.wait() # wait until cmd ends
if self.check and return_code != 0:
raise subprocess.CalledProcessError(
self.conn.returncode, self.conn.args, stderr=self.conn.stderr.read()
)
def increment(self):
"""Returns the increment of progression from
the last time this method is executed.
"""
# for cmd badblocks the increment can be negative at the
# beginning of the second step where last_percentage
# is 100 and percentage is 0. By using max we
# kind-of reset the increment and start counting for
# the second step
increment = max(self.percentage - self.last_update_percentage, 0)
self.last_update_percentage = self.percentage
return increment

View File

@ -1,171 +0,0 @@
"""Functions to get values from dictionaries and list encoded key-value
strings with meaningful indentations.
Values obtained from these functions are sanitized and automatically
(or explicitly set) casted. Sanitization includes removing unnecessary
whitespaces and removing useless keywords (in the context of
computer hardware) from the texts.
"""
import re
from itertools import chain
from typing import Any, Iterable, Set, Type, Union
from unittest.mock import DEFAULT
import boltons.iterutils
import yaml
from ereuse_devicehub.ereuse_utils.text import clean
def dict(
d: dict,
path: Union[str, tuple],
remove: Set[str] = set(),
default: Any = DEFAULT,
type: Type = None,
):
"""Gets a value from the dictionary and sanitizes it.
Values are patterned and compared against sets
of meaningless characters for device hardware.
:param d: A dictionary potentially containing the value.
:param path: The key or a tuple-path where the value should be.
:param remove: Remove these words if found.
:param default: A default value to return if not found. If not set,
an exception is raised.
:param type: Enforce a type on the value (like ``int``). By default
dict tries to guess the correct type.
"""
try:
v = boltons.iterutils.get_path(d, (path,) if isinstance(path, str) else path)
except KeyError:
return _default(path, default)
else:
return sanitize(v, remove, type=type)
def kv(
iterable: Iterable[str],
key: str,
default: Any = DEFAULT,
sep=':',
type: Type = None,
) -> Any:
"""Key-value. Gets a value from an iterable representing key values in the
form of a list of strings lines, for example an ``.ini`` or yaml file,
if they are opened with ``.splitlines()``.
:param iterable: An iterable of strings.
:param key: The key where the value should be.
:param default: A default value to return if not found. If not set,
an exception is raised.
:param sep: What separates the key from the value in the line.
Usually ``:`` or ``=``.
:param type: Enforce a type on the value (like ``int``). By default
dict tries to guess the correct type.
"""
for line in iterable:
try:
k, value, *_ = line.strip().split(sep)
except ValueError:
continue
else:
if key == k:
return sanitize(value, type=type)
return _default(key, default)
def indents(iterable: Iterable[str], keyword: str, indent=' '):
"""For a given iterable of strings, returns blocks of the same
left indentation.
For example:
foo1
bar1
bar2
foo2
foo2
For that text, this method would return ``[bar1, bar2]`` for passed-in
keyword ``foo1``.
:param iterable: A list of strings representing lines.
:param keyword: The title preceding the indentation.
:param indent: Which characters makes the indentation.
"""
section_pos = None
for i, line in enumerate(iterable):
if not line.startswith(indent):
if keyword in line:
section_pos = i
elif section_pos is not None:
yield iterable[section_pos:i]
section_pos = None
return
def _default(key, default):
if default is DEFAULT:
raise IndexError('Value {} not found.'.format(key))
else:
return default
"""Gets"""
TO_REMOVE = {'none', 'prod', 'o.e.m', 'oem', r'n/a', 'atapi', 'pc', 'unknown'}
"""Delete those *words* from the value"""
assert all(v.lower() == v for v in TO_REMOVE), 'All words need to be lower-case'
REMOVE_CHARS_BETWEEN = '(){}[]'
"""
Remove those *characters* from the value.
All chars inside those are removed. Ex: foo (bar) => foo
"""
CHARS_TO_REMOVE = '*'
"""Remove the characters.
'*' Needs to be removed or otherwise it is interpreted
as a glob expression by regexes.
"""
MEANINGLESS = {
'to be filled',
'system manufacturer',
'system product',
'sernum',
'xxxxx',
'system name',
'not specified',
'modulepartnumber',
'system serial',
'0001-067a-0000',
'partnum',
'manufacturer',
'0000000',
'fffff',
'jedec id:ad 00 00 00 00 00 00 00',
'012000',
'x.x',
'sku',
}
"""Discard a value if any of these values are inside it. """
assert all(v.lower() == v for v in MEANINGLESS), 'All values need to be lower-case'
def sanitize(value, remove=set(), type=None):
if value is None:
return None
remove = remove | TO_REMOVE
regex = r'({})\W'.format('|'.join(s for s in remove))
val = re.sub(regex, '', value, flags=re.IGNORECASE)
val = '' if val.lower() in remove else val # regex's `\W` != whole string
val = re.sub(r'\([^)]*\)', '', val) # Remove everything between
for char_to_remove in chain(REMOVE_CHARS_BETWEEN, CHARS_TO_REMOVE):
val = val.replace(char_to_remove, '')
val = clean(val)
if val and not any(meaningless in val.lower() for meaningless in MEANINGLESS):
return type(val) if type else yaml.load(val, Loader=yaml.SafeLoader)
else:
return None

View File

@ -1,143 +0,0 @@
from inflection import (
camelize,
dasherize,
parameterize,
pluralize,
singularize,
underscore,
)
HID_CONVERSION_DOC = """
The HID is the result of concatenating,
in the following order: the type of device (ex. Computer),
the manufacturer name, the model name, and the S/N. It is joined
with hyphens, and adapted to comply with the URI specification, so
it can be used in the URI identifying the device on the Internet.
The conversion is done as follows:
1. non-ASCII characters are converted to their ASCII equivalent or
removed.
2. Characterst that are not letters or numbers are converted to
underscores, in a way that there are no trailing underscores
and no underscores together, and they are set to lowercase.
Ex. ``laptop-acer-aod270-lusga_0d0242201212c7614``
"""
class Naming:
"""
In DeviceHub there are many ways to name the same resource (yay!), this is because of all the different
types of schemas we work with. But no worries, we offer easy ways to change between naming conventions.
- TypeCase (or resource-type) is the one represented with '@type' and follow PascalCase and always singular.
This is the standard preferred one.
- resource-case is the eve naming, using the standard URI conventions. This one is tricky, as although the types
are represented in singular, the URI convention is to be plural (Event vs events), however just few of them
follow this rule (Snapshot [type] to snapshot [resource]). You can set which ones you want to change their
number.
- python_case is the one used by python for its folders and modules. It is underscored and always singular.
"""
TYPE_PREFIX = ':'
RESOURCE_PREFIX = '_'
@staticmethod
def resource(string: str):
"""
:param string: String can be type, resource or python case
"""
try:
prefix, resulting_type = Naming.pop_prefix(string)
prefix += Naming.RESOURCE_PREFIX
except IndexError:
prefix = ''
resulting_type = string
resulting_type = dasherize(underscore(resulting_type))
return prefix + pluralize(resulting_type)
@staticmethod
def python(string: str):
"""
:param string: String can be type, resource or python case
"""
return underscore(singularize(string))
@staticmethod
def type(string: str):
try:
prefix, resulting_type = Naming.pop_prefix(string)
prefix += Naming.TYPE_PREFIX
except IndexError:
prefix = ''
resulting_type = string
resulting_type = singularize(resulting_type)
resulting_type = resulting_type.replace(
'-', '_'
) # camelize does not convert '-' but '_'
return prefix + camelize(resulting_type)
@staticmethod
def url_word(word: str):
"""
Normalizes a full word to be inserted to an url. If the word has spaces, etc, is used '_' and not '-'
"""
return parameterize(word, '_')
@staticmethod
def pop_prefix(string: str):
"""Erases the prefix and returns it.
:throws IndexError: There is no prefix.
:return A set with two elements: 1- the prefix, 2- the type without it.
"""
result = string.split(Naming.TYPE_PREFIX)
if len(result) == 1:
result = string.split(Naming.RESOURCE_PREFIX)
if len(result) == 1:
raise IndexError()
return result
@staticmethod
def new_type(type_name: str, prefix: str or None = None) -> str:
"""
Creates a resource type with optionally a prefix.
Using the rules of JSON-LD, we use prefixes to disambiguate between different types with the same name:
one can Accept a device or a project. In eReuse.org there are different events with the same names, in
linked-data terms they have different URI. In eReuse.org, we solve this with the following:
"@type": "devices:Accept" // the URI for these events is 'devices/events/accept'
"@type": "projects:Accept" // the URI for these events is 'projects/events/accept
...
Type is only used in events, when there are ambiguities. The rest of
"@type": "devices:Accept"
"@type": "Accept"
But these not:
"@type": "projects:Accept" // it is an event from a project
"@type": "Accept" // it is an event from a device
"""
if Naming.TYPE_PREFIX in type_name:
raise TypeError(
'Cannot create new type: type {} is already prefixed.'.format(type_name)
)
prefix = (prefix + Naming.TYPE_PREFIX) if prefix is not None else ''
return prefix + type_name
@staticmethod
def hid(type: str, manufacturer: str, model: str, serial_number: str) -> str:
(
"""Computes the HID for the given properties of a device.
The HID is suitable to use to an URI.
"""
+ HID_CONVERSION_DOC
)
return '{type}-{mn}-{ml}-{sn}'.format(
type=Naming.url_word(type),
mn=Naming.url_word(manufacturer),
ml=Naming.url_word(model),
sn=Naming.url_word(serial_number),
)

View File

@ -1,85 +0,0 @@
class NestedLookup:
@staticmethod
def __new__(cls, document, references, operation):
"""Lookup a key in a nested document, return a list of values
From https://github.com/russellballestrini/nested-lookup/ but in python 3
"""
return list(NestedLookup._nested_lookup(document, references, operation))
@staticmethod
def key_equality_factory(key_to_find):
def key_equality(key, _):
return key == key_to_find
return key_equality
@staticmethod
def is_sub_type_factory(type):
def _is_sub_type(_, value):
return is_sub_type(value, type)
return _is_sub_type
@staticmethod
def key_value_equality_factory(key_to_find, value_to_find):
def key_value_equality(key, value):
return key == key_to_find and value == value_to_find
return key_value_equality
@staticmethod
def key_value_containing_value_factory(key_to_find, value_to_find):
def key_value_containing_value(key, value):
return key == key_to_find and value_to_find in value
return key_value_containing_value
@staticmethod
def _nested_lookup(document, references, operation): # noqa: C901
"""Lookup a key in a nested document, yield a value"""
if isinstance(document, list):
for d in document:
for result in NestedLookup._nested_lookup(d, references, operation):
yield result
if isinstance(document, dict):
for k, v in document.items():
if operation(k, v):
references.append((document, k))
yield v
elif isinstance(v, dict):
for result in NestedLookup._nested_lookup(v, references, operation):
yield result
elif isinstance(v, list):
for d in v:
for result in NestedLookup._nested_lookup(
d, references, operation
):
yield result
def is_sub_type(value, resource_type):
try:
return issubclass(value, resource_type)
except TypeError:
return issubclass(value.__class__, resource_type)
def get_nested_dicts_with_key_value(parent_dict: dict, key, value):
"""Return all nested dictionaries that contain a key with a specific value. A sub-case of NestedLookup."""
references = []
NestedLookup(
parent_dict, references, NestedLookup.key_value_equality_factory(key, value)
)
return (document for document, _ in references)
def get_nested_dicts_with_key_containing_value(parent_dict: dict, key, value):
"""Return all nested dictionaries that contain a key with a specific value. A sub-case of NestedLookup."""
references = []
NestedLookup(
parent_dict,
references,
NestedLookup.key_value_containing_value_factory(key, value),
)
return (document for document, _ in references)

View File

@ -1,285 +0,0 @@
import base64
import json
from typing import Any, Dict, Iterable, Tuple, TypeVar, Union
import boltons.urlutils
from requests import Response
from requests_toolbelt.sessions import BaseUrlSession
from urllib3 import Retry
from ereuse_devicehub import ereuse_utils
# mypy
Query = Iterable[Tuple[str, Any]]
Status = Union[int]
try:
from typing import Protocol # Only py 3.6+
except ImportError:
pass
else:
class HasStatusProperty(Protocol):
def __init__(self, *args, **kwargs) -> None:
self.status = ... # type: int
Status = Union[int, HasStatusProperty]
JSON = 'application/json'
ANY = '*/*'
AUTH = 'Authorization'
BASIC = 'Basic {}'
URL = Union[str, boltons.urlutils.URL]
Data = Union[str, dict, ereuse_utils.Dumpeable]
Res = Tuple[Union[Dict[str, Any], str], Response]
# actual code
class Session(BaseUrlSession):
"""A BaseUrlSession that always raises for status and sets a
timeout for all requests by default.
"""
def __init__(self, base_url=None, timeout=15):
"""
:param base_url:
:param timeout: Time requests will wait to receive the first
response bytes (not the whole) from the server. In seconds.
"""
super().__init__(base_url)
self.timeout = timeout
self.hooks['response'] = lambda r, *args, **kwargs: r.raise_for_status()
def request(self, method, url, *args, **kwargs):
kwargs.setdefault('timeout', self.timeout)
return super().request(method, url, *args, **kwargs)
def __repr__(self):
return '<{} base={}>.'.format(self.__class__.__name__, self.base_url)
class DevicehubClient(Session):
"""A Session pre-configured to connect to Devicehub-like APIs."""
def __init__(self, base_url: URL = None,
token: str = None,
inventory: Union[str, bool] = False,
**kwargs):
"""Initializes a session pointing to a Devicehub endpoint.
Authentication can be passed-in as a token for endpoints
that require them, now at ini, after when executing the method,
or in between with ``set_auth``.
:param base_url: An url pointing to a endpoint.
:param token: A Base64 encoded token, as given by a devicehub.
You can encode tokens by executing `encode_token`.
:param inventory: If True, use the default inventory of the user.
If False, do not use inventories (single-inventory
database, this is the option by default).
If a string, always use the set inventory.
"""
if isinstance(base_url, boltons.urlutils.URL):
base_url = base_url.to_text()
else:
base_url = str(base_url)
super().__init__(base_url, **kwargs)
assert base_url[-1] != '/', 'Do not provide a final slash to the URL'
if token:
self.set_auth(token)
self.inventory = inventory
self.user = None # type: Dict[str, object]
def set_auth(self, token):
self.headers['Authorization'] = 'Basic {}'.format(token)
@classmethod
def encode_token(cls, token: str):
"""Encodes a token suitable for a Devicehub endpoint."""
return base64.b64encode(str.encode(str(token) + ':')).decode()
def login(self, email: str, password: str) -> Dict[str, Any]:
"""Performs login, authenticating future requests.
:return: The logged-in user.
"""
user, _ = self.post('/users/login/', {'email': email, 'password': password}, status=200)
self.set_auth(user['token'])
self.user = user
self.inventory = user['inventories'][0]['id']
return user
def get(self,
base_url: URL,
uri=None,
status: Status = 200,
query: Query = tuple(),
accept=JSON,
content_type=JSON,
headers: dict = None,
token=None,
**kwargs) -> Res:
return super().get(base_url,
uri=uri,
status=status,
query=query,
accept=accept,
content_type=content_type,
headers=headers,
token=token, **kwargs)
def post(self, base_url: URL,
data: Data,
uri=None,
status: Status = 201,
query: Query = tuple(),
accept=JSON,
content_type=JSON,
headers: dict = None,
token=None,
**kwargs) -> Res:
return super().post(base_url,
data=data,
uri=uri,
status=status,
query=query,
accept=accept,
content_type=content_type,
headers=headers,
token=token, **kwargs)
def delete(self,
base_url: URL,
uri=None,
status: Status = 204,
query: Query = tuple(),
accept=JSON,
content_type=JSON,
headers: dict = None,
token=None,
**kwargs) -> Res:
return super().delete(base_url,
uri=uri,
status=status,
query=query,
accept=accept,
content_type=content_type,
headers=headers,
token=token, **kwargs)
def patch(self, base_url: URL,
data: Data,
uri=None,
status: Status = 201,
query: Query = tuple(),
accept=JSON,
content_type=JSON,
headers: dict = None,
token=None,
**kwargs) -> Res:
return super().patch(base_url,
data=data,
uri=uri,
status=status,
query=query,
accept=accept,
content_type=content_type,
headers=headers,
token=token, **kwargs)
def request(self,
method,
base_url: URL,
uri=None,
status: Status = 200,
query: Query = tuple(),
accept=JSON,
content_type=JSON,
data=None,
headers: dict = None,
token=None,
**kw) -> Res:
assert not kw.get('json', None), 'Do not use json; use data.'
# We allow uris without slashes for item endpoints
uri = str(uri) if uri else None
headers = headers or {}
headers['Accept'] = accept
headers['Content-Type'] = content_type
if token:
headers['Authorization'] = 'Basic {}'.format(token)
if data and content_type == JSON:
data = json.dumps(data, cls=ereuse_utils.JSONEncoder, sort_keys=True)
url = base_url if not isinstance(base_url, boltons.urlutils.URL) else base_url.to_text()
assert url[-1] == '/', 'base_url should end with a slash'
if self.inventory and not isinstance(self.inventory, bool):
url = '{}/{}'.format(self.inventory, base_url)
assert url[-1] == '/', 'base_url should end with a slash'
if uri:
url = self.parse_uri(url, uri)
if query:
url = self.parse_query(url, query)
response = super().request(method, url, data=data, headers=headers, **kw)
if status:
_status = getattr(status, 'code', status)
if _status != response.status_code:
raise WrongStatus('Req to {} failed bc the status is {} but it should have been {}'
.format(url, response.status_code, _status))
data = response.content if not accept == JSON or not response.content else response.json()
return data, response
@staticmethod
def parse_uri(base_url, uri):
return boltons.urlutils.URL(base_url).navigate(uri).to_text()
@staticmethod
def parse_query(uri, query):
url = boltons.urlutils.URL(uri)
url.query_params = boltons.urlutils.QueryParamDict([
(k, json.dumps(v, cls=ereuse_utils.JSONEncoder) if isinstance(v, (list, dict)) else v)
for k, v in query
])
return url.to_text()
def __repr__(self):
return '<{} base={} inv={} user={}>.'.format(self.__class__.__name__, self.base_url,
self.inventory, self.user)
class WrongStatus(Exception):
pass
import requests
from requests.adapters import HTTPAdapter
T = TypeVar('T', bound=requests.Session)
def retry(session: T,
retries=3,
backoff_factor=1,
status_to_retry=(500, 502, 504)) -> T:
"""Configures requests from the given session to retry in
failed requests due to connection errors, HTTP response codes
with ``status_to_retry`` and 30X redirections.
Remember that you still need
"""
# From https://www.peterbe.com/plog/best-practice-with-retries-with-requests
# Doc in https://urllib3.readthedocs.io/en/latest/reference/urllib3.util.html#module-urllib3.util.retry
session = session or requests.Session()
retry = Retry(
total=retries,
read=retries,
connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_to_retry,
method_whitelist=False # Retry too in non-idempotent methods like POST
)
adapter = HTTPAdapter(max_retries=retry)
session.mount('http://', adapter)
session.mount('https://', adapter)
return session

View File

@ -1,165 +0,0 @@
from contextlib import suppress
from typing import Dict, Tuple, Union
from flask import json
from flask.testing import FlaskClient
from werkzeug.wrappers import Response
from ereuse_devicehub.ereuse_utils.session import ANY, AUTH, BASIC, DevicehubClient, JSON, Query, Status
ANY = ANY
AUTH = AUTH
BASIC = BASIC
Res = Tuple[Union[Dict[str, object], str], Response]
class Client(FlaskClient):
"""
A client for the REST servers of DeviceHub and WorkbenchServer.
- JSON first. By default it sends and expects receiving JSON files.
- Assert regular status responses, like 200 for GET.
- Auto-parses a nested dictionary of URL query params to the
URL version with nested properties to JSON.
- Meaningful headers format: a dictionary of name-values.
"""
def open(self,
uri: str,
status: Status = 200,
query: Query = tuple(),
accept=JSON,
content_type=JSON,
item=None,
headers: dict = None,
**kw) -> Res:
"""
:param uri: The URI without basename and query.
:param status: Assert the response for specified status. Set
None to avoid.
:param query: The query of the URL in the form of
[(key1, value1), (key2, value2), (key1, value3)].
If value is a list or a dict, they will be
converted to JSON.
Please, see :class:`boltons.urlutils`.
QueryParamDict` for more info.
:param accept: The Accept header. If 'application/json'
(default) then it will parse incoming JSON.
:param item: The last part of the path. Useful to do something
like ``get('db/accounts', item='24')``. If you
use ``item``, you can't set a final backslash into
``uri`` (or the parse will fail).
:param headers: A dictionary of headers, where keys are header
names and values their values.
Ex: {'Accept', 'application/json'}.
:param kw: Kwargs passed into parent ``open``.
:return: A tuple with: 1. response data, as a string or JSON
depending of Accept, and 2. the Response object.
"""
j_encoder = self.application.json_encoder
headers = headers or {}
headers['Accept'] = accept
headers['Content-Type'] = content_type
headers = [(k, v) for k, v in headers.items()]
if 'data' in kw and content_type == JSON:
kw['data'] = json.dumps(kw['data'], cls=j_encoder)
if item:
uri = DevicehubClient.parse_uri(uri, item)
if query:
uri = DevicehubClient.parse_query(uri, query)
response = super().open(uri, headers=headers, **kw)
if status:
_status = getattr(status, 'code', status)
assert response.status_code == _status, \
'Expected status code {} but got {}. Returned data is:\n' \
'{}'.format(_status, response.status_code, response.get_data().decode())
data = response.get_data()
with suppress(UnicodeDecodeError):
data = data.decode()
if accept == JSON:
data = json.loads(data) if data else {}
return data, response
def get(self,
uri: str,
query: Query = tuple(),
item: str = None,
status: Status = 200,
accept: str = JSON,
headers: dict = None,
**kw) -> Res:
"""
Performs a GET.
See the parameters in :meth:`ereuse_utils.test.Client.open`.
Moreover:
:param query: A dictionary of query params. If a parameter is a
dict or a list, it will be parsed to JSON, then
all params are encoded with ``urlencode``.
:param kw: Kwargs passed into parent ``open``.
"""
return super().get(uri, item=item, status=status, accept=accept, headers=headers,
query=query, **kw)
def post(self,
uri: str,
data: str or dict,
query: Query = tuple(),
status: Status = 201,
content_type: str = JSON,
accept: str = JSON,
headers: dict = None,
**kw) -> Res:
"""
Performs a POST.
See the parameters in :meth:`ereuse_utils.test.Client.open`.
"""
return super().post(uri, data=data, status=status, content_type=content_type,
accept=accept, headers=headers, query=query, **kw)
def patch(self,
uri: str,
data: str or dict,
query: Query = tuple(),
status: Status = 200,
content_type: str = JSON,
item: str = None,
accept: str = JSON,
headers: dict = None,
**kw) -> Res:
"""
Performs a PATCH.
See the parameters in :meth:`ereuse_utils.test.Client.open`.
"""
return super().patch(uri, item=item, data=data, status=status, content_type=content_type,
accept=accept, headers=headers, query=query, **kw)
def put(self,
uri: str,
data: str or dict,
query: Query = tuple(),
status: Status = 201,
content_type: str = JSON,
item: str = None,
accept: str = JSON,
headers: dict = None,
**kw) -> Res:
return super().put(uri, item=item, data=data, status=status, content_type=content_type,
accept=accept, headers=headers, query=query, **kw)
def delete(self,
uri: str,
query: Query = tuple(),
item: str = None,
status: Status = 204,
accept: str = JSON,
headers: dict = None,
**kw) -> Res:
return super().delete(uri, query=query, item=item, status=status, accept=accept,
headers=headers, **kw)

View File

@ -1,72 +0,0 @@
import ast
import re
from typing import Iterator, Set, Union
def grep(text: str, value: str):
"""An easy 'grep -i' that yields lines where value is found."""
for line in text.splitlines():
if value in line:
yield line
def between(text: str, begin='(', end=')'):
"""Dead easy text between two characters.
Not recursive or repetitions.
"""
return text.split(begin)[-1].split(end)[0]
def numbers(text: str) -> Iterator[Union[int, float]]:
"""Gets numbers in strings with other characters.
Integer Numbers: 1 2 3 987 +4 -8
Decimal Numbers: 0.1 2. .3 .987 +4.0 -0.8
Scientific Notation: 1e2 0.2e2 3.e2 .987e2 +4e-1 -8.e+2
Numbers with percentages: 49% 32.39%
This returns int or float.
"""
# From https://regexr.com/33jqd
for x in re.finditer(r'[+-]?(?=\.\d|\d)(?:\d+)?(?:\.?\d*)(?:[eE][+-]?\d+)?', text):
yield ast.literal_eval(x.group())
def positive_percentages(
text: str, lengths: Set[int] = None, decimal_numbers: int = None
) -> Iterator[Union[int, float]]:
"""Gets numbers postfixed with a '%' in strings with other characters.
1)100% 2)56.78% 3)56 78.90% 4)34.6789% some text
:param text: The text to search for.
:param lengths: A set of lengths that the percentage
number should have to be considered valid.
Ex. {5,6} would validate '90.32' and '100.00'
"""
# From https://regexr.com/3aumh
for x in re.finditer(r'[\d|\.]+%', text):
num = x.group()[:-1]
if lengths:
if not len(num) in lengths:
continue
if decimal_numbers:
try:
pos = num.rindex('.')
except ValueError:
continue
else:
if len(num) - pos - 1 != decimal_numbers:
continue
yield float(num)
def macs(text: str) -> Iterator[str]:
"""Find MACs in strings with other characters."""
for x in re.finditer('{0}:{0}:{0}:{0}:{0}:{0}'.format(r'[a-fA-F0-9.+_-]+'), text):
yield x.group()
def clean(text: str) -> str:
"""Trims the text and replaces multiple spaces with a single space."""
return ' '.join(text.split())

View File

@ -1,80 +0,0 @@
import usb.core
import usb.util
from usb import CLASS_MASS_STORAGE
from ereuse_devicehub.ereuse_utils.naming import Naming
def plugged_usbs(multiple=True) -> map or dict: # noqa: C901
"""
Gets the plugged-in USB Flash drives (pen-drives).
If multiple is true, it returns a map, and a dict otherwise.
If multiple is false, this method will raise a :class:`.NoUSBFound` if no USB is found.
"""
class FindPenDrives(object):
# From https://github.com/pyusb/pyusb/blob/master/docs/tutorial.rst
def __init__(self, class_):
self._class = class_
def __call__(self, device):
# first, let's check the device
if device.bDeviceClass == self._class:
return True
# ok, transverse all devices to find an
# interface that matches our class
for cfg in device:
# find_descriptor: what's it?
intf = usb.util.find_descriptor(cfg, bInterfaceClass=self._class)
# We don't want Card readers
if intf is not None:
try:
product = intf.device.product.lower()
except ValueError as e:
if 'langid' in str(e):
raise OSError(
'Cannot get "langid". Do you have permissions?'
)
else:
raise e
if 'crw' not in product and 'reader' not in product:
return True
return False
def get_pendrive(pen: usb.Device) -> dict:
if not pen.manufacturer or not pen.product or not pen.serial_number:
raise UsbDoesNotHaveHid()
manufacturer = pen.manufacturer.strip() or str(pen.idVendor)
model = pen.product.strip() or str(pen.idProduct)
serial_number = pen.serial_number.strip()
hid = Naming.hid('USBFlashDrive', manufacturer, model, serial_number)
return {
'id': hid, # Make live easier to DeviceHubClient by using _id
'hid': hid,
'type': 'USBFlashDrive',
'serialNumber': serial_number,
'model': model,
'manufacturer': manufacturer,
'vendorId': pen.idVendor,
'productId': pen.idProduct,
}
result = usb.core.find(
find_all=multiple, custom_match=FindPenDrives(CLASS_MASS_STORAGE)
)
if multiple:
return map(get_pendrive, result)
else:
if not result:
raise NoUSBFound()
return get_pendrive(result)
class NoUSBFound(Exception):
pass
class UsbDoesNotHaveHid(Exception):
pass

View File

@ -1,192 +0,0 @@
from boltons.urlutils import URL
from flask import current_app as app
from flask import g, session
from flask_wtf import FlaskForm
from werkzeug.security import generate_password_hash
from wtforms import (
BooleanField,
EmailField,
PasswordField,
StringField,
URLField,
validators,
)
from ereuse_devicehub.db import db
from ereuse_devicehub.resources.user.models import SanitizationEntity, User
class LoginForm(FlaskForm):
email = EmailField('Email Address', [validators.Length(min=6, max=35)])
password = PasswordField('Password', [validators.DataRequired()])
remember = BooleanField('Remember me')
error_messages = {
'invalid_login': (
"Please enter a correct email and password. Note that both "
"fields may be case-sensitive."
),
'inactive': "This account is inactive.",
}
def validate(self, extra_validators=None):
is_valid = super().validate(extra_validators)
if not is_valid:
return False
email = self.email.data
password = self.password.data
self.user_cache = self.authenticate(email, password)
if self.user_cache is None:
self.form_errors.append(self.error_messages['invalid_login'])
return False
return self.confirm_login_allowed(self.user_cache)
def authenticate(self, email, password):
if email is None or password is None:
return
user = User.query.filter_by(email=email).first()
if user is None:
# Run the default password hasher once to reduce the timing
# difference between an existing and a nonexistent user (#20760).
generate_password_hash(password)
else:
if user.check_password(password):
return user
def confirm_login_allowed(self, user):
"""
Controls whether the given User may log in. This is a policy setting,
independent of end-user authentication. This default behavior is to
allow login by active users, and reject login by inactive users.
If the given user cannot log in, this method should raise a
``ValidationError``.
If the given user may log in, this method should return None.
"""
if not user.is_active:
self.form_errors.append(self.error_messages['inactive'])
if 'dpp' in app.blueprints.keys():
dlt_keys = user.get_dlt_keys(
self.password.data
).get('data', {})
token_dlt = dlt_keys.get('api_token')
eth_pub_key = dlt_keys.get('eth_pub_key')
session['token_dlt'] = token_dlt
session['eth_pub_key'] = eth_pub_key
session['rols'] = user.get_rols()
return user.is_active
class PasswordForm(FlaskForm):
password = PasswordField(
'Current Password',
[validators.DataRequired()],
render_kw={'class': "form-control"},
)
newpassword = PasswordField(
'New Password',
[validators.DataRequired()],
render_kw={'class': "form-control"},
)
renewpassword = PasswordField(
'Re-enter New Password',
[validators.DataRequired()],
render_kw={'class': "form-control"},
)
def validate(self, extra_validators=None):
is_valid = super().validate(extra_validators)
if not is_valid:
return False
if not g.user.check_password(self.password.data):
return False
if self.newpassword.data != self.renewpassword.data:
return False
return True
def save(self, commit=True):
if 'dpp' in app.blueprints.keys():
keys_dlt = g.user.get_dlt_keys(self.password.data)
g.user.reset_dlt_keys(self.newpassword.data, keys_dlt)
token_dlt = (
g.user.get_dlt_keys(self.newpassword.data)
.get('data', {})
.get('api_token')
)
session['token_dlt'] = token_dlt
g.user.password = self.newpassword.data
db.session.add(g.user)
if commit:
db.session.commit()
return
class SanitizationEntityForm(FlaskForm):
logo = URLField(
'Logo',
[validators.Optional(), validators.URL()],
render_kw={
'class': "form-control",
"placeholder": "Url where is the logo - acceptd only .png, .jpg, .gif, svg",
},
)
company_name = StringField('Company Name', render_kw={'class': "form-control"})
location = StringField('Location', render_kw={'class': "form-control"})
responsable_person = StringField(
'Responsable person', render_kw={'class': "form-control"}
)
supervisor_person = StringField(
'Supervisor person', render_kw={'class': "form-control"}
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if isinstance(self.logo.data, URL):
self.logo.data = self.logo.data.to_text()
def validate(self, extra_validators=None):
is_valid = super().validate(extra_validators)
if not is_valid:
return False
if not self.logo.data:
return True
extensions = ["jpg", "jpeg", "png", "gif", "svg"]
if self.logo.data.lower().split(".")[-1] not in extensions:
txt = "Error in Url field - accepted only .PNG, .JPG and .GIF. extensions"
self.logo.errors = [txt]
return False
return True
def save(self, commit=True):
if isinstance(self.logo.data, str):
self.logo.data = URL(self.logo.data)
sanitation_data = SanitizationEntity.query.filter_by(user_id=g.user.id).first()
if not sanitation_data:
sanitation_data = SanitizationEntity(user_id=g.user.id)
self.populate_obj(sanitation_data)
db.session.add(sanitation_data)
else:
self.populate_obj(sanitation_data)
if commit:
db.session.commit()
return

File diff suppressed because it is too large Load Diff

View File

@ -1,161 +0,0 @@
from uuid import uuid4
from citext import CIText
from dateutil.tz import tzutc
from flask import g
from sortedcontainers import SortedSet
from sqlalchemy import BigInteger, Column, Integer
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import backref, relationship
from ereuse_devicehub.db import db
from ereuse_devicehub.resources.models import Thing
from ereuse_devicehub.resources.user.models import User
from ereuse_devicehub.teal.db import CASCADE_OWN, URL
class Transfer(Thing):
"""
The transfer is a transfer of possession of devices between
a user and a code (not system user)
"""
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid4)
code = Column(CIText(), default='', nullable=False)
date = Column(db.TIMESTAMP(timezone=True))
description = Column(CIText(), default='', nullable=True)
lot_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey('lot.id', use_alter=True, name='lot_transfer'),
nullable=False,
)
lot = relationship(
'Lot',
backref=backref('transfer', lazy=True, uselist=False, cascade=CASCADE_OWN),
primaryjoin='Transfer.lot_id == Lot.id',
)
user_from_id = db.Column(UUID(as_uuid=True), db.ForeignKey(User.id), nullable=True)
user_from = db.relationship(User, primaryjoin=user_from_id == User.id)
user_to_id = db.Column(UUID(as_uuid=True), db.ForeignKey(User.id), nullable=True)
user_to = db.relationship(User, primaryjoin=user_to_id == User.id)
@property
def closed(self):
if self.date:
return True
return False
def type_transfer(self):
if self.user_from == g.user:
return 'Outgoing'
if self.user_to == g.user:
return 'Incoming'
return 'Temporary'
class DeliveryNote(Thing):
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid4)
number = Column(CIText(), default='', nullable=False)
date = Column(db.TIMESTAMP(timezone=True))
units = Column(Integer, default=0)
weight = Column(Integer, default=0)
transfer_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey('transfer.id'),
nullable=False,
)
transfer = relationship(
'Transfer',
backref=backref('delivery_note', lazy=True, uselist=False, cascade=CASCADE_OWN),
primaryjoin='DeliveryNote.transfer_id == Transfer.id',
)
class ReceiverNote(Thing):
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid4)
number = Column(CIText(), default='', nullable=False)
date = Column(db.TIMESTAMP(timezone=True))
units = Column(Integer, default=0)
weight = Column(Integer, default=0)
transfer_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey('transfer.id'),
nullable=False,
)
transfer = relationship(
'Transfer',
backref=backref('receiver_note', lazy=True, uselist=False, cascade=CASCADE_OWN),
primaryjoin='ReceiverNote.transfer_id == Transfer.id',
)
class TransferCustomerDetails(Thing):
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid4)
company_name = Column(CIText(), nullable=True)
location = Column(CIText(), nullable=True)
logo = Column(URL(), nullable=True)
transfer_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey('transfer.id'),
nullable=False,
)
transfer = relationship(
'Transfer',
backref=backref(
'customer_details', lazy=True, uselist=False, cascade=CASCADE_OWN
),
primaryjoin='TransferCustomerDetails.transfer_id == Transfer.id',
)
_sorted_documents = {
'order_by': lambda: DeviceDocument.created,
'collection_class': SortedSet,
}
class DeviceDocument(Thing):
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid4)
type = Column(db.CIText(), nullable=True)
date = Column(db.DateTime, nullable=True)
id_document = Column(db.CIText(), nullable=True)
description = Column(db.CIText(), nullable=True)
owner_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey(User.id),
nullable=False,
default=lambda: g.user.id,
)
owner = db.relationship(User, primaryjoin=owner_id == User.id)
device_id = db.Column(BigInteger, db.ForeignKey('device.id'), nullable=False)
device = db.relationship(
'Device',
primaryjoin='DeviceDocument.device_id == Device.id',
backref=backref(
'documents', lazy=True, cascade=CASCADE_OWN, **_sorted_documents
),
)
file_name = Column(db.CIText(), nullable=True)
file_hash = Column(db.CIText(), nullable=True)
url = db.Column(URL(), nullable=True)
# __table_args__ = (
# db.Index('document_id', id, postgresql_using='hash'),
# db.Index('type_doc', type, postgresql_using='hash')
# )
def get_url(self) -> str:
if self.url:
return self.url.to_text()
return ''
def __lt__(self, other):
return self.created.replace(tzinfo=tzutc()) < other.created.replace(
tzinfo=tzutc()
)

File diff suppressed because it is too large Load Diff

View File

@ -1,70 +0,0 @@
from flask import g
from flask_wtf import FlaskForm
from wtforms import IntegerField, StringField, validators
from ereuse_devicehub.db import db
from ereuse_devicehub.resources.device.models import Device
from ereuse_devicehub.resources.tag.model import Tag
class TagForm(FlaskForm):
code = StringField('Code', [validators.length(min=1)])
def validate(self, extra_validators=None):
error = ["This value is being used"]
is_valid = super().validate(extra_validators)
if not is_valid:
return False
tag = Tag.query.filter(Tag.id == self.code.data).all()
if tag:
self.code.errors = error
return False
return True
def save(self):
self.instance = Tag(id=self.code.data)
db.session.add(self.instance)
db.session.commit()
return self.instance
def remove(self):
if not self.instance.device and not self.instance.provider:
self.instance.delete()
db.session.commit()
return self.instance
class TagUnnamedForm(FlaskForm):
amount = IntegerField('amount')
def save(self):
num = self.amount.data
tags_id, _ = g.tag_provider.post('/', {}, query=[('num', num)])
tags = [Tag(id=tag_id, provider=g.inventory.tag_provider) for tag_id in tags_id]
db.session.add_all(tags)
db.session.commit()
return tags
class PrintLabelsForm(FlaskForm):
devices = StringField(render_kw={'class': "devicesList d-none"})
def validate(self, extra_validators=None):
is_valid = super().validate(extra_validators)
if not self.devices.data:
return False
device_ids = self.devices.data.split(",")
self._devices = (
Device.query.filter(Device.id.in_(device_ids))
.filter(Device.owner_id == g.user.id)
.distinct()
.all()
)
if not self._devices:
return False
return is_valid

View File

@ -1,172 +0,0 @@
import logging
import flask
from flask import Blueprint, request, url_for
from flask.views import View
from flask_login import current_user, login_required
from requests.exceptions import ConnectionError
from ereuse_devicehub import __version__, messages
from ereuse_devicehub.labels.forms import PrintLabelsForm, TagForm, TagUnnamedForm
from ereuse_devicehub.resources.lot.models import Lot, ShareLot
from ereuse_devicehub.resources.tag.model import Tag
labels = Blueprint('labels', __name__, url_prefix='/labels')
logger = logging.getLogger(__name__)
class TagListView(View):
methods = ['GET']
decorators = [login_required]
template_name = 'labels/label_list.html'
def dispatch_request(self):
lots = Lot.query.filter(Lot.owner_id == current_user.id)
share_lots = ShareLot.query.filter_by(user_to_id=current_user.id)
tags = Tag.query.filter(Tag.owner_id == current_user.id).order_by(
Tag.created.desc()
)
context = {
'lots': lots,
'tags': tags,
'page_title': 'Unique Identifiers Management',
'version': __version__,
'share_lots': share_lots,
}
return flask.render_template(self.template_name, **context)
class TagAddView(View):
methods = ['GET', 'POST']
decorators = [login_required]
template_name = 'labels/tag_create.html'
def dispatch_request(self):
lots = Lot.query.filter(Lot.owner_id == current_user.id)
share_lots = ShareLot.query.filter_by(user_to_id=current_user.id)
context = {
'page_title': 'New Tag',
'lots': lots,
'version': __version__,
'share_lots': share_lots,
}
form = TagForm()
if form.validate_on_submit():
form.save()
next_url = url_for('labels.label_list')
return flask.redirect(next_url)
return flask.render_template(self.template_name, form=form, **context)
class TagAddUnnamedView(View):
methods = ['GET', 'POST']
decorators = [login_required]
template_name = 'labels/tag_create_unnamed.html'
def dispatch_request(self):
lots = Lot.query.filter(Lot.owner_id == current_user.id)
share_lots = ShareLot.query.filter_by(user_to_id=current_user.id)
context = {
'page_title': 'New Unnamed Tag',
'lots': lots,
'version': __version__,
'share_lots': share_lots,
}
form = TagUnnamedForm()
if form.validate_on_submit():
try:
form.save()
except ConnectionError as e:
logger.error(
"Error while trying to connect to tag server: {}".format(e)
)
msg = (
"Sorry, we cannot create the unnamed tags requested because "
"some error happens while connecting to the tag server!"
)
messages.error(msg)
next_url = url_for('labels.label_list')
return flask.redirect(next_url)
return flask.render_template(self.template_name, form=form, **context)
class PrintLabelsView(View):
"""This View is used to print labels from multiple devices"""
methods = ['POST', 'GET']
decorators = [login_required]
template_name = 'labels/print_labels.html'
title = 'Design and implementation of labels'
def dispatch_request(self):
lots = Lot.query.filter(Lot.owner_id == current_user.id)
share_lots = ShareLot.query.filter_by(user_to_id=current_user.id)
context = {
'lots': lots,
'page_title': self.title,
'version': __version__,
'referrer': request.referrer,
'share_lots': share_lots,
}
form = PrintLabelsForm()
if form.validate_on_submit():
context['form'] = form
context['devices'] = form._devices
return flask.render_template(self.template_name, **context)
else:
messages.error('Error you need select one or more devices')
next_url = request.referrer or url_for('inventory.devicelist')
return flask.redirect(next_url)
class LabelDetailView(View):
"""This View is used to print labels from multiple devices"""
methods = ['POST', 'GET']
decorators = [login_required]
template_name = 'labels/print_labels.html'
title = 'Design and implementation of labels'
def dispatch_request(self, id):
lots = Lot.query.filter(Lot.owner_id == current_user.id)
share_lots = ShareLot.query.filter_by(user_to_id=current_user.id)
tag = (
Tag.query.filter(Tag.owner_id == current_user.id).filter(Tag.id == id).one()
)
context = {
'lots': lots,
'page_title': self.title,
'version': __version__,
'referrer': request.referrer,
'share_lots': share_lots,
}
devices = []
if tag.device:
form = PrintLabelsForm(devices=str(tag.device.id))
devices = [tag.device]
else:
form = PrintLabelsForm()
form._devices = devices
context['form'] = form
context['devices'] = devices
return flask.render_template(self.template_name, **context)
labels.add_url_rule('/', view_func=TagListView.as_view('label_list'))
labels.add_url_rule('/add/', view_func=TagAddView.as_view('tag_add'))
labels.add_url_rule(
'/unnamed/add/', view_func=TagAddUnnamedView.as_view('tag_unnamed_add')
)
labels.add_url_rule(
'/print',
view_func=PrintLabelsView.as_view('print_labels'),
)
labels.add_url_rule('/<string:id>/', view_func=LabelDetailView.as_view('label_details'))

View File

@ -1,622 +0,0 @@
# -*- coding: utf-8 -*-
"""
flaskext.mail
~~~~~~~~~~~~~
Flask extension for sending email.
:copyright: (c) 2010 by Dan Jacob.
:license: BSD, see LICENSE for more details.
"""
from __future__ import with_statement
__version__ = '0.9.1'
import re
import smtplib
import sys
import time
import unicodedata
from contextlib import contextmanager
from email import charset
from email.encoders import encode_base64
from email.header import Header
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import formataddr, formatdate, make_msgid, parseaddr
import blinker
from flask import current_app
PY3 = sys.version_info[0] == 3
PY34 = PY3 and sys.version_info[1] >= 4
basestring = str
unicode = str
if PY3:
string_types = (str,)
text_type = str
from email import policy
message_policy = policy.SMTP
else:
string_types = (basestring,)
text_type = unicode
message_policy = None
charset.add_charset('utf-8', charset.SHORTEST, None, 'utf-8')
class FlaskMailUnicodeDecodeError(UnicodeDecodeError):
def __init__(self, obj, *args):
self.obj = obj
UnicodeDecodeError.__init__(self, *args)
def __str__(self):
original = UnicodeDecodeError.__str__(self)
return '%s. You passed in %r (%s)' % (original, self.obj, type(self.obj))
def force_text(s, encoding='utf-8', errors='strict'):
"""
Similar to smart_text, except that lazy instances are resolved to
strings, rather than kept as lazy objects.
If strings_only is True, don't convert (some) non-string-like objects.
"""
strings_only = True
if isinstance(s, text_type):
return s
try:
if not isinstance(s, string_types):
if PY3:
if isinstance(s, bytes):
s = text_type(s, encoding, errors)
else:
s = text_type(s)
elif hasattr(s, '__unicode__'):
s = s.__unicode__()
else:
s = text_type(bytes(s), encoding, errors)
else:
s = s.decode(encoding, errors)
except UnicodeDecodeError as e:
if not isinstance(s, Exception):
raise FlaskMailUnicodeDecodeError(s, *e.args)
else:
s = ' '.join([force_text(arg, encoding, strings_only, errors) for arg in s])
return s
def sanitize_subject(subject, encoding='utf-8'):
try:
subject.encode('ascii')
except UnicodeEncodeError:
try:
subject = Header(subject, encoding).encode()
except UnicodeEncodeError:
subject = Header(subject, 'utf-8').encode()
return subject
def sanitize_address(addr, encoding='utf-8'):
if isinstance(addr, string_types):
addr = parseaddr(force_text(addr))
nm, addr = addr
try:
nm = Header(nm, encoding).encode()
except UnicodeEncodeError:
nm = Header(nm, 'utf-8').encode()
try:
addr.encode('ascii')
except UnicodeEncodeError: # IDN
if '@' in addr:
localpart, domain = addr.split('@', 1)
localpart = str(Header(localpart, encoding))
domain = domain.encode('idna').decode('ascii')
addr = '@'.join([localpart, domain])
else:
addr = Header(addr, encoding).encode()
return formataddr((nm, addr))
def sanitize_addresses(addresses, encoding='utf-8'):
return map(lambda e: sanitize_address(e, encoding), addresses)
def _has_newline(line):
"""Used by has_bad_header to check for \\r or \\n"""
if line and ('\r' in line or '\n' in line):
return True
return False
class Connection(object):
"""Handles connection to host."""
def __init__(self, mail):
self.mail = mail
def __enter__(self):
if self.mail.suppress:
self.host = None
else:
self.host = self.configure_host()
self.num_emails = 0
return self
def __exit__(self, exc_type, exc_value, tb):
if self.host:
self.host.quit()
def configure_host(self):
if self.mail.use_ssl:
host = smtplib.SMTP_SSL(self.mail.server, self.mail.port)
else:
host = smtplib.SMTP(self.mail.server, self.mail.port)
host.set_debuglevel(int(self.mail.debug))
if self.mail.use_tls:
host.starttls()
if self.mail.username and self.mail.password:
host.login(self.mail.username, self.mail.password)
return host
def send(self, message, envelope_from=None):
"""Verifies and sends message.
:param message: Message instance.
:param envelope_from: Email address to be used in MAIL FROM command.
"""
assert message.send_to, "No recipients have been added"
assert message.sender, (
"The message does not specify a sender and a default sender "
"has not been configured"
)
if message.has_bad_headers():
raise BadHeaderError
if message.date is None:
message.date = time.time()
if self.host:
self.host.sendmail(
sanitize_address(envelope_from or message.sender),
list(sanitize_addresses(message.send_to)),
message.as_bytes() if PY3 else message.as_string(),
message.mail_options,
message.rcpt_options,
)
email_dispatched.send(message, app=current_app._get_current_object())
self.num_emails += 1
if self.num_emails == self.mail.max_emails:
self.num_emails = 0
if self.host:
self.host.quit()
self.host = self.configure_host()
def send_message(self, *args, **kwargs):
"""Shortcut for send(msg).
Takes same arguments as Message constructor.
:versionadded: 0.3.5
"""
self.send(Message(*args, **kwargs))
class BadHeaderError(Exception):
pass
class Attachment(object):
"""Encapsulates file attachment information.
:versionadded: 0.3.5
:param filename: filename of attachment
:param content_type: file mimetype
:param data: the raw file data
:param disposition: content-disposition (if any)
"""
def __init__(
self,
filename=None,
content_type=None,
data=None,
disposition=None,
headers=None,
):
self.filename = filename
self.content_type = content_type
self.data = data
self.disposition = disposition or 'attachment'
self.headers = headers or {}
class Message(object):
"""Encapsulates an email message.
:param subject: email subject header
:param recipients: list of email addresses
:param body: plain text message
:param html: HTML message
:param sender: email sender address, or **MAIL_DEFAULT_SENDER** by default
:param cc: CC list
:param bcc: BCC list
:param attachments: list of Attachment instances
:param reply_to: reply-to address
:param date: send date
:param charset: message character set
:param extra_headers: A dictionary of additional headers for the message
:param mail_options: A list of ESMTP options to be used in MAIL FROM command
:param rcpt_options: A list of ESMTP options to be used in RCPT commands
"""
def __init__(
self,
subject='',
recipients=None,
body=None,
html=None,
sender=None,
cc=None,
bcc=None,
attachments=None,
reply_to=None,
date=None,
charset=None,
extra_headers=None,
mail_options=None,
rcpt_options=None,
):
sender = sender or current_app.extensions['mail'].default_sender
if isinstance(sender, tuple):
sender = "%s <%s>" % sender
self.recipients = recipients or []
self.subject = subject
self.sender = sender
self.reply_to = reply_to
self.cc = cc or []
self.bcc = bcc or []
self.body = body
self.html = html
self.date = date
self.msgId = make_msgid()
self.charset = charset
self.extra_headers = extra_headers
self.mail_options = mail_options or []
self.rcpt_options = rcpt_options or []
self.attachments = attachments or []
@property
def send_to(self):
return set(self.recipients) | set(self.bcc or ()) | set(self.cc or ())
def _mimetext(self, text, subtype='plain'):
"""Creates a MIMEText object with the given subtype (default: 'plain')
If the text is unicode, the utf-8 charset is used.
"""
charset = self.charset or 'utf-8'
return MIMEText(text, _subtype=subtype, _charset=charset)
def _message(self): # noqa: C901
"""Creates the email"""
ascii_attachments = current_app.extensions['mail'].ascii_attachments
encoding = self.charset or 'utf-8'
attachments = self.attachments or []
if len(attachments) == 0 and not self.html:
# No html content and zero attachments means plain text
msg = self._mimetext(self.body)
elif len(attachments) > 0 and not self.html:
# No html and at least one attachment means multipart
msg = MIMEMultipart()
msg.attach(self._mimetext(self.body))
else:
# Anything else
msg = MIMEMultipart()
alternative = MIMEMultipart('alternative')
alternative.attach(self._mimetext(self.body, 'plain'))
alternative.attach(self._mimetext(self.html, 'html'))
msg.attach(alternative)
if self.subject:
msg['Subject'] = sanitize_subject(force_text(self.subject), encoding)
msg['From'] = sanitize_address(self.sender, encoding)
msg['To'] = ', '.join(list(set(sanitize_addresses(self.recipients, encoding))))
msg['Date'] = formatdate(self.date, localtime=True)
# see RFC 5322 section 3.6.4.
msg['Message-ID'] = self.msgId
if self.cc:
msg['Cc'] = ', '.join(list(set(sanitize_addresses(self.cc, encoding))))
if self.reply_to:
msg['Reply-To'] = sanitize_address(self.reply_to, encoding)
if self.extra_headers:
for k, v in self.extra_headers.items():
msg[k] = v
SPACES = re.compile(r'[\s]+', re.UNICODE)
for attachment in attachments:
f = MIMEBase(*attachment.content_type.split('/'))
f.set_payload(attachment.data)
encode_base64(f)
filename = attachment.filename
if filename and ascii_attachments:
# force filename to ascii
filename = unicodedata.normalize('NFKD', filename)
filename = filename.encode('ascii', 'ignore').decode('ascii')
filename = SPACES.sub(u' ', filename).strip()
try:
filename and filename.encode('ascii')
except UnicodeEncodeError:
if not PY3:
filename = filename.encode('utf8')
filename = ('UTF8', '', filename)
f.add_header(
'Content-Disposition', attachment.disposition, filename=filename
)
for key, value in attachment.headers:
f.add_header(key, value)
msg.attach(f)
if message_policy:
msg.policy = message_policy
return msg
def as_string(self):
return self._message().as_string()
def as_bytes(self):
# if PY34:
# return self._message().as_bytes()
# else: # fallback for old Python (3) versions
# return self._message().as_string().encode(self.charset or 'utf-8')
return self._message().as_string().encode(self.charset or 'utf-8')
def __str__(self):
return self.as_string()
def __bytes__(self):
return self.as_bytes()
def has_bad_headers(self):
"""Checks for bad headers i.e. newlines in subject, sender or recipients.
RFC5322: Allows multiline CRLF with trailing whitespace (FWS) in headers
"""
headers = [self.sender, self.reply_to] + self.recipients
for header in headers:
if _has_newline(header):
return True
if self.subject:
if _has_newline(self.subject):
for linenum, line in enumerate(self.subject.split('\r\n')):
if not line:
return True
if linenum > 0 and line[0] not in '\t ':
return True
if _has_newline(line):
return True
if len(line.strip()) == 0:
return True
return False
def is_bad_headers(self):
from warnings import warn
msg = (
'is_bad_headers is deprecated, use the new has_bad_headers method instead.'
)
warn(DeprecationWarning(msg), stacklevel=1)
return self.has_bad_headers()
def send(self, connection):
"""Verifies and sends the message."""
connection.send(self)
def add_recipient(self, recipient):
"""Adds another recipient to the message.
:param recipient: email address of recipient.
"""
self.recipients.append(recipient)
def attach(
self,
filename=None,
content_type=None,
data=None,
disposition=None,
headers=None,
):
"""Adds an attachment to the message.
:param filename: filename of attachment
:param content_type: file mimetype
:param data: the raw file data
:param disposition: content-disposition (if any)
"""
self.attachments.append(
Attachment(filename, content_type, data, disposition, headers)
)
class _MailMixin(object):
@contextmanager
def record_messages(self):
"""Records all messages. Use in unit tests for example::
with mail.record_messages() as outbox:
response = app.test_client.get("/email-sending-view/")
assert len(outbox) == 1
assert outbox[0].subject == "testing"
You must have blinker installed in order to use this feature.
:versionadded: 0.4
"""
if not email_dispatched:
raise RuntimeError("blinker must be installed")
outbox = []
def _record(message, app):
outbox.append(message)
email_dispatched.connect(_record)
try:
yield outbox
finally:
email_dispatched.disconnect(_record)
def send(self, message):
"""Sends a single message instance. If TESTING is True the message will
not actually be sent.
:param message: a Message instance.
"""
with self.connect() as connection:
message.send(connection)
def send_message(self, *args, **kwargs):
"""Shortcut for send(msg).
Takes same arguments as Message constructor.
:versionadded: 0.3.5
"""
self.send(Message(*args, **kwargs))
def connect(self):
"""Opens a connection to the mail host."""
app = getattr(self, "app", None) or current_app
try:
return Connection(app.extensions['mail'])
except KeyError:
raise RuntimeError(
"The curent application was not configured with Flask-Mail"
)
class _Mail(_MailMixin):
def __init__(
self,
server,
username,
password,
port,
use_tls,
use_ssl,
default_sender,
debug,
max_emails,
suppress,
ascii_attachments=False,
):
self.server = server
self.username = username
self.password = password
self.port = port
self.use_tls = use_tls
self.use_ssl = use_ssl
self.default_sender = default_sender
self.debug = debug
self.max_emails = max_emails
self.suppress = suppress
self.ascii_attachments = ascii_attachments
class Mail(_MailMixin):
"""Manages email messaging
:param app: Flask instance
"""
def __init__(self, app=None):
self.app = app
if app is not None:
self.state = self.init_app(app)
else:
self.state = None
def init_mail(self, config, debug=False, testing=False):
return _Mail(
config.get('MAIL_SERVER', '127.0.0.1'),
config.get('MAIL_USERNAME'),
config.get('MAIL_PASSWORD'),
config.get('MAIL_PORT', 25),
config.get('MAIL_USE_TLS', False),
config.get('MAIL_USE_SSL', False),
config.get('MAIL_DEFAULT_SENDER'),
int(config.get('MAIL_DEBUG', debug)),
config.get('MAIL_MAX_EMAILS'),
config.get('MAIL_SUPPRESS_SEND', testing),
config.get('MAIL_ASCII_ATTACHMENTS', False),
)
def init_app(self, app):
"""Initializes your mail settings from the application settings.
You can use this if you want to set up your Mail instance
at configuration time.
:param app: Flask application instance
"""
state = self.init_mail(app.config, app.debug, app.testing)
# register extension with app
app.extensions = getattr(app, 'extensions', {})
app.extensions['mail'] = state
return state
def __getattr__(self, name):
return getattr(self.state, name, None)
signals = blinker.Namespace()
email_dispatched = signals.signal(
"email-dispatched",
doc="""
Signal sent when an email is dispatched. This signal will also be sent
in testing mode, even though the email will not actually be sent.
""",
)

View File

@ -1,31 +0,0 @@
import logging
from smtplib import SMTPException
from threading import Thread
from flask import current_app as app
from ereuse_devicehub.mail.flask_mail import Message
logger = logging.getLogger(__name__)
def _send_async_email(app, msg):
with app.app_context():
try:
app.mail.send(msg)
except SMTPException:
logger.exception("An error occurred while sending the email")
def send_email(
subject, recipients, text_body, sender=None, cc=None, bcc=None, html_body=None
):
msg = Message(subject, sender=sender, recipients=recipients, cc=cc, bcc=bcc)
msg.body = text_body
if html_body:
msg.html = html_body
Thread(target=_send_async_email, args=(app._get_current_object(), msg)).start()

View File

@ -1,33 +1,14 @@
from marshmallow.fields import missing_
from teal.db import SQLAlchemy
from teal.marshmallow import NestedOn as TealNestedOn
from ereuse_devicehub.db import db
from ereuse_devicehub.teal.db import SQLAlchemy
from ereuse_devicehub.teal.marshmallow import NestedOn as TealNestedOn
class NestedOn(TealNestedOn):
__doc__ = TealNestedOn.__doc__
def __init__(
self,
nested,
polymorphic_on='type',
db: SQLAlchemy = db,
collection_class=list,
default=missing_,
exclude=tuple(),
only_query: str = None,
only=None,
**kwargs,
):
super().__init__(
nested,
polymorphic_on,
db,
collection_class,
default,
exclude,
only_query,
only,
**kwargs,
)
def __init__(self, nested, polymorphic_on='type', db: SQLAlchemy = db, collection_class=list,
default=missing_, exclude=tuple(), only_query: str = None, only=None, **kwargs):
super().__init__(nested, polymorphic_on, db, collection_class, default, exclude,
only_query, only, **kwargs)

View File

@ -1,64 +0,0 @@
from flask import flash, session
DEBUG = 10
INFO = 20
SUCCESS = 25
WARNING = 30
ERROR = 40
DEFAULT_LEVELS = {
'DEBUG': DEBUG,
'INFO': INFO,
'SUCCESS': SUCCESS,
'WARNING': WARNING,
'ERROR': ERROR,
}
DEFAULT_TAGS = {
DEBUG: 'light',
INFO: 'info',
SUCCESS: 'success',
WARNING: 'warning',
ERROR: 'danger',
}
DEFAULT_ICONS = {
DEFAULT_TAGS[DEBUG]: 'tools',
DEFAULT_TAGS[INFO]: 'info-circle',
DEFAULT_TAGS[SUCCESS]: 'check-circle',
DEFAULT_TAGS[WARNING]: 'exclamation-triangle',
DEFAULT_TAGS[ERROR]: 'exclamation-octagon',
}
def add_message(level, message):
level_tag = DEFAULT_TAGS[level]
if '_message_icon' not in session:
session['_message_icon'] = DEFAULT_ICONS
flash(message, level_tag)
def debug(message):
"""Add a message with the ``DEBUG`` level."""
add_message(DEBUG, message)
def info(message):
"""Add a message with the ``INFO`` level."""
add_message(INFO, message)
def success(message):
"""Add a message with the ``SUCCESS`` level."""
add_message(SUCCESS, message)
def warning(message):
"""Add a message with the ``WARNING`` level."""
add_message(WARNING, message)
def error(message):
"""Add a message with the ``ERROR`` level."""
add_message(ERROR, message)

View File

@ -9,7 +9,7 @@ from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
import citext
from ereuse_devicehub import teal
import teal
${imports if imports else ""}
# revision identifiers, used by Alembic.

View File

@ -1,124 +0,0 @@
"""transfer
Revision ID: 054a3aea9f08
Revises: 926865284103
Create Date: 2022-05-27 11:07:18.245322
"""
from uuid import uuid4
import citext
import sqlalchemy as sa
from alembic import context, op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '054a3aea9f08'
down_revision = '926865284103'
branch_labels = None
depends_on = None
def get_inv():
INV = context.get_x_argument(as_dictionary=True).get('inventory')
if not INV:
raise ValueError("Inventory value is not specified")
return INV
def upgrade_datas():
sql = f'select user_from_id, user_to_id, lot_id, code from {get_inv()}.trade where confirm=False'
con = op.get_bind()
sql_phantom = 'select id from common.user where phantom=True'
phantoms = [x[0] for x in con.execute(sql_phantom)]
for ac in con.execute(sql):
id = uuid4()
user_from = ac.user_from_id
user_to = ac.user_to_id
lot = ac.lot_id
code = ac.code
columns = '(id, user_from_id, user_to_id, lot_id, code)'
values = f'(\'{id}\', \'{user_from}\', \'{user_to}\', \'{lot}\', \'{code}\')'
if user_to not in phantoms:
columns = '(id, user_to_id, lot_id, code)'
values = f'(\'{id}\', \'{user_to}\', \'{lot}\', \'{code}\')'
if user_from not in phantoms:
columns = '(id, user_from_id, lot_id, code)'
values = f'(\'{id}\', \'{user_from}\', \'{lot}\', \'{code}\')'
new_transfer = f'insert into {get_inv()}.transfer {columns} values {values}'
op.execute(new_transfer)
def upgrade():
# creating transfer table
op.create_table(
'transfer',
sa.Column(
'updated',
sa.TIMESTAMP(timezone=True),
server_default=sa.text('CURRENT_TIMESTAMP'),
nullable=False,
),
sa.Column(
'created',
sa.TIMESTAMP(timezone=True),
server_default=sa.text('CURRENT_TIMESTAMP'),
nullable=False,
),
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('code', citext.CIText(), nullable=False),
sa.Column(
'description',
citext.CIText(),
nullable=True,
comment='A comment about the action.',
),
sa.Column('date', sa.TIMESTAMP(timezone=True), nullable=True),
sa.Column('lot_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('user_to_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('user_from_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.ForeignKeyConstraint(['lot_id'], [f'{get_inv()}.lot.id']),
sa.ForeignKeyConstraint(['user_from_id'], ['common.user.id']),
sa.ForeignKeyConstraint(['user_to_id'], ['common.user.id']),
sa.PrimaryKeyConstraint('id'),
schema=f'{get_inv()}',
)
# creating index
op.create_index(
op.f('ix_transfer_created'),
'transfer',
['created'],
unique=False,
schema=f'{get_inv()}',
)
op.create_index(
op.f('ix_transfer_updated'),
'transfer',
['updated'],
unique=False,
schema=f'{get_inv()}',
)
op.create_index(
'ix_transfer_id',
'transfer',
['id'],
unique=False,
postgresql_using='hash',
schema=f'{get_inv()}',
)
upgrade_datas()
def downgrade():
op.drop_index(
op.f('ix_transfer_created'), table_name='transfer', schema=f'{get_inv()}'
)
op.drop_index(
op.f('ix_transfer_updated'), table_name='transfer', schema=f'{get_inv()}'
)
op.drop_index(op.f('ix_transfer_id'), table_name='transfer', schema=f'{get_inv()}')
op.drop_table('transfer', schema=f'{get_inv()}')

View File

@ -10,7 +10,7 @@ from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
import citext
from ereuse_devicehub import teal
import teal
# revision identifiers, used by Alembic.
@ -26,32 +26,11 @@ def get_inv():
raise ValueError("Inventory value is not specified")
return INV
def upgrade():
op.alter_column(
'test_data_storage',
'current_pending_sector_count',
type_=sa.Integer(),
schema=f'{get_inv()}',
)
op.alter_column(
'test_data_storage',
'offline_uncorrectable',
type_=sa.Integer(),
schema=f'{get_inv()}',
)
op.alter_column('test_data_storage', 'current_pending_sector_count', type_=sa.Integer(), schema=f'{get_inv()}')
op.alter_column('test_data_storage', 'offline_uncorrectable', type_=sa.Integer(), schema=f'{get_inv()}')
def downgrade():
op.alter_column(
'test_data_storage',
'current_pending_sector_count',
type_=sa.SmallInteger(),
schema=f'{get_inv()}',
)
op.alter_column(
'test_data_storage',
'offline_uncorrectable',
type_=sa.SmallInteger(),
schema=f'{get_inv()}',
)
op.alter_column('test_data_storage', 'current_pending_sector_count', type_=sa.SmallInteger(), schema=f'{get_inv()}')
op.alter_column('test_data_storage', 'offline_uncorrectable', type_=sa.SmallInteger(), schema=f'{get_inv()}')

View File

@ -1,42 +0,0 @@
"""change firewire
Revision ID: 17288b2a7440
Revises: 8571fb32c912
Create Date: 2022-03-29 11:49:39.270791
"""
import citext
import sqlalchemy as sa
from alembic import context, op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '17288b2a7440'
down_revision = '8571fb32c912'
branch_labels = None
depends_on = None
def get_inv():
INV = context.get_x_argument(as_dictionary=True).get('inventory')
if not INV:
raise ValueError("Inventory value is not specified")
return INV
def upgrade():
op.add_column(
'computer',
sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=True),
schema=f'{get_inv()}',
)
op.add_column(
'snapshot',
sa.Column('wbid', citext.CIText(), nullable=True),
schema=f'{get_inv()}',
)
def downgrade():
op.drop_column('computer', 'uuid', schema=f'{get_inv()}')
op.drop_column('snapshot', 'wbid', schema=f'{get_inv()}')

View File

@ -11,7 +11,7 @@ from sqlalchemy.dialects import postgresql
import sqlalchemy as sa
import sqlalchemy_utils
import citext
from ereuse_devicehub import teal
import teal
from ereuse_devicehub.resources.enums import SessionType

View File

@ -1,56 +0,0 @@
"""add snapshot errors
Revision ID: 23d9e7ebbd7d
Revises: 17288b2a7440
Create Date: 2022-04-04 19:27:48.675387
"""
import citext
import sqlalchemy as sa
from alembic import context, op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '23d9e7ebbd7d'
down_revision = '17288b2a7440'
branch_labels = None
depends_on = None
def get_inv():
INV = context.get_x_argument(as_dictionary=True).get('inventory')
if not INV:
raise ValueError("Inventory value is not specified")
return INV
def upgrade():
op.create_table(
'snapshot_errors',
sa.Column(
'updated',
sa.TIMESTAMP(timezone=True),
server_default=sa.text('CURRENT_TIMESTAMP'),
nullable=False,
comment='The last time Devicehub recorded a change for \n this thing.\n ',
),
sa.Column(
'created',
sa.TIMESTAMP(timezone=True),
server_default=sa.text('CURRENT_TIMESTAMP'),
nullable=False,
comment='When Devicehub created this.',
),
sa.Column('id', sa.BigInteger(), nullable=False),
sa.Column('description', citext.CIText(), nullable=False),
sa.Column('snapshot_uuid', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('severity', sa.SmallInteger(), nullable=False),
sa.PrimaryKeyConstraint('id'),
schema=f'{get_inv()}',
)
op.execute(f"CREATE SEQUENCE {get_inv()}.snapshot_errors_seq START 1;")
def downgrade():
op.drop_table('snapshot_errors', schema=f'{get_inv()}')
op.execute(f"DROP SEQUENCE {get_inv()}.snapshot_errors_seq;")

View File

@ -1,78 +0,0 @@
"""add owner to placeholder
Revision ID: d7ea9a3b2da1
Revises: 2b90b41a556a
Create Date: 2022-07-27 14:40:15.513820
"""
import citext
import sqlalchemy as sa
from alembic import context, op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '2b90b41a556a'
down_revision = '3e3a67f62972'
branch_labels = None
depends_on = None
def get_inv():
INV = context.get_x_argument(as_dictionary=True).get('inventory')
if not INV:
raise ValueError("Inventory value is not specified")
return INV
def upgrade_data():
con = op.get_bind()
sql = f"select {get_inv()}.placeholder.id, {get_inv()}.device.owner_id from {get_inv()}.placeholder"
sql += f" join {get_inv()}.device on {get_inv()}.device.id={get_inv()}.placeholder.device_id;"
for c in con.execute(sql):
id_placeholder = c.id
id_owner = c.owner_id
sql_update = f"update {get_inv()}.placeholder set owner_id='{id_owner}', is_abstract=False where id={id_placeholder};"
con.execute(sql_update)
def upgrade():
op.add_column(
'placeholder',
sa.Column('is_abstract', sa.Boolean(), nullable=True),
schema=f'{get_inv()}',
)
op.add_column(
'placeholder',
sa.Column('components', citext.CIText(), nullable=True),
schema=f'{get_inv()}',
)
op.add_column(
'placeholder',
sa.Column('owner_id', postgresql.UUID(), nullable=True),
schema=f'{get_inv()}',
)
op.create_foreign_key(
"fk_placeholder_owner_id_user_id",
"placeholder",
"user",
["owner_id"],
["id"],
ondelete="SET NULL",
source_schema=f'{get_inv()}',
referent_schema='common',
)
upgrade_data()
def downgrade():
op.drop_constraint(
"fk_placeholder_owner_id_user_id",
"placeholder",
type_="foreignkey",
schema=f'{get_inv()}',
)
op.drop_column('placeholder', 'owner_id', schema=f'{get_inv()}')
op.drop_column('placeholder', 'is_abstract', schema=f'{get_inv()}')
op.drop_column('placeholder', 'components', schema=f'{get_inv()}')

View File

@ -1,52 +0,0 @@
"""share lot
Revision ID: 2f2ef041483a
Revises: ac476b60d952
Create Date: 2023-04-26 16:04:21.560888
"""
import sqlalchemy as sa
from alembic import context, op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '2f2ef041483a'
down_revision = 'ac476b60d952'
branch_labels = None
depends_on = None
def get_inv():
INV = context.get_x_argument(as_dictionary=True).get('inventory')
if not INV:
raise ValueError("Inventory value is not specified")
return INV
def upgrade():
op.create_table(
'share_lot',
sa.Column(
'created',
sa.TIMESTAMP(timezone=True),
server_default=sa.text('CURRENT_TIMESTAMP'),
nullable=False,
),
sa.Column(
'updated',
sa.TIMESTAMP(timezone=True),
server_default=sa.text('CURRENT_TIMESTAMP'),
nullable=False,
),
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('user_to_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('lot_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.ForeignKeyConstraint(['user_to_id'], ['common.user.id']),
sa.ForeignKeyConstraint(['lot_id'], [f'{get_inv()}.lot.id']),
sa.PrimaryKeyConstraint('id'),
schema=f'{get_inv()}',
)
def downgrade():
op.drop_table('share_lot', schema=f'{get_inv()}')

View File

@ -5,12 +5,12 @@ Revises: bf600ca861a4
Create Date: 2020-12-16 11:45:13.339624
"""
import citext
import sqlalchemy as sa
import sqlalchemy_utils
from alembic import context
from alembic import op
from ereuse_devicehub import teal
import sqlalchemy as sa
import sqlalchemy_utils
import citext
import teal
# revision identifiers, used by Alembic.

View File

@ -5,14 +5,15 @@ Revises: 51439cf24be8
Create Date: 2021-06-15 14:38:59.931818
"""
import teal
import citext
import sqlalchemy as sa
from ereuse_devicehub import teal
from alembic import op
from alembic import context
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '3a3601ac8224'
down_revision = '51439cf24be8'
@ -26,143 +27,108 @@ def get_inv():
raise ValueError("Inventory value is not specified")
return INV
def upgrade():
op.create_table(
'trade_document',
sa.Column(
'updated',
sa.TIMESTAMP(timezone=True),
server_default=sa.text('CURRENT_TIMESTAMP'),
nullable=False,
comment='The last time Devicehub recorded a change for \n this thing.\n ',
),
sa.Column(
'created',
sa.TIMESTAMP(timezone=True),
server_default=sa.text('CURRENT_TIMESTAMP'),
nullable=False,
comment='When Devicehub created this.',
),
sa.Column(
'id',
sa.BigInteger(),
nullable=False,
comment='The identifier of the device for this database. Used only\n internally for software; users should not use this.\n ',
),
sa.Column(
'date',
sa.DateTime(),
nullable=True,
comment='The date of document, some documents need to have one date\n ',
),
sa.Column(
'id_document',
citext.CIText(),
nullable=True,
comment='The id of one document like invoice so they can be linked.',
),
sa.Column(
'description',
citext.CIText(),
nullable=True,
comment='A description of document.',
),
sa.Column('owner_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('lot_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column(
'file_name',
citext.CIText(),
nullable=True,
comment='This is the name of the file when user up the document.',
),
sa.Column(
'file_hash',
citext.CIText(),
nullable=True,
comment='This is the hash of the file produced from frontend.',
),
sa.Column(
'url',
citext.CIText(),
teal.db.URL(),
nullable=True,
comment='This is the url where resides the document.',
),
sa.ForeignKeyConstraint(
['lot_id'],
[f'{get_inv()}.lot.id'],
),
sa.ForeignKeyConstraint(
['owner_id'],
['common.user.id'],
),
sa.PrimaryKeyConstraint('id'),
schema=f'{get_inv()}',
op.create_table('trade_document',
sa.Column(
'updated',
sa.TIMESTAMP(timezone=True),
server_default=sa.text('CURRENT_TIMESTAMP'),
nullable=False,
comment='The last time Devicehub recorded a change for \n this thing.\n '
),
sa.Column(
'created',
sa.TIMESTAMP(timezone=True),
server_default=sa.text('CURRENT_TIMESTAMP'),
nullable=False,
comment='When Devicehub created this.'
),
sa.Column(
'id',
sa.BigInteger(),
nullable=False,
comment='The identifier of the device for this database. Used only\n internally for software; users should not use this.\n '
),
sa.Column(
'date',
sa.DateTime(),
nullable=True,
comment='The date of document, some documents need to have one date\n '
),
sa.Column(
'id_document',
citext.CIText(),
nullable=True,
comment='The id of one document like invoice so they can be linked.'
),
sa.Column(
'description',
citext.CIText(),
nullable=True,
comment='A description of document.'
),
sa.Column(
'owner_id',
postgresql.UUID(as_uuid=True),
nullable=False
),
sa.Column(
'lot_id',
postgresql.UUID(as_uuid=True),
nullable=False
),
sa.Column(
'file_name',
citext.CIText(),
nullable=True,
comment='This is the name of the file when user up the document.'
),
sa.Column(
'file_hash',
citext.CIText(),
nullable=True,
comment='This is the hash of the file produced from frontend.'
),
sa.Column(
'url',
citext.CIText(),
teal.db.URL(),
nullable=True,
comment='This is the url where resides the document.'
),
sa.ForeignKeyConstraint(['lot_id'], [f'{get_inv()}.lot.id'],),
sa.ForeignKeyConstraint(['owner_id'], ['common.user.id'],),
sa.PrimaryKeyConstraint('id'),
schema=f'{get_inv()}'
)
# Action document table
op.create_table(
'action_trade_document',
sa.Column('document_id', sa.BigInteger(), nullable=False),
sa.Column('action_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.ForeignKeyConstraint(
['action_id'],
[f'{get_inv()}.action.id'],
),
sa.ForeignKeyConstraint(
['document_id'],
[f'{get_inv()}.trade_document.id'],
),
sa.PrimaryKeyConstraint('document_id', 'action_id'),
schema=f'{get_inv()}',
)
op.create_table('action_trade_document',
sa.Column('document_id', sa.BigInteger(), nullable=False),
sa.Column('action_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.ForeignKeyConstraint(['action_id'], [f'{get_inv()}.action.id'], ),
sa.ForeignKeyConstraint(['document_id'], [f'{get_inv()}.trade_document.id'], ),
sa.PrimaryKeyConstraint('document_id', 'action_id'),
schema=f'{get_inv()}'
)
op.create_index(
'document_id',
'trade_document',
['id'],
unique=False,
postgresql_using='hash',
schema=f'{get_inv()}',
)
op.create_index(
op.f('ix_trade_document_created'),
'trade_document',
['created'],
unique=False,
schema=f'{get_inv()}',
)
op.create_index(
op.f('ix_trade_document_updated'),
'trade_document',
['updated'],
unique=False,
schema=f'{get_inv()}',
)
op.create_index('document_id', 'trade_document', ['id'], unique=False, postgresql_using='hash', schema=f'{get_inv()}')
op.create_index(op.f('ix_trade_document_created'), 'trade_document', ['created'], unique=False, schema=f'{get_inv()}')
op.create_index(op.f('ix_trade_document_updated'), 'trade_document', ['updated'], unique=False, schema=f'{get_inv()}')
op.create_table(
'confirm_document',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('action_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.ForeignKeyConstraint(
['id'],
[f'{get_inv()}.action.id'],
),
sa.ForeignKeyConstraint(
['action_id'],
[f'{get_inv()}.action.id'],
),
sa.ForeignKeyConstraint(
['user_id'],
['common.user.id'],
),
sa.PrimaryKeyConstraint('id'),
schema=f'{get_inv()}',
)
op.create_table('confirm_document',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('action_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.ForeignKeyConstraint(['id'], [f'{get_inv()}.action.id'], ),
sa.ForeignKeyConstraint(['action_id'], [f'{get_inv()}.action.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['common.user.id'], ),
sa.PrimaryKeyConstraint('id'),
schema=f'{get_inv()}'
)
def downgrade():
op.drop_table('action_trade_document', schema=f'{get_inv()}')
op.drop_table('confirm_document', schema=f'{get_inv()}')
op.drop_table('trade_document', schema=f'{get_inv()}')

View File

@ -1,66 +0,0 @@
"""placeholder log
Revision ID: 3e3a67f62972
Revises: aeca9fb50cc6
Create Date: 2022-07-06 18:23:54.267003
"""
import citext
import sqlalchemy as sa
from alembic import context, op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '3e3a67f62972'
down_revision = 'aeca9fb50cc6'
branch_labels = None
depends_on = None
def get_inv():
INV = context.get_x_argument(as_dictionary=True).get('inventory')
if not INV:
raise ValueError("Inventory value is not specified")
return INV
def upgrade():
op.create_table(
'placeholders_log',
sa.Column(
'updated',
sa.TIMESTAMP(timezone=True),
server_default=sa.text('CURRENT_TIMESTAMP'),
nullable=False,
comment='The last time Devicehub recorded a change for \n this thing.\n ',
),
sa.Column(
'created',
sa.TIMESTAMP(timezone=True),
server_default=sa.text('CURRENT_TIMESTAMP'),
nullable=False,
comment='When Devicehub created this.',
),
sa.Column('id', sa.BigInteger(), nullable=False),
sa.Column('source', citext.CIText(), nullable=True),
sa.Column('type', citext.CIText(), nullable=True),
sa.Column('severity', sa.SmallInteger(), nullable=False),
sa.Column('placeholder_id', sa.BigInteger(), nullable=True),
sa.Column('owner_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.ForeignKeyConstraint(
['placeholder_id'],
[f'{get_inv()}.placeholder.id'],
),
sa.ForeignKeyConstraint(
['owner_id'],
['common.user.id'],
),
sa.PrimaryKeyConstraint('id'),
schema=f'{get_inv()}',
)
op.execute("CREATE SEQUENCE placeholders_log_seq START 1;")
def downgrade():
op.drop_table('placeholders_log', schema=f'{get_inv()}')
op.execute("DROP SEQUENCE placeholders_log_seq;")

View File

@ -1,39 +0,0 @@
"""device other
Revision ID: 410aadae7652
Revises: d65745749e34
Create Date: 2022-11-29 12:00:40.272121
"""
import sqlalchemy as sa
from alembic import context, op
# revision identifiers, used by Alembic.
revision = '410aadae7652'
down_revision = 'd65745749e34'
branch_labels = None
depends_on = None
def get_inv():
INV = context.get_x_argument(as_dictionary=True).get('inventory')
if not INV:
raise ValueError("Inventory value is not specified")
return INV
def upgrade():
op.create_table(
'other',
sa.Column('id', sa.BigInteger(), nullable=False),
sa.ForeignKeyConstraint(
['id'],
[f'{get_inv()}.device.id'],
),
sa.PrimaryKeyConstraint('id'),
schema=f'{get_inv()}',
)
def downgrade():
op.drop_table('other', schema=f'{get_inv()}')

Some files were not shown because too many files have changed in this diff Show More