fixed tests
This commit is contained in:
parent
dd081532ba
commit
ae221dadd1
|
@ -1,2 +1,2 @@
|
|||
Type,Chassis,Serial Number,Model,Manufacturer,Registered in,Physical state,Trading state,Price,Processor,RAM (MB),Data Storage Size (MB),Rate,Range,Processor Rate,Processor Range,RAM Rate,RAM Range,Data Storage Rate,Data Storage Range
|
||||
Desktop,Microtower,d1s,d1ml,d1mr,Tue Jul 2 10:35:10 2019,,,,p1ml,0,0,1.0,Very low,1.0,Very low,1.0,Very low,1.0,Very low
|
||||
Type;Chassis;Serial Number;Model;Manufacturer;Registered in;Physical state;Allocate state;Lifecycle state;Price;Processor;RAM (MB);Data Storage Size (MB);Rate;Range;Processor Rate;Processor Range;RAM Rate;RAM Range;Data Storage Rate;Data Storage Range
|
||||
Desktop;Microtower;d1s;d1ml;d1mr;Mon May 16 09:34:22 2022;;;;;p1ml;0;0;1.0;Very low;1.0;Very low;1.0;Very low;1.0;Very low
|
||||
|
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
1193
tests/test_action.py
1193
tests/test_action.py
File diff suppressed because it is too large
Load diff
|
@ -1,30 +1,30 @@
|
|||
import csv
|
||||
import hashlib
|
||||
from datetime import datetime
|
||||
from io import StringIO, BytesIO
|
||||
from io import BytesIO, StringIO
|
||||
from pathlib import Path
|
||||
from flask import url_for
|
||||
|
||||
import pytest
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
import teal.marshmallow
|
||||
from ereuse_utils.test import ANY
|
||||
from flask import url_for
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
|
||||
from ereuse_devicehub import auth
|
||||
from ereuse_devicehub.client import Client, UserClient
|
||||
from ereuse_devicehub.db import db
|
||||
from ereuse_devicehub.devicehub import Devicehub
|
||||
from ereuse_devicehub.resources.user.models import Session
|
||||
from ereuse_devicehub.resources.action.models import Snapshot, Allocate, Live
|
||||
from ereuse_devicehub.resources.documents import documents
|
||||
from ereuse_devicehub.resources.tradedocument.models import TradeDocument
|
||||
from ereuse_devicehub.resources.action.models import Allocate, Live, Snapshot
|
||||
from ereuse_devicehub.resources.device import models as d
|
||||
from ereuse_devicehub.resources.documents import documents
|
||||
from ereuse_devicehub.resources.enums import SessionType
|
||||
from ereuse_devicehub.resources.hash_reports import ReportHash
|
||||
from ereuse_devicehub.resources.lot.models import Lot
|
||||
from ereuse_devicehub.resources.tag.model import Tag
|
||||
from ereuse_devicehub.resources.hash_reports import ReportHash
|
||||
from ereuse_devicehub.resources.enums import SessionType
|
||||
from ereuse_devicehub.db import db
|
||||
from ereuse_devicehub.resources.tradedocument.models import TradeDocument
|
||||
from ereuse_devicehub.resources.user.models import Session
|
||||
from tests import conftest
|
||||
from tests.conftest import file, yaml2json, json_encode
|
||||
from tests.conftest import file, json_encode, yaml2json
|
||||
|
||||
|
||||
@pytest.mark.mvp
|
||||
|
@ -33,27 +33,30 @@ def test_erasure_certificate_public_one(user: UserClient, client: Client):
|
|||
s = file('erase-sectors.snapshot')
|
||||
snapshot, _ = user.post(s, res=Snapshot)
|
||||
|
||||
doc, response = user.get(res=documents.DocumentDef.t,
|
||||
item='erasures/{}'.format(
|
||||
snapshot['device']['id']),
|
||||
accept=ANY)
|
||||
doc, response = user.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='erasures/{}'.format(snapshot['device']['id']),
|
||||
accept=ANY,
|
||||
)
|
||||
assert 'html' in response.content_type
|
||||
assert '<html' in doc
|
||||
assert '2018' in doc
|
||||
|
||||
doc, response = client.get(res=documents.DocumentDef.t,
|
||||
item='erasures/{}'.format(
|
||||
snapshot['device']['id']),
|
||||
query=[('format', 'PDF')],
|
||||
accept='application/pdf')
|
||||
doc, response = client.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='erasures/{}'.format(snapshot['device']['id']),
|
||||
query=[('format', 'PDF')],
|
||||
accept='application/pdf',
|
||||
)
|
||||
assert 'application/pdf' == response.content_type
|
||||
|
||||
erasure = next(e for e in snapshot['actions']
|
||||
if e['type'] == 'EraseSectors')
|
||||
erasure = next(e for e in snapshot['actions'] if e['type'] == 'EraseSectors')
|
||||
|
||||
doc, response = client.get(res=documents.DocumentDef.t,
|
||||
item='erasures/{}'.format(erasure['id']),
|
||||
accept=ANY)
|
||||
doc, response = client.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='erasures/{}'.format(erasure['id']),
|
||||
accept=ANY,
|
||||
)
|
||||
assert 'html' in response.content_type
|
||||
assert '<html' in doc
|
||||
assert '2018' in doc
|
||||
|
@ -67,30 +70,32 @@ def test_erasure_certificate_private_query(user: UserClient):
|
|||
s = file('erase-sectors.snapshot')
|
||||
snapshot, response = user.post(s, res=Snapshot)
|
||||
|
||||
doc, response = user.get(res=documents.DocumentDef.t,
|
||||
item='erasures/',
|
||||
query=[
|
||||
('filter', {'ids': [snapshot['device']['id']]})],
|
||||
accept=ANY)
|
||||
doc, response = user.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='erasures/',
|
||||
query=[('filter', {'ids': [snapshot['device']['id']]})],
|
||||
accept=ANY,
|
||||
)
|
||||
assert 'html' in response.content_type
|
||||
assert '<html' in doc
|
||||
assert '2018' in doc
|
||||
|
||||
doc, response = user.get(res=documents.DocumentDef.t,
|
||||
item='erasures/',
|
||||
query=[
|
||||
('filter', {
|
||||
'ids': [snapshot['device']['id']]}),
|
||||
('format', 'PDF')
|
||||
],
|
||||
accept='application/pdf')
|
||||
doc, response = user.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='erasures/',
|
||||
query=[('filter', {'ids': [snapshot['device']['id']]}), ('format', 'PDF')],
|
||||
accept='application/pdf',
|
||||
)
|
||||
assert 'application/pdf' == response.content_type
|
||||
|
||||
|
||||
@pytest.mark.mvp
|
||||
def test_erasure_certificate_wrong_id(client: Client):
|
||||
client.get(res=documents.DocumentDef.t, item='erasures/this-is-not-an-id',
|
||||
status=teal.marshmallow.ValidationError)
|
||||
client.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='erasures/this-is-not-an-id',
|
||||
status=teal.marshmallow.ValidationError,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.mvp
|
||||
|
@ -98,20 +103,27 @@ def test_export_csv_permitions(user: UserClient, user2: UserClient, client: Clie
|
|||
"""test export device information in a csv file with others users."""
|
||||
snapshot, _ = user.post(file('basic.snapshot'), res=Snapshot)
|
||||
dev_id = snapshot['device']['id']
|
||||
csv_user, _ = user.get(res=documents.DocumentDef.t,
|
||||
item='devices/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer'], 'ids': [dev_id]})])
|
||||
csv_user, _ = user.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='devices/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer'], 'ids': [dev_id]})],
|
||||
)
|
||||
|
||||
csv_user2, _ = user2.get(res=documents.DocumentDef.t,
|
||||
item='devices/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer'], 'ids': [dev_id]})])
|
||||
csv_user2, _ = user2.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='devices/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer'], 'ids': [dev_id]})],
|
||||
)
|
||||
|
||||
_, res = client.get(res=documents.DocumentDef.t,
|
||||
item='devices/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer'], 'ids': [dev_id]})], status=401)
|
||||
_, res = client.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='devices/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer'], 'ids': [dev_id]})],
|
||||
status=401,
|
||||
)
|
||||
assert res.status_code == 401
|
||||
|
||||
assert len(csv_user) > 0
|
||||
|
@ -124,36 +136,46 @@ def test_export_csv_actions(user: UserClient, user2: UserClient, client: Client)
|
|||
acer = yaml2json('acer.happy.battery.snapshot')
|
||||
snapshot, _ = user.post(json_encode(acer), res=Snapshot)
|
||||
device_id = snapshot['device']['id']
|
||||
post_request = {"transaction": "ccc", "name": "John", "endUsers": 1,
|
||||
"devices": [device_id], "description": "aaa",
|
||||
"finalUserCode": "abcdefjhi",
|
||||
"startTime": "2020-11-01T02:00:00+00:00",
|
||||
"endTime": "2020-12-01T02:00:00+00:00"
|
||||
}
|
||||
post_request = {
|
||||
"transaction": "ccc",
|
||||
"name": "John",
|
||||
"endUsers": 1,
|
||||
"devices": [device_id],
|
||||
"description": "aaa",
|
||||
"finalUserCode": "abcdefjhi",
|
||||
"startTime": "2020-11-01T02:00:00+00:00",
|
||||
"endTime": "2020-12-01T02:00:00+00:00",
|
||||
}
|
||||
|
||||
user.post(res=Allocate, data=post_request)
|
||||
hdd = [c for c in acer['components'] if c['type'] == 'HardDrive'][0]
|
||||
hdd_action = [a for a in hdd['actions']
|
||||
if a['type'] == 'TestDataStorage'][0]
|
||||
hdd_action = [a for a in hdd['actions'] if a['type'] == 'TestDataStorage'][0]
|
||||
hdd_action['lifetime'] += 1000
|
||||
acer.pop('elapsed')
|
||||
acer['licence_version'] = '1.0.0'
|
||||
snapshot, _ = client.post(acer, res=Live)
|
||||
|
||||
csv_user, _ = user.get(res=documents.DocumentDef.t,
|
||||
item='actions/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer'], 'ids': [device_id]})])
|
||||
csv_user, _ = user.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='actions/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer'], 'ids': [device_id]})],
|
||||
)
|
||||
|
||||
csv_user2, _ = user2.get(res=documents.DocumentDef.t,
|
||||
item='actions/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer']})])
|
||||
csv_user2, _ = user2.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='actions/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer']})],
|
||||
)
|
||||
|
||||
_, res = client.get(res=documents.DocumentDef.t,
|
||||
item='actions/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer']})], status=401)
|
||||
_, res = client.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='actions/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer']})],
|
||||
status=401,
|
||||
)
|
||||
assert res.status_code == 401
|
||||
|
||||
assert len(csv_user) > 0
|
||||
|
@ -167,21 +189,27 @@ def test_live_export_csv2(user: UserClient, client: Client, app: Devicehub):
|
|||
acer = yaml2json('acer-happy.snapshot-test1')
|
||||
snapshot, _ = user.post(json_encode(acer), res=Snapshot)
|
||||
device_id = snapshot['device']['id']
|
||||
post_request = {"transaction": "ccc", "name": "John", "endUsers": 1,
|
||||
"devices": [device_id], "description": "aaa",
|
||||
"finalUserCode": "abcdefjhi",
|
||||
"startTime": "2020-11-01T02:00:00+00:00",
|
||||
"endTime": "2020-12-01T02:00:00+00:00"
|
||||
}
|
||||
post_request = {
|
||||
"transaction": "ccc",
|
||||
"name": "John",
|
||||
"endUsers": 1,
|
||||
"devices": [device_id],
|
||||
"description": "aaa",
|
||||
"finalUserCode": "abcdefjhi",
|
||||
"startTime": "2020-11-01T02:00:00+00:00",
|
||||
"endTime": "2020-12-01T02:00:00+00:00",
|
||||
}
|
||||
|
||||
user.post(res=Allocate, data=post_request)
|
||||
|
||||
acer = yaml2json('acer-happy.live-test1')
|
||||
live, _ = client.post(acer, res=Live)
|
||||
csv_user, _ = user.get(res=documents.DocumentDef.t,
|
||||
item='actions/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer'], 'ids': [device_id]})])
|
||||
csv_user, _ = user.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='actions/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer'], 'ids': [device_id]})],
|
||||
)
|
||||
|
||||
assert "4692" in csv_user
|
||||
assert "8692" in csv_user
|
||||
|
@ -195,11 +223,15 @@ def test_live_example2(user: UserClient, client: Client, app: Devicehub):
|
|||
acer = yaml2json('acer-happy.snapshot-test1')
|
||||
snapshot, _ = user.post(json_encode(acer), res=Snapshot)
|
||||
device_id = snapshot['device']['id']
|
||||
post_request = {"transaction": "ccc", "name": "John", "endUsers": 1,
|
||||
"devices": [device_id], "description": "aaa",
|
||||
"finalUserCode": "abcdefjhi",
|
||||
"startTime": "2020-11-01T02:00:00+00:00",
|
||||
"endTime": "2020-12-01T02:00:00+00:00"
|
||||
post_request = {
|
||||
"transaction": "ccc",
|
||||
"name": "John",
|
||||
"endUsers": 1,
|
||||
"devices": [device_id],
|
||||
"description": "aaa",
|
||||
"finalUserCode": "abcdefjhi",
|
||||
"startTime": "2020-11-01T02:00:00+00:00",
|
||||
"endTime": "2020-12-01T02:00:00+00:00",
|
||||
}
|
||||
|
||||
user.post(res=Allocate, data=post_request)
|
||||
|
@ -217,27 +249,36 @@ def test_export_basic_snapshot(user: UserClient):
|
|||
"""Test export device information in a csv file."""
|
||||
snapshot, _ = user.post(file('basic.snapshot'), res=Snapshot)
|
||||
dev_id = snapshot['device']['id']
|
||||
csv_str, _ = user.get(res=documents.DocumentDef.t,
|
||||
item='devices/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer'], 'ids': [dev_id]})])
|
||||
csv_str, _ = user.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='devices/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer'], 'ids': [dev_id]})],
|
||||
)
|
||||
|
||||
f = StringIO(csv_str)
|
||||
obj_csv = csv.reader(f, f, delimiter=';', quotechar='"')
|
||||
export_csv = list(obj_csv)
|
||||
|
||||
# Open fixture csv and transform to list
|
||||
with Path(__file__).parent.joinpath('files').joinpath('basic.csv').open() as csv_file:
|
||||
with Path(__file__).parent.joinpath('files').joinpath(
|
||||
'basic.csv'
|
||||
).open() as csv_file:
|
||||
obj_csv = csv.reader(csv_file, delimiter=';', quotechar='"')
|
||||
fixture_csv = list(obj_csv)
|
||||
|
||||
assert isinstance(datetime.strptime(export_csv[1][19], '%c'), datetime), \
|
||||
'Register in field is not a datetime'
|
||||
assert isinstance(
|
||||
datetime.strptime(export_csv[1][19], '%c'), datetime
|
||||
), 'Register in field is not a datetime'
|
||||
|
||||
assert fixture_csv[0] == export_csv[0], 'Headers are not equal'
|
||||
assert fixture_csv[1][:19] == export_csv[1][:19], 'Computer information are not equal'
|
||||
assert (
|
||||
fixture_csv[1][:19] == export_csv[1][:19]
|
||||
), 'Computer information are not equal'
|
||||
assert fixture_csv[1][20] == export_csv[1][20], 'Computer information are not equal'
|
||||
assert fixture_csv[1][22:] == export_csv[1][22:], 'Computer information are not equal'
|
||||
assert (
|
||||
fixture_csv[1][22:] == export_csv[1][22:]
|
||||
), 'Computer information are not equal'
|
||||
|
||||
|
||||
@pytest.mark.mvp
|
||||
|
@ -245,13 +286,17 @@ def test_export_basic_snapshot(user: UserClient):
|
|||
def test_check_insert_hash(app: Devicehub, user: UserClient, client: Client):
|
||||
"""Test export device information in a csv file."""
|
||||
snapshot, _ = user.post(file('basic.snapshot'), res=Snapshot)
|
||||
csv_str, _ = user.get(res=documents.DocumentDef.t,
|
||||
item='devices/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer']})])
|
||||
csv_str, _ = user.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='devices/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer']})],
|
||||
)
|
||||
hash3 = hashlib.sha3_256(csv_str.encode('utf-8')).hexdigest()
|
||||
assert ReportHash.query.filter_by(hash3=hash3).count() == 1
|
||||
result, status = client.get(res=documents.DocumentDef.t, item='check/', query=[('hash', hash3)])
|
||||
result, status = client.get(
|
||||
res=documents.DocumentDef.t, item='check/', query=[('hash', hash3)]
|
||||
)
|
||||
assert status.status_code == 200
|
||||
assert result
|
||||
|
||||
|
@ -266,7 +311,9 @@ def test_check_insert_hash(app: Devicehub, user: UserClient, client: Client):
|
|||
@pytest.mark.mvp
|
||||
def test_export_extended(app: Devicehub, user: UserClient):
|
||||
"""Test a export device with all information and a lot of components."""
|
||||
snapshot1, _ = user.post(file('real-eee-1001pxd.snapshot.12'), res=Snapshot, status=201)
|
||||
snapshot1, _ = user.post(
|
||||
file('real-eee-1001pxd.snapshot.12'), res=Snapshot, status=201
|
||||
)
|
||||
snapshot2, _ = user.post(file('complete.export.snapshot'), res=Snapshot, status=201)
|
||||
dev1_id = snapshot1['device']['id']
|
||||
dev2_id = snapshot2['device']['id']
|
||||
|
@ -276,10 +323,12 @@ def test_export_extended(app: Devicehub, user: UserClient):
|
|||
db.session.add(pc)
|
||||
db.session.commit()
|
||||
|
||||
csv_str, _ = user.get(res=documents.DocumentDef.t,
|
||||
item='devices/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer'], 'ids': [dev1_id, dev2_id]})])
|
||||
csv_str, _ = user.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='devices/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer'], 'ids': [dev1_id, dev2_id]})],
|
||||
)
|
||||
|
||||
f = StringIO(csv_str)
|
||||
obj_csv = csv.reader(f, f, delimiter=';', quotechar='"')
|
||||
|
@ -287,28 +336,50 @@ def test_export_extended(app: Devicehub, user: UserClient):
|
|||
|
||||
# Open fixture csv and transform to list
|
||||
with Path(__file__).parent.joinpath('files').joinpath(
|
||||
'proposal_extended_csv_report.csv').open() as csv_file:
|
||||
'proposal_extended_csv_report.csv'
|
||||
).open() as csv_file:
|
||||
obj_csv = csv.reader(csv_file, delimiter=';', quotechar='"')
|
||||
fixture_csv = list(obj_csv)
|
||||
|
||||
assert isinstance(datetime.strptime(export_csv[1][19], '%c'), datetime), \
|
||||
'Register in field is not a datetime'
|
||||
assert isinstance(
|
||||
datetime.strptime(export_csv[1][19], '%c'), datetime
|
||||
), 'Register in field is not a datetime'
|
||||
|
||||
assert fixture_csv[0] == export_csv[0], 'Headers are not equal'
|
||||
assert fixture_csv[1][:19] == export_csv[1][:19], 'Computer information are not equal'
|
||||
assert (
|
||||
fixture_csv[1][:19] == export_csv[1][:19]
|
||||
), 'Computer information are not equal'
|
||||
assert fixture_csv[1][20] == export_csv[1][20], 'Computer information are not equal'
|
||||
assert fixture_csv[1][22:82] == export_csv[1][22:82], 'Computer information are not equal'
|
||||
assert fixture_csv[1][83] == export_csv[1][83], 'Computer information are not equal'
|
||||
assert fixture_csv[1][86:] == export_csv[1][86:], 'Computer information are not equal'
|
||||
assert fixture_csv[2][:19] == export_csv[2][:19], 'Computer information are not equal'
|
||||
assert (
|
||||
fixture_csv[1][22:83] == export_csv[1][22:83]
|
||||
), 'Computer information are not equal'
|
||||
assert fixture_csv[1][84] == export_csv[1][84], 'Computer information are not equal'
|
||||
assert (
|
||||
fixture_csv[1][87:] == export_csv[1][87:]
|
||||
), 'Computer information are not equal'
|
||||
assert (
|
||||
fixture_csv[2][:19] == export_csv[2][:19]
|
||||
), 'Computer information are not equal'
|
||||
assert fixture_csv[2][20] == export_csv[2][20], 'Computer information are not equal'
|
||||
assert fixture_csv[2][22:82] == export_csv[2][22:82], 'Computer information are not equal'
|
||||
assert fixture_csv[2][83] == export_csv[2][83], 'Computer information are not equal'
|
||||
assert fixture_csv[2][86:106] == export_csv[2][86:106], 'Computer information are not equal'
|
||||
assert fixture_csv[2][109] == export_csv[2][109], 'Computer information are not equal'
|
||||
assert fixture_csv[2][112:133] == export_csv[2][112:133], 'Computer information are not equal'
|
||||
assert fixture_csv[2][135] == export_csv[2][135], 'Computer information are not equal'
|
||||
assert fixture_csv[2][138:] == export_csv[2][138:], 'Computer information are not equal'
|
||||
assert (
|
||||
fixture_csv[2][22:83] == export_csv[2][22:83]
|
||||
), 'Computer information are not equal'
|
||||
assert fixture_csv[2][84] == export_csv[2][84], 'Computer information are not equal'
|
||||
assert (
|
||||
fixture_csv[2][86:107] == export_csv[2][86:107]
|
||||
), 'Computer information are not equal'
|
||||
assert (
|
||||
fixture_csv[2][110] == export_csv[2][110]
|
||||
), 'Computer information are not equal'
|
||||
assert (
|
||||
fixture_csv[2][113:134] == export_csv[2][113:134]
|
||||
), 'Computer information are not equal'
|
||||
assert (
|
||||
fixture_csv[2][136] == export_csv[2][136]
|
||||
), 'Computer information are not equal'
|
||||
assert (
|
||||
fixture_csv[2][139:] == export_csv[2][139:]
|
||||
), 'Computer information are not equal'
|
||||
|
||||
|
||||
@pytest.mark.mvp
|
||||
|
@ -316,9 +387,9 @@ def test_export_empty(user: UserClient):
|
|||
"""Test to check works correctly exporting csv without any information,
|
||||
export a placeholder device.
|
||||
"""
|
||||
csv_str, _ = user.get(res=documents.DocumentDef.t,
|
||||
accept='text/csv',
|
||||
item='devices/')
|
||||
csv_str, _ = user.get(
|
||||
res=documents.DocumentDef.t, accept='text/csv', item='devices/'
|
||||
)
|
||||
f = StringIO(csv_str)
|
||||
obj_csv = csv.reader(f, f)
|
||||
export_csv = list(obj_csv)
|
||||
|
@ -330,16 +401,19 @@ def test_export_empty(user: UserClient):
|
|||
def test_export_computer_monitor(user: UserClient):
|
||||
"""Test a export device type computer monitor."""
|
||||
snapshot, _ = user.post(file('computer-monitor.snapshot'), res=Snapshot)
|
||||
csv_str, _ = user.get(res=documents.DocumentDef.t,
|
||||
item='devices/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['ComputerMonitor']})])
|
||||
csv_str, _ = user.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='devices/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['ComputerMonitor']})],
|
||||
)
|
||||
f = StringIO(csv_str)
|
||||
obj_csv = csv.reader(f, f)
|
||||
export_csv = list(obj_csv)
|
||||
# Open fixture csv and transform to list
|
||||
with Path(__file__).parent.joinpath('files').joinpath('computer-monitor.csv').open() \
|
||||
as csv_file:
|
||||
with Path(__file__).parent.joinpath('files').joinpath(
|
||||
'computer-monitor.csv'
|
||||
).open() as csv_file:
|
||||
obj_csv = csv.reader(csv_file)
|
||||
fixture_csv = list(obj_csv)
|
||||
|
||||
|
@ -355,15 +429,19 @@ def test_export_computer_monitor(user: UserClient):
|
|||
def test_export_keyboard(user: UserClient):
|
||||
"""Test a export device type keyboard."""
|
||||
snapshot, _ = user.post(file('keyboard.snapshot'), res=Snapshot)
|
||||
csv_str, _ = user.get(res=documents.DocumentDef.t,
|
||||
item='devices/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Keyboard']})])
|
||||
csv_str, _ = user.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='devices/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Keyboard']})],
|
||||
)
|
||||
f = StringIO(csv_str)
|
||||
obj_csv = csv.reader(f, f)
|
||||
export_csv = list(obj_csv)
|
||||
# Open fixture csv and transform to list
|
||||
with Path(__file__).parent.joinpath('files').joinpath('keyboard.csv').open() as csv_file:
|
||||
with Path(__file__).parent.joinpath('files').joinpath(
|
||||
'keyboard.csv'
|
||||
).open() as csv_file:
|
||||
obj_csv = csv.reader(csv_file)
|
||||
fixture_csv = list(obj_csv)
|
||||
|
||||
|
@ -381,8 +459,9 @@ def test_export_multiple_different_devices(user: UserClient):
|
|||
computers, keyboards, monitors, etc..)
|
||||
"""
|
||||
# Open fixture csv and transform to list
|
||||
with Path(__file__).parent.joinpath('files').joinpath('multiples_devices.csv').open() \
|
||||
as csv_file:
|
||||
with Path(__file__).parent.joinpath('files').joinpath(
|
||||
'multiples_devices.csv'
|
||||
).open() as csv_file:
|
||||
fixture_csv = list(csv.reader(csv_file))
|
||||
for row in fixture_csv:
|
||||
del row[8] # We remove the 'Registered in' column
|
||||
|
@ -393,10 +472,12 @@ def test_export_multiple_different_devices(user: UserClient):
|
|||
snapshot_keyboard, _ = user.post(file('keyboard.snapshot'), res=Snapshot)
|
||||
snapshot_monitor, _ = user.post(file('computer-monitor.snapshot'), res=Snapshot)
|
||||
|
||||
csv_str, _ = user.get(res=documents.DocumentDef.t,
|
||||
item='devices/',
|
||||
query=[('filter', {'type': ['Computer', 'Keyboard', 'Monitor']})],
|
||||
accept='text/csv')
|
||||
csv_str, _ = user.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='devices/',
|
||||
query=[('filter', {'type': ['Computer', 'Keyboard', 'Monitor']})],
|
||||
accept='text/csv',
|
||||
)
|
||||
f = StringIO(csv_str)
|
||||
obj_csv = csv.reader(f, f)
|
||||
export_csv = list(obj_csv)
|
||||
|
@ -409,20 +490,25 @@ def test_export_multiple_different_devices(user: UserClient):
|
|||
|
||||
@pytest.mark.mvp
|
||||
def test_report_devices_stock_control(user: UserClient, user2: UserClient):
|
||||
# TODO
|
||||
"""Test export device information in a csv file."""
|
||||
snapshot, _ = user.post(file('basic.snapshot'), res=Snapshot)
|
||||
snapshot2, _ = user2.post(file('basic.snapshot2'), res=Snapshot)
|
||||
|
||||
csv_str, _ = user.get(res=documents.DocumentDef.t,
|
||||
item='stock/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer']})])
|
||||
csv_str, _ = user.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='stock/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer']})],
|
||||
)
|
||||
f = StringIO(csv_str)
|
||||
obj_csv = csv.reader(f, f)
|
||||
export_csv = list(obj_csv)
|
||||
|
||||
# Open fixture csv and transform to list
|
||||
with Path(__file__).parent.joinpath('files').joinpath('basic-stock.csv').open() as csv_file:
|
||||
with Path(__file__).parent.joinpath('files').joinpath(
|
||||
'basic-stock.csv'
|
||||
).open() as csv_file:
|
||||
obj_csv = csv.reader(csv_file)
|
||||
fixture_csv = list(obj_csv)
|
||||
|
||||
|
@ -431,8 +517,12 @@ def test_report_devices_stock_control(user: UserClient, user2: UserClient):
|
|||
|
||||
export_csv[0] = export_csv[0][0].split(';')
|
||||
export_csv[1] = export_csv[1][0].split(';')
|
||||
assert isinstance(datetime.strptime(export_csv[1][5], '%c'), datetime), \
|
||||
'Register in field is not a datetime'
|
||||
fixture_csv[0] = fixture_csv[0][0].split(';')
|
||||
fixture_csv[1] = fixture_csv[1][0].split(';')
|
||||
|
||||
assert isinstance(
|
||||
datetime.strptime(export_csv[1][5], '%c'), datetime
|
||||
), 'Register in field is not a datetime'
|
||||
|
||||
# Pop dates fields from csv lists to compare them
|
||||
fixture_csv[1] = fixture_csv[1][:5] + fixture_csv[1][6:]
|
||||
|
@ -447,17 +537,22 @@ def test_report_devices_stock_control(user: UserClient, user2: UserClient):
|
|||
def test_get_document_lots(user: UserClient, user2: UserClient):
|
||||
"""Tests submitting and retreiving all lots."""
|
||||
|
||||
l, _ = user.post({'name': 'Lot1', 'description': 'comments,lot1,testcomment-lot1,'}, res=Lot)
|
||||
l, _ = user.post({'name': 'Lot2', 'description': 'comments,lot2,testcomment-lot2,'}, res=Lot)
|
||||
l, _ = user2.post({'name': 'Lot3-User2', 'description': 'comments,lot3,testcomment-lot3,'}, res=Lot)
|
||||
l, _ = user.post(
|
||||
{'name': 'Lot1', 'description': 'comments,lot1,testcomment-lot1,'}, res=Lot
|
||||
)
|
||||
l, _ = user.post(
|
||||
{'name': 'Lot2', 'description': 'comments,lot2,testcomment-lot2,'}, res=Lot
|
||||
)
|
||||
l, _ = user2.post(
|
||||
{'name': 'Lot3-User2', 'description': 'comments,lot3,testcomment-lot3,'},
|
||||
res=Lot,
|
||||
)
|
||||
|
||||
csv_str, _ = user.get(res=documents.DocumentDef.t,
|
||||
item='lots/',
|
||||
accept='text/csv')
|
||||
csv_str, _ = user.get(res=documents.DocumentDef.t, item='lots/', accept='text/csv')
|
||||
|
||||
csv2_str, _ = user2.get(res=documents.DocumentDef.t,
|
||||
item='lots/',
|
||||
accept='text/csv')
|
||||
csv2_str, _ = user2.get(
|
||||
res=documents.DocumentDef.t, item='lots/', accept='text/csv'
|
||||
)
|
||||
|
||||
f = StringIO(csv_str)
|
||||
obj_csv = csv.reader(f, f)
|
||||
|
@ -470,10 +565,17 @@ def test_get_document_lots(user: UserClient, user2: UserClient):
|
|||
assert len(export_csv) == 3
|
||||
assert len(export2_csv) == 2
|
||||
|
||||
assert export_csv[0] == export2_csv[0] == ['Id', 'Name', 'Registered in', 'Description']
|
||||
assert (
|
||||
export_csv[0]
|
||||
== export2_csv[0]
|
||||
== ['Id', 'Name', 'Registered in', 'Description']
|
||||
)
|
||||
|
||||
assert export_csv[1][1] == 'Lot1' or 'Lot2'
|
||||
assert export_csv[1][3] == 'comments,lot1,testcomment-lot1,' or 'comments,lot2,testcomment-lot2,'
|
||||
assert (
|
||||
export_csv[1][3] == 'comments,lot1,testcomment-lot1,'
|
||||
or 'comments,lot2,testcomment-lot2,'
|
||||
)
|
||||
assert export2_csv[1][1] == 'Lot3-User2'
|
||||
assert export2_csv[1][3] == 'comments,lot3,testcomment-lot3,'
|
||||
|
||||
|
@ -482,34 +584,39 @@ def test_get_document_lots(user: UserClient, user2: UserClient):
|
|||
def test_verify_stamp(user: UserClient, client: Client):
|
||||
"""Test verify stamp of one export device information in a csv file."""
|
||||
snapshot, _ = user.post(file('basic.snapshot'), res=Snapshot)
|
||||
csv_str, _ = user.get(res=documents.DocumentDef.t,
|
||||
item='devices/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer']})])
|
||||
csv_str, _ = user.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='devices/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer']})],
|
||||
)
|
||||
|
||||
response, _ = client.post(res=documents.DocumentDef.t,
|
||||
item='stamps/',
|
||||
content_type='multipart/form-data',
|
||||
accept='text/html',
|
||||
data={'docUpload': [(BytesIO(bytes(csv_str, 'utf-8')), 'example.csv')]},
|
||||
status=200)
|
||||
response, _ = client.post(
|
||||
res=documents.DocumentDef.t,
|
||||
item='stamps/',
|
||||
content_type='multipart/form-data',
|
||||
accept='text/html',
|
||||
data={'docUpload': [(BytesIO(bytes(csv_str, 'utf-8')), 'example.csv')]},
|
||||
status=200,
|
||||
)
|
||||
assert "alert alert-info" in response
|
||||
assert not "alert alert-danger" in response
|
||||
|
||||
response, _ = client.post(res=documents.DocumentDef.t,
|
||||
item='stamps/',
|
||||
content_type='multipart/form-data',
|
||||
accept='text/html',
|
||||
data={'docUpload': [(BytesIO(b'abc'), 'example.csv')]},
|
||||
status=200)
|
||||
response, _ = client.post(
|
||||
res=documents.DocumentDef.t,
|
||||
item='stamps/',
|
||||
content_type='multipart/form-data',
|
||||
accept='text/html',
|
||||
data={'docUpload': [(BytesIO(b'abc'), 'example.csv')]},
|
||||
status=200,
|
||||
)
|
||||
|
||||
assert not "alert alert-info" in response
|
||||
assert "alert alert-danger" in response
|
||||
|
||||
response, _ = client.get(res=documents.DocumentDef.t,
|
||||
item='stamps/',
|
||||
accept='text/html',
|
||||
status=200)
|
||||
response, _ = client.get(
|
||||
res=documents.DocumentDef.t, item='stamps/', accept='text/html', status=200
|
||||
)
|
||||
|
||||
assert not "alert alert-info" in response
|
||||
assert not "alert alert-danger" in response
|
||||
|
@ -519,20 +626,23 @@ def test_verify_stamp(user: UserClient, client: Client):
|
|||
def test_verify_stamp_log_info(user: UserClient, client: Client):
|
||||
"""Test verify stamp of one export lots-info in a csv file."""
|
||||
|
||||
l, _ = user.post({'name': 'Lot1', 'description': 'comments,lot1,testcomment-lot1,'}, res=Lot)
|
||||
l, _ = user.post({'name': 'Lot2', 'description': 'comments,lot2,testcomment-lot2,'}, res=Lot)
|
||||
l, _ = user.post(
|
||||
{'name': 'Lot1', 'description': 'comments,lot1,testcomment-lot1,'}, res=Lot
|
||||
)
|
||||
l, _ = user.post(
|
||||
{'name': 'Lot2', 'description': 'comments,lot2,testcomment-lot2,'}, res=Lot
|
||||
)
|
||||
|
||||
csv_str, _ = user.get(res=documents.DocumentDef.t,
|
||||
item='lots/',
|
||||
accept='text/csv')
|
||||
csv_str, _ = user.get(res=documents.DocumentDef.t, item='lots/', accept='text/csv')
|
||||
|
||||
response, _ = client.post(res=documents.DocumentDef.t,
|
||||
item='stamps/',
|
||||
content_type='multipart/form-data',
|
||||
accept='text/html',
|
||||
data={'docUpload': [(BytesIO(bytes(csv_str, 'utf-8')),
|
||||
'example.csv')]},
|
||||
status=200)
|
||||
response, _ = client.post(
|
||||
res=documents.DocumentDef.t,
|
||||
item='stamps/',
|
||||
content_type='multipart/form-data',
|
||||
accept='text/html',
|
||||
data={'docUpload': [(BytesIO(bytes(csv_str, 'utf-8')), 'example.csv')]},
|
||||
status=200,
|
||||
)
|
||||
assert "alert alert-info" in response
|
||||
|
||||
|
||||
|
@ -542,18 +652,21 @@ def test_verify_stamp_devices_stock(user: UserClient, client: Client):
|
|||
|
||||
snapshot, _ = user.post(file('basic.snapshot'), res=Snapshot)
|
||||
|
||||
csv_str, _ = user.get(res=documents.DocumentDef.t,
|
||||
item='stock/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer']})])
|
||||
csv_str, _ = user.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='stock/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer']})],
|
||||
)
|
||||
|
||||
response, _ = client.post(res=documents.DocumentDef.t,
|
||||
item='stamps/',
|
||||
content_type='multipart/form-data',
|
||||
accept='text/html',
|
||||
data={'docUpload': [(BytesIO(bytes(csv_str, 'utf-8')),
|
||||
'example.csv')]},
|
||||
status=200)
|
||||
response, _ = client.post(
|
||||
res=documents.DocumentDef.t,
|
||||
item='stamps/',
|
||||
content_type='multipart/form-data',
|
||||
accept='text/html',
|
||||
data={'docUpload': [(BytesIO(bytes(csv_str, 'utf-8')), 'example.csv')]},
|
||||
status=200,
|
||||
)
|
||||
assert "alert alert-info" in response
|
||||
|
||||
|
||||
|
@ -563,11 +676,15 @@ def test_verify_stamp_csv_actions(user: UserClient, client: Client):
|
|||
acer = yaml2json('acer.happy.battery.snapshot')
|
||||
snapshot, _ = user.post(json_encode(acer), res=Snapshot)
|
||||
device_id = snapshot['device']['id']
|
||||
post_request = {"transaction": "ccc", "name": "John", "endUsers": 1,
|
||||
"devices": [device_id], "description": "aaa",
|
||||
"finalUserCode": "abcdefjhi",
|
||||
"startTime": "2020-11-01T02:00:00+00:00",
|
||||
"endTime": "2020-12-01T02:00:00+00:00"
|
||||
post_request = {
|
||||
"transaction": "ccc",
|
||||
"name": "John",
|
||||
"endUsers": 1,
|
||||
"devices": [device_id],
|
||||
"description": "aaa",
|
||||
"finalUserCode": "abcdefjhi",
|
||||
"startTime": "2020-11-01T02:00:00+00:00",
|
||||
"endTime": "2020-12-01T02:00:00+00:00",
|
||||
}
|
||||
|
||||
user.post(res=Allocate, data=post_request)
|
||||
|
@ -578,18 +695,21 @@ def test_verify_stamp_csv_actions(user: UserClient, client: Client):
|
|||
acer['licence_version'] = '1.0.0'
|
||||
snapshot, _ = client.post(acer, res=Live)
|
||||
|
||||
csv_str, _ = user.get(res=documents.DocumentDef.t,
|
||||
item='actions/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer']})])
|
||||
csv_str, _ = user.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='actions/',
|
||||
accept='text/csv',
|
||||
query=[('filter', {'type': ['Computer']})],
|
||||
)
|
||||
|
||||
response, _ = client.post(res=documents.DocumentDef.t,
|
||||
item='stamps/',
|
||||
content_type='multipart/form-data',
|
||||
accept='text/html',
|
||||
data={'docUpload': [(BytesIO(bytes(csv_str, 'utf-8')),
|
||||
'example.csv')]},
|
||||
status=200)
|
||||
response, _ = client.post(
|
||||
res=documents.DocumentDef.t,
|
||||
item='stamps/',
|
||||
content_type='multipart/form-data',
|
||||
accept='text/html',
|
||||
data={'docUpload': [(BytesIO(bytes(csv_str, 'utf-8')), 'example.csv')]},
|
||||
status=200,
|
||||
)
|
||||
assert "alert alert-info" in response
|
||||
|
||||
|
||||
|
@ -599,35 +719,38 @@ def test_verify_stamp_erasure_certificate(user: UserClient, client: Client):
|
|||
s = file('erase-sectors.snapshot')
|
||||
snapshot, response = user.post(s, res=Snapshot)
|
||||
|
||||
doc, _ = user.get(res=documents.DocumentDef.t,
|
||||
item='erasures/',
|
||||
query=[('filter', {'ids': [snapshot['device']['id']]})],
|
||||
accept=ANY)
|
||||
doc, _ = user.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='erasures/',
|
||||
query=[('filter', {'ids': [snapshot['device']['id']]})],
|
||||
accept=ANY,
|
||||
)
|
||||
|
||||
response, _ = client.post(res=documents.DocumentDef.t,
|
||||
item='stamps/',
|
||||
content_type='multipart/form-data',
|
||||
accept='text/html',
|
||||
data={'docUpload': [(BytesIO(bytes(doc, 'utf-8')),
|
||||
'example.csv')]},
|
||||
status=200)
|
||||
response, _ = client.post(
|
||||
res=documents.DocumentDef.t,
|
||||
item='stamps/',
|
||||
content_type='multipart/form-data',
|
||||
accept='text/html',
|
||||
data={'docUpload': [(BytesIO(bytes(doc, 'utf-8')), 'example.csv')]},
|
||||
status=200,
|
||||
)
|
||||
assert "alert alert-danger" in response
|
||||
|
||||
doc, _ = user.get(res=documents.DocumentDef.t,
|
||||
item='erasures/',
|
||||
query=[
|
||||
('filter', {'ids': [snapshot['device']['id']]}),
|
||||
('format', 'PDF')
|
||||
],
|
||||
accept='application/pdf')
|
||||
doc, _ = user.get(
|
||||
res=documents.DocumentDef.t,
|
||||
item='erasures/',
|
||||
query=[('filter', {'ids': [snapshot['device']['id']]}), ('format', 'PDF')],
|
||||
accept='application/pdf',
|
||||
)
|
||||
|
||||
response, _ = client.post(res=documents.DocumentDef.t,
|
||||
item='stamps/',
|
||||
content_type='multipart/form-data',
|
||||
accept='text/html',
|
||||
data={'docUpload': [(BytesIO(doc),
|
||||
'example.csv')]},
|
||||
status=200)
|
||||
response, _ = client.post(
|
||||
res=documents.DocumentDef.t,
|
||||
item='stamps/',
|
||||
content_type='multipart/form-data',
|
||||
accept='text/html',
|
||||
data={'docUpload': [(BytesIO(doc), 'example.csv')]},
|
||||
status=200,
|
||||
)
|
||||
assert "alert alert-info" in response
|
||||
|
||||
|
||||
|
@ -636,11 +759,10 @@ def test_get_document_internal_stats(user: UserClient, user2: UserClient):
|
|||
"""Tests for get the internal stats."""
|
||||
|
||||
# csv_str, _ = user.get(res=documents.DocumentDef.t,
|
||||
# item='internalstats/')
|
||||
csv_str, _ = user.get(res=documents.DocumentDef.t,
|
||||
item='internalstats/',
|
||||
accept='text/csv',
|
||||
query=[])
|
||||
# item='internalstats/')
|
||||
csv_str, _ = user.get(
|
||||
res=documents.DocumentDef.t, item='internalstats/', accept='text/csv', query=[]
|
||||
)
|
||||
|
||||
f = StringIO(csv_str)
|
||||
obj_csv = csv.reader(f, f)
|
||||
|
@ -648,10 +770,9 @@ def test_get_document_internal_stats(user: UserClient, user2: UserClient):
|
|||
|
||||
assert len(export_csv) == 1
|
||||
|
||||
csv_str, _ = user2.get(res=documents.DocumentDef.t,
|
||||
item='internalstats/',
|
||||
accept='text/csv',
|
||||
query=[])
|
||||
csv_str, _ = user2.get(
|
||||
res=documents.DocumentDef.t, item='internalstats/', accept='text/csv', query=[]
|
||||
)
|
||||
|
||||
f = StringIO(csv_str)
|
||||
obj_csv = csv.reader(f, f)
|
||||
|
@ -672,8 +793,9 @@ def test_get_wbconf(user: UserClient):
|
|||
assert 'WB_ERASE =' in env
|
||||
# assert 'WB_ERASE = True' in env
|
||||
|
||||
session = Session.query.filter_by(user_id=user.user['id'],
|
||||
type=SessionType.Internal).first()
|
||||
session = Session.query.filter_by(
|
||||
user_id=user.user['id'], type=SessionType.Internal
|
||||
).first()
|
||||
token = session.token
|
||||
token = auth.Auth.encode(session.token)
|
||||
assert token in env
|
||||
|
@ -691,7 +813,7 @@ def test_trade_documents(user: UserClient):
|
|||
'filename': 'test.pdf',
|
||||
'hash': 'bbbbbbbb',
|
||||
'url': 'http://www.ereuse.org/',
|
||||
'lot': lot['id']
|
||||
'lot': lot['id'],
|
||||
}
|
||||
doc, _ = user.post(res=TradeDocument, data=request_post)
|
||||
assert doc['filename'] == request_post['filename']
|
||||
|
@ -707,13 +829,15 @@ def test_trade_documents_with_weight(user: UserClient):
|
|||
|
||||
lot, _ = user.post({'name': 'MyLot'}, res=Lot)
|
||||
# long url
|
||||
url = 'http://www.ereuse.org/apapaapaapaapaapaapaapaapaapaapapaapaapaapaapaapaapaapaapaapapaapaapaapaapaapaapaapaapaapaaaa',
|
||||
url = (
|
||||
'http://www.ereuse.org/apapaapaapaapaapaapaapaapaapaapapaapaapaapaapaapaapaapaapaapapaapaapaapaapaapaapaapaapaapaaaa',
|
||||
)
|
||||
request_post = {
|
||||
'filename': 'test.pdf',
|
||||
'hash': 'bbbbbbbb',
|
||||
'url': url,
|
||||
'weight': 15,
|
||||
'lot': lot['id']
|
||||
'lot': lot['id'],
|
||||
}
|
||||
doc, _ = user.post(res=TradeDocument, data=request_post)
|
||||
assert doc['weight'] == request_post['weight']
|
||||
|
|
|
@ -224,11 +224,12 @@ def test_export_devices(user3: UserClientFlask):
|
|||
), 'Computer information are not equal'
|
||||
assert fixture_csv[1][20] == export_csv[1][20], 'Computer information are not equal'
|
||||
assert (
|
||||
fixture_csv[1][22:82] == export_csv[1][22:82]
|
||||
fixture_csv[1][22:83] == export_csv[1][22:83]
|
||||
), 'Computer information are not equal'
|
||||
assert fixture_csv[1][83] == export_csv[1][83], 'Computer information are not equal'
|
||||
|
||||
assert fixture_csv[1][84] == export_csv[1][84], 'Computer information are not equal'
|
||||
assert (
|
||||
fixture_csv[1][86:] == export_csv[1][86:]
|
||||
fixture_csv[1][88:] == export_csv[1][88:]
|
||||
), 'Computer information are not equal'
|
||||
|
||||
|
||||
|
|
Reference in a new issue