Better reporting for user limits (#5225)
- Added explanatory messages for actions denied for user limits - Fixed few rules and checks - Upgraded OPA versionmain
parent
aa4980eea5
commit
ec3e1f34a4
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,129 @@
|
|||||||
|
package limits
|
||||||
|
|
||||||
|
import future.keywords.if
|
||||||
|
import future.keywords.in
|
||||||
|
import future.keywords.contains
|
||||||
|
|
||||||
|
import data.utils
|
||||||
|
|
||||||
|
|
||||||
|
CAP_USER_SANDBOX_TASKS = "USER_SANDBOX_TASKS"
|
||||||
|
CAP_USER_SANDBOX_PROJECTS = "USER_SANDBOX_PROJECTS"
|
||||||
|
CAP_TASKS_IN_USER_SANDBOX_PROJECT = "TASKS_IN_USER_SANDBOX_PROJECT"
|
||||||
|
CAP_USER_OWNED_ORGS = "USER_OWNED_ORGS"
|
||||||
|
CAP_USER_SANDBOX_CLOUD_STORAGES = "USER_SANDBOX_CLOUD_STORAGES"
|
||||||
|
CAP_ORG_TASKS = "ORG_TASKS"
|
||||||
|
CAP_ORG_PROJECTS = "ORG_PROJECTS"
|
||||||
|
CAP_TASKS_IN_ORG_PROJECT = "TASKS_IN_ORG_PROJECT"
|
||||||
|
CAP_ORG_CLOUD_STORAGES = "ORG_CLOUD_STORAGES"
|
||||||
|
CAP_ORG_COMMON_WEBHOOKS = "ORG_COMMON_WEBHOOKS"
|
||||||
|
CAP_PROJECT_WEBHOOKS = "PROJECT_WEBHOOKS"
|
||||||
|
|
||||||
|
|
||||||
|
check_limit_exceeded(current, max) {
|
||||||
|
null != max
|
||||||
|
current >= max
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
problems contains "user tasks limit reached" if {
|
||||||
|
check_limit_exceeded(
|
||||||
|
input.resource.limits[CAP_USER_SANDBOX_TASKS].used,
|
||||||
|
input.resource.limits[CAP_USER_SANDBOX_TASKS].max
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
problems contains "user projects limit reached" if {
|
||||||
|
check_limit_exceeded(
|
||||||
|
input.resource.limits[CAP_USER_SANDBOX_PROJECTS].used,
|
||||||
|
input.resource.limits[CAP_USER_SANDBOX_PROJECTS].max
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
problems contains "user project tasks limit reached" if {
|
||||||
|
check_limit_exceeded(
|
||||||
|
input.resource.limits[CAP_TASKS_IN_USER_SANDBOX_PROJECT].used,
|
||||||
|
input.resource.limits[CAP_TASKS_IN_USER_SANDBOX_PROJECT].max
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
problems contains "org tasks limit reached" if {
|
||||||
|
check_limit_exceeded(
|
||||||
|
input.resource.limits[CAP_ORG_TASKS].used,
|
||||||
|
input.resource.limits[CAP_ORG_TASKS].max
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
problems contains "org projects limit reached" if {
|
||||||
|
check_limit_exceeded(
|
||||||
|
input.resource.limits[CAP_ORG_PROJECTS].used,
|
||||||
|
input.resource.limits[CAP_ORG_PROJECTS].max
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
problems contains "org project tasks limit reached" if {
|
||||||
|
check_limit_exceeded(
|
||||||
|
input.resource.limits[CAP_TASKS_IN_ORG_PROJECT].used,
|
||||||
|
input.resource.limits[CAP_TASKS_IN_ORG_PROJECT].max
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
problems contains "project webhooks limit reached" if {
|
||||||
|
check_limit_exceeded(
|
||||||
|
input.resource.limits[CAP_PROJECT_WEBHOOKS].used,
|
||||||
|
input.resource.limits[CAP_PROJECT_WEBHOOKS].max
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
problems contains "org webhooks limit reached" if {
|
||||||
|
check_limit_exceeded(
|
||||||
|
input.resource.limits[CAP_ORG_COMMON_WEBHOOKS].used,
|
||||||
|
input.resource.limits[CAP_ORG_COMMON_WEBHOOKS].max
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
problems contains "user orgs limit reached" if {
|
||||||
|
check_limit_exceeded(
|
||||||
|
input.resource.limits[CAP_USER_OWNED_ORGS].used,
|
||||||
|
input.resource.limits[CAP_USER_OWNED_ORGS].max
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
problems contains "user cloud storages limit reached" if {
|
||||||
|
check_limit_exceeded(
|
||||||
|
input.resource.limits[CAP_USER_SANDBOX_CLOUD_STORAGES].used,
|
||||||
|
input.resource.limits[CAP_USER_SANDBOX_CLOUD_STORAGES].max
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
problems contains "org cloud storages limit reached" if {
|
||||||
|
check_limit_exceeded(
|
||||||
|
input.resource.limits[CAP_ORG_CLOUD_STORAGES].used,
|
||||||
|
input.resource.limits[CAP_ORG_CLOUD_STORAGES].max
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
# In the case of invalid input or no applicable limits,
|
||||||
|
# we deny the request. We suppose that we always check at least 1
|
||||||
|
# limit, and this package is queried by IAM only when there are
|
||||||
|
# limits to check in the input scope.
|
||||||
|
default result = {
|
||||||
|
"allow": false,
|
||||||
|
"reasons": []
|
||||||
|
}
|
||||||
|
|
||||||
|
result := {
|
||||||
|
"allow": true,
|
||||||
|
"reasons": [],
|
||||||
|
} if {
|
||||||
|
utils.is_admin
|
||||||
|
} else := {
|
||||||
|
"allow": count(problems) == 0,
|
||||||
|
"reasons": problems
|
||||||
|
} if {
|
||||||
|
not utils.is_admin
|
||||||
|
count(input.resource.limits) != 0
|
||||||
|
}
|
||||||
|
|
||||||
|
allow := result.allow
|
||||||
@ -0,0 +1,14 @@
|
|||||||
|
TestKind,Capability,CapKind
|
||||||
|
single,USER_SANDBOX_TASKS,max
|
||||||
|
single,USER_SANDBOX_PROJECTS,max
|
||||||
|
single,TASKS_IN_USER_SANDBOX_PROJECT,max
|
||||||
|
single,USER_OWNED_ORGS,max
|
||||||
|
single,USER_SANDBOX_CLOUD_STORAGES,max
|
||||||
|
single,ORG_TASKS,max
|
||||||
|
single,ORG_PROJECTS,max
|
||||||
|
single,TASKS_IN_ORG_PROJECT,max
|
||||||
|
single,ORG_CLOUD_STORAGES,max
|
||||||
|
single,ORG_COMMON_WEBHOOKS,max
|
||||||
|
single,PROJECT_WEBHOOKS,max
|
||||||
|
multi,"USER_SANDBOX_TASKS,USER_SANDBOX_PROJECTS",N/A
|
||||||
|
multi,,N/A
|
||||||
|
|
|
|
|
@ -0,0 +1,211 @@
|
|||||||
|
# Copyright (C) 2022 CVAT.ai Corporation
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
import csv
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import textwrap
|
||||||
|
from enum import Enum
|
||||||
|
from itertools import product
|
||||||
|
|
||||||
|
NAME = "limits"
|
||||||
|
|
||||||
|
|
||||||
|
class TestKinds(str, Enum):
|
||||||
|
single = "single"
|
||||||
|
multi = "multi"
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return self.value.lower()
|
||||||
|
|
||||||
|
|
||||||
|
class CapKinds(str, Enum):
|
||||||
|
max = "max"
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return self.value.lower()
|
||||||
|
|
||||||
|
|
||||||
|
def read_test_table(name):
|
||||||
|
# The table describes positive cases and test configurations
|
||||||
|
table = []
|
||||||
|
with open(os.path.join(sys.argv[1], f"{name}.csv")) as f:
|
||||||
|
for row in csv.DictReader(f):
|
||||||
|
table.append(row)
|
||||||
|
|
||||||
|
return table
|
||||||
|
|
||||||
|
|
||||||
|
test_table = read_test_table(NAME)
|
||||||
|
|
||||||
|
CAPABILITIES = {
|
||||||
|
entry["Capability"]: entry["CapKind"]
|
||||||
|
for entry in test_table
|
||||||
|
if entry["TestKind"] == TestKinds.single
|
||||||
|
}
|
||||||
|
ROLES = ["user", "admin"]
|
||||||
|
|
||||||
|
MAX_CAPABILITY_LIMIT_VALUES = [None, 5]
|
||||||
|
MAX_CAPABILITY_USED_VALUES = [2, 7]
|
||||||
|
|
||||||
|
|
||||||
|
def eval_rule(test_kind, role, capabilities, *, data):
|
||||||
|
if role == "admin":
|
||||||
|
return {
|
||||||
|
"allow": True,
|
||||||
|
"messages": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
allow = True
|
||||||
|
messages = 0
|
||||||
|
for capability in capabilities:
|
||||||
|
cap_name = capability["name"]
|
||||||
|
cap_kind = CAPABILITIES[cap_name]
|
||||||
|
cap_data = data["resource"]["limits"][cap_name]
|
||||||
|
if cap_kind == CapKinds.max:
|
||||||
|
cap_allow = (cap_data["max"] is None) or (cap_data["used"] < cap_data["max"])
|
||||||
|
messages += not cap_allow
|
||||||
|
allow &= cap_allow
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unknown capability kind {cap_kind}")
|
||||||
|
|
||||||
|
if not capabilities:
|
||||||
|
allow = False
|
||||||
|
messages = 0
|
||||||
|
|
||||||
|
return {
|
||||||
|
"allow": allow,
|
||||||
|
"messages": messages,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _get_name(prefix, **kwargs):
|
||||||
|
name = prefix
|
||||||
|
for k, v in kwargs.items():
|
||||||
|
prefix = "_" + str(k)
|
||||||
|
if isinstance(v, dict):
|
||||||
|
if "id" in v:
|
||||||
|
v = v.copy()
|
||||||
|
v.pop("id")
|
||||||
|
if v:
|
||||||
|
name += _get_name(prefix, **v)
|
||||||
|
else:
|
||||||
|
name += "".join(
|
||||||
|
map(
|
||||||
|
lambda c: c if c.isalnum() else {"@": "_IN_"}.get(c, "_"),
|
||||||
|
f"{prefix}_{str(v).upper()}",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return name
|
||||||
|
|
||||||
|
|
||||||
|
def get_name(*args, **kwargs):
|
||||||
|
return _get_name("test", *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_capability_cases(capability: str):
|
||||||
|
capability_kind = CAPABILITIES[capability]
|
||||||
|
if capability_kind == CapKinds.max:
|
||||||
|
for used, maximum in product(MAX_CAPABILITY_USED_VALUES, MAX_CAPABILITY_LIMIT_VALUES):
|
||||||
|
yield {"name": capability, "used": used, "max": maximum}
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unknown capability kind {capability_kind}")
|
||||||
|
|
||||||
|
|
||||||
|
def generate_test_data(test_kind, role, capabilities):
|
||||||
|
data = {
|
||||||
|
"auth": {"user": {"privilege": role}},
|
||||||
|
"resource": {
|
||||||
|
"limits": {},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for cap_case in capabilities:
|
||||||
|
cap_name = cap_case["name"]
|
||||||
|
cap_kind = CAPABILITIES[cap_case["name"]]
|
||||||
|
if cap_kind == CapKinds.max:
|
||||||
|
data["resource"]["limits"][cap_name] = {
|
||||||
|
"used": cap_case["used"],
|
||||||
|
"max": cap_case["max"],
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unknown capability type {cap_kind}")
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def generate_test_cases():
|
||||||
|
for config in test_table:
|
||||||
|
test_kind = config["TestKind"]
|
||||||
|
if test_kind == TestKinds.single:
|
||||||
|
capability = config["Capability"]
|
||||||
|
|
||||||
|
for role, cap_case in product(ROLES, generate_capability_cases(capability)):
|
||||||
|
yield dict(test_kind=test_kind, role=role, capabilities=[cap_case])
|
||||||
|
|
||||||
|
elif test_kind == TestKinds.multi:
|
||||||
|
if config["Capability"]:
|
||||||
|
capabilities = config["Capability"].split(",")
|
||||||
|
else:
|
||||||
|
capabilities = []
|
||||||
|
|
||||||
|
capability_cases = [
|
||||||
|
generate_capability_cases(capability) for capability in capabilities
|
||||||
|
]
|
||||||
|
|
||||||
|
for params in product(ROLES, *capability_cases):
|
||||||
|
role = params[0]
|
||||||
|
cap_case = params[1:]
|
||||||
|
yield dict(test_kind=test_kind, role=role, capabilities=cap_case)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unknown test kind {test_kind}")
|
||||||
|
|
||||||
|
|
||||||
|
def gen_test_rego(name):
|
||||||
|
with open(f"{name}_test.gen.rego", "wt") as f:
|
||||||
|
f.write(f"package {name}\n\n")
|
||||||
|
|
||||||
|
for test_params in generate_test_cases():
|
||||||
|
test_data = generate_test_data(**test_params)
|
||||||
|
test_result = eval_rule(**test_params, data=test_data)
|
||||||
|
test_name = get_name(**test_params)
|
||||||
|
f.write(
|
||||||
|
textwrap.dedent(
|
||||||
|
"""
|
||||||
|
{test_name} {{
|
||||||
|
r := result with input as {data}
|
||||||
|
r.allow == {allow}
|
||||||
|
count(r.reasons) == {messages}
|
||||||
|
}}
|
||||||
|
"""
|
||||||
|
).format(
|
||||||
|
test_name=test_name,
|
||||||
|
allow=str(test_result["allow"]).lower(),
|
||||||
|
messages=test_result["messages"],
|
||||||
|
data=json.dumps(test_data),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Write the script which is used to generate the file
|
||||||
|
with open(sys.argv[0]) as this_file:
|
||||||
|
f.write(f"\n\n# {os.path.split(sys.argv[0])[1]}\n")
|
||||||
|
for line in this_file:
|
||||||
|
if line.strip():
|
||||||
|
f.write(f"# {line}")
|
||||||
|
else:
|
||||||
|
f.write(f"#\n")
|
||||||
|
|
||||||
|
# Write rules which are used to generate the file
|
||||||
|
with open(os.path.join(sys.argv[1], f"{name}.csv")) as rego_file:
|
||||||
|
f.write(f"\n\n# {name}.csv\n")
|
||||||
|
for line in rego_file:
|
||||||
|
if line.strip():
|
||||||
|
f.write(f"# {line}")
|
||||||
|
else:
|
||||||
|
f.write(f"#\n")
|
||||||
|
|
||||||
|
|
||||||
|
gen_test_rego(NAME)
|
||||||
@ -0,0 +1,6 @@
|
|||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class LimitManagerConfig(AppConfig):
|
||||||
|
default_auto_field = 'django.db.models.BigAutoField'
|
||||||
|
name = 'limit_manager'
|
||||||
@ -0,0 +1,230 @@
|
|||||||
|
# Copyright (C) 2022 CVAT.ai Corporation
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
from enum import Enum, auto
|
||||||
|
from typing import Optional, cast
|
||||||
|
|
||||||
|
from attrs import define
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
from cvat.apps.engine.models import CloudStorage, Project, Task
|
||||||
|
from cvat.apps.organizations.models import Organization
|
||||||
|
from cvat.apps.webhooks.models import Webhook
|
||||||
|
|
||||||
|
|
||||||
|
class Limits(Enum):
|
||||||
|
"""
|
||||||
|
Represents a capability which has an upper limit, and can be consumed.
|
||||||
|
|
||||||
|
Each capability is also supposed to have a separate CapabilityContext class,
|
||||||
|
representing its parameters. Different parameter combinations should each have
|
||||||
|
a different enum member, no member reuse is supposed for different limits.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# TODO: for a capability with N params, there are O(k^N)
|
||||||
|
# possible limitation combinations. Not all are meaningful, but even though
|
||||||
|
# it is quite a large number. Example:
|
||||||
|
|
||||||
|
# A "task create" capability [user_id, org_id, project_id]
|
||||||
|
# yields the following possible limitations:
|
||||||
|
# - tasks from the user
|
||||||
|
# - tasks from the user outside orgs
|
||||||
|
# - tasks from the user inside orgs
|
||||||
|
# - tasks from the user in the org
|
||||||
|
# - tasks from the user in the project
|
||||||
|
# - tasks in the org
|
||||||
|
# - tasks in the org projects
|
||||||
|
# ...
|
||||||
|
#
|
||||||
|
# Currently, we will cover all of this with a single request to the limit manager.
|
||||||
|
# For each meaningful combination class a capability enum entry is supposed.
|
||||||
|
|
||||||
|
USER_SANDBOX_TASKS = auto()
|
||||||
|
USER_SANDBOX_PROJECTS = auto()
|
||||||
|
TASKS_IN_USER_SANDBOX_PROJECT = auto()
|
||||||
|
USER_OWNED_ORGS = auto()
|
||||||
|
USER_SANDBOX_CLOUD_STORAGES = auto()
|
||||||
|
|
||||||
|
ORG_TASKS = auto()
|
||||||
|
ORG_PROJECTS = auto()
|
||||||
|
TASKS_IN_ORG_PROJECT = auto()
|
||||||
|
ORG_CLOUD_STORAGES = auto()
|
||||||
|
ORG_COMMON_WEBHOOKS = auto()
|
||||||
|
|
||||||
|
PROJECT_WEBHOOKS = auto()
|
||||||
|
|
||||||
|
class CapabilityContext:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@define(kw_only=True)
|
||||||
|
class UserCapabilityContext(CapabilityContext):
|
||||||
|
user_id: int
|
||||||
|
|
||||||
|
@define(kw_only=True)
|
||||||
|
class OrgCapabilityContext(CapabilityContext):
|
||||||
|
org_id: int
|
||||||
|
|
||||||
|
@define(kw_only=True)
|
||||||
|
class UserSandboxTasksContext(UserCapabilityContext):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@define(kw_only=True)
|
||||||
|
class OrgTasksContext(OrgCapabilityContext):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@define(kw_only=True)
|
||||||
|
class TasksInUserSandboxProjectContext(UserCapabilityContext):
|
||||||
|
project_id: int
|
||||||
|
|
||||||
|
@define(kw_only=True)
|
||||||
|
class TasksInOrgProjectContext(OrgCapabilityContext):
|
||||||
|
project_id: int
|
||||||
|
|
||||||
|
@define(kw_only=True)
|
||||||
|
class UserSandboxProjectsContext(UserCapabilityContext):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@define(kw_only=True)
|
||||||
|
class OrgProjectsContext(OrgCapabilityContext):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@define(kw_only=True)
|
||||||
|
class UserSandboxCloudStoragesContext(UserCapabilityContext):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@define(kw_only=True)
|
||||||
|
class OrgCloudStoragesContext(OrgCapabilityContext):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@define(kw_only=True)
|
||||||
|
class UserOrgsContext(UserCapabilityContext):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@define(kw_only=True)
|
||||||
|
class ProjectWebhooksContext(CapabilityContext):
|
||||||
|
project_id: int
|
||||||
|
|
||||||
|
@define(kw_only=True)
|
||||||
|
class OrgCommonWebhooksContext(OrgCapabilityContext):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@define(frozen=True)
|
||||||
|
class LimitStatus:
|
||||||
|
used: Optional[int]
|
||||||
|
max: Optional[int]
|
||||||
|
|
||||||
|
class LimitManager:
|
||||||
|
def get_status(self,
|
||||||
|
limit: Limits, *,
|
||||||
|
context: Optional[CapabilityContext] = None,
|
||||||
|
) -> LimitStatus:
|
||||||
|
if limit == Limits.USER_OWNED_ORGS:
|
||||||
|
assert context is not None
|
||||||
|
context = cast(UserOrgsContext, context)
|
||||||
|
|
||||||
|
return LimitStatus(
|
||||||
|
Organization.objects.filter(owner_id=context.user_id).count(),
|
||||||
|
settings.DEFAULT_LIMITS["USER_OWNED_ORGS"],
|
||||||
|
)
|
||||||
|
|
||||||
|
elif limit == Limits.USER_SANDBOX_PROJECTS:
|
||||||
|
assert context is not None
|
||||||
|
context = cast(UserSandboxProjectsContext, context)
|
||||||
|
|
||||||
|
return LimitStatus(
|
||||||
|
# TODO: check about active/removed projects
|
||||||
|
Project.objects.filter(owner=context.user_id, organization=None).count(),
|
||||||
|
settings.DEFAULT_LIMITS["USER_SANDBOX_PROJECTS"],
|
||||||
|
)
|
||||||
|
|
||||||
|
elif limit == Limits.ORG_PROJECTS:
|
||||||
|
assert context is not None
|
||||||
|
context = cast(OrgProjectsContext, context)
|
||||||
|
|
||||||
|
return LimitStatus(
|
||||||
|
# TODO: check about active/removed projects
|
||||||
|
Project.objects.filter(organization=context.org_id).count(),
|
||||||
|
settings.DEFAULT_LIMITS["ORG_PROJECTS"],
|
||||||
|
)
|
||||||
|
|
||||||
|
elif limit == Limits.USER_SANDBOX_TASKS:
|
||||||
|
assert context is not None
|
||||||
|
context = cast(UserSandboxTasksContext, context)
|
||||||
|
|
||||||
|
return LimitStatus(
|
||||||
|
# TODO: check about active/removed tasks
|
||||||
|
Task.objects.filter(owner=context.user_id, organization=None).count(),
|
||||||
|
settings.DEFAULT_LIMITS["USER_SANDBOX_TASKS"],
|
||||||
|
)
|
||||||
|
|
||||||
|
elif limit == Limits.ORG_TASKS:
|
||||||
|
assert context is not None
|
||||||
|
context = cast(OrgTasksContext, context)
|
||||||
|
|
||||||
|
return LimitStatus(
|
||||||
|
# TODO: check about active/removed tasks
|
||||||
|
Task.objects.filter(organization=context.org_id).count(),
|
||||||
|
settings.DEFAULT_LIMITS["ORG_TASKS"],
|
||||||
|
)
|
||||||
|
|
||||||
|
elif limit == Limits.TASKS_IN_USER_SANDBOX_PROJECT:
|
||||||
|
assert context is not None
|
||||||
|
context = cast(TasksInUserSandboxProjectContext, context)
|
||||||
|
|
||||||
|
return LimitStatus(
|
||||||
|
# TODO: check about active/removed tasks
|
||||||
|
Task.objects.filter(project=context.project_id).count(),
|
||||||
|
settings.DEFAULT_LIMITS["TASKS_IN_USER_SANDBOX_PROJECT"]
|
||||||
|
)
|
||||||
|
|
||||||
|
elif limit == Limits.TASKS_IN_ORG_PROJECT:
|
||||||
|
assert context is not None
|
||||||
|
context = cast(TasksInOrgProjectContext, context)
|
||||||
|
|
||||||
|
return LimitStatus(
|
||||||
|
# TODO: check about active/removed tasks
|
||||||
|
Task.objects.filter(project=context.project_id).count(),
|
||||||
|
settings.DEFAULT_LIMITS["TASKS_IN_ORG_PROJECT"]
|
||||||
|
)
|
||||||
|
|
||||||
|
elif limit == Limits.PROJECT_WEBHOOKS:
|
||||||
|
assert context is not None
|
||||||
|
context = cast(ProjectWebhooksContext, context)
|
||||||
|
|
||||||
|
return LimitStatus(
|
||||||
|
# We only limit webhooks per project, not per user
|
||||||
|
# TODO: think over this limit, maybe we should limit per user
|
||||||
|
Webhook.objects.filter(project=context.project_id).count(),
|
||||||
|
settings.DEFAULT_LIMITS["PROJECT_WEBHOOKS"]
|
||||||
|
)
|
||||||
|
|
||||||
|
elif limit == Limits.ORG_COMMON_WEBHOOKS:
|
||||||
|
assert context is not None
|
||||||
|
context = cast(OrgCommonWebhooksContext, context)
|
||||||
|
|
||||||
|
return LimitStatus(
|
||||||
|
Webhook.objects.filter(organization=context.org_id, project=None).count(),
|
||||||
|
settings.DEFAULT_LIMITS["ORG_COMMON_WEBHOOKS"]
|
||||||
|
)
|
||||||
|
|
||||||
|
elif limit == Limits.USER_SANDBOX_CLOUD_STORAGES:
|
||||||
|
assert context is not None
|
||||||
|
context = cast(UserSandboxCloudStoragesContext, context)
|
||||||
|
|
||||||
|
return LimitStatus(
|
||||||
|
CloudStorage.objects.filter(owner=context.user_id, organization=None).count(),
|
||||||
|
settings.DEFAULT_LIMITS["USER_SANDBOX_CLOUD_STORAGES"]
|
||||||
|
)
|
||||||
|
|
||||||
|
elif limit == Limits.ORG_CLOUD_STORAGES:
|
||||||
|
assert context is not None
|
||||||
|
context = cast(OrgCloudStoragesContext, context)
|
||||||
|
|
||||||
|
return LimitStatus(
|
||||||
|
CloudStorage.objects.filter(organization=context.org_id).count(),
|
||||||
|
settings.DEFAULT_LIMITS["ORG_CLOUD_STORAGES"]
|
||||||
|
)
|
||||||
|
|
||||||
|
raise NotImplementedError(f"Unknown capability {limit.name}")
|
||||||
@ -0,0 +1,11 @@
|
|||||||
|
# Copyright (C) 2022 CVAT.ai Corporation
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
from django.db import models
|
||||||
|
|
||||||
|
import cvat.apps.limit_manager.core.limits as core
|
||||||
|
|
||||||
|
|
||||||
|
class Limits(core.Limits, models.TextChoices):
|
||||||
|
pass
|
||||||
@ -0,0 +1,560 @@
|
|||||||
|
# Copyright (C) 2022 CVAT.ai Corporation
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
import json
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from functools import partial
|
||||||
|
from http import HTTPStatus
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
import boto3
|
||||||
|
import pytest
|
||||||
|
from cvat_sdk import Client, exceptions
|
||||||
|
from cvat_sdk.api_client import ApiClient, models
|
||||||
|
from cvat_sdk.core.client import Config
|
||||||
|
from cvat_sdk.core.proxies.projects import Project
|
||||||
|
from cvat_sdk.core.proxies.tasks import ResourceType, Task
|
||||||
|
|
||||||
|
from shared.utils.config import (
|
||||||
|
BASE_URL,
|
||||||
|
MINIO_ENDPOINT_URL,
|
||||||
|
MINIO_KEY,
|
||||||
|
MINIO_SECRET_KEY,
|
||||||
|
USER_PASS,
|
||||||
|
post_method,
|
||||||
|
)
|
||||||
|
from shared.utils.helpers import generate_image_file
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def fxt_image_file(tmp_path: Path):
|
||||||
|
img_path = tmp_path / "img.png"
|
||||||
|
with img_path.open("wb") as f:
|
||||||
|
f.write(generate_image_file(filename=str(img_path), size=(5, 10)).getvalue())
|
||||||
|
|
||||||
|
return img_path
|
||||||
|
|
||||||
|
|
||||||
|
def get_common_storage_params():
|
||||||
|
return {
|
||||||
|
"provider_type": "AWS_S3_BUCKET",
|
||||||
|
"credentials_type": "KEY_SECRET_KEY_PAIR",
|
||||||
|
"key": "minio_access_key",
|
||||||
|
"secret_key": "minio_secret_key",
|
||||||
|
"specific_attributes": "endpoint_url=http://minio:9000",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def define_s3_client():
|
||||||
|
s3 = boto3.resource(
|
||||||
|
"s3",
|
||||||
|
aws_access_key_id=MINIO_KEY,
|
||||||
|
aws_secret_access_key=MINIO_SECRET_KEY,
|
||||||
|
endpoint_url=MINIO_ENDPOINT_URL,
|
||||||
|
)
|
||||||
|
return s3.meta.client
|
||||||
|
|
||||||
|
|
||||||
|
class TestUserLimits:
|
||||||
|
@classmethod
|
||||||
|
def _create_user(cls, api_client: ApiClient, email: str) -> str:
|
||||||
|
username = email.split("@", maxsplit=1)[0]
|
||||||
|
with api_client:
|
||||||
|
(user, _) = api_client.auth_api.create_register(
|
||||||
|
models.RegisterSerializerExRequest(
|
||||||
|
username=username, password1=USER_PASS, password2=USER_PASS, email=email
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
api_client.cookies.clear()
|
||||||
|
|
||||||
|
return user.username
|
||||||
|
|
||||||
|
def _make_client(self) -> Client:
|
||||||
|
return Client(BASE_URL, config=Config(status_check_period=0.01))
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def setup(self, restore_db_per_function, tmp_path: Path, fxt_image_file: Path):
|
||||||
|
self.tmp_dir = tmp_path
|
||||||
|
self.image_file = fxt_image_file
|
||||||
|
|
||||||
|
self.client = self._make_client()
|
||||||
|
self.user = self._create_user(self.client.api_client, email="test_user_limits@localhost")
|
||||||
|
|
||||||
|
with self.client:
|
||||||
|
self.client.login((self.user, USER_PASS))
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def fxt_another_client(self) -> Client:
|
||||||
|
client = self._make_client()
|
||||||
|
user = self._create_user(self.client.api_client, email="test_user_limits2@localhost")
|
||||||
|
|
||||||
|
with client:
|
||||||
|
client.login((user, USER_PASS))
|
||||||
|
yield client
|
||||||
|
|
||||||
|
_DEFAULT_TASKS_LIMIT = 10
|
||||||
|
_DEFAULT_PROJECT_TASKS_LIMIT = 5
|
||||||
|
_DEFAULT_PROJECTS_LIMIT = 3
|
||||||
|
_DEFAULT_ORGS_LIMIT = 1
|
||||||
|
_DEFAULT_CLOUD_STORAGES_LIMIT = 10
|
||||||
|
|
||||||
|
_TASK_LIMIT_MESSAGE = "user tasks limit reached"
|
||||||
|
_PROJECT_TASK_LIMIT_MESSAGE = "user project tasks limit reached"
|
||||||
|
_PROJECTS_LIMIT_MESSAGE = "user projects limit reached"
|
||||||
|
_ORGS_LIMIT_MESSAGE = "user orgs limit reached"
|
||||||
|
_CLOUD_STORAGES_LIMIT_MESSAGE = "user cloud storages limit reached"
|
||||||
|
|
||||||
|
def _create_task(
|
||||||
|
self, *, project: Optional[int] = None, client: Optional[Client] = None
|
||||||
|
) -> Task:
|
||||||
|
if client is None:
|
||||||
|
client = self.client
|
||||||
|
|
||||||
|
return client.tasks.create_from_data(
|
||||||
|
spec=models.TaskWriteRequest(
|
||||||
|
name="test_task",
|
||||||
|
labels=[models.PatchedLabelRequest(name="cat")] if not project else [],
|
||||||
|
project_id=project,
|
||||||
|
),
|
||||||
|
resource_type=ResourceType.LOCAL,
|
||||||
|
resources=[str(self.image_file)],
|
||||||
|
)
|
||||||
|
|
||||||
|
def _create_project(self, *, client: Optional[Client] = None) -> Project:
|
||||||
|
if client is None:
|
||||||
|
client = self.client
|
||||||
|
|
||||||
|
return client.projects.create(models.ProjectWriteRequest(name="test_project"))
|
||||||
|
|
||||||
|
def test_can_reach_tasks_limit(self):
|
||||||
|
for _ in range(self._DEFAULT_TASKS_LIMIT):
|
||||||
|
self._create_task()
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.ApiException) as capture:
|
||||||
|
self._create_task()
|
||||||
|
|
||||||
|
assert capture.value.status == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(json.loads(capture.value.body)) == {self._TASK_LIMIT_MESSAGE}
|
||||||
|
|
||||||
|
def test_can_reach_tasks_limit_when_importing_backup(self):
|
||||||
|
for _ in range(self._DEFAULT_TASKS_LIMIT):
|
||||||
|
task = self._create_task()
|
||||||
|
|
||||||
|
backup_filename = self.tmp_dir / "task_backup.zip"
|
||||||
|
task.download_backup(backup_filename)
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.ApiException) as capture:
|
||||||
|
self.client.tasks.create_from_backup(backup_filename)
|
||||||
|
|
||||||
|
assert capture.value.status == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(json.loads(capture.value.body)) == {self._TASK_LIMIT_MESSAGE}
|
||||||
|
|
||||||
|
def test_can_reach_tasks_limit_when_creating_in_project(self):
|
||||||
|
project = self._create_project().id
|
||||||
|
|
||||||
|
for _ in range(self._DEFAULT_PROJECT_TASKS_LIMIT):
|
||||||
|
self._create_task(project=project)
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.ApiException) as capture:
|
||||||
|
self._create_task(project=project)
|
||||||
|
|
||||||
|
assert capture.value.status == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(json.loads(capture.value.body)) == {self._PROJECT_TASK_LIMIT_MESSAGE}
|
||||||
|
|
||||||
|
def test_can_reach_tasks_limit_when_creating_in_different_projects(self):
|
||||||
|
project1 = self._create_project().id
|
||||||
|
project2 = self._create_project().id
|
||||||
|
|
||||||
|
for _ in range(self._DEFAULT_PROJECT_TASKS_LIMIT):
|
||||||
|
self._create_task(project=project1)
|
||||||
|
for _ in range(self._DEFAULT_TASKS_LIMIT - self._DEFAULT_PROJECT_TASKS_LIMIT):
|
||||||
|
self._create_task(project=project2)
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.ApiException) as capture:
|
||||||
|
self._create_task()
|
||||||
|
|
||||||
|
assert capture.value.status == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(json.loads(capture.value.body)) == {self._TASK_LIMIT_MESSAGE}
|
||||||
|
|
||||||
|
def test_can_reach_tasks_limit_when_creating_in_filled_project(self):
|
||||||
|
project = self._create_project().id
|
||||||
|
|
||||||
|
for _ in range(self._DEFAULT_PROJECT_TASKS_LIMIT):
|
||||||
|
self._create_task(project=project)
|
||||||
|
for _ in range(self._DEFAULT_TASKS_LIMIT - self._DEFAULT_PROJECT_TASKS_LIMIT):
|
||||||
|
self._create_task()
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.ApiException) as capture:
|
||||||
|
self._create_task(project=project)
|
||||||
|
|
||||||
|
assert capture.value.status == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(json.loads(capture.value.body)) == {
|
||||||
|
self._TASK_LIMIT_MESSAGE,
|
||||||
|
self._PROJECT_TASK_LIMIT_MESSAGE,
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_can_reach_project_tasks_limit_when_moving_into_filled_project(self):
|
||||||
|
project = self._create_project().id
|
||||||
|
for _ in range(self._DEFAULT_PROJECT_TASKS_LIMIT):
|
||||||
|
self._create_task(project=project)
|
||||||
|
|
||||||
|
task = self._create_task()
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.ApiException) as capture:
|
||||||
|
task.update(models.PatchedTaskWriteRequest(project_id=project))
|
||||||
|
|
||||||
|
assert capture.value.status == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(json.loads(capture.value.body)) == {self._PROJECT_TASK_LIMIT_MESSAGE}
|
||||||
|
|
||||||
|
@pytest.mark.xfail(
|
||||||
|
raises=AssertionError, reason="only admins can change ownership, but they ignore limits"
|
||||||
|
)
|
||||||
|
def test_can_reach_tasks_limit_when_giving_away_to_another_user(
|
||||||
|
self, fxt_another_client: Client
|
||||||
|
):
|
||||||
|
for _ in range(self._DEFAULT_TASKS_LIMIT):
|
||||||
|
self._create_task(client=fxt_another_client)
|
||||||
|
|
||||||
|
task = self._create_task()
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.ApiException) as capture:
|
||||||
|
task.update(
|
||||||
|
models.PatchedTaskWriteRequest(
|
||||||
|
owner_id=fxt_another_client.users.retrieve_current_user().id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
assert capture.value.status == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(json.loads(capture.value.body)) == {self._PROJECT_TASK_LIMIT_MESSAGE}
|
||||||
|
|
||||||
|
@pytest.mark.xfail(
|
||||||
|
raises=AssertionError, reason="only admins can change ownership, but they ignore limits"
|
||||||
|
)
|
||||||
|
def test_can_reach_project_tasks_limit_when_giving_away_to_another_users_filled_project(
|
||||||
|
self, fxt_another_client: Client
|
||||||
|
):
|
||||||
|
project = self._create_project(client=fxt_another_client).id
|
||||||
|
|
||||||
|
for _ in range(self._DEFAULT_PROJECT_TASKS_LIMIT):
|
||||||
|
self._create_task(client=fxt_another_client, project=project)
|
||||||
|
for _ in range(self._DEFAULT_TASKS_LIMIT - self._DEFAULT_PROJECT_TASKS_LIMIT):
|
||||||
|
self._create_task(client=fxt_another_client)
|
||||||
|
|
||||||
|
task = self._create_task()
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.ApiException) as capture:
|
||||||
|
task.update(
|
||||||
|
models.PatchedTaskWriteRequest(
|
||||||
|
owner_id=fxt_another_client.users.retrieve_current_user().id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
assert capture.value.status == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(json.loads(capture.value.body)) == {
|
||||||
|
self._DEFAULT_TASKS_LIMIT,
|
||||||
|
self._PROJECT_TASK_LIMIT_MESSAGE,
|
||||||
|
}
|
||||||
|
|
||||||
|
@pytest.mark.xfail(
|
||||||
|
raises=AssertionError, reason="only admins can change ownership, but they ignore limits"
|
||||||
|
)
|
||||||
|
def test_can_reach_projects_limit_when_giving_away_to_another_user(
|
||||||
|
self, fxt_another_client: Client
|
||||||
|
):
|
||||||
|
for _ in range(self._DEFAULT_PROJECTS_LIMIT):
|
||||||
|
self._create_project(client=fxt_another_client)
|
||||||
|
|
||||||
|
project = self._create_project()
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.ApiException) as capture:
|
||||||
|
project.update(
|
||||||
|
models.PatchedProjectWriteRequest(
|
||||||
|
owner_id=fxt_another_client.users.retrieve_current_user().id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
assert capture.value.status == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(json.loads(capture.value.body)) == {self._PROJECT_TASK_LIMIT_MESSAGE}
|
||||||
|
|
||||||
|
def test_can_reach_projects_limit(self):
|
||||||
|
for _ in range(self._DEFAULT_PROJECTS_LIMIT):
|
||||||
|
self._create_project()
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.ApiException) as capture:
|
||||||
|
self._create_project()
|
||||||
|
|
||||||
|
assert capture.value.status == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(json.loads(capture.value.body)) == {self._PROJECTS_LIMIT_MESSAGE}
|
||||||
|
|
||||||
|
def test_can_reach_projects_limit_when_importing_backup(self):
|
||||||
|
for _ in range(self._DEFAULT_PROJECTS_LIMIT):
|
||||||
|
project = self._create_project()
|
||||||
|
|
||||||
|
backup_filename = self.tmp_dir / (project.name + "_backup.zip")
|
||||||
|
project.download_backup(backup_filename)
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.ApiException) as capture:
|
||||||
|
self.client.projects.create_from_backup(backup_filename)
|
||||||
|
|
||||||
|
assert capture.value.status == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(json.loads(capture.value.body)) == {self._PROJECTS_LIMIT_MESSAGE}
|
||||||
|
|
||||||
|
def test_can_reach_orgs_limit(self):
|
||||||
|
for i in range(self._DEFAULT_ORGS_LIMIT):
|
||||||
|
(_, response) = self.client.api_client.organizations_api.create(
|
||||||
|
models.OrganizationWriteRequest(slug=f"test_user_orgs_{i}"), _parse_response=False
|
||||||
|
)
|
||||||
|
assert response.status == HTTPStatus.CREATED
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.ApiException) as capture:
|
||||||
|
self.client.api_client.organizations_api.create(
|
||||||
|
models.OrganizationWriteRequest(slug=f"test_user_orgs_{i}"), _parse_response=False
|
||||||
|
)
|
||||||
|
|
||||||
|
assert capture.value.status == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(json.loads(capture.value.body)) == {self._ORGS_LIMIT_MESSAGE}
|
||||||
|
|
||||||
|
@pytest.mark.with_external_services
|
||||||
|
def test_can_reach_cloud_storages_limit(self, request: pytest.FixtureRequest):
|
||||||
|
storage_params = get_common_storage_params()
|
||||||
|
|
||||||
|
# TODO: refactor after https://github.com/opencv/cvat/pull/4819
|
||||||
|
s3_client = define_s3_client()
|
||||||
|
|
||||||
|
def _create_bucket(name: str) -> str:
|
||||||
|
name = name + str(uuid4())
|
||||||
|
s3_client.create_bucket(Bucket=name)
|
||||||
|
request.addfinalizer(partial(s3_client.delete_bucket, Bucket=name))
|
||||||
|
return name
|
||||||
|
|
||||||
|
def _add_storage(idx: int):
|
||||||
|
response = post_method(
|
||||||
|
self.user,
|
||||||
|
"cloudstorages",
|
||||||
|
{
|
||||||
|
"display_name": f"test_storage{idx}",
|
||||||
|
"resource": _create_bucket(f"testbucket{idx}"),
|
||||||
|
**storage_params,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return response
|
||||||
|
|
||||||
|
for i in range(self._DEFAULT_CLOUD_STORAGES_LIMIT):
|
||||||
|
response = _add_storage(i)
|
||||||
|
assert response.status_code == HTTPStatus.CREATED
|
||||||
|
|
||||||
|
response = _add_storage(i)
|
||||||
|
|
||||||
|
assert response.status_code == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(response.json()) == {self._CLOUD_STORAGES_LIMIT_MESSAGE}
|
||||||
|
|
||||||
|
|
||||||
|
class TestOrgLimits:
|
||||||
|
@classmethod
|
||||||
|
def _create_org(cls, api_client: ApiClient) -> str:
|
||||||
|
with api_client:
|
||||||
|
(_, response) = api_client.organizations_api.create(
|
||||||
|
models.OrganizationWriteRequest(slug="test_org_limits"), _parse_response=False
|
||||||
|
)
|
||||||
|
|
||||||
|
return json.loads(response.data)
|
||||||
|
|
||||||
|
def _make_client(self) -> Client:
|
||||||
|
return Client(BASE_URL, config=Config(status_check_period=0.01))
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def setup(
|
||||||
|
self, restore_db_per_function, tmp_path: Path, regular_user: str, fxt_image_file: Path
|
||||||
|
):
|
||||||
|
self.tmp_dir = tmp_path
|
||||||
|
self.image_file = fxt_image_file
|
||||||
|
|
||||||
|
self.client = self._make_client()
|
||||||
|
self.user = regular_user
|
||||||
|
|
||||||
|
with self.client:
|
||||||
|
self.client.login((self.user, USER_PASS))
|
||||||
|
|
||||||
|
org = self._create_org(self.client.api_client)
|
||||||
|
self.org = org["id"]
|
||||||
|
self.org_slug = org["slug"]
|
||||||
|
|
||||||
|
with self._patch_client_with_org(self.client):
|
||||||
|
yield
|
||||||
|
|
||||||
|
_DEFAULT_TASKS_LIMIT = 10
|
||||||
|
_DEFAULT_PROJECT_TASKS_LIMIT = 5
|
||||||
|
_DEFAULT_PROJECTS_LIMIT = 3
|
||||||
|
_DEFAULT_CLOUD_STORAGES_LIMIT = 10
|
||||||
|
|
||||||
|
_TASK_LIMIT_MESSAGE = "org tasks limit reached"
|
||||||
|
_PROJECT_TASK_LIMIT_MESSAGE = "org project tasks limit reached"
|
||||||
|
_PROJECTS_LIMIT_MESSAGE = "org projects limit reached"
|
||||||
|
_CLOUD_STORAGES_LIMIT_MESSAGE = "org cloud storages limit reached"
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def _patch_client_with_org(self, client: Optional[Client] = None):
|
||||||
|
if client is None:
|
||||||
|
client = self.client
|
||||||
|
|
||||||
|
new_headers = self.client.api_client.default_headers.copy()
|
||||||
|
new_headers["X-Organization"] = self.org_slug
|
||||||
|
with pytest.MonkeyPatch.context() as monkeypatch:
|
||||||
|
monkeypatch.setattr(client.api_client, "default_headers", new_headers)
|
||||||
|
yield client
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def fxt_patch_client_with_org(self):
|
||||||
|
with self._patch_client_with_org(self.client):
|
||||||
|
yield
|
||||||
|
|
||||||
|
def _create_task(
|
||||||
|
self, *, project: Optional[int] = None, client: Optional[Client] = None
|
||||||
|
) -> Task:
|
||||||
|
if client is None:
|
||||||
|
client = self.client
|
||||||
|
|
||||||
|
return client.tasks.create_from_data(
|
||||||
|
spec=models.TaskWriteRequest(
|
||||||
|
name="test_task",
|
||||||
|
labels=[models.PatchedLabelRequest(name="cat")] if not project else [],
|
||||||
|
project_id=project,
|
||||||
|
),
|
||||||
|
resource_type=ResourceType.LOCAL,
|
||||||
|
resources=[str(self.image_file)],
|
||||||
|
)
|
||||||
|
|
||||||
|
def _create_project(self, *, client: Optional[Client] = None) -> Project:
|
||||||
|
if client is None:
|
||||||
|
client = self.client
|
||||||
|
|
||||||
|
return client.projects.create(models.ProjectWriteRequest(name="test_project"))
|
||||||
|
|
||||||
|
def test_can_reach_tasks_limit(self):
|
||||||
|
for _ in range(self._DEFAULT_TASKS_LIMIT):
|
||||||
|
self._create_task()
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.ApiException) as capture:
|
||||||
|
self._create_task()
|
||||||
|
|
||||||
|
assert capture.value.status == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(json.loads(capture.value.body)) == {self._TASK_LIMIT_MESSAGE}
|
||||||
|
|
||||||
|
def test_can_reach_tasks_limit_when_importing_backup(self):
|
||||||
|
for _ in range(self._DEFAULT_TASKS_LIMIT):
|
||||||
|
task = self._create_task()
|
||||||
|
|
||||||
|
backup_filename = self.tmp_dir / "task_backup.zip"
|
||||||
|
task.download_backup(backup_filename)
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.ApiException) as capture:
|
||||||
|
self.client.tasks.create_from_backup(backup_filename)
|
||||||
|
|
||||||
|
assert capture.value.status == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(json.loads(capture.value.body)) == {self._TASK_LIMIT_MESSAGE}
|
||||||
|
|
||||||
|
def test_can_reach_tasks_limit_when_creating_in_project(self):
|
||||||
|
project = self._create_project().id
|
||||||
|
|
||||||
|
for _ in range(self._DEFAULT_PROJECT_TASKS_LIMIT):
|
||||||
|
self._create_task(project=project)
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.ApiException) as capture:
|
||||||
|
self._create_task(project=project)
|
||||||
|
|
||||||
|
assert capture.value.status == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(json.loads(capture.value.body)) == {self._PROJECT_TASK_LIMIT_MESSAGE}
|
||||||
|
|
||||||
|
def test_can_reach_tasks_limit_when_creating_in_different_projects(self):
|
||||||
|
project1 = self._create_project().id
|
||||||
|
project2 = self._create_project().id
|
||||||
|
|
||||||
|
for _ in range(self._DEFAULT_PROJECT_TASKS_LIMIT):
|
||||||
|
self._create_task(project=project1)
|
||||||
|
for _ in range(self._DEFAULT_TASKS_LIMIT - self._DEFAULT_PROJECT_TASKS_LIMIT):
|
||||||
|
self._create_task(project=project2)
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.ApiException) as capture:
|
||||||
|
self._create_task()
|
||||||
|
|
||||||
|
assert capture.value.status == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(json.loads(capture.value.body)) == {self._TASK_LIMIT_MESSAGE}
|
||||||
|
|
||||||
|
def test_can_reach_tasks_limit_when_creating_in_filled_project(self):
|
||||||
|
project = self._create_project().id
|
||||||
|
|
||||||
|
for _ in range(self._DEFAULT_PROJECT_TASKS_LIMIT):
|
||||||
|
self._create_task(project=project)
|
||||||
|
for _ in range(self._DEFAULT_TASKS_LIMIT - self._DEFAULT_PROJECT_TASKS_LIMIT):
|
||||||
|
self._create_task()
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.ApiException) as capture:
|
||||||
|
self._create_task(project=project)
|
||||||
|
|
||||||
|
assert capture.value.status == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(json.loads(capture.value.body)) == {
|
||||||
|
self._TASK_LIMIT_MESSAGE,
|
||||||
|
self._PROJECT_TASK_LIMIT_MESSAGE,
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_can_reach_projects_limit(self):
|
||||||
|
for _ in range(self._DEFAULT_PROJECTS_LIMIT):
|
||||||
|
self._create_project()
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.ApiException) as capture:
|
||||||
|
self._create_project()
|
||||||
|
|
||||||
|
assert capture.value.status == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(json.loads(capture.value.body)) == {self._PROJECTS_LIMIT_MESSAGE}
|
||||||
|
|
||||||
|
def test_can_reach_projects_limit_when_importing_backup(self):
|
||||||
|
for _ in range(self._DEFAULT_PROJECTS_LIMIT):
|
||||||
|
project = self._create_project()
|
||||||
|
|
||||||
|
backup_filename = self.tmp_dir / "test_project_backup.zip"
|
||||||
|
project.download_backup(str(backup_filename))
|
||||||
|
|
||||||
|
with pytest.raises(exceptions.ApiException) as capture:
|
||||||
|
self.client.projects.create_from_backup(str(backup_filename))
|
||||||
|
|
||||||
|
assert capture.value.status == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(json.loads(capture.value.body)) == {self._PROJECTS_LIMIT_MESSAGE}
|
||||||
|
|
||||||
|
@pytest.mark.with_external_services
|
||||||
|
def test_can_reach_cloud_storages_limit(self, request: pytest.FixtureRequest):
|
||||||
|
storage_params = get_common_storage_params()
|
||||||
|
|
||||||
|
# TODO: refactor after https://github.com/opencv/cvat/pull/4819
|
||||||
|
s3_client = define_s3_client()
|
||||||
|
|
||||||
|
def _create_bucket(name: str) -> str:
|
||||||
|
name = name + str(uuid4())
|
||||||
|
s3_client.create_bucket(Bucket=name)
|
||||||
|
request.addfinalizer(partial(s3_client.delete_bucket, Bucket=name))
|
||||||
|
return name
|
||||||
|
|
||||||
|
def _add_storage(idx: int):
|
||||||
|
response = post_method(
|
||||||
|
self.user,
|
||||||
|
"cloudstorages",
|
||||||
|
{
|
||||||
|
"display_name": f"test_storage{idx}",
|
||||||
|
"resource": _create_bucket(f"testbucket{idx}"),
|
||||||
|
**storage_params,
|
||||||
|
},
|
||||||
|
org_id=self.org,
|
||||||
|
)
|
||||||
|
return response
|
||||||
|
|
||||||
|
for i in range(self._DEFAULT_CLOUD_STORAGES_LIMIT):
|
||||||
|
response = _add_storage(i)
|
||||||
|
assert response.status_code == HTTPStatus.CREATED
|
||||||
|
|
||||||
|
response = _add_storage(i)
|
||||||
|
|
||||||
|
assert response.status_code == HTTPStatus.FORBIDDEN
|
||||||
|
assert set(response.json()) == {self._CLOUD_STORAGES_LIMIT_MESSAGE}
|
||||||
Loading…
Reference in New Issue