mirror of
https://github.com/fuckpiracyshield/service.git
synced 2024-11-21 12:49:46 +01:00
Latest updates.
This commit is contained in:
parent
579ed1de04
commit
9d6a79bb2c
32 changed files with 1006 additions and 300 deletions
|
@ -1,12 +1,5 @@
|
|||
[build-system]
|
||||
requires = [
|
||||
"setuptools>=54",
|
||||
"setuptools-rust"
|
||||
]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[[tool.setuptools-rust.ext-modules]]
|
||||
target = "rs_cidr_verifier"
|
||||
path = "src/piracyshield_service/whitelist/cidr/Cargo.toml"
|
||||
binding = "PyO3"
|
||||
py-limited-api = "auto"
|
||||
|
|
|
@ -29,7 +29,7 @@ class AccountExistsByIdentifierService(BaseService):
|
|||
def execute(self, account_id: str) -> bool | Exception:
|
||||
try:
|
||||
response = self.data_storage.exists_by_identifier(
|
||||
account_id = account_id
|
||||
identifier = account_id
|
||||
)
|
||||
|
||||
batch = response.batch()
|
||||
|
|
57
src/piracyshield_service/account/get_active.py
Normal file
57
src/piracyshield_service/account/get_active.py
Normal file
|
@ -0,0 +1,57 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from piracyshield_service.base import BaseService
|
||||
|
||||
from piracyshield_component.exception import ApplicationException
|
||||
|
||||
from piracyshield_data_storage.account.storage import AccountStorageGetException
|
||||
|
||||
from piracyshield_service.account.errors import AccountErrorCode, AccountErrorMessage
|
||||
|
||||
class AccountGetActiveService(BaseService):
|
||||
|
||||
"""
|
||||
Fetches all active accounts.
|
||||
"""
|
||||
|
||||
data_storage = None
|
||||
|
||||
def __init__(self, data_storage: AccountStorage):
|
||||
"""
|
||||
Inizialize logger and required modules.
|
||||
"""
|
||||
|
||||
# child data storage class
|
||||
self.data_storage = data_storage()
|
||||
|
||||
super().__init__()
|
||||
|
||||
def execute(self) -> list | Exception:
|
||||
try:
|
||||
response = self.data_storage.get_active()
|
||||
|
||||
batch = response.batch()
|
||||
|
||||
if not len(batch):
|
||||
self.logger.debug(f'No account found')
|
||||
|
||||
raise ApplicationException(AccountErrorCode.ACCOUNT_NOT_FOUND, AccountErrorMessage.ACCOUNT_NOT_FOUND)
|
||||
|
||||
return list(batch)
|
||||
|
||||
except AccountStorageGetException as e:
|
||||
self.logger.error(f'Could not retrieve any account')
|
||||
|
||||
raise ApplicationException(AccountErrorCode.GENERIC, AccountErrorMessage.GENERIC, e)
|
||||
|
||||
def _schedule_task(self):
|
||||
pass
|
||||
|
||||
def _validate_parameters(self):
|
||||
pass
|
||||
|
||||
def _prepare_configs(self):
|
||||
pass
|
||||
|
||||
def _prepare_modules(self):
|
||||
pass
|
|
@ -11,17 +11,17 @@ class AuthenticationErrorCode:
|
|||
|
||||
USER_NON_ACTIVE = '2001'
|
||||
|
||||
PASSWORD_NON_VALID = '2002'
|
||||
PASSWORD_NON_VALID = '2001'
|
||||
|
||||
PASSWORD_MISMATCH = '2002'
|
||||
PASSWORD_MISMATCH = '2001'
|
||||
|
||||
TOKEN_REFRESH_USER_NON_ACTIVE = '2003'
|
||||
TOKEN_REFRESH_USER_NON_ACTIVE = '2002'
|
||||
|
||||
TOKEN_MISMATCH = '2003'
|
||||
TOKEN_MISMATCH = '2002'
|
||||
|
||||
TOKEN_EXPIRED = '2003'
|
||||
TOKEN_EXPIRED = '2002'
|
||||
|
||||
MAX_LOGIN_ATTEMPTS = '2004'
|
||||
MAX_LOGIN_ATTEMPTS = '2003'
|
||||
|
||||
class AuthenticationErrorMessage:
|
||||
|
||||
|
|
|
@ -17,13 +17,15 @@ class DDAErrorCode:
|
|||
|
||||
NON_VALID_ACCOUNT_ROLE = '7007'
|
||||
|
||||
INSTANCE_EXISTS = '7008'
|
||||
UNKNOWN_DDA_IDENTIFIER = '7008'
|
||||
|
||||
CANNOT_REMOVE = '7009'
|
||||
INSTANCE_EXISTS = '7009'
|
||||
|
||||
INSTANCE_USED = '7010'
|
||||
CANNOT_REMOVE = '7010'
|
||||
|
||||
CANNOT_SET_STATUS = '7011'
|
||||
INSTANCE_USED = '7011'
|
||||
|
||||
CANNOT_SET_STATUS = '7012'
|
||||
|
||||
class DDAErrorMessage:
|
||||
|
||||
|
@ -43,6 +45,8 @@ class DDAErrorMessage:
|
|||
|
||||
NON_VALID_ACCOUNT_ROLE = 'Account with wrong role. Only Reporter accounts are allowed to obtain and use a DDA instance.'
|
||||
|
||||
UNKNOWN_DDA_IDENTIFIER = 'DDA identifier not found.'
|
||||
|
||||
INSTANCE_EXISTS = 'This item has been already created.'
|
||||
|
||||
CANNOT_REMOVE = 'The item could not be removed. Ensure you have proper permissions or to specify a valid item.'
|
||||
|
|
58
src/piracyshield_service/dda/get_by_identifier.py
Normal file
58
src/piracyshield_service/dda/get_by_identifier.py
Normal file
|
@ -0,0 +1,58 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from piracyshield_service.base import BaseService
|
||||
|
||||
from piracyshield_component.exception import ApplicationException
|
||||
|
||||
from piracyshield_data_storage.dda.storage import DDAStorage, DDAStorageGetException
|
||||
|
||||
from piracyshield_service.dda.errors import DDAErrorCode, DDAErrorMessage
|
||||
|
||||
class DDAGetByIdentifierService(BaseService):
|
||||
|
||||
"""
|
||||
Get a single DDA by its identifier.
|
||||
"""
|
||||
|
||||
data_storage = None
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Inizialize logger and required modules.
|
||||
"""
|
||||
|
||||
super().__init__()
|
||||
|
||||
self._prepare_modules()
|
||||
|
||||
def execute(self, dda_id: str) -> dict | Exception:
|
||||
try:
|
||||
response = self.data_storage.get_by_identifier(
|
||||
dda_id = dda_id
|
||||
)
|
||||
|
||||
if response.empty():
|
||||
self.logger.debug(f'No DDA found for this identifier `{dda_id}`')
|
||||
|
||||
raise ApplicationException(DDAErrorCode.UNKNOWN_DDA_IDENTIFIER, DDAErrorMessage.UNKNOWN_DDA_IDENTIFIER)
|
||||
|
||||
document = next(response, None)
|
||||
|
||||
return document
|
||||
|
||||
except DDAStorageGetException as e:
|
||||
self.logger.error(f'Cannot get the DDA')
|
||||
|
||||
raise ApplicationException(DDAErrorCode.GENERIC, DDAErrorMessage.GENERIC, e)
|
||||
|
||||
def _schedule_task(self):
|
||||
pass
|
||||
|
||||
def _validate_parameters(self):
|
||||
pass
|
||||
|
||||
def _prepare_configs(self):
|
||||
pass
|
||||
|
||||
def _prepare_modules(self):
|
||||
self.data_storage = DDAStorage()
|
|
@ -0,0 +1,59 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from piracyshield_service.base import BaseService
|
||||
|
||||
from piracyshield_component.exception import ApplicationException
|
||||
|
||||
from piracyshield_data_storage.dda.storage import DDAStorage, DDAStorageGetException
|
||||
|
||||
from piracyshield_service.dda.errors import DDAErrorCode, DDAErrorMessage
|
||||
|
||||
class DDAGetByIdentifierForReporterService(BaseService):
|
||||
|
||||
"""
|
||||
Get a single DDA by its identifier.
|
||||
"""
|
||||
|
||||
data_storage = None
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Inizialize logger and required modules.
|
||||
"""
|
||||
|
||||
super().__init__()
|
||||
|
||||
self._prepare_modules()
|
||||
|
||||
def execute(self, dda_id: str, reporter_id: str) -> dict | Exception:
|
||||
try:
|
||||
response = self.data_storage.get_by_identifier_for_reporter(
|
||||
dda_id = dda_id,
|
||||
reporter_id = reporter_id
|
||||
)
|
||||
|
||||
if response.empty():
|
||||
self.logger.debug(f'No DDA found for this identifier `{dda_id}`')
|
||||
|
||||
raise ApplicationException(DDAErrorCode.UNKNOWN_DDA_IDENTIFIER, DDAErrorMessage.UNKNOWN_DDA_IDENTIFIER)
|
||||
|
||||
document = next(response, None)
|
||||
|
||||
return document
|
||||
|
||||
except DDAStorageGetException as e:
|
||||
self.logger.error(f'Cannot get the DDA')
|
||||
|
||||
raise ApplicationException(DDAErrorCode.GENERIC, DDAErrorMessage.GENERIC, e)
|
||||
|
||||
def _schedule_task(self):
|
||||
pass
|
||||
|
||||
def _validate_parameters(self):
|
||||
pass
|
||||
|
||||
def _prepare_configs(self):
|
||||
pass
|
||||
|
||||
def _prepare_modules(self):
|
||||
self.data_storage = DDAStorage()
|
|
@ -2,6 +2,7 @@ from __future__ import annotations
|
|||
|
||||
from piracyshield_service.base import BaseService
|
||||
|
||||
from piracyshield_component.config import Config
|
||||
from piracyshield_component.utils.time import Time
|
||||
from piracyshield_component.exception import ApplicationException
|
||||
|
||||
|
@ -9,14 +10,16 @@ from piracyshield_data_model.forensic.archive.model import ForensicArchiveModel,
|
|||
|
||||
from piracyshield_data_storage.forensic.storage import ForensicStorage, ForensicStorageCreateException, ForensicStorageGetException, ForensicStorageUpdateException
|
||||
|
||||
from piracyshield_service.log.ticket.create import LogTicketCreateService
|
||||
from piracyshield_data_storage.cache.storage import CacheStorage
|
||||
|
||||
from piracyshield_service.importer.save_file import ImporterSaveFileService
|
||||
from piracyshield_service.log.ticket.create import LogTicketCreateService
|
||||
|
||||
from piracyshield_service.forensic.tasks.analyze_forensic_archive import analyze_forensic_archive_task_caller
|
||||
|
||||
from piracyshield_service.forensic.errors import ForensicErrorCode, ForensicErrorMessage
|
||||
|
||||
import os
|
||||
|
||||
class ForensicCreateArchiveService(BaseService):
|
||||
|
||||
"""
|
||||
|
@ -27,10 +30,14 @@ class ForensicCreateArchiveService(BaseService):
|
|||
|
||||
log_ticket_create_service = None
|
||||
|
||||
data_storage_cache = None
|
||||
|
||||
data_storage = None
|
||||
|
||||
data_model = None
|
||||
|
||||
application_archive_config = None
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Inizialize logger and required modules.
|
||||
|
@ -38,9 +45,11 @@ class ForensicCreateArchiveService(BaseService):
|
|||
|
||||
super().__init__()
|
||||
|
||||
self._prepare_configs()
|
||||
|
||||
self._prepare_modules()
|
||||
|
||||
def execute(self, ticket_id: str, archive_name: str, archive_content: bytes) -> bool | Exception:
|
||||
def execute(self, ticket_id: str, archive_name: str) -> bool | Exception:
|
||||
"""
|
||||
:param ticket_id: the ticket identifier related to the archive.
|
||||
:param archive_name: the name of the archive.
|
||||
|
@ -54,19 +63,24 @@ class ForensicCreateArchiveService(BaseService):
|
|||
if not self._ticket_id_exists(ticket_id):
|
||||
raise ApplicationException(ForensicErrorCode.NO_HASH_FOR_TICKET, ForensicErrorMessage.NO_HASH_FOR_TICKET)
|
||||
|
||||
# put the file in cache
|
||||
cache_filename = self.importer_save_file_service.execute(
|
||||
filename = model.get('name'),
|
||||
content = archive_content
|
||||
)
|
||||
# the file is already in the cache
|
||||
if not self.data_storage_cache.exists(archive_name):
|
||||
self.logger.debug(f'Forensic evidence `{archive_name}` not found in cache')
|
||||
|
||||
self.logger.debug(f'Forensic evidence `{cache_filename}` moved to cache for ticket `{ticket_id}`')
|
||||
raise ApplicationException(ForensicErrorCode.GENERIC, ForensicErrorMessage.GENERIC)
|
||||
|
||||
extension = self._get_extension(archive_name)
|
||||
|
||||
if self._has_supported_extension(extension) == False:
|
||||
raise ApplicationException(ForensicErrorCode.EXTENSION_NOT_SUPPORTED, ForensicErrorMessage.EXTENSION_NOT_SUPPORTED)
|
||||
|
||||
self.logger.debug(f'New forensic evidence `{archive_name}` in cache for ticket `{ticket_id}`')
|
||||
|
||||
try:
|
||||
# update ticket with the archive name
|
||||
self.data_storage.update_archive_name(
|
||||
ticket_id = ticket_id,
|
||||
archive_name = cache_filename,
|
||||
archive_name = archive_name,
|
||||
status = model.get('status'),
|
||||
updated_at = Time.now_iso8601()
|
||||
)
|
||||
|
@ -114,6 +128,28 @@ class ForensicCreateArchiveService(BaseService):
|
|||
|
||||
raise ApplicationException(ForensicErrorCode.GENERIC, ForensicErrorMessage.GENERIC, e)
|
||||
|
||||
def _get_extension(self, filename: str) -> str:
|
||||
"""
|
||||
Extracts the extension from the filename.
|
||||
|
||||
:param filename: the name of the file.
|
||||
:return: the extension of the file.
|
||||
"""
|
||||
|
||||
_, extension = os.path.splitext(filename)
|
||||
|
||||
return extension
|
||||
|
||||
def _has_supported_extension(self, extension: str) -> bool:
|
||||
"""
|
||||
Checks wether the extension is supported.
|
||||
|
||||
:param extension: the extension of the file.
|
||||
:return: true if supported.
|
||||
"""
|
||||
|
||||
return extension.lower() in self.application_archive_config.get('supported_extensions')
|
||||
|
||||
def _schedule_task(self, ticket_id: str) -> bool | Exception:
|
||||
"""
|
||||
Schedules the archive analysis.
|
||||
|
@ -155,10 +191,14 @@ class ForensicCreateArchiveService(BaseService):
|
|||
return model.to_dict()
|
||||
|
||||
except ForensicArchiveModelNameException:
|
||||
raise ApplicationException(ForensicErrorCode.ARCHIVE_NAME, TicketErrorMessage.ARCHIVE_NAME)
|
||||
raise ApplicationException(ForensicErrorCode.ARCHIVE_NAME, ForensicErrorMessage.ARCHIVE_NAME)
|
||||
|
||||
def _prepare_configs(self):
|
||||
pass
|
||||
def _prepare_configs(self) -> None:
|
||||
"""
|
||||
Loads the configs.
|
||||
"""
|
||||
|
||||
self.application_archive_config = Config('application').get('archive')
|
||||
|
||||
def _prepare_modules(self):
|
||||
"""
|
||||
|
@ -169,6 +209,6 @@ class ForensicCreateArchiveService(BaseService):
|
|||
|
||||
self.data_storage = ForensicStorage()
|
||||
|
||||
self.importer_save_file_service = ImporterSaveFileService()
|
||||
self.data_storage_cache = CacheStorage()
|
||||
|
||||
self.log_ticket_create_service = LogTicketCreateService()
|
||||
|
|
|
@ -42,12 +42,6 @@ class ForensicCreateHashService(BaseService):
|
|||
hash_string = hash_string
|
||||
)
|
||||
|
||||
# check for already inserted hashes strings
|
||||
if self._hash_string_exists(hash_string):
|
||||
raise ApplicationException(ForensicErrorCode.HASH_STRING_EXISTS, ForensicErrorMessage.HASH_STRING_EXISTS)
|
||||
|
||||
self.logger.info(f'Created new hash `{hash_string}` for ticket `{ticket_id}`')
|
||||
|
||||
document = self._build_document(
|
||||
model = model,
|
||||
forensic_id = self._generate_forensic_id(),
|
||||
|
@ -56,6 +50,20 @@ class ForensicCreateHashService(BaseService):
|
|||
now = Time.now_iso8601()
|
||||
)
|
||||
|
||||
# let's search for a pre-existent hash
|
||||
existent_hash = self._hash_string_exists(hash_string)
|
||||
|
||||
# we got one, let's update our new entry with those values
|
||||
if existent_hash:
|
||||
if 'archive_name' in existent_hash:
|
||||
document['archive_name'] = existent_hash.get('archive_name')
|
||||
|
||||
if 'status' in existent_hash:
|
||||
document['status'] = existent_hash.get('status')
|
||||
|
||||
if 'reason' in existent_hash:
|
||||
document['reason'] = existent_hash.get('reason')
|
||||
|
||||
try:
|
||||
self.data_storage.insert(document)
|
||||
|
||||
|
@ -64,6 +72,8 @@ class ForensicCreateHashService(BaseService):
|
|||
|
||||
raise ApplicationException(ForensicErrorCode.GENERIC, ForensicErrorMessage.GENERIC, e)
|
||||
|
||||
self.logger.info(f'Created hash `{hash_string}` for ticket `{ticket_id}`')
|
||||
|
||||
return True
|
||||
|
||||
def _hash_string_exists(self, hash_string: str) -> bool | Exception:
|
||||
|
@ -72,12 +82,14 @@ class ForensicCreateHashService(BaseService):
|
|||
hash_string = hash_string
|
||||
)
|
||||
|
||||
if response.next():
|
||||
self.logger.debug(f'Hash string found for `{hash_string}`')
|
||||
if response.empty():
|
||||
return False
|
||||
|
||||
return True
|
||||
self.logger.debug(f'Found pre-existent hash for `{hash_string}`')
|
||||
|
||||
return False
|
||||
document = next(response, None)
|
||||
|
||||
return document
|
||||
|
||||
except ForensicStorageGetException as e:
|
||||
self.logger.error(f'Could not verify `{hash_string}` existence')
|
||||
|
|
|
@ -1,23 +1,24 @@
|
|||
|
||||
class ForensicErrorCode:
|
||||
|
||||
GENERIC = '9001'
|
||||
GENERIC = '9000'
|
||||
|
||||
ARCHIVE_NAME = '9002'
|
||||
ARCHIVE_NAME = '9001'
|
||||
|
||||
HASH_TYPE_NOT_SUPPORTED = '9003'
|
||||
HASH_TYPE_NOT_SUPPORTED = '9002'
|
||||
|
||||
HASH_STRING_EMPTY = '9004'
|
||||
HASH_STRING_EMPTY = '9003'
|
||||
|
||||
HASH_STRING_NON_VALID = '9005'
|
||||
HASH_STRING_NON_VALID = '9004'
|
||||
|
||||
HASH_STRING_EXISTS = '9006'
|
||||
NO_HASH_FOR_TICKET = '9005'
|
||||
|
||||
NO_HASH_FOR_TICKET = '9007'
|
||||
EXTENSION_NOT_SUPPORTED = '9006'
|
||||
|
||||
class ForensicErrorMessage:
|
||||
|
||||
GENERIC = 'Error during the handling of the forensic evidence.'
|
||||
# error during the handling of the forensic evidence
|
||||
GENERIC = 'Generic error.'
|
||||
|
||||
ARCHIVE_NAME = 'The archive name contains non valid characters.'
|
||||
|
||||
|
@ -27,6 +28,6 @@ class ForensicErrorMessage:
|
|||
|
||||
HASH_STRING_NON_VALID = 'Forensic evidence hash non valid.'
|
||||
|
||||
HASH_STRING_EXISTS = 'The hash string value is already present, meaning that this forensic evidence archive has already been submitted.'
|
||||
|
||||
NO_HASH_FOR_TICKET = 'This ticket does not have any forensic evidence hash.'
|
||||
|
||||
EXTENSION_NOT_SUPPORTED = 'The file extension is not supported.'
|
||||
|
|
|
@ -1,14 +1,18 @@
|
|||
from piracyshield_service.task.base import BaseTask
|
||||
|
||||
from piracyshield_component.utils.time import Time
|
||||
from piracyshield_component.exception import ApplicationException
|
||||
|
||||
from piracyshield_data_model.forensic.archive.status.model import ForensicArchiveStatusModel
|
||||
|
||||
from piracyshield_forensic.analyze import ForensicAnalysis
|
||||
|
||||
from piracyshield_service.forensic.update_archive_status import ForensicUpdateArchiveStatusService
|
||||
from piracyshield_service.forensic.get_by_ticket import ForensicGetByTicketService
|
||||
|
||||
from piracyshield_service.log.ticket.create import LogTicketCreateService
|
||||
|
||||
from piracyshield_component.exception import ApplicationException
|
||||
from piracyshield_data_storage.forensic.storage import ForensicStorage, ForensicStorageUpdateException
|
||||
|
||||
class AnalyzeForensicArchiveTask(BaseTask):
|
||||
|
||||
|
@ -22,8 +26,12 @@ class AnalyzeForensicArchiveTask(BaseTask):
|
|||
|
||||
update_archive_status_service = None
|
||||
|
||||
get_by_ticket_service = None
|
||||
|
||||
ticket_id = None
|
||||
|
||||
data_storage = None
|
||||
|
||||
def __init__(self, ticket_id: str):
|
||||
super().__init__()
|
||||
|
||||
|
@ -66,11 +74,24 @@ class AnalyzeForensicArchiveTask(BaseTask):
|
|||
|
||||
# NOTE: we won't clean the package from the cache here as we might want to schedule a cleaning process separately.
|
||||
|
||||
self.update_archive_status_service.execute(
|
||||
ticket_id = self.ticket_id,
|
||||
status = ForensicArchiveStatusModel.APPROVED.value
|
||||
# retrieve forensic data associated to this ticket identifier
|
||||
forensic_data = self.get_by_ticket_service.execute(
|
||||
ticket_id = self.ticket_id
|
||||
)
|
||||
|
||||
# get other tickets with the same hash, if any
|
||||
tickets = self._hash_string_exists(
|
||||
hash_string = forensic_data.get('hash_string')
|
||||
)
|
||||
|
||||
for ticket_data in tickets:
|
||||
self.data_storage.update_archive_name(
|
||||
ticket_id = ticket_data.get('ticket_id'),
|
||||
archive_name = forensic_data.get('archive_name'),
|
||||
status = ForensicArchiveStatusModel.APPROVED.value,
|
||||
updated_at = Time.now_iso8601()
|
||||
)
|
||||
|
||||
# log success operation
|
||||
self.log_ticket_create_service.execute(
|
||||
ticket_id = self.ticket_id,
|
||||
|
@ -79,15 +100,35 @@ class AnalyzeForensicArchiveTask(BaseTask):
|
|||
|
||||
return True
|
||||
|
||||
def _hash_string_exists(self, hash_string: str) -> bool | Exception:
|
||||
try:
|
||||
response = self.data_storage.exists_hash_string(
|
||||
hash_string = hash_string
|
||||
)
|
||||
|
||||
if response.empty():
|
||||
return False
|
||||
|
||||
return list(response.batch())
|
||||
|
||||
except ForensicStorageGetException as e:
|
||||
self.logger.error(f'Could not verify `{hash_string}` existence')
|
||||
|
||||
raise ApplicationException(ForensicErrorCode.GENERIC, ForensicErrorMessage.GENERIC, e)
|
||||
|
||||
def before_run(self):
|
||||
"""
|
||||
Initialize required modules.
|
||||
"""
|
||||
|
||||
self.data_storage = ForensicStorage()
|
||||
|
||||
self.log_ticket_create_service = LogTicketCreateService()
|
||||
|
||||
self.forensic_analysis = ForensicAnalysis()
|
||||
|
||||
self.get_by_ticket_service = ForensicGetByTicketService()
|
||||
|
||||
self.update_archive_status_service = ForensicUpdateArchiveStatusService()
|
||||
|
||||
def after_run(self):
|
||||
|
|
|
@ -1,12 +1,15 @@
|
|||
|
||||
# TODO: this extension will be merged into forensic.
|
||||
|
||||
class ImporterErrorCode:
|
||||
|
||||
GENERIC = '9000'
|
||||
|
||||
EXTENSION_NOT_SUPPORTED = '9001'
|
||||
EXTENSION_NOT_SUPPORTED = '9006'
|
||||
|
||||
class ImporterErrorMessage:
|
||||
|
||||
GENERIC = 'Error during the saving of the file.'
|
||||
# error during the saving of the file
|
||||
GENERIC = 'Generic error.'
|
||||
|
||||
EXTENSION_NOT_SUPPORTED = 'The file extension is not supported.'
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
|
||||
class LogErrorCode:
|
||||
|
||||
GENERIC = '8000'
|
||||
GENERIC = '1300'
|
||||
|
||||
class LogErrorMessage:
|
||||
|
||||
|
|
18
src/piracyshield_service/provider/get_active.py
Normal file
18
src/piracyshield_service/provider/get_active.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from piracyshield_service.account.get_all import AccountGetAllService
|
||||
|
||||
from piracyshield_data_storage.provider.storage import ProviderStorage
|
||||
|
||||
class ProviderGetActiveService(AccountGetAllService):
|
||||
|
||||
"""
|
||||
Retrieves all the active provider accounts.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Pass the data storage to the parent class.
|
||||
"""
|
||||
|
||||
super().__init__(ProviderStorage)
|
|
@ -1 +0,0 @@
|
|||
|
|
@ -22,18 +22,27 @@ from piracyshield_data_model.ticket.model import (
|
|||
TicketModelAssignedToNonValidException
|
||||
)
|
||||
|
||||
from piracyshield_data_model.ticket.status.model import TicketStatusModel
|
||||
|
||||
from piracyshield_service.ticket.relation.establish import TicketRelationEstablishService
|
||||
from piracyshield_service.ticket.relation.abandon import TicketRelationAbandonService
|
||||
from piracyshield_data_model.ticket.item.model import (
|
||||
TicketItemModel,
|
||||
TicketItemModelTicketIdentifierNonValidException,
|
||||
TicketItemModelTicketItemIdentifierNonValidException,
|
||||
TicketItemModelGenreNonValidException,
|
||||
TicketItemModelFQDNMissingException,
|
||||
TicketItemModelFQDNNonValidException,
|
||||
TicketItemModelIPv4MissingException,
|
||||
TicketItemModelIPv4NonValidException,
|
||||
TicketItemModelIPv6MissingException,
|
||||
TicketItemModelIPv6NonValidException,
|
||||
TicketItemModelProviderIdentifierMissingException,
|
||||
TicketItemModelProviderIdentifierNonValidException
|
||||
)
|
||||
|
||||
from piracyshield_data_storage.ticket.storage import TicketStorage, TicketStorageCreateException
|
||||
|
||||
from piracyshield_service.provider.get_all import ProviderGetAllService
|
||||
from piracyshield_service.provider.get_active import ProviderGetActiveService
|
||||
from piracyshield_service.provider.exists_by_identifier import ProviderExistsByIdentifierService
|
||||
|
||||
from piracyshield_service.ticket.tasks.ticket_initialize import ticket_initialize_task_caller
|
||||
from piracyshield_service.ticket.tasks.ticket_autoclose import ticket_autoclose_task_caller
|
||||
from piracyshield_service.ticket.tasks.ticket_create import ticket_create_task_caller
|
||||
|
||||
from piracyshield_service.log.ticket.create import LogTicketCreateService
|
||||
|
||||
|
@ -43,6 +52,7 @@ from piracyshield_service.forensic.remove_by_ticket import ForensicRemoveByTicke
|
|||
from piracyshield_service.dda.is_assigned_to_account import DDAIsAssignedToAccountService
|
||||
|
||||
from piracyshield_service.ticket.errors import TicketErrorCode, TicketErrorMessage
|
||||
from piracyshield_service.ticket.item.errors import TicketItemErrorCode, TicketItemErrorMessage
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
|
@ -58,13 +68,11 @@ class TicketCreateService(BaseService):
|
|||
|
||||
log_ticket_create_service = None
|
||||
|
||||
ticket_relation_establish_service = None
|
||||
|
||||
ticket_relation_abandon_service = None
|
||||
|
||||
provider_exists_by_identifier_service = None
|
||||
|
||||
provider_get_all_service = None
|
||||
provider_get_active_service = None
|
||||
|
||||
ticket_item_data_model = None
|
||||
|
||||
data_model = None
|
||||
|
||||
|
@ -92,6 +100,23 @@ class TicketCreateService(BaseService):
|
|||
created_by: str,
|
||||
description: str = None
|
||||
) -> tuple | Exception:
|
||||
# filter duplicates
|
||||
fqdn = list(set(fqdn))
|
||||
ipv4 = list(set(ipv4))
|
||||
ipv6 = list(set(ipv6))
|
||||
assigned_to = list(set(assigned_to))
|
||||
|
||||
# TODO: do not hardcode this.
|
||||
# do not procees if ticket items exceed maximum limits
|
||||
if len(fqdn) > 1000:
|
||||
raise ApplicationException(TicketErrorCode.TOO_MANY_FQDN, TicketErrorMessage.TOO_MANY_FQDN)
|
||||
|
||||
if len(ipv4) > 1000:
|
||||
raise ApplicationException(TicketErrorCode.TOO_MANY_IPV4, TicketErrorMessage.TOO_MANY_IPV4)
|
||||
|
||||
if len(ipv6) > 1000:
|
||||
raise ApplicationException(TicketErrorCode.TOO_MANY_IPV6, TicketErrorMessage.TOO_MANY_IPV6)
|
||||
|
||||
model = self._validate_parameters(
|
||||
ticket_id = self._generate_ticket_id(),
|
||||
dda_id = dda_id,
|
||||
|
@ -102,25 +127,53 @@ class TicketCreateService(BaseService):
|
|||
assigned_to = assigned_to
|
||||
)
|
||||
|
||||
# check if the DDA identifier is assigned to this account
|
||||
if self.dda_is_assigned_to_account_service.execute(
|
||||
dda_id = dda_id,
|
||||
account_id = created_by
|
||||
) == False:
|
||||
raise ApplicationException(TicketErrorCode.UNKNOWN_DDA_IDENTIFIER, TicketErrorMessage.UNKNOWN_DDA_IDENTIFIER)
|
||||
# formal validation before going on with the creation process
|
||||
if fqdn:
|
||||
for fqdn_value in fqdn:
|
||||
self._validate_ticket_item(
|
||||
ticket_id = model.get('ticket_id'),
|
||||
value = fqdn_value,
|
||||
genre = 'fqdn'
|
||||
)
|
||||
|
||||
# if specified, validate each provider_id
|
||||
if assigned_to:
|
||||
if ipv4:
|
||||
for ipv4_value in ipv4:
|
||||
self._validate_ticket_item(
|
||||
ticket_id = model.get('ticket_id'),
|
||||
value = ipv4_value,
|
||||
genre = 'ipv4'
|
||||
)
|
||||
|
||||
if ipv6:
|
||||
for ipv6_value in ipv6:
|
||||
self._validate_ticket_item(
|
||||
ticket_id = model.get('ticket_id'),
|
||||
value = ipv6_value,
|
||||
genre = 'ipv6'
|
||||
)
|
||||
|
||||
# verify DDA
|
||||
self._verify_dda(
|
||||
dda_id = dda_id,
|
||||
created_by = created_by
|
||||
)
|
||||
|
||||
if len(assigned_to):
|
||||
# if specified, validate each provider identifier
|
||||
self._verify_assigned_to(assigned_to)
|
||||
|
||||
model['assigned_to'] = assigned_to
|
||||
|
||||
# otherwise collect all the providers and assign them to the ticket
|
||||
else:
|
||||
providers = self.provider_get_all_service.execute()
|
||||
|
||||
model['assigned_to'] = []
|
||||
|
||||
for provider in providers:
|
||||
model['assigned_to'].append(provider.get('account_id'))
|
||||
active_providers = self.provider_get_active_service.execute()
|
||||
|
||||
for provider in active_providers:
|
||||
provider_id = provider.get('account_id')
|
||||
|
||||
model['assigned_to'].append(provider_id)
|
||||
|
||||
self.forensic_create_hash_service.execute(
|
||||
ticket_id = model.get('ticket_id'),
|
||||
|
@ -128,34 +181,13 @@ class TicketCreateService(BaseService):
|
|||
reporter_id = created_by
|
||||
)
|
||||
|
||||
# proceed to build the relation item <-> provider
|
||||
# this part provides a check for duplicates, whitelisted items and error tickets
|
||||
(fqdn_ticket_items, ipv4_ticket_items, ipv6_ticket_items) = self.ticket_relation_establish_service.execute(
|
||||
ticket_id = model.get('ticket_id'),
|
||||
providers = model.get('assigned_to'),
|
||||
fqdn = model.get('fqdn') or None,
|
||||
ipv4 = model.get('ipv4') or None,
|
||||
ipv6 = model.get('ipv6') or None
|
||||
)
|
||||
|
||||
(initialize_job_id, autoclose_job_id) = self._schedule_task(
|
||||
ticket_id = model.get('ticket_id'),
|
||||
revoke_time = model.get('settings').get('revoke_time'),
|
||||
autoclose_time = model.get('settings').get('autoclose_time')
|
||||
)
|
||||
|
||||
document = self._build_document(
|
||||
model = model,
|
||||
fqdn = fqdn,
|
||||
ipv4 = ipv4,
|
||||
ipv6 = ipv6,
|
||||
now = Time.now_iso8601(),
|
||||
created_by = created_by,
|
||||
tasks = [
|
||||
# append the task id so we can cancel its execution if needed
|
||||
initialize_job_id,
|
||||
autoclose_job_id
|
||||
]
|
||||
created_by = created_by
|
||||
)
|
||||
|
||||
try:
|
||||
|
@ -163,14 +195,8 @@ class TicketCreateService(BaseService):
|
|||
self.data_storage.insert(document)
|
||||
|
||||
except TicketStorageCreateException as e:
|
||||
self.ticket_relation_abandon_service.execute(model.get('ticket_id'))
|
||||
|
||||
self.forensic_remove_by_ticket_service.execute(model.get('ticket_id'))
|
||||
|
||||
# clean created tasks
|
||||
for single_task in document.get('tasks'):
|
||||
self.task_service.remove(single_task)
|
||||
|
||||
self.logger.error(f'Could not create the ticket')
|
||||
|
||||
raise ApplicationException(TicketErrorCode.GENERIC, TicketErrorMessage.GENERIC, e)
|
||||
|
@ -183,6 +209,11 @@ class TicketCreateService(BaseService):
|
|||
|
||||
self.logger.info(f'Ticket `{model.get("ticket_id")}` created by `{document.get("metadata").get("created_by")}`')
|
||||
|
||||
# initialize the creation of the ticket items
|
||||
self._schedule_task(
|
||||
ticket_data = model
|
||||
)
|
||||
|
||||
return (
|
||||
# ticket identifier
|
||||
model.get('ticket_id'),
|
||||
|
@ -198,6 +229,16 @@ class TicketCreateService(BaseService):
|
|||
|
||||
return self.identifier.generate()
|
||||
|
||||
def _verify_dda(self, dda_id: str, created_by: str) -> bool | Exception:
|
||||
# check if the DDA identifier is assigned to this account
|
||||
if self.dda_is_assigned_to_account_service.execute(
|
||||
dda_id = dda_id,
|
||||
account_id = created_by
|
||||
) == False:
|
||||
raise ApplicationException(TicketErrorCode.UNKNOWN_DDA_IDENTIFIER, TicketErrorMessage.UNKNOWN_DDA_IDENTIFIER)
|
||||
|
||||
return True
|
||||
|
||||
def _verify_assigned_to(self, assigned_to: list) -> bool | Exception:
|
||||
"""
|
||||
Verifies each provider in the list.
|
||||
|
@ -206,27 +247,22 @@ class TicketCreateService(BaseService):
|
|||
:return: true if correct, exception if not.
|
||||
"""
|
||||
|
||||
try:
|
||||
for provider_id in assigned_to:
|
||||
if self.provider_exists_by_identifier_service.execute(
|
||||
account_id = provider_id
|
||||
) == False:
|
||||
raise ApplicationException(TicketErrorCode.NON_EXISTENT_ASSIGNED_TO, TicketErrorMessage.NON_EXISTENT_ASSIGNED_TO)
|
||||
for provider_id in assigned_to:
|
||||
if self.provider_exists_by_identifier_service.execute(
|
||||
account_id = provider_id
|
||||
) == False:
|
||||
self.logger.error(f'Could not get assigned accounts: `{assigned_to}`')
|
||||
|
||||
raise ApplicationException(TicketErrorCode.NON_EXISTENT_ASSIGNED_TO, TicketErrorMessage.NON_EXISTENT_ASSIGNED_TO)
|
||||
|
||||
return True
|
||||
|
||||
except:
|
||||
self.logger.error(f'Could not get assigned accounts: `{assigned_to}`')
|
||||
|
||||
raise ApplicationException(TicketErrorCode.GENERIC, TicketErrorMessage.GENERIC, e)
|
||||
|
||||
def _build_document(
|
||||
self,
|
||||
model: dict,
|
||||
fqdn: list,
|
||||
ipv4: list,
|
||||
ipv6: list,
|
||||
tasks: list,
|
||||
now: str,
|
||||
created_by: str
|
||||
):
|
||||
|
@ -241,7 +277,7 @@ class TicketCreateService(BaseService):
|
|||
'status': model.get('status'),
|
||||
'assigned_to': model.get('assigned_to'),
|
||||
'settings': model.get('settings'),
|
||||
'tasks': tasks,
|
||||
'tasks': [],
|
||||
'metadata': {
|
||||
# creation date
|
||||
'created_at': now,
|
||||
|
@ -254,31 +290,16 @@ class TicketCreateService(BaseService):
|
|||
}
|
||||
}
|
||||
|
||||
def _schedule_task(self, ticket_id: str, revoke_time: int, autoclose_time: int) -> tuple | Exception:
|
||||
# schedule the initialization of the ticket
|
||||
# move the ticket status to open after X seconds and perform the initial operations
|
||||
def _schedule_task(self, ticket_data: dict) -> None | Exception:
|
||||
try:
|
||||
initialize_job_id = self.task_service.create(
|
||||
task_caller = ticket_initialize_task_caller,
|
||||
delay = revoke_time,
|
||||
ticket_id = ticket_id
|
||||
self.task_service.create(
|
||||
task_caller = ticket_create_task_caller,
|
||||
delay = 1,
|
||||
ticket_data = ticket_data
|
||||
)
|
||||
|
||||
# move the ticket status to close after X seconds
|
||||
autoclose_job_id = self.task_service.create(
|
||||
task_caller = ticket_autoclose_task_caller,
|
||||
delay = autoclose_time,
|
||||
ticket_id = ticket_id
|
||||
)
|
||||
|
||||
return (initialize_job_id, autoclose_job_id)
|
||||
|
||||
except Exception as e:
|
||||
self.ticket_relation_abandon_service.execute(ticket_id)
|
||||
|
||||
self.forensic_remove_by_ticket_service.execute(ticket_id)
|
||||
|
||||
self.logger.error(f'Could not create the task for `{ticket_id}`')
|
||||
self.logger.error(f'Could not create ticket items for `{ticket_data.get("ticket_id")}`')
|
||||
|
||||
raise ApplicationException(TicketErrorCode.GENERIC, TicketErrorMessage.GENERIC, e)
|
||||
|
||||
|
@ -335,25 +356,44 @@ class TicketCreateService(BaseService):
|
|||
except TicketModelAssignedToNonValidException:
|
||||
raise ApplicationException(TicketErrorCode.NON_VALID_ASSIGNED_TO, TicketErrorMessage.NON_VALID_ASSIGNED_TO)
|
||||
|
||||
def _validate_ticket_item(self, ticket_id: str, value: str, genre: str) -> dict | Exception:
|
||||
try:
|
||||
self.ticket_item_data_model(
|
||||
ticket_id = ticket_id, # placeholder
|
||||
ticket_item_id = ticket_id, # placeholder
|
||||
provider_id = ticket_id, # placeholder
|
||||
value = value,
|
||||
genre = genre,
|
||||
is_active = False, # placeholder
|
||||
is_duplicate = False, # placeholder
|
||||
is_whitelisted = False, # placeholder
|
||||
is_error = False # placeholder
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f'Could not create the ticket item `{value}` for `{ticket_id}`')
|
||||
|
||||
raise ApplicationException(TicketItemErrorCode.GENERIC, TicketItemErrorMessage.GENERIC, e)
|
||||
|
||||
def _prepare_configs(self):
|
||||
pass
|
||||
|
||||
def _prepare_modules(self):
|
||||
self.data_model = TicketModel
|
||||
|
||||
self.ticket_item_data_model = TicketItemModel
|
||||
|
||||
self.data_storage = TicketStorage()
|
||||
|
||||
self.identifier = Identifier()
|
||||
|
||||
self.ticket_relation_establish_service = TicketRelationEstablishService()
|
||||
|
||||
self.ticket_relation_abandon_service = TicketRelationAbandonService()
|
||||
|
||||
self.forensic_create_hash_service = ForensicCreateHashService()
|
||||
|
||||
self.forensic_remove_by_ticket_service = ForensicRemoveByTicketService()
|
||||
|
||||
self.provider_get_all_service = ProviderGetAllService()
|
||||
self.provider_get_active_service = ProviderGetActiveService()
|
||||
|
||||
self.provider_exists_by_identifier_service = ProviderExistsByIdentifierService()
|
||||
|
||||
|
|
|
@ -37,6 +37,12 @@ class TicketErrorCode:
|
|||
|
||||
TICKET_NOT_FOUND = '4017'
|
||||
|
||||
TOO_MANY_FQDN = '4018'
|
||||
|
||||
TOO_MANY_IPV4 = '4019'
|
||||
|
||||
TOO_MANY_IPV6 = '4020'
|
||||
|
||||
class TicketErrorMessage:
|
||||
|
||||
GENERIC = 'Error during the handling of the ticket.'
|
||||
|
@ -74,3 +80,9 @@ class TicketErrorMessage:
|
|||
REPORT_ERROR_TIME_EXCEEDED = 'Cannot create the ticket, exceeded max error reporting time.'
|
||||
|
||||
TICKET_NOT_FOUND = 'Ticket not found.'
|
||||
|
||||
TOO_MANY_FQDN = 'Too many ticket FQDN items.'
|
||||
|
||||
TOO_MANY_IPV4 = 'Too many IPv4 items.'
|
||||
|
||||
TOO_MANY_IPV6 = 'Too many IPv6 items.'
|
||||
|
|
122
src/piracyshield_service/ticket/item/create_batch.py
Normal file
122
src/piracyshield_service/ticket/item/create_batch.py
Normal file
|
@ -0,0 +1,122 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from piracyshield_service.base import BaseService
|
||||
|
||||
from piracyshield_component.utils.time import Time
|
||||
from piracyshield_component.exception import ApplicationException
|
||||
|
||||
from piracyshield_data_model.ticket.item.model import TicketItemModel
|
||||
|
||||
from piracyshield_data_storage.ticket.item.storage import TicketItemStorage, TicketItemStorageCreateException
|
||||
|
||||
from piracyshield_service.ticket.item.errors import TicketItemErrorCode, TicketItemErrorMessage
|
||||
|
||||
class TicketItemCreateBatchService(BaseService):
|
||||
|
||||
"""
|
||||
Create multiple ticket items in a single batch.
|
||||
"""
|
||||
|
||||
data_model = None
|
||||
|
||||
data_storage = None
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Inizialize logger and required modules.
|
||||
"""
|
||||
|
||||
super().__init__()
|
||||
|
||||
self._prepare_modules()
|
||||
|
||||
def execute(self, ticket_items: list) -> bool | Exception:
|
||||
"""
|
||||
:param ticket_items: a list of ticket items dictionaries.
|
||||
:return
|
||||
"""
|
||||
|
||||
batch = []
|
||||
|
||||
for ticket_item in ticket_items:
|
||||
model = self._validate_parameters(**ticket_item)
|
||||
|
||||
batch.append(self._build_document(
|
||||
model = model,
|
||||
now = Time.now_iso8601()
|
||||
))
|
||||
|
||||
try:
|
||||
# insert the data into the database
|
||||
self.data_storage.insert_many(batch)
|
||||
|
||||
except TicketItemStorageCreateException as e:
|
||||
self.logger.error(f'Could not massively create ticket items')
|
||||
|
||||
raise ApplicationException(TicketItemErrorCode.GENERIC, TicketItemErrorMessage.GENERIC, e)
|
||||
|
||||
self.logger.info(f'Created {len(batch)} ticket items')
|
||||
|
||||
return True
|
||||
|
||||
def _build_document(self, model: dict, now: str) -> dict:
|
||||
return {
|
||||
'ticket_id': model.get('ticket_id'),
|
||||
'ticket_item_id': model.get('ticket_item_id'),
|
||||
'provider_id': model.get('provider_id'),
|
||||
'value': model.get('value'),
|
||||
'genre': model.get('genre'),
|
||||
'status': model.get('status'),
|
||||
'is_active': model.get('is_active'),
|
||||
'is_duplicate': model.get('is_duplicate'),
|
||||
'is_whitelisted': model.get('is_whitelisted'),
|
||||
'is_error': model.get('is_error'),
|
||||
'settings': model.get('settings'),
|
||||
'metadata': {
|
||||
'created_at': now,
|
||||
'updated_at': now
|
||||
}
|
||||
}
|
||||
|
||||
def _schedule_task(self):
|
||||
pass
|
||||
|
||||
def _validate_parameters(self,
|
||||
ticket_id: str,
|
||||
ticket_item_id: str,
|
||||
provider_id: str,
|
||||
value: str,
|
||||
genre: str,
|
||||
is_active: bool,
|
||||
is_duplicate: bool,
|
||||
is_whitelisted: bool,
|
||||
is_error: bool
|
||||
):
|
||||
try:
|
||||
model = self.data_model(
|
||||
ticket_id = ticket_id,
|
||||
ticket_item_id = ticket_item_id,
|
||||
provider_id = provider_id,
|
||||
value = value,
|
||||
genre = genre,
|
||||
is_active = is_active,
|
||||
is_duplicate = is_duplicate,
|
||||
is_whitelisted = is_whitelisted,
|
||||
is_error = is_error
|
||||
)
|
||||
|
||||
return model.to_dict()
|
||||
|
||||
# this has been already validated by the ticket service, but better safe than sorry
|
||||
except Exception as e:
|
||||
self.logger.error(f'Could not validate ticket item `{value}` for `{ticket_id}`')
|
||||
|
||||
raise ApplicationException(TicketItemErrorCode.GENERIC, TicketItemErrorMessage.GENERIC, e)
|
||||
|
||||
def _prepare_configs(self):
|
||||
pass
|
||||
|
||||
def _prepare_modules(self):
|
||||
self.data_model = TicketItemModel
|
||||
|
||||
self.data_storage = TicketItemStorage()
|
|
@ -27,7 +27,7 @@ class TicketItemErrorCode:
|
|||
|
||||
class TicketItemErrorMessage:
|
||||
|
||||
GENERIC = 'Error during the creation of the ticket item.'
|
||||
GENERIC = 'Generic error.'
|
||||
|
||||
TICKET_ITEM_NOT_FOUND = 'Ticket item not found.'
|
||||
|
||||
|
|
52
src/piracyshield_service/ticket/item/get_active.py
Normal file
52
src/piracyshield_service/ticket/item/get_active.py
Normal file
|
@ -0,0 +1,52 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from piracyshield_service.base import BaseService
|
||||
|
||||
from piracyshield_component.exception import ApplicationException
|
||||
|
||||
from piracyshield_data_storage.ticket.item.storage import TicketItemStorage, TicketItemStorageGetException
|
||||
|
||||
from piracyshield_service.ticket.item.errors import TicketItemErrorCode, TicketItemErrorMessage
|
||||
|
||||
class TicketItemGetActiveService(BaseService):
|
||||
|
||||
"""
|
||||
Returns all the blockable ticket items, grouped by genre.
|
||||
This is currently used to build a cache.
|
||||
"""
|
||||
|
||||
data_storage = None
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Inizialize logger and required modules.
|
||||
"""
|
||||
|
||||
super().__init__()
|
||||
|
||||
self._prepare_modules()
|
||||
|
||||
def execute(self) -> list | Exception:
|
||||
try:
|
||||
response = self.data_storage.get_active()
|
||||
|
||||
document = next(response, None)
|
||||
|
||||
return document
|
||||
|
||||
except TicketItemStorageGetException as e:
|
||||
self.logger.error(f'Could not get all the ticket items')
|
||||
|
||||
raise ApplicationException(TicketItemErrorCode.GENERIC, TicketItemErrorMessage.GENERIC, e)
|
||||
|
||||
def _schedule_task(self):
|
||||
pass
|
||||
|
||||
def _validate_parameters(self):
|
||||
pass
|
||||
|
||||
def _prepare_configs(self):
|
||||
pass
|
||||
|
||||
def _prepare_modules(self):
|
||||
self.data_storage = TicketItemStorage()
|
|
@ -6,30 +6,35 @@ from piracyshield_component.security.identifier import Identifier
|
|||
from piracyshield_component.exception import ApplicationException
|
||||
|
||||
from piracyshield_data_model.ticket.item.genre.model import TicketItemGenreModel
|
||||
from piracyshield_data_model.whitelist.genre.model import WhitelistGenreModel
|
||||
|
||||
from piracyshield_service.ticket.item.create import TicketItemCreateService
|
||||
from piracyshield_service.ticket.item.exists_by_value import TicketItemExistsByValueService
|
||||
from piracyshield_service.ticket.item.create_batch import TicketItemCreateBatchService
|
||||
from piracyshield_service.ticket.item.get_active import TicketItemGetActiveService
|
||||
|
||||
from piracyshield_service.ticket.error.get_by_ticket import TicketErrorGetByTicketService
|
||||
|
||||
from piracyshield_service.whitelist.exists_by_value import WhitelistExistsByValueService
|
||||
from piracyshield_service.whitelist.get_active import WhitelistGetActiveService
|
||||
|
||||
from piracyshield_service.ticket.item.errors import TicketItemErrorCode, TicketItemErrorMessage
|
||||
|
||||
from piracyshield_component_cidr_verifier import is_ipv4_in_cidr, is_ipv6_in_cidr
|
||||
|
||||
class TicketRelationEstablishService(BaseService):
|
||||
|
||||
"""
|
||||
Builds the relation of a ticket items and assigned providers.
|
||||
Each ticket item is duplicated and associated to a single provider.
|
||||
Each ticket item is created and associated to a single provider.
|
||||
"""
|
||||
|
||||
batch = []
|
||||
|
||||
ticket_item_cache = {}
|
||||
|
||||
whitelist_cache = {}
|
||||
|
||||
ticket_item_create_service = None
|
||||
|
||||
whitelist_exists_by_value_service = None
|
||||
ticket_item_get_active_service = None
|
||||
|
||||
ticket_error_get_by_ticket_service = None
|
||||
|
||||
ticket_item_exists_by_value_service = None
|
||||
whitelist_get_active_service = None
|
||||
|
||||
identifier = None
|
||||
|
||||
|
@ -42,7 +47,15 @@ class TicketRelationEstablishService(BaseService):
|
|||
|
||||
self._prepare_modules()
|
||||
|
||||
# build ticket item cache
|
||||
self._build_ticket_item_cache()
|
||||
|
||||
# build whitelist cache
|
||||
self._build_whitelist_cache()
|
||||
|
||||
def execute(self, ticket_id: str, providers: list, fqdn: list = None, ipv4: list = None, ipv6: list = None) -> bool | Exception:
|
||||
self.batch = []
|
||||
|
||||
self.logger.debug(f'Establishing relations for `{ticket_id}`')
|
||||
|
||||
fqdn_ticket_items = None
|
||||
|
@ -50,7 +63,7 @@ class TicketRelationEstablishService(BaseService):
|
|||
ipv6_ticket_items = None
|
||||
|
||||
if fqdn:
|
||||
fqdn_ticket_items = self._establish_relation(
|
||||
fqdn_ticket_items = self._generate_relation(
|
||||
ticket_id = ticket_id,
|
||||
genre = TicketItemGenreModel.FQDN.value,
|
||||
items = fqdn,
|
||||
|
@ -58,7 +71,7 @@ class TicketRelationEstablishService(BaseService):
|
|||
)
|
||||
|
||||
if ipv4:
|
||||
ipv4_ticket_items = self._establish_relation(
|
||||
ipv4_ticket_items = self._generate_relation(
|
||||
ticket_id = ticket_id,
|
||||
genre = TicketItemGenreModel.IPV4.value,
|
||||
items = ipv4,
|
||||
|
@ -66,18 +79,21 @@ class TicketRelationEstablishService(BaseService):
|
|||
)
|
||||
|
||||
if ipv6:
|
||||
ipv6_ticket_items = self._establish_relation(
|
||||
ipv6_ticket_items = self._generate_relation(
|
||||
ticket_id = ticket_id,
|
||||
genre = TicketItemGenreModel.IPV6.value,
|
||||
items = ipv6,
|
||||
providers = providers
|
||||
)
|
||||
|
||||
# insert batch
|
||||
self.ticket_item_create_batch_service.execute(self.batch)
|
||||
|
||||
self.logger.info(f'Ticket relations completed')
|
||||
|
||||
return (fqdn_ticket_items, ipv4_ticket_items, ipv6_ticket_items)
|
||||
|
||||
def _establish_relation(self, ticket_id: str, genre: str, items: list, providers: list) -> dict:
|
||||
def _generate_relation(self, ticket_id: str, genre: str, items: list, providers: list) -> dict:
|
||||
ticket_items = []
|
||||
|
||||
for value in items:
|
||||
|
@ -92,23 +108,24 @@ class TicketRelationEstablishService(BaseService):
|
|||
)
|
||||
|
||||
is_whitelisted = self._is_whitelisted(
|
||||
genre = genre,
|
||||
value = value
|
||||
)
|
||||
|
||||
is_error = False
|
||||
|
||||
for provider_id in providers:
|
||||
self.ticket_item_create_service.execute(
|
||||
ticket_id = ticket_id,
|
||||
ticket_item_id = ticket_item_id,
|
||||
provider_id = provider_id,
|
||||
value = value,
|
||||
genre = genre,
|
||||
is_active = is_active,
|
||||
is_duplicate = is_duplicate,
|
||||
is_whitelisted = is_whitelisted,
|
||||
is_error = is_error
|
||||
)
|
||||
self.batch.append({
|
||||
'ticket_id': ticket_id,
|
||||
'ticket_item_id': ticket_item_id,
|
||||
'value': value,
|
||||
'genre': genre,
|
||||
'provider_id': provider_id,
|
||||
'is_active': is_active,
|
||||
'is_duplicate': is_duplicate,
|
||||
'is_whitelisted': is_whitelisted,
|
||||
'is_error': is_error
|
||||
})
|
||||
|
||||
ticket_items.append({
|
||||
'value': value,
|
||||
|
@ -122,36 +139,40 @@ class TicketRelationEstablishService(BaseService):
|
|||
return ticket_items
|
||||
|
||||
def _is_duplicate(self, genre: str, value: str) -> bool:
|
||||
found_tickets = self.ticket_item_exists_by_value_service.execute(
|
||||
genre = genre,
|
||||
value = value
|
||||
)
|
||||
if genre == TicketItemGenreModel.FQDN.value and TicketItemGenreModel.FQDN.value in self.ticket_item_cache:
|
||||
return value in self.ticket_item_cache.get(TicketItemGenreModel.FQDN.value)
|
||||
|
||||
# no ticket item found
|
||||
if not len(found_tickets):
|
||||
return False
|
||||
elif genre == TicketItemGenreModel.IPV4.value and TicketItemGenreModel.IPV4.value in self.ticket_item_cache:
|
||||
return value in self.ticket_item_cache.get(TicketItemGenreModel.IPV4.value)
|
||||
|
||||
# we have a ticket that contains this ticket item
|
||||
if len(found_tickets):
|
||||
# but let's search for a ticket error in this case
|
||||
for ticket_blocking in found_tickets:
|
||||
ticket_errors = self.ticket_error_get_by_ticket_service.execute(
|
||||
ticket_blocking.get('ticket_id')
|
||||
)
|
||||
elif genre == TicketItemGenreModel.IPV6.value and TicketItemGenreModel.IPV6.value in self.ticket_item_cache:
|
||||
return value in self.ticket_item_cache.get(TicketItemGenreModel.IPV6.value)
|
||||
|
||||
# found one, let's search for our item
|
||||
if len(ticket_errors):
|
||||
for ticket_error_response in ticket_errors:
|
||||
# found the item, so we don't have any duplicate
|
||||
if genre in ticket_error_response and ticket_error_response.get(genre) and value in ticket_error_response.get(genre):
|
||||
return False
|
||||
return False
|
||||
|
||||
return True
|
||||
def _is_whitelisted(self, genre: str, value: str) -> bool:
|
||||
if genre == TicketItemGenreModel.FQDN.value and TicketItemGenreModel.FQDN.value in self.whitelist_cache:
|
||||
return value in self.whitelist_cache.get(WhitelistGenreModel.FQDN.value)
|
||||
|
||||
def _is_whitelisted(self, value: str) -> bool:
|
||||
return self.whitelist_exists_by_value_service.execute(
|
||||
value = value
|
||||
)
|
||||
elif genre == TicketItemGenreModel.IPV4.value and TicketItemGenreModel.IPV4.value in self.whitelist_cache:
|
||||
if value in self.whitelist_cache.get(WhitelistGenreModel.IPV4.value):
|
||||
return True
|
||||
|
||||
if WhitelistGenreModel.CIDR_IPV4.value in self.whitelist_cache:
|
||||
for cidr_ipv4 in self.whitelist_cache.get(WhitelistGenreModel.CIDR_IPV4.value):
|
||||
if is_ipv4_in_cidr(value, cidr_ipv4) == True:
|
||||
return True
|
||||
|
||||
elif genre == TicketItemGenreModel.IPV6.value and TicketItemGenreModel.IPV6.value in self.whitelist_cache:
|
||||
if value in self.whitelist_cache.get(WhitelistGenreModel.IPV6.value):
|
||||
return True
|
||||
|
||||
if WhitelistGenreModel.CIDR_IPV6.value in self.whitelist_cache:
|
||||
for cidr_ipv6 in self.whitelist_cache.get(WhitelistGenreModel.CIDR_IPV6.value):
|
||||
if is_ipv6_in_cidr(value, cidr_ipv6) == True:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _generate_ticket_item_id(self) -> str:
|
||||
"""
|
||||
|
@ -160,6 +181,12 @@ class TicketRelationEstablishService(BaseService):
|
|||
|
||||
return self.identifier.generate()
|
||||
|
||||
def _build_ticket_item_cache(self):
|
||||
self.ticket_item_cache = self.ticket_item_get_active_service.execute()
|
||||
|
||||
def _build_whitelist_cache(self):
|
||||
self.whitelist_cache = self.whitelist_get_active_service.execute()
|
||||
|
||||
def _schedule_task(self):
|
||||
pass
|
||||
|
||||
|
@ -170,12 +197,10 @@ class TicketRelationEstablishService(BaseService):
|
|||
pass
|
||||
|
||||
def _prepare_modules(self):
|
||||
self.ticket_item_exists_by_value_service = TicketItemExistsByValueService()
|
||||
self.ticket_item_get_active_service = TicketItemGetActiveService()
|
||||
|
||||
self.ticket_error_get_by_ticket_service = TicketErrorGetByTicketService()
|
||||
self.ticket_item_create_batch_service = TicketItemCreateBatchService()
|
||||
|
||||
self.whitelist_exists_by_value_service = WhitelistExistsByValueService()
|
||||
|
||||
self.ticket_item_create_service = TicketItemCreateService()
|
||||
self.whitelist_get_active_service = WhitelistGetActiveService()
|
||||
|
||||
self.identifier = Identifier()
|
||||
|
|
|
@ -32,19 +32,9 @@ class TicketAutocloseTask(BaseTask):
|
|||
# TODO: must check if the ticket exists.
|
||||
|
||||
# change status
|
||||
try:
|
||||
self.ticket_storage.update_status(
|
||||
ticket_id = self.ticket_id,
|
||||
ticket_status = TicketStatusModel.CLOSED.value
|
||||
)
|
||||
|
||||
except TicketStorageUpdateException:
|
||||
self.logger.error(f'Could not update the ticket `{self.ticket_id}`')
|
||||
|
||||
# log the operation
|
||||
self.log_ticket_create_service.execute(
|
||||
self.ticket_storage.update_status(
|
||||
ticket_id = self.ticket_id,
|
||||
message = f'Changed status to `{TicketStatusModel.CLOSED.value}`.'
|
||||
ticket_status = TicketStatusModel.CLOSED.value
|
||||
)
|
||||
|
||||
def before_run(self):
|
||||
|
@ -56,10 +46,19 @@ class TicketAutocloseTask(BaseTask):
|
|||
self.log_ticket_create_service = LogTicketCreateService()
|
||||
|
||||
def after_run(self):
|
||||
pass
|
||||
# log the operation
|
||||
self.log_ticket_create_service.execute(
|
||||
ticket_id = self.ticket_id,
|
||||
message = f'Changed status to `{TicketStatusModel.CLOSED.value}`.'
|
||||
)
|
||||
|
||||
def on_failure(self):
|
||||
pass
|
||||
self.logger.error(f'Could not update the ticket `{self.ticket_id}`')
|
||||
|
||||
self.ticket_storage.update_status(
|
||||
ticket_id = self.ticket_id,
|
||||
ticket_status = TicketStatusModel.CREATED.value
|
||||
)
|
||||
|
||||
def ticket_autoclose_task_caller(**kwargs):
|
||||
t = TicketAutocloseTask(**kwargs)
|
||||
|
|
134
src/piracyshield_service/ticket/tasks/ticket_create.py
Normal file
134
src/piracyshield_service/ticket/tasks/ticket_create.py
Normal file
|
@ -0,0 +1,134 @@
|
|||
from piracyshield_service.task.base import BaseTask
|
||||
|
||||
from piracyshield_component.utils.time import Time
|
||||
|
||||
from piracyshield_data_model.ticket.status.model import TicketStatusModel
|
||||
|
||||
from piracyshield_data_storage.ticket.storage import TicketStorage
|
||||
|
||||
from piracyshield_service.ticket.relation.establish import TicketRelationEstablishService
|
||||
from piracyshield_service.ticket.relation.abandon import TicketRelationAbandonService
|
||||
from piracyshield_service.ticket.get import TicketGetService
|
||||
|
||||
from piracyshield_service.forensic.remove_by_ticket import ForensicRemoveByTicketService
|
||||
|
||||
from piracyshield_service.ticket.tasks.ticket_initialize import ticket_initialize_task_caller
|
||||
from piracyshield_service.ticket.tasks.ticket_autoclose import ticket_autoclose_task_caller
|
||||
|
||||
from piracyshield_service.log.ticket.create import LogTicketCreateService
|
||||
|
||||
from piracyshield_service.task.service import TaskService
|
||||
|
||||
class TicketCreateTask(BaseTask):
|
||||
|
||||
"""
|
||||
Creation of ticket items and subsequent tasks.
|
||||
"""
|
||||
|
||||
pending_tasks = []
|
||||
|
||||
ticket_data = None
|
||||
|
||||
ticket_relation_establish_service = None
|
||||
|
||||
ticket_relation_abandon_service = None
|
||||
|
||||
ticket_get_service = None
|
||||
|
||||
ticket_storage = None
|
||||
|
||||
log_ticket_create_service = None
|
||||
|
||||
task_service = None
|
||||
|
||||
def __init__(self, ticket_data: dict):
|
||||
super().__init__()
|
||||
|
||||
self.ticket_data = ticket_data
|
||||
|
||||
def run(self) -> bool:
|
||||
"""
|
||||
Starts the operations for the new ticket.
|
||||
The status gets set to `open` as we make it visible for API pulls and notifications
|
||||
"""
|
||||
|
||||
# proceed to build the relation item <-> provider
|
||||
# this part provides a check for duplicates, whitelisted items and error tickets
|
||||
(fqdn_ticket_items, ipv4_ticket_items, ipv6_ticket_items) = self.ticket_relation_establish_service.execute(
|
||||
ticket_id = self.ticket_data.get('ticket_id'),
|
||||
providers = self.ticket_data.get('assigned_to'),
|
||||
fqdn = self.ticket_data.get('fqdn') or None,
|
||||
ipv4 = self.ticket_data.get('ipv4') or None,
|
||||
ipv6 = self.ticket_data.get('ipv6') or None
|
||||
)
|
||||
|
||||
self.pending_tasks.append(self.task_service.create(
|
||||
task_caller = ticket_initialize_task_caller,
|
||||
delay = self.ticket_data.get('settings').get('revoke_time'),
|
||||
ticket_id = self.ticket_data.get('ticket_id')
|
||||
))
|
||||
|
||||
self.pending_tasks.append(self.task_service.create(
|
||||
task_caller = ticket_autoclose_task_caller,
|
||||
delay = self.ticket_data.get('settings').get('autoclose_time'),
|
||||
ticket_id = self.ticket_data.get('ticket_id')
|
||||
))
|
||||
|
||||
self.ticket_storage.update_task_list(
|
||||
ticket_id = self.ticket_data.get('ticket_id'),
|
||||
task_ids = self.pending_tasks,
|
||||
updated_at = Time.now_iso8601()
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def before_run(self):
|
||||
"""
|
||||
Initialize required modules.
|
||||
"""
|
||||
|
||||
self.ticket_storage = TicketStorage()
|
||||
|
||||
self.ticket_get_service = TicketGetService()
|
||||
|
||||
self.ticket_relation_establish_service = TicketRelationEstablishService()
|
||||
|
||||
self.ticket_relation_abandon_service = TicketRelationAbandonService()
|
||||
|
||||
self.forensic_remove_by_ticket_service = ForensicRemoveByTicketService()
|
||||
|
||||
self.log_ticket_create_service = LogTicketCreateService()
|
||||
|
||||
self.task_service = TaskService()
|
||||
|
||||
def after_run(self):
|
||||
self.ticket_storage.update_status(
|
||||
ticket_id = self.ticket_data.get('ticket_id'),
|
||||
ticket_status = TicketStatusModel.CREATED.value
|
||||
)
|
||||
|
||||
# log the operation
|
||||
self.log_ticket_create_service.execute(
|
||||
ticket_id = self.ticket_data.get('ticket_id'),
|
||||
message = f'Completed the creation of all ticket items.'
|
||||
)
|
||||
|
||||
def on_failure(self):
|
||||
self.logger.error(f'Could not initialize ticket items for `{self.ticket_data.get("ticket_id")}`')
|
||||
|
||||
self.ticket_relation_abandon_service.execute(self.ticket_data.get('ticket_id'))
|
||||
|
||||
self.forensic_remove_by_ticket_service.execute(self.ticket_data.get('ticket_id'))
|
||||
|
||||
for single_task in self.pending_tasks:
|
||||
self.task_service.remove(single_task)
|
||||
|
||||
self.ticket_storage.update_status(
|
||||
ticket_id = self.ticket_data.get('ticket_id'),
|
||||
ticket_status = TicketStatusModel.FAILED.value
|
||||
)
|
||||
|
||||
def ticket_create_task_caller(**kwargs):
|
||||
t = TicketCreateTask(**kwargs)
|
||||
|
||||
return t.execute()
|
|
@ -2,7 +2,7 @@ from piracyshield_service.task.base import BaseTask
|
|||
|
||||
from piracyshield_data_model.ticket.status.model import TicketStatusModel
|
||||
|
||||
from piracyshield_data_storage.ticket.storage import TicketStorage, TicketStorageUpdateException
|
||||
from piracyshield_data_storage.ticket.storage import TicketStorage
|
||||
|
||||
from piracyshield_service.log.ticket.create import LogTicketCreateService
|
||||
|
||||
|
@ -32,19 +32,9 @@ class TicketInitializeTask(BaseTask):
|
|||
# TODO: must check if the ticket exists.
|
||||
|
||||
# change status
|
||||
try:
|
||||
self.ticket_storage.update_status(
|
||||
ticket_id = self.ticket_id,
|
||||
ticket_status = TicketStatusModel.OPEN.value
|
||||
)
|
||||
|
||||
except TicketStorageUpdateException:
|
||||
self.logger.error(f'Could not update the ticket `{self.ticket_id}`')
|
||||
|
||||
# log the operation
|
||||
self.log_ticket_create_service.execute(
|
||||
self.ticket_storage.update_status(
|
||||
ticket_id = self.ticket_id,
|
||||
message = f'Changed status to `{TicketStatusModel.OPEN.value}`.'
|
||||
ticket_status = TicketStatusModel.OPEN.value
|
||||
)
|
||||
|
||||
def before_run(self):
|
||||
|
@ -56,10 +46,19 @@ class TicketInitializeTask(BaseTask):
|
|||
self.log_ticket_create_service = LogTicketCreateService()
|
||||
|
||||
def after_run(self):
|
||||
pass
|
||||
# log the operation
|
||||
self.log_ticket_create_service.execute(
|
||||
ticket_id = self.ticket_id,
|
||||
message = f'Changed status to `{TicketStatusModel.OPEN.value}`.'
|
||||
)
|
||||
|
||||
def on_failure(self):
|
||||
pass
|
||||
self.logger.error(f'Could not update the ticket `{self.ticket_id}`')
|
||||
|
||||
self.ticket_storage.update_status(
|
||||
ticket_id = self.ticket_id,
|
||||
ticket_status = TicketStatusModel.CREATED.value
|
||||
)
|
||||
|
||||
def ticket_initialize_task_caller(**kwargs):
|
||||
t = TicketInitializeTask(**kwargs)
|
||||
|
|
|
@ -1,17 +0,0 @@
|
|||
[package]
|
||||
name = "rs_cidr_verifier"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[profile.release]
|
||||
opt-level = 3
|
||||
lto = true
|
||||
|
||||
[lib]
|
||||
name = "rs_cidr_verifier"
|
||||
path = "src/lib.rs"
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
pyo3 = { version = "0.20.2", features = ["extension-module"] }
|
||||
ipnetwork = "0.20.0"
|
|
@ -1,2 +0,0 @@
|
|||
include Cargo.toml
|
||||
recursive-include src *
|
|
@ -1,34 +0,0 @@
|
|||
use pyo3::prelude::*;
|
||||
use ipnetwork::{Ipv4Network, Ipv6Network};
|
||||
use std::net::{Ipv4Addr, Ipv6Addr};
|
||||
use std::str::FromStr;
|
||||
|
||||
#[pyfunction]
|
||||
fn is_ipv4_in_cidr(ip: &str, cidr: &str) -> PyResult<bool> {
|
||||
let ip_addr = ip.parse::<Ipv4Addr>()
|
||||
.map_err(|_| PyErr::new::<pyo3::exceptions::PyValueError, _>("Invalid IPv4 address"))?;
|
||||
|
||||
let cidr_net = Ipv4Network::from_str(cidr)
|
||||
.map_err(|_| PyErr::new::<pyo3::exceptions::PyValueError, _>("Invalid IPv4 CIDR notation"))?;
|
||||
|
||||
Ok(cidr_net.contains(ip_addr))
|
||||
}
|
||||
|
||||
#[pyfunction]
|
||||
fn is_ipv6_in_cidr(ip: &str, cidr: &str) -> PyResult<bool> {
|
||||
let ip_addr = ip.parse::<Ipv6Addr>()
|
||||
.map_err(|_| PyErr::new::<pyo3::exceptions::PyValueError, _>("Invalid IPv6 address"))?;
|
||||
|
||||
let cidr_net = Ipv6Network::from_str(cidr)
|
||||
.map_err(|_| PyErr::new::<pyo3::exceptions::PyValueError, _>("Invalid IPv6 CIDR notation"))?;
|
||||
|
||||
Ok(cidr_net.contains(ip_addr))
|
||||
}
|
||||
|
||||
#[pymodule]
|
||||
fn rs_cidr_verifier(_py: Python, m: &PyModule) -> PyResult<()> {
|
||||
m.add_function(wrap_pyfunction!(is_ipv4_in_cidr, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(is_ipv6_in_cidr, m)?)?;
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -14,6 +14,10 @@ from piracyshield_data_model.whitelist.model import (
|
|||
WhitelistModelIPv4NonValidException,
|
||||
WhitelistModelIPv6MissingException,
|
||||
WhitelistModelIPv6NonValidException,
|
||||
WhitelistModelCIDRIPv4MissingException,
|
||||
WhitelistModelCIDRIPv4NonValidException,
|
||||
WhitelistModelCIDRIPv6MissingException,
|
||||
WhitelistModelCIDRIPv6NonValidException,
|
||||
WhitelistModelRegistrarMissingException,
|
||||
WhitelistModelRegistrarNonValidException,
|
||||
WhitelistModelASCodeMissingException,
|
||||
|
@ -121,6 +125,12 @@ class WhitelistCreateService(BaseService):
|
|||
case 'ipv6':
|
||||
document['as_code'] = model.get('as_code')
|
||||
|
||||
case 'cidr_ipv4':
|
||||
document['as_code'] = model.get('as_code')
|
||||
|
||||
case 'cidr_ipv6':
|
||||
document['as_code'] = model.get('as_code')
|
||||
|
||||
return document
|
||||
|
||||
def _schedule_task(self):
|
||||
|
@ -159,6 +169,18 @@ class WhitelistCreateService(BaseService):
|
|||
except WhitelistModelIPv6NonValidException:
|
||||
raise ApplicationException(WhitelistErrorCode.NON_VALID_IPV6, WhitelistErrorMessage.NON_VALID_IPV6)
|
||||
|
||||
except WhitelistModelCIDRIPv4MissingException:
|
||||
raise ApplicationException(WhitelistErrorCode.MISSING_CIDR_IPV4, WhitelistErrorMessage.MISSING_CIDR_IPV4)
|
||||
|
||||
except WhitelistModelCIDRIPv4NonValidException:
|
||||
raise ApplicationException(WhitelistErrorCode.NON_VALID_CIDR_IPV4, WhitelistErrorMessage.NON_VALID_CIDR_IPV4)
|
||||
|
||||
except WhitelistModelCIDRIPv6MissingException:
|
||||
raise ApplicationException(WhitelistErrorCode.MISSING_CIDR_IPV6, WhitelistErrorMessage.MISSING_CIDR_IPV6)
|
||||
|
||||
except WhitelistModelCIDRIPv6NonValidException:
|
||||
raise ApplicationException(WhitelistErrorCode.NON_VALID_CIDR_IPV6, WhitelistErrorMessage.NON_VALID_CIDR_IPV6)
|
||||
|
||||
except WhitelistModelRegistrarMissingException:
|
||||
raise ApplicationException(WhitelistErrorCode.MISSING_REGISTRAR, WhitelistErrorMessage.MISSING_REGISTRAR)
|
||||
|
||||
|
|
|
@ -17,21 +17,29 @@ class WhitelistErrorCode:
|
|||
|
||||
NON_VALID_IPV6 = '6007'
|
||||
|
||||
MISSING_REGISTRAR = '6008'
|
||||
MISSING_CIDR_IPV4 = '6008'
|
||||
|
||||
NON_VALID_REGISTRAR = '6009'
|
||||
NON_VALID_CIDR_IPV4 = '6009'
|
||||
|
||||
MISSING_AS_CODE = '6010'
|
||||
MISSING_CIDR_IPV6 = '6010'
|
||||
|
||||
NON_VALID_AS_CODE = '6011'
|
||||
NON_VALID_CIDR_IPV6 = '6011'
|
||||
|
||||
ITEM_EXISTS = '6012'
|
||||
MISSING_REGISTRAR = '6012'
|
||||
|
||||
ITEM_HAS_TICKET = '6013'
|
||||
NON_VALID_REGISTRAR = '6013'
|
||||
|
||||
CANNOT_REMOVE = '6014'
|
||||
MISSING_AS_CODE = '6014'
|
||||
|
||||
CANNOT_SET_STATUS = '6015'
|
||||
NON_VALID_AS_CODE = '6015'
|
||||
|
||||
ITEM_EXISTS = '6016'
|
||||
|
||||
ITEM_HAS_TICKET = '6017'
|
||||
|
||||
CANNOT_REMOVE = '6018'
|
||||
|
||||
CANNOT_SET_STATUS = '6019'
|
||||
|
||||
class WhitelistErrorMessage:
|
||||
|
||||
|
@ -51,9 +59,17 @@ class WhitelistErrorMessage:
|
|||
|
||||
NON_VALID_IPV6 = 'Non valid IPv6.'
|
||||
|
||||
MISSING_CIDR_IPV4 = 'Missing CIDR IPv4 class'
|
||||
|
||||
NON_VALID_CIDR_IPV4 = 'Non valid CIDR IPv4 class'
|
||||
|
||||
MISSING_CIDR_IPV6 = 'Missing CIDR IPv6 class'
|
||||
|
||||
NON_VALID_CIDR_IPV6 = 'Non valid CIDR IPv6 class'
|
||||
|
||||
MISSING_REGISTRAR = 'Missing registrar.'
|
||||
|
||||
NON_VALID_REGISTRAR = 'Non valid registrar. A registrar must be a string (allowed characters: ` -`)'
|
||||
NON_VALID_REGISTRAR = 'Non valid registrar. A registrar must be a string (allowed characters: ` .,-_`)'
|
||||
|
||||
MISSING_AS_CODE = 'Missing AS code.'
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ from piracyshield_component.exception import ApplicationException
|
|||
|
||||
from piracyshield_data_storage.whitelist.storage import WhitelistStorage, WhitelistStorageGetException
|
||||
|
||||
from piracyshield_service.ticket.errors import TicketErrorCode, TicketErrorMessage
|
||||
from piracyshield_service.whitelist.errors import WhitelistErrorCode, WhitelistErrorMessage
|
||||
|
||||
class WhitelistExistsByValueService(BaseService):
|
||||
|
||||
|
|
53
src/piracyshield_service/whitelist/get_active.py
Normal file
53
src/piracyshield_service/whitelist/get_active.py
Normal file
|
@ -0,0 +1,53 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from piracyshield_service.base import BaseService
|
||||
|
||||
from piracyshield_component.exception import ApplicationException
|
||||
|
||||
from piracyshield_data_storage.whitelist.storage import WhitelistStorage, WhitelistStorageGetException
|
||||
|
||||
from piracyshield_service.whitelist.errors import WhitelistErrorCode, WhitelistErrorMessage
|
||||
|
||||
class WhitelistGetActiveService(BaseService):
|
||||
|
||||
"""
|
||||
Get all the active whitelist items.
|
||||
|
||||
This is currently used to build a cache.
|
||||
"""
|
||||
|
||||
data_storage = None
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Inizialize logger and required modules.
|
||||
"""
|
||||
|
||||
super().__init__()
|
||||
|
||||
self._prepare_modules()
|
||||
|
||||
def execute(self) -> list | Exception:
|
||||
try:
|
||||
response = self.data_storage.get_active()
|
||||
|
||||
document = next(response, None)
|
||||
|
||||
return document
|
||||
|
||||
except WhitelistStorageGetException as e:
|
||||
self.logger.error(f'Cannot get all the whitelist items')
|
||||
|
||||
raise ApplicationException(WhitelistErrorCode.GENERIC, WhitelistErrorMessage.GENERIC, e)
|
||||
|
||||
def _schedule_task(self):
|
||||
pass
|
||||
|
||||
def _validate_parameters(self):
|
||||
pass
|
||||
|
||||
def _prepare_configs(self):
|
||||
pass
|
||||
|
||||
def _prepare_modules(self):
|
||||
self.data_storage = WhitelistStorage()
|
|
@ -6,7 +6,7 @@ from piracyshield_component.exception import ApplicationException
|
|||
|
||||
from piracyshield_data_storage.whitelist.storage import WhitelistStorage, WhitelistStorageGetException
|
||||
|
||||
from piracyshield_service.ticket.errors import TicketErrorCode, TicketErrorMessage
|
||||
from piracyshield_service.whitelist.errors import WhitelistErrorCode, WhitelistErrorMessage
|
||||
|
||||
class WhitelistGetGlobalService(BaseService):
|
||||
|
||||
|
|
Loading…
Reference in a new issue