Skip to content

Commit

Permalink
Merge branch 'master' into pr/11627
Browse files Browse the repository at this point in the history
  • Loading branch information
v-prasadboke committed Jan 9, 2025
2 parents a116c9c + cb54692 commit 6f9cc80
Show file tree
Hide file tree
Showing 99 changed files with 10,235 additions and 2,686 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ on:
workflow_dispatch:
jobs:
DetectionTemplateSchemaValidation:
runs-on: ubuntu-latest
runs-on: ubuntu-22.04
env:
buildConfiguration: Release
dotnetSdkVersion: 3.1.401
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/non-ascii-validations.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ on:
workflow_dispatch:
jobs:
NonAsciiValidations:
runs-on: ubuntu-latest
runs-on: ubuntu-22.04
env:
buildConfiguration: Release
dotnetSdkVersion: 3.1.401
Expand Down
3 changes: 2 additions & 1 deletion .script/tests/asimParsersTest/ASimFilteringTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
# Workspace ID for the Log Analytics workspace where the ASim filtering tests will be performed.
WORKSPACE_ID = "e9beceee-7d61-429f-a177-ee5e2b7f481a"
# Timespan for the parser query
TIME_SPAN_IN_DAYS = 7
TIME_SPAN_IN_DAYS = 2

# exclusion_file_path refers to the CSV file path containing a list of parsers. Despite failing tests, these parsers will not cause the overall workflow to fail
exclusion_file_path = '.script/tests/asimParsersTest/ExclusionListForASimTests.csv'
Expand Down Expand Up @@ -309,6 +309,7 @@ def main():
if parser_file['EquivalentBuiltInParser'] in read_exclusion_list_from_csv():
print(f"{YELLOW}The parser {parser_file_path} is listed in the exclusions file. Therefore, this workflow run will not fail because of it. To allow this parser to cause the workflow to fail, please remove its name from the exclusions list file located at: {exclusion_file_path}{RESET}")
sys.stdout.flush()
continue
# Check for exception cases where the failure can be ignored
# Check if the failure message and schema match the exception cases
if len(result.failures) == 1:
Expand Down
8 changes: 8 additions & 0 deletions .script/tests/asimParsersTest/VerifyASimParserTemplate.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,10 @@ def extract_and_check_properties(Parser_file, Union_Parser__file, FileType, Pars
if match:
event_product = match.group(1)
results.append((event_product, '"EventProduct" field is mapped in parser', 'Pass'))
# if equivalent_built_in_parser end with Native, then use 'EventProduct' as SchemaName + 'NativeTable'
elif equivalent_built_in_parser.endswith('_Native'):
event_product = 'NativeTable'
results.append((event_product, '"EventProduct" field is not required since this is a native table parser. Static value will be used for "EventProduct".', 'Pass'))
# If 'EventProduct' was not found in the KQL query, add to results
else:
results.append((f'{RED}EventProduct{RESET}', f'{RED}"EventProduct" field not mapped in parser. Please map it in parser query.{RESET}', f'{RED}Fail{RESET}'))
Expand All @@ -136,6 +140,10 @@ def extract_and_check_properties(Parser_file, Union_Parser__file, FileType, Pars
if match:
event_vendor = match.group(1)
results.append((event_vendor, '"EventVendor" field is mapped in parser', 'Pass'))
# if equivalent_built_in_parser end with Native, then use 'EventVendor' as 'Microsoft'
elif equivalent_built_in_parser.endswith('_Native'):
event_vendor = 'Microsoft'
results.append((event_vendor, '"EventVendor" field is not required since this is a native table parser. Static value will be used for "EventVendor".', 'Pass'))
# If 'EventVendor' was not found in the KQL query, add to results
else:
results.append((f'{RED}EventVendor{RESET}', f'{RED}"EventVendor" field not mapped in parser. Please map it in parser query.{RESET}', f'{RED}Fail{RESET}'))
Expand Down
7 changes: 7 additions & 0 deletions .script/tests/asimParsersTest/ingestASimSampleData.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,12 +259,18 @@ def extract_event_vendor_product(parser_query,parser_file):
match = re.search(r'EventVendor\s*=\s*[\'"]([^\'"]+)[\'"]', parser_query)
if match:
event_vendor = match.group(1)
# if equivalent_built_in_parser end with Native, then use 'EventVendor' as 'Microsoft'
elif equivalent_built_in_parser.endswith('_Native'):
event_vendor = 'Microsoft'
else:
print(f'EventVendor field not mapped in parser. Please map it in parser query.{parser_file}')

match = re.search(r'EventProduct\s*=\s*[\'"]([^\'"]+)[\'"]', parser_query)
if match:
event_product = match.group(1)
# if equivalent_built_in_parser end with Native, then use 'EventProduct' as SchemaName + 'NativeTable'
elif equivalent_built_in_parser.endswith('_Native'):
event_product = 'NativeTable'
else:
print(f'Event Product field not mapped in parser. Please map it in parser query.{parser_file}')
return event_vendor, event_product ,schema_name
Expand Down Expand Up @@ -332,6 +338,7 @@ def convert_data_type(schema_result, data_result):
parser_query = asim_parser.get('ParserQuery', '')
normalization = asim_parser.get('Normalization', {})
schema = normalization.get('Schema')
equivalent_built_in_parser = asim_parser.get('EquivalentBuiltInParser')
event_vendor, event_product, schema_name = extract_event_vendor_product(parser_query, file)

SampleDataFile = f'{event_vendor}_{event_product}_{schema}_IngestedLogs.csv'
Expand Down
12 changes: 12 additions & 0 deletions Logos/Druva_Logo.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified Solutions/AbnormalSecurity/Data Connectors/AbnormalSecurityConn.zip
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def _get_header(self):
return {
"Authorization": f"Bearer {self.api_key}",
"Soar-Integration-Origin": "AZURE SENTINEL",
"Azure-Sentinel-Version": "2024-11-29"
"Azure-Sentinel-Version": "2024-12-24"
}

def _get_filter_query(self, filter_param, gte_datetime=None, lte_datetime=None):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def get_headers(ctx: Context) -> Dict[str, str]:
"X-Abnormal-Trace-Id": str(ctx.TRACE_ID),
"Authorization": f"Bearer {ctx.API_TOKEN}",
"Soar-Integration-Origin": "AZURE SENTINEL",
"Azure-Sentinel-Version": "2024-11-29 V2",
"Azure-Sentinel-Version": "2024-12-24 V2",
}


Expand All @@ -50,7 +50,7 @@ def compute_url(base_url: str, pathname: str, params: Dict[str, str]) -> str:
return endpoint


async def fetch_with_retries(url, retries=3, backoff=4, timeout=10, headers=None):
async def fetch_with_retries(url, retries=3, backoff=8, timeout=60, headers=None):
logging.info(f"Fetching url: {url}")
async def fetch(session, url):
async with session.get(url, headers=headers, timeout=timeout) as response:
Expand All @@ -68,23 +68,27 @@ async def fetch(session, url):
logging.info(f"API Response Status for URL: `{url}` is `{response.status}`")
return json.loads(text)

async with aiohttp.ClientSession() as session:
for attempt in range(1, retries + 1):
for attempt in range(1, retries + 1):
async with aiohttp.ClientSession() as session:
try:
logging.info(f"Fetch Attempt `{attempt}` for url: `{url}`")
response = await fetch(session, url)
return response
except aiohttp.ClientResponseError as e:
if 500 <= e.status < 600:
logging.error("Attempt {attempt} failed with error", exc_info=e)
logging.error(f"Attempt {attempt} for {url} failed with error", exc_info=e)
if attempt == retries:
raise
else:
await asyncio.sleep(backoff**attempt)
else:
raise
except aiohttp.ClientError as e:
logging.error("Request failed with non-retryable error", exc_info=e)
raise
except Exception as e:
logging.error(f"Attempt {attempt} for {url} failed with error", exc_info=e)
if attempt == retries:
raise
else:
await asyncio.sleep(backoff**attempt)


async def call_threat_campaigns_endpoint(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def try_str_to_datetime(time: str) -> datetime:
return datetime.strptime(time, TIME_FORMAT)
except Exception as _:
pass
return datetime.strptime(time, TIME_FORMAT_WITHMS)
return datetime.strptime((time[:26] + 'Z') if len(time) > 26 else time, TIME_FORMAT_WITHMS)


class TimeRange(BaseModel):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def test_valid_headers(self):
"X-Abnormal-Trace-Id": str(self.trace_id),
"Authorization": f"Bearer {self.api_token}",
"Soar-Integration-Origin": "AZURE SENTINEL",
"Azure-Sentinel-Version": "2024-11-29 V2",
"Azure-Sentinel-Version": "2024-12-24 V2",
}
self.maxDiff = None
self.assertEqual(headers, expected_headers)
Expand Down
24 changes: 24 additions & 0 deletions Solutions/AbnormalSecurity/Data Connectors/Tests/utils_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
)
from pydantic import ValidationError
from uuid import uuid4
import random


class TestTryStrToDateTime(unittest.TestCase):
Expand All @@ -27,6 +28,29 @@ def test_format_with_ms(self):
expected = datetime.strptime(time_str, TIME_FORMAT_WITHMS)
result = try_str_to_datetime(time_str)
self.assertEqual(result, expected)

def test_format_with_ns(self):
# Test case for format with milliseconds
time_str_ns = "2024-10-01T12:34:56.123456789Z"
time_str_ms = "2024-10-01T12:34:56.123456Z"
expected = datetime.strptime(time_str_ms, TIME_FORMAT_WITHMS)
result = try_str_to_datetime(time_str_ns)
self.assertEqual(result, expected)

def test_format_with_ns_2(self):
# Test case for format with milliseconds
time_str_ns = "2024-10-01T12:34:56.12345678913Z"
result = try_str_to_datetime(time_str_ns)
self.assertIsNotNone(result)

def test_format_with_ns_3(self):
# Test case for format with milliseconds
f = ""
for i in range(100):
f += random.choice("1234567890")
time_str_ns = f"2024-10-01T12:34:56.{f}Z"
result = try_str_to_datetime(time_str_ns)
self.assertIsNotNone(result)

def test_invalid_format(self):
# Test case for invalid format
Expand Down
Binary file modified Solutions/Check Point CloudGuard CNAPP/Package/3.0.0.zip
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@
"_email": "[variables('email')]",
"_solutionName": "Check Point CloudGuard CNAPP",
"_solutionVersion": "3.0.0",
"solutionId": "checkpoint-cloudguard.checkpoint-sentinel-solutions-cloud-guard",
"solutionId": "checkpoint.checkpoint-sentinel-solutions-cloud-guard",
"_solutionId": "[variables('solutionId')]",
"workspaceResourceId": "[resourceId('microsoft.OperationalInsights/Workspaces', parameters('workspace'))]",
"dataConnectorCCPVersion": "1.0.0",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"publisherId": "checkpoint-cloudguard",
"publisherId": "checkpoint",
"offerId": "checkpoint-sentinel-solutions-cloud-guard",
"firstPublishDate": "2024-11-12",
"providers": [
Expand Down
Loading

0 comments on commit 6f9cc80

Please sign in to comment.